]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
Update copyright years.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. */
2918
2919 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2920
2921 /* Various DIE's use offsets relative to the beginning of the
2922 .debug_info section to refer to each other. */
2923
2924 typedef long int dw_offset;
2925
2926 struct comdat_type_node;
2927
2928 /* The entries in the line_info table more-or-less mirror the opcodes
2929 that are used in the real dwarf line table. Arrays of these entries
2930 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2931 supported. */
2932
2933 enum dw_line_info_opcode {
2934 /* Emit DW_LNE_set_address; the operand is the label index. */
2935 LI_set_address,
2936
2937 /* Emit a row to the matrix with the given line. This may be done
2938 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2939 special opcodes. */
2940 LI_set_line,
2941
2942 /* Emit a DW_LNS_set_file. */
2943 LI_set_file,
2944
2945 /* Emit a DW_LNS_set_column. */
2946 LI_set_column,
2947
2948 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2949 LI_negate_stmt,
2950
2951 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2952 LI_set_prologue_end,
2953 LI_set_epilogue_begin,
2954
2955 /* Emit a DW_LNE_set_discriminator. */
2956 LI_set_discriminator,
2957
2958 /* Output a Fixed Advance PC; the target PC is the label index; the
2959 base PC is the previous LI_adv_address or LI_set_address entry.
2960 We only use this when emitting debug views without assembler
2961 support, at explicit user request. Ideally, we should only use
2962 it when the offset might be zero but we can't tell: it's the only
2963 way to maybe change the PC without resetting the view number. */
2964 LI_adv_address
2965 };
2966
2967 typedef struct GTY(()) dw_line_info_struct {
2968 enum dw_line_info_opcode opcode;
2969 unsigned int val;
2970 } dw_line_info_entry;
2971
2972
2973 struct GTY(()) dw_line_info_table {
2974 /* The label that marks the end of this section. */
2975 const char *end_label;
2976
2977 /* The values for the last row of the matrix, as collected in the table.
2978 These are used to minimize the changes to the next row. */
2979 unsigned int file_num;
2980 unsigned int line_num;
2981 unsigned int column_num;
2982 int discrim_num;
2983 bool is_stmt;
2984 bool in_use;
2985
2986 /* This denotes the NEXT view number.
2987
2988 If it is 0, it is known that the NEXT view will be the first view
2989 at the given PC.
2990
2991 If it is -1, we're forcing the view number to be reset, e.g. at a
2992 function entry.
2993
2994 The meaning of other nonzero values depends on whether we're
2995 computing views internally or leaving it for the assembler to do
2996 so. If we're emitting them internally, view denotes the view
2997 number since the last known advance of PC. If we're leaving it
2998 for the assembler, it denotes the LVU label number that we're
2999 going to ask the assembler to assign. */
3000 var_loc_view view;
3001
3002 /* This counts the number of symbolic views emitted in this table
3003 since the latest view reset. Its max value, over all tables,
3004 sets symview_upper_bound. */
3005 var_loc_view symviews_since_reset;
3006
3007 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3008 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3009 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3010 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3011
3012 vec<dw_line_info_entry, va_gc> *entries;
3013 };
3014
3015 /* This is an upper bound for view numbers that the assembler may
3016 assign to symbolic views output in this translation. It is used to
3017 decide how big a field to use to represent view numbers in
3018 symview-classed attributes. */
3019
3020 static var_loc_view symview_upper_bound;
3021
3022 /* If we're keep track of location views and their reset points, and
3023 INSN is a reset point (i.e., it necessarily advances the PC), mark
3024 the next view in TABLE as reset. */
3025
3026 static void
3027 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3028 {
3029 if (!debug_internal_reset_location_views)
3030 return;
3031
3032 /* Maybe turn (part of?) this test into a default target hook. */
3033 int reset = 0;
3034
3035 if (targetm.reset_location_view)
3036 reset = targetm.reset_location_view (insn);
3037
3038 if (reset)
3039 ;
3040 else if (JUMP_TABLE_DATA_P (insn))
3041 reset = 1;
3042 else if (GET_CODE (insn) == USE
3043 || GET_CODE (insn) == CLOBBER
3044 || GET_CODE (insn) == ASM_INPUT
3045 || asm_noperands (insn) >= 0)
3046 ;
3047 else if (get_attr_min_length (insn) > 0)
3048 reset = 1;
3049
3050 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3051 RESET_NEXT_VIEW (table->view);
3052 }
3053
3054 /* Each DIE attribute has a field specifying the attribute kind,
3055 a link to the next attribute in the chain, and an attribute value.
3056 Attributes are typically linked below the DIE they modify. */
3057
3058 typedef struct GTY(()) dw_attr_struct {
3059 enum dwarf_attribute dw_attr;
3060 dw_val_node dw_attr_val;
3061 }
3062 dw_attr_node;
3063
3064
3065 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3066 The children of each node form a circular list linked by
3067 die_sib. die_child points to the node *before* the "first" child node. */
3068
3069 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3070 union die_symbol_or_type_node
3071 {
3072 const char * GTY ((tag ("0"))) die_symbol;
3073 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3074 }
3075 GTY ((desc ("%0.comdat_type_p"))) die_id;
3076 vec<dw_attr_node, va_gc> *die_attr;
3077 dw_die_ref die_parent;
3078 dw_die_ref die_child;
3079 dw_die_ref die_sib;
3080 dw_die_ref die_definition; /* ref from a specification to its definition */
3081 dw_offset die_offset;
3082 unsigned long die_abbrev;
3083 int die_mark;
3084 unsigned int decl_id;
3085 enum dwarf_tag die_tag;
3086 /* Die is used and must not be pruned as unused. */
3087 BOOL_BITFIELD die_perennial_p : 1;
3088 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3089 /* For an external ref to die_symbol if die_offset contains an extra
3090 offset to that symbol. */
3091 BOOL_BITFIELD with_offset : 1;
3092 /* Whether this DIE was removed from the DIE tree, for example via
3093 prune_unused_types. We don't consider those present from the
3094 DIE lookup routines. */
3095 BOOL_BITFIELD removed : 1;
3096 /* Lots of spare bits. */
3097 }
3098 die_node;
3099
3100 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3101 static bool early_dwarf;
3102 static bool early_dwarf_finished;
3103 struct set_early_dwarf {
3104 bool saved;
3105 set_early_dwarf () : saved(early_dwarf)
3106 {
3107 gcc_assert (! early_dwarf_finished);
3108 early_dwarf = true;
3109 }
3110 ~set_early_dwarf () { early_dwarf = saved; }
3111 };
3112
3113 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3114 #define FOR_EACH_CHILD(die, c, expr) do { \
3115 c = die->die_child; \
3116 if (c) do { \
3117 c = c->die_sib; \
3118 expr; \
3119 } while (c != die->die_child); \
3120 } while (0)
3121
3122 /* The pubname structure */
3123
3124 typedef struct GTY(()) pubname_struct {
3125 dw_die_ref die;
3126 const char *name;
3127 }
3128 pubname_entry;
3129
3130
3131 struct GTY(()) dw_ranges {
3132 const char *label;
3133 /* If this is positive, it's a block number, otherwise it's a
3134 bitwise-negated index into dw_ranges_by_label. */
3135 int num;
3136 /* Index for the range list for DW_FORM_rnglistx. */
3137 unsigned int idx : 31;
3138 /* True if this range might be possibly in a different section
3139 from previous entry. */
3140 unsigned int maybe_new_sec : 1;
3141 };
3142
3143 /* A structure to hold a macinfo entry. */
3144
3145 typedef struct GTY(()) macinfo_struct {
3146 unsigned char code;
3147 unsigned HOST_WIDE_INT lineno;
3148 const char *info;
3149 }
3150 macinfo_entry;
3151
3152
3153 struct GTY(()) dw_ranges_by_label {
3154 const char *begin;
3155 const char *end;
3156 };
3157
3158 /* The comdat type node structure. */
3159 struct GTY(()) comdat_type_node
3160 {
3161 dw_die_ref root_die;
3162 dw_die_ref type_die;
3163 dw_die_ref skeleton_die;
3164 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3165 comdat_type_node *next;
3166 };
3167
3168 /* A list of DIEs for which we can't determine ancestry (parent_die
3169 field) just yet. Later in dwarf2out_finish we will fill in the
3170 missing bits. */
3171 typedef struct GTY(()) limbo_die_struct {
3172 dw_die_ref die;
3173 /* The tree for which this DIE was created. We use this to
3174 determine ancestry later. */
3175 tree created_for;
3176 struct limbo_die_struct *next;
3177 }
3178 limbo_die_node;
3179
3180 typedef struct skeleton_chain_struct
3181 {
3182 dw_die_ref old_die;
3183 dw_die_ref new_die;
3184 struct skeleton_chain_struct *parent;
3185 }
3186 skeleton_chain_node;
3187
3188 /* Define a macro which returns nonzero for a TYPE_DECL which was
3189 implicitly generated for a type.
3190
3191 Note that, unlike the C front-end (which generates a NULL named
3192 TYPE_DECL node for each complete tagged type, each array type,
3193 and each function type node created) the C++ front-end generates
3194 a _named_ TYPE_DECL node for each tagged type node created.
3195 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3196 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3197 front-end, but for each type, tagged or not. */
3198
3199 #define TYPE_DECL_IS_STUB(decl) \
3200 (DECL_NAME (decl) == NULL_TREE \
3201 || (DECL_ARTIFICIAL (decl) \
3202 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3203 /* This is necessary for stub decls that \
3204 appear in nested inline functions. */ \
3205 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3206 && (decl_ultimate_origin (decl) \
3207 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3208
3209 /* Information concerning the compilation unit's programming
3210 language, and compiler version. */
3211
3212 /* Fixed size portion of the DWARF compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3214 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3215 + (dwarf_version >= 5 ? 4 : 3))
3216
3217 /* Fixed size portion of the DWARF comdat type unit header. */
3218 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3219 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3220 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3221
3222 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3223 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3225
3226 /* Fixed size portion of public names info. */
3227 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3228
3229 /* Fixed size portion of the address range info. */
3230 #define DWARF_ARANGES_HEADER_SIZE \
3231 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3232 DWARF2_ADDR_SIZE * 2) \
3233 - DWARF_INITIAL_LENGTH_SIZE)
3234
3235 /* Size of padding portion in the address range info. It must be
3236 aligned to twice the pointer size. */
3237 #define DWARF_ARANGES_PAD_SIZE \
3238 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3239 DWARF2_ADDR_SIZE * 2) \
3240 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3241
3242 /* Use assembler line directives if available. */
3243 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3244 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3246 #else
3247 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3248 #endif
3249 #endif
3250
3251 /* Use assembler views in line directives if available. */
3252 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3253 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3255 #else
3256 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3257 #endif
3258 #endif
3259
3260 /* Return true if GCC configure detected assembler support for .loc. */
3261
3262 bool
3263 dwarf2out_default_as_loc_support (void)
3264 {
3265 return DWARF2_ASM_LINE_DEBUG_INFO;
3266 #if (GCC_VERSION >= 3000)
3267 # undef DWARF2_ASM_LINE_DEBUG_INFO
3268 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3269 #endif
3270 }
3271
3272 /* Return true if GCC configure detected assembler support for views
3273 in .loc directives. */
3274
3275 bool
3276 dwarf2out_default_as_locview_support (void)
3277 {
3278 return DWARF2_ASM_VIEW_DEBUG_INFO;
3279 #if (GCC_VERSION >= 3000)
3280 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3281 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3282 #endif
3283 }
3284
3285 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3286 view computation, and it refers to a view identifier for which we
3287 will not emit a label because it is known to map to a view number
3288 zero. We won't allocate the bitmap if we're not using assembler
3289 support for location views, but we have to make the variable
3290 visible for GGC and for code that will be optimized out for lack of
3291 support but that's still parsed and compiled. We could abstract it
3292 out with macros, but it's not worth it. */
3293 static GTY(()) bitmap zero_view_p;
3294
3295 /* Evaluate to TRUE iff N is known to identify the first location view
3296 at its PC. When not using assembler location view computation,
3297 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3298 and views label numbers recorded in it are the ones known to be
3299 zero. */
3300 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3301 || (N) == (var_loc_view)-1 \
3302 || (zero_view_p \
3303 && bitmap_bit_p (zero_view_p, (N))))
3304
3305 /* Return true iff we're to emit .loc directives for the assembler to
3306 generate line number sections.
3307
3308 When we're not emitting views, all we need from the assembler is
3309 support for .loc directives.
3310
3311 If we are emitting views, we can only use the assembler's .loc
3312 support if it also supports views.
3313
3314 When the compiler is emitting the line number programs and
3315 computing view numbers itself, it resets view numbers at known PC
3316 changes and counts from that, and then it emits view numbers as
3317 literal constants in locviewlists. There are cases in which the
3318 compiler is not sure about PC changes, e.g. when extra alignment is
3319 requested for a label. In these cases, the compiler may not reset
3320 the view counter, and the potential PC advance in the line number
3321 program will use an opcode that does not reset the view counter
3322 even if the PC actually changes, so that compiler and debug info
3323 consumer can keep view numbers in sync.
3324
3325 When the compiler defers view computation to the assembler, it
3326 emits symbolic view numbers in locviewlists, with the exception of
3327 views known to be zero (forced resets, or reset after
3328 compiler-visible PC changes): instead of emitting symbols for
3329 these, we emit literal zero and assert the assembler agrees with
3330 the compiler's assessment. We could use symbolic views everywhere,
3331 instead of special-casing zero views, but then we'd be unable to
3332 optimize out locviewlists that contain only zeros. */
3333
3334 static bool
3335 output_asm_line_debug_info (void)
3336 {
3337 return (dwarf2out_as_loc_support
3338 && (dwarf2out_as_locview_support
3339 || !debug_variable_location_views));
3340 }
3341
3342 /* Minimum line offset in a special line info. opcode.
3343 This value was chosen to give a reasonable range of values. */
3344 #define DWARF_LINE_BASE -10
3345
3346 /* First special line opcode - leave room for the standard opcodes. */
3347 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3348
3349 /* Range of line offsets in a special line info. opcode. */
3350 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3351
3352 /* Flag that indicates the initial value of the is_stmt_start flag.
3353 In the present implementation, we do not mark any lines as
3354 the beginning of a source statement, because that information
3355 is not made available by the GCC front-end. */
3356 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3357
3358 /* Maximum number of operations per instruction bundle. */
3359 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3360 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3361 #endif
3362
3363 /* This location is used by calc_die_sizes() to keep track
3364 the offset of each DIE within the .debug_info section. */
3365 static unsigned long next_die_offset;
3366
3367 /* Record the root of the DIE's built for the current compilation unit. */
3368 static GTY(()) dw_die_ref single_comp_unit_die;
3369
3370 /* A list of type DIEs that have been separated into comdat sections. */
3371 static GTY(()) comdat_type_node *comdat_type_list;
3372
3373 /* A list of CU DIEs that have been separated. */
3374 static GTY(()) limbo_die_node *cu_die_list;
3375
3376 /* A list of DIEs with a NULL parent waiting to be relocated. */
3377 static GTY(()) limbo_die_node *limbo_die_list;
3378
3379 /* A list of DIEs for which we may have to generate
3380 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3381 static GTY(()) limbo_die_node *deferred_asm_name;
3382
3383 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3384 {
3385 typedef const char *compare_type;
3386
3387 static hashval_t hash (dwarf_file_data *);
3388 static bool equal (dwarf_file_data *, const char *);
3389 };
3390
3391 /* Filenames referenced by this compilation unit. */
3392 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3393
3394 struct decl_die_hasher : ggc_ptr_hash<die_node>
3395 {
3396 typedef tree compare_type;
3397
3398 static hashval_t hash (die_node *);
3399 static bool equal (die_node *, tree);
3400 };
3401 /* A hash table of references to DIE's that describe declarations.
3402 The key is a DECL_UID() which is a unique number identifying each decl. */
3403 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3404
3405 struct GTY ((for_user)) variable_value_struct {
3406 unsigned int decl_id;
3407 vec<dw_die_ref, va_gc> *dies;
3408 };
3409
3410 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3411 {
3412 typedef tree compare_type;
3413
3414 static hashval_t hash (variable_value_struct *);
3415 static bool equal (variable_value_struct *, tree);
3416 };
3417 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3418 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3419 DECL_CONTEXT of the referenced VAR_DECLs. */
3420 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3421
3422 struct block_die_hasher : ggc_ptr_hash<die_struct>
3423 {
3424 static hashval_t hash (die_struct *);
3425 static bool equal (die_struct *, die_struct *);
3426 };
3427
3428 /* A hash table of references to DIE's that describe COMMON blocks.
3429 The key is DECL_UID() ^ die_parent. */
3430 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3431
3432 typedef struct GTY(()) die_arg_entry_struct {
3433 dw_die_ref die;
3434 tree arg;
3435 } die_arg_entry;
3436
3437
3438 /* Node of the variable location list. */
3439 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3440 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3441 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3442 in mode of the EXPR_LIST node and first EXPR_LIST operand
3443 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3444 location or NULL for padding. For larger bitsizes,
3445 mode is 0 and first operand is a CONCAT with bitsize
3446 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3447 NULL as second operand. */
3448 rtx GTY (()) loc;
3449 const char * GTY (()) label;
3450 struct var_loc_node * GTY (()) next;
3451 var_loc_view view;
3452 };
3453
3454 /* Variable location list. */
3455 struct GTY ((for_user)) var_loc_list_def {
3456 struct var_loc_node * GTY (()) first;
3457
3458 /* Pointer to the last but one or last element of the
3459 chained list. If the list is empty, both first and
3460 last are NULL, if the list contains just one node
3461 or the last node certainly is not redundant, it points
3462 to the last node, otherwise points to the last but one.
3463 Do not mark it for GC because it is marked through the chain. */
3464 struct var_loc_node * GTY ((skip ("%h"))) last;
3465
3466 /* Pointer to the last element before section switch,
3467 if NULL, either sections weren't switched or first
3468 is after section switch. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3470
3471 /* DECL_UID of the variable decl. */
3472 unsigned int decl_id;
3473 };
3474 typedef struct var_loc_list_def var_loc_list;
3475
3476 /* Call argument location list. */
3477 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3478 rtx GTY (()) call_arg_loc_note;
3479 const char * GTY (()) label;
3480 tree GTY (()) block;
3481 bool tail_call_p;
3482 rtx GTY (()) symbol_ref;
3483 struct call_arg_loc_node * GTY (()) next;
3484 };
3485
3486
3487 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3488 {
3489 typedef const_tree compare_type;
3490
3491 static hashval_t hash (var_loc_list *);
3492 static bool equal (var_loc_list *, const_tree);
3493 };
3494
3495 /* Table of decl location linked lists. */
3496 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3497
3498 /* Head and tail of call_arg_loc chain. */
3499 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3500 static struct call_arg_loc_node *call_arg_loc_last;
3501
3502 /* Number of call sites in the current function. */
3503 static int call_site_count = -1;
3504 /* Number of tail call sites in the current function. */
3505 static int tail_call_site_count = -1;
3506
3507 /* A cached location list. */
3508 struct GTY ((for_user)) cached_dw_loc_list_def {
3509 /* The DECL_UID of the decl that this entry describes. */
3510 unsigned int decl_id;
3511
3512 /* The cached location list. */
3513 dw_loc_list_ref loc_list;
3514 };
3515 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3516
3517 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3518 {
3519
3520 typedef const_tree compare_type;
3521
3522 static hashval_t hash (cached_dw_loc_list *);
3523 static bool equal (cached_dw_loc_list *, const_tree);
3524 };
3525
3526 /* Table of cached location lists. */
3527 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3528
3529 /* A vector of references to DIE's that are uniquely identified by their tag,
3530 presence/absence of children DIE's, and list of attribute/value pairs. */
3531 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3532
3533 /* A hash map to remember the stack usage for DWARF procedures. The value
3534 stored is the stack size difference between before the DWARF procedure
3535 invokation and after it returned. In other words, for a DWARF procedure
3536 that consumes N stack slots and that pushes M ones, this stores M - N. */
3537 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3538
3539 /* A global counter for generating labels for line number data. */
3540 static unsigned int line_info_label_num;
3541
3542 /* The current table to which we should emit line number information
3543 for the current function. This will be set up at the beginning of
3544 assembly for the function. */
3545 static GTY(()) dw_line_info_table *cur_line_info_table;
3546
3547 /* The two default tables of line number info. */
3548 static GTY(()) dw_line_info_table *text_section_line_info;
3549 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3550
3551 /* The set of all non-default tables of line number info. */
3552 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3553
3554 /* A flag to tell pubnames/types export if there is an info section to
3555 refer to. */
3556 static bool info_section_emitted;
3557
3558 /* A pointer to the base of a table that contains a list of publicly
3559 accessible names. */
3560 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible types. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3565
3566 /* A pointer to the base of a table that contains a list of macro
3567 defines/undefines (and file start/end markers). */
3568 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3569
3570 /* True if .debug_macinfo or .debug_macros section is going to be
3571 emitted. */
3572 #define have_macinfo \
3573 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3574 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3575 && !macinfo_table->is_empty ())
3576
3577 /* Vector of dies for which we should generate .debug_ranges info. */
3578 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3579
3580 /* Vector of pairs of labels referenced in ranges_table. */
3581 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3582
3583 /* Whether we have location lists that need outputting */
3584 static GTY(()) bool have_location_lists;
3585
3586 /* Unique label counter. */
3587 static GTY(()) unsigned int loclabel_num;
3588
3589 /* Unique label counter for point-of-call tables. */
3590 static GTY(()) unsigned int poc_label_num;
3591
3592 /* The last file entry emitted by maybe_emit_file(). */
3593 static GTY(()) struct dwarf_file_data * last_emitted_file;
3594
3595 /* Number of internal labels generated by gen_internal_sym(). */
3596 static GTY(()) int label_num;
3597
3598 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3599
3600 /* Instances of generic types for which we need to generate debug
3601 info that describe their generic parameters and arguments. That
3602 generation needs to happen once all types are properly laid out so
3603 we do it at the end of compilation. */
3604 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3605
3606 /* Offset from the "steady-state frame pointer" to the frame base,
3607 within the current function. */
3608 static poly_int64 frame_pointer_fb_offset;
3609 static bool frame_pointer_fb_offset_valid;
3610
3611 static vec<dw_die_ref> base_types;
3612
3613 /* Flags to represent a set of attribute classes for attributes that represent
3614 a scalar value (bounds, pointers, ...). */
3615 enum dw_scalar_form
3616 {
3617 dw_scalar_form_constant = 0x01,
3618 dw_scalar_form_exprloc = 0x02,
3619 dw_scalar_form_reference = 0x04
3620 };
3621
3622 /* Forward declarations for functions defined in this file. */
3623
3624 static int is_pseudo_reg (const_rtx);
3625 static tree type_main_variant (tree);
3626 static int is_tagged_type (const_tree);
3627 static const char *dwarf_tag_name (unsigned);
3628 static const char *dwarf_attr_name (unsigned);
3629 static const char *dwarf_form_name (unsigned);
3630 static tree decl_ultimate_origin (const_tree);
3631 static tree decl_class_context (tree);
3632 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3633 static inline enum dw_val_class AT_class (dw_attr_node *);
3634 static inline unsigned int AT_index (dw_attr_node *);
3635 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3636 static inline unsigned AT_flag (dw_attr_node *);
3637 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3638 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3639 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3640 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3641 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3642 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3643 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3644 unsigned int, unsigned char *);
3645 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3646 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3647 static inline const char *AT_string (dw_attr_node *);
3648 static enum dwarf_form AT_string_form (dw_attr_node *);
3649 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3650 static void add_AT_specification (dw_die_ref, dw_die_ref);
3651 static inline dw_die_ref AT_ref (dw_attr_node *);
3652 static inline int AT_ref_external (dw_attr_node *);
3653 static inline void set_AT_ref_external (dw_attr_node *, int);
3654 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3655 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3656 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3657 dw_loc_list_ref);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3660 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3661 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3662 static void remove_addr_table_entry (addr_table_entry *);
3663 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3664 static inline rtx AT_addr (dw_attr_node *);
3665 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3668 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3669 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3670 unsigned long, bool);
3671 static inline const char *AT_lbl (dw_attr_node *);
3672 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3673 static const char *get_AT_low_pc (dw_die_ref);
3674 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3675 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3676 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3677 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3678 static bool is_c (void);
3679 static bool is_cxx (void);
3680 static bool is_cxx (const_tree);
3681 static bool is_fortran (void);
3682 static bool is_ada (void);
3683 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3684 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3685 static void add_child_die (dw_die_ref, dw_die_ref);
3686 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3687 static dw_die_ref lookup_type_die (tree);
3688 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3689 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3690 static void equate_type_number_to_die (tree, dw_die_ref);
3691 static dw_die_ref lookup_decl_die (tree);
3692 static var_loc_list *lookup_decl_loc (const_tree);
3693 static void equate_decl_number_to_die (tree, dw_die_ref);
3694 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3695 static void print_spaces (FILE *);
3696 static void print_die (dw_die_ref, FILE *);
3697 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3699 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3700 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3701 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3702 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3704 struct md5_ctx *, int *);
3705 struct checksum_attributes;
3706 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3707 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3708 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3709 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3710 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3711 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3712 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3713 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3714 static int is_type_die (dw_die_ref);
3715 static inline bool is_template_instantiation (dw_die_ref);
3716 static int is_declaration_die (dw_die_ref);
3717 static int should_move_die_to_comdat (dw_die_ref);
3718 static dw_die_ref clone_as_declaration (dw_die_ref);
3719 static dw_die_ref clone_die (dw_die_ref);
3720 static dw_die_ref clone_tree (dw_die_ref);
3721 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3722 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3723 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3724 static dw_die_ref generate_skeleton (dw_die_ref);
3725 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3726 dw_die_ref,
3727 dw_die_ref);
3728 static void break_out_comdat_types (dw_die_ref);
3729 static void copy_decls_for_unworthy_types (dw_die_ref);
3730
3731 static void add_sibling_attributes (dw_die_ref);
3732 static void output_location_lists (dw_die_ref);
3733 static int constant_size (unsigned HOST_WIDE_INT);
3734 static unsigned long size_of_die (dw_die_ref);
3735 static void calc_die_sizes (dw_die_ref);
3736 static void calc_base_type_die_sizes (void);
3737 static void mark_dies (dw_die_ref);
3738 static void unmark_dies (dw_die_ref);
3739 static void unmark_all_dies (dw_die_ref);
3740 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3741 static unsigned long size_of_aranges (void);
3742 static enum dwarf_form value_format (dw_attr_node *);
3743 static void output_value_format (dw_attr_node *);
3744 static void output_abbrev_section (void);
3745 static void output_die_abbrevs (unsigned long, dw_die_ref);
3746 static void output_die (dw_die_ref);
3747 static void output_compilation_unit_header (enum dwarf_unit_type);
3748 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3749 static void output_comdat_type_unit (comdat_type_node *);
3750 static const char *dwarf2_name (tree, int);
3751 static void add_pubname (tree, dw_die_ref);
3752 static void add_enumerator_pubname (const char *, dw_die_ref);
3753 static void add_pubname_string (const char *, dw_die_ref);
3754 static void add_pubtype (tree, dw_die_ref);
3755 static void output_pubnames (vec<pubname_entry, va_gc> *);
3756 static void output_aranges (void);
3757 static unsigned int add_ranges (const_tree, bool = false);
3758 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3759 bool *, bool);
3760 static void output_ranges (void);
3761 static dw_line_info_table *new_line_info_table (void);
3762 static void output_line_info (bool);
3763 static void output_file_names (void);
3764 static dw_die_ref base_type_die (tree, bool);
3765 static int is_base_type (tree);
3766 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3767 static int decl_quals (const_tree);
3768 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3769 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3770 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3771 static unsigned int dbx_reg_number (const_rtx);
3772 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3773 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3774 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3775 enum var_init_status);
3776 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3777 enum var_init_status);
3778 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3779 enum var_init_status);
3780 static int is_based_loc (const_rtx);
3781 static bool resolve_one_addr (rtx *);
3782 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3783 enum var_init_status);
3784 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3785 enum var_init_status);
3786 struct loc_descr_context;
3787 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3788 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3789 static dw_loc_list_ref loc_list_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3792 struct loc_descr_context *);
3793 static tree field_type (const_tree);
3794 static unsigned int simple_type_align_in_bits (const_tree);
3795 static unsigned int simple_decl_align_in_bits (const_tree);
3796 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3797 struct vlr_context;
3798 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3799 HOST_WIDE_INT *);
3800 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3801 dw_loc_list_ref);
3802 static void add_data_member_location_attribute (dw_die_ref, tree,
3803 struct vlr_context *);
3804 static bool add_const_value_attribute (dw_die_ref, rtx);
3805 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3806 static void insert_wide_int (const wide_int &, unsigned char *, int);
3807 static void insert_float (const_rtx, unsigned char *);
3808 static rtx rtl_for_decl_location (tree);
3809 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3810 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3811 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3812 static void add_name_attribute (dw_die_ref, const char *);
3813 static void add_desc_attribute (dw_die_ref, tree);
3814 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3815 static void add_comp_dir_attribute (dw_die_ref);
3816 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3817 struct loc_descr_context *);
3818 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3819 struct loc_descr_context *);
3820 static void add_subscript_info (dw_die_ref, tree, bool);
3821 static void add_byte_size_attribute (dw_die_ref, tree);
3822 static void add_alignment_attribute (dw_die_ref, tree);
3823 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3824 struct vlr_context *);
3825 static void add_bit_size_attribute (dw_die_ref, tree);
3826 static void add_prototyped_attribute (dw_die_ref, tree);
3827 static void add_abstract_origin_attribute (dw_die_ref, tree);
3828 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3829 static void add_src_coords_attributes (dw_die_ref, tree);
3830 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3831 static void add_discr_value (dw_die_ref, dw_discr_value *);
3832 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3833 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3834 static dw_die_ref scope_die_for (tree, dw_die_ref);
3835 static inline int local_scope_p (dw_die_ref);
3836 static inline int class_scope_p (dw_die_ref);
3837 static inline int class_or_namespace_scope_p (dw_die_ref);
3838 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3839 static void add_calling_convention_attribute (dw_die_ref, tree);
3840 static const char *type_tag (const_tree);
3841 static tree member_declared_type (const_tree);
3842 #if 0
3843 static const char *decl_start_label (tree);
3844 #endif
3845 static void gen_array_type_die (tree, dw_die_ref);
3846 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3847 #if 0
3848 static void gen_entry_point_die (tree, dw_die_ref);
3849 #endif
3850 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3851 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3852 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3853 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3854 static void gen_formal_types_die (tree, dw_die_ref);
3855 static void gen_subprogram_die (tree, dw_die_ref);
3856 static void gen_variable_die (tree, tree, dw_die_ref);
3857 static void gen_const_die (tree, dw_die_ref);
3858 static void gen_label_die (tree, dw_die_ref);
3859 static void gen_lexical_block_die (tree, dw_die_ref);
3860 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3861 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3862 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3863 static dw_die_ref gen_compile_unit_die (const char *);
3864 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3865 static void gen_member_die (tree, dw_die_ref);
3866 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3867 enum debug_info_usage);
3868 static void gen_subroutine_type_die (tree, dw_die_ref);
3869 static void gen_typedef_die (tree, dw_die_ref);
3870 static void gen_type_die (tree, dw_die_ref);
3871 static void gen_block_die (tree, dw_die_ref);
3872 static void decls_for_scope (tree, dw_die_ref, bool = true);
3873 static bool is_naming_typedef_decl (const_tree);
3874 static inline dw_die_ref get_context_die (tree);
3875 static void gen_namespace_die (tree, dw_die_ref);
3876 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3877 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3878 static dw_die_ref force_decl_die (tree);
3879 static dw_die_ref force_type_die (tree);
3880 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3881 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3882 static struct dwarf_file_data * lookup_filename (const char *);
3883 static void retry_incomplete_types (void);
3884 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3885 static void gen_generic_params_dies (tree);
3886 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3887 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3888 static void splice_child_die (dw_die_ref, dw_die_ref);
3889 static int file_info_cmp (const void *, const void *);
3890 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3891 const char *, var_loc_view, const char *);
3892 static void output_loc_list (dw_loc_list_ref);
3893 static char *gen_internal_sym (const char *);
3894 static bool want_pubnames (void);
3895
3896 static void prune_unmark_dies (dw_die_ref);
3897 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3898 static void prune_unused_types_mark (dw_die_ref, int);
3899 static void prune_unused_types_walk (dw_die_ref);
3900 static void prune_unused_types_walk_attribs (dw_die_ref);
3901 static void prune_unused_types_prune (dw_die_ref);
3902 static void prune_unused_types (void);
3903 static int maybe_emit_file (struct dwarf_file_data *fd);
3904 static inline const char *AT_vms_delta1 (dw_attr_node *);
3905 static inline const char *AT_vms_delta2 (dw_attr_node *);
3906 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3907 static void gen_remaining_tmpl_value_param_die_attribute (void);
3908 static bool generic_type_p (tree);
3909 static void schedule_generic_params_dies_gen (tree t);
3910 static void gen_scheduled_generic_parms_dies (void);
3911 static void resolve_variable_values (void);
3912
3913 static const char *comp_dir_string (void);
3914
3915 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3916
3917 /* enum for tracking thread-local variables whose address is really an offset
3918 relative to the TLS pointer, which will need link-time relocation, but will
3919 not need relocation by the DWARF consumer. */
3920
3921 enum dtprel_bool
3922 {
3923 dtprel_false = 0,
3924 dtprel_true = 1
3925 };
3926
3927 /* Return the operator to use for an address of a variable. For dtprel_true, we
3928 use DW_OP_const*. For regular variables, which need both link-time
3929 relocation and consumer-level relocation (e.g., to account for shared objects
3930 loaded at a random address), we use DW_OP_addr*. */
3931
3932 static inline enum dwarf_location_atom
3933 dw_addr_op (enum dtprel_bool dtprel)
3934 {
3935 if (dtprel == dtprel_true)
3936 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3937 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3938 else
3939 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3940 }
3941
3942 /* Return a pointer to a newly allocated address location description. If
3943 dwarf_split_debug_info is true, then record the address with the appropriate
3944 relocation. */
3945 static inline dw_loc_descr_ref
3946 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3947 {
3948 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3949
3950 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3951 ref->dw_loc_oprnd1.v.val_addr = addr;
3952 ref->dtprel = dtprel;
3953 if (dwarf_split_debug_info)
3954 ref->dw_loc_oprnd1.val_entry
3955 = add_addr_table_entry (addr,
3956 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3957 else
3958 ref->dw_loc_oprnd1.val_entry = NULL;
3959
3960 return ref;
3961 }
3962
3963 /* Section names used to hold DWARF debugging information. */
3964
3965 #ifndef DEBUG_INFO_SECTION
3966 #define DEBUG_INFO_SECTION ".debug_info"
3967 #endif
3968 #ifndef DEBUG_DWO_INFO_SECTION
3969 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3970 #endif
3971 #ifndef DEBUG_LTO_INFO_SECTION
3972 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3973 #endif
3974 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3975 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_ABBREV_SECTION
3978 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3979 #endif
3980 #ifndef DEBUG_LTO_ABBREV_SECTION
3981 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3982 #endif
3983 #ifndef DEBUG_DWO_ABBREV_SECTION
3984 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3985 #endif
3986 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3987 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3988 #endif
3989 #ifndef DEBUG_ARANGES_SECTION
3990 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3991 #endif
3992 #ifndef DEBUG_ADDR_SECTION
3993 #define DEBUG_ADDR_SECTION ".debug_addr"
3994 #endif
3995 #ifndef DEBUG_MACINFO_SECTION
3996 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3997 #endif
3998 #ifndef DEBUG_LTO_MACINFO_SECTION
3999 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4000 #endif
4001 #ifndef DEBUG_DWO_MACINFO_SECTION
4002 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4003 #endif
4004 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4005 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4006 #endif
4007 #ifndef DEBUG_MACRO_SECTION
4008 #define DEBUG_MACRO_SECTION ".debug_macro"
4009 #endif
4010 #ifndef DEBUG_LTO_MACRO_SECTION
4011 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4012 #endif
4013 #ifndef DEBUG_DWO_MACRO_SECTION
4014 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4015 #endif
4016 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4017 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4018 #endif
4019 #ifndef DEBUG_LINE_SECTION
4020 #define DEBUG_LINE_SECTION ".debug_line"
4021 #endif
4022 #ifndef DEBUG_LTO_LINE_SECTION
4023 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4024 #endif
4025 #ifndef DEBUG_DWO_LINE_SECTION
4026 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4027 #endif
4028 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4029 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4030 #endif
4031 #ifndef DEBUG_LOC_SECTION
4032 #define DEBUG_LOC_SECTION ".debug_loc"
4033 #endif
4034 #ifndef DEBUG_DWO_LOC_SECTION
4035 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4036 #endif
4037 #ifndef DEBUG_LOCLISTS_SECTION
4038 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4039 #endif
4040 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4041 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4042 #endif
4043 #ifndef DEBUG_PUBNAMES_SECTION
4044 #define DEBUG_PUBNAMES_SECTION \
4045 ((debug_generate_pub_sections == 2) \
4046 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4047 #endif
4048 #ifndef DEBUG_PUBTYPES_SECTION
4049 #define DEBUG_PUBTYPES_SECTION \
4050 ((debug_generate_pub_sections == 2) \
4051 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4052 #endif
4053 #ifndef DEBUG_STR_OFFSETS_SECTION
4054 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4055 #endif
4056 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4057 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4058 #endif
4059 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4060 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4061 #endif
4062 #ifndef DEBUG_STR_SECTION
4063 #define DEBUG_STR_SECTION ".debug_str"
4064 #endif
4065 #ifndef DEBUG_LTO_STR_SECTION
4066 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4067 #endif
4068 #ifndef DEBUG_STR_DWO_SECTION
4069 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_DWO_SECTION
4072 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4073 #endif
4074 #ifndef DEBUG_RANGES_SECTION
4075 #define DEBUG_RANGES_SECTION ".debug_ranges"
4076 #endif
4077 #ifndef DEBUG_RNGLISTS_SECTION
4078 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4079 #endif
4080 #ifndef DEBUG_LINE_STR_SECTION
4081 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4082 #endif
4083 #ifndef DEBUG_LTO_LINE_STR_SECTION
4084 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4085 #endif
4086
4087 /* Standard ELF section names for compiled code and data. */
4088 #ifndef TEXT_SECTION_NAME
4089 #define TEXT_SECTION_NAME ".text"
4090 #endif
4091
4092 /* Section flags for .debug_str section. */
4093 #define DEBUG_STR_SECTION_FLAGS \
4094 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4095 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4096 : SECTION_DEBUG)
4097
4098 /* Section flags for .debug_str.dwo section. */
4099 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4100
4101 /* Attribute used to refer to the macro section. */
4102 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4103 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4104
4105 /* Labels we insert at beginning sections we can reference instead of
4106 the section names themselves. */
4107
4108 #ifndef TEXT_SECTION_LABEL
4109 #define TEXT_SECTION_LABEL "Ltext"
4110 #endif
4111 #ifndef COLD_TEXT_SECTION_LABEL
4112 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4113 #endif
4114 #ifndef DEBUG_LINE_SECTION_LABEL
4115 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4116 #endif
4117 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4118 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4119 #endif
4120 #ifndef DEBUG_INFO_SECTION_LABEL
4121 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4122 #endif
4123 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4124 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4125 #endif
4126 #ifndef DEBUG_ABBREV_SECTION_LABEL
4127 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4128 #endif
4129 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4130 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4131 #endif
4132 #ifndef DEBUG_ADDR_SECTION_LABEL
4133 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4134 #endif
4135 #ifndef DEBUG_LOC_SECTION_LABEL
4136 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4137 #endif
4138 #ifndef DEBUG_RANGES_SECTION_LABEL
4139 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4140 #endif
4141 #ifndef DEBUG_MACINFO_SECTION_LABEL
4142 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4143 #endif
4144 #ifndef DEBUG_MACRO_SECTION_LABEL
4145 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4146 #endif
4147 #define SKELETON_COMP_DIE_ABBREV 1
4148 #define SKELETON_TYPE_DIE_ABBREV 2
4149
4150 /* Definitions of defaults for formats and names of various special
4151 (artificial) labels which may be generated within this file (when the -g
4152 options is used and DWARF2_DEBUGGING_INFO is in effect.
4153 If necessary, these may be overridden from within the tm.h file, but
4154 typically, overriding these defaults is unnecessary. */
4155
4156 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4171
4172 #ifndef TEXT_END_LABEL
4173 #define TEXT_END_LABEL "Letext"
4174 #endif
4175 #ifndef COLD_END_LABEL
4176 #define COLD_END_LABEL "Letext_cold"
4177 #endif
4178 #ifndef BLOCK_BEGIN_LABEL
4179 #define BLOCK_BEGIN_LABEL "LBB"
4180 #endif
4181 #ifndef BLOCK_INLINE_ENTRY_LABEL
4182 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4183 #endif
4184 #ifndef BLOCK_END_LABEL
4185 #define BLOCK_END_LABEL "LBE"
4186 #endif
4187 #ifndef LINE_CODE_LABEL
4188 #define LINE_CODE_LABEL "LM"
4189 #endif
4190
4191 \f
4192 /* Return the root of the DIE's built for the current compilation unit. */
4193 static dw_die_ref
4194 comp_unit_die (void)
4195 {
4196 if (!single_comp_unit_die)
4197 single_comp_unit_die = gen_compile_unit_die (NULL);
4198 return single_comp_unit_die;
4199 }
4200
4201 /* We allow a language front-end to designate a function that is to be
4202 called to "demangle" any name before it is put into a DIE. */
4203
4204 static const char *(*demangle_name_func) (const char *);
4205
4206 void
4207 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4208 {
4209 demangle_name_func = func;
4210 }
4211
4212 /* Test if rtl node points to a pseudo register. */
4213
4214 static inline int
4215 is_pseudo_reg (const_rtx rtl)
4216 {
4217 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4218 || (GET_CODE (rtl) == SUBREG
4219 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4220 }
4221
4222 /* Return a reference to a type, with its const and volatile qualifiers
4223 removed. */
4224
4225 static inline tree
4226 type_main_variant (tree type)
4227 {
4228 type = TYPE_MAIN_VARIANT (type);
4229
4230 /* ??? There really should be only one main variant among any group of
4231 variants of a given type (and all of the MAIN_VARIANT values for all
4232 members of the group should point to that one type) but sometimes the C
4233 front-end messes this up for array types, so we work around that bug
4234 here. */
4235 if (TREE_CODE (type) == ARRAY_TYPE)
4236 while (type != TYPE_MAIN_VARIANT (type))
4237 type = TYPE_MAIN_VARIANT (type);
4238
4239 return type;
4240 }
4241
4242 /* Return nonzero if the given type node represents a tagged type. */
4243
4244 static inline int
4245 is_tagged_type (const_tree type)
4246 {
4247 enum tree_code code = TREE_CODE (type);
4248
4249 return (code == RECORD_TYPE || code == UNION_TYPE
4250 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4251 }
4252
4253 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4254
4255 static void
4256 get_ref_die_offset_label (char *label, dw_die_ref ref)
4257 {
4258 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4259 }
4260
4261 /* Return die_offset of a DIE reference to a base type. */
4262
4263 static unsigned long int
4264 get_base_type_offset (dw_die_ref ref)
4265 {
4266 if (ref->die_offset)
4267 return ref->die_offset;
4268 if (comp_unit_die ()->die_abbrev)
4269 {
4270 calc_base_type_die_sizes ();
4271 gcc_assert (ref->die_offset);
4272 }
4273 return ref->die_offset;
4274 }
4275
4276 /* Return die_offset of a DIE reference other than base type. */
4277
4278 static unsigned long int
4279 get_ref_die_offset (dw_die_ref ref)
4280 {
4281 gcc_assert (ref->die_offset);
4282 return ref->die_offset;
4283 }
4284
4285 /* Convert a DIE tag into its string name. */
4286
4287 static const char *
4288 dwarf_tag_name (unsigned int tag)
4289 {
4290 const char *name = get_DW_TAG_name (tag);
4291
4292 if (name != NULL)
4293 return name;
4294
4295 return "DW_TAG_<unknown>";
4296 }
4297
4298 /* Convert a DWARF attribute code into its string name. */
4299
4300 static const char *
4301 dwarf_attr_name (unsigned int attr)
4302 {
4303 const char *name;
4304
4305 switch (attr)
4306 {
4307 #if VMS_DEBUGGING_INFO
4308 case DW_AT_HP_prologue:
4309 return "DW_AT_HP_prologue";
4310 #else
4311 case DW_AT_MIPS_loop_unroll_factor:
4312 return "DW_AT_MIPS_loop_unroll_factor";
4313 #endif
4314
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_epilogue:
4317 return "DW_AT_HP_epilogue";
4318 #else
4319 case DW_AT_MIPS_stride:
4320 return "DW_AT_MIPS_stride";
4321 #endif
4322 }
4323
4324 name = get_DW_AT_name (attr);
4325
4326 if (name != NULL)
4327 return name;
4328
4329 return "DW_AT_<unknown>";
4330 }
4331
4332 /* Convert a DWARF value form code into its string name. */
4333
4334 static const char *
4335 dwarf_form_name (unsigned int form)
4336 {
4337 const char *name = get_DW_FORM_name (form);
4338
4339 if (name != NULL)
4340 return name;
4341
4342 return "DW_FORM_<unknown>";
4343 }
4344 \f
4345 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4346 instance of an inlined instance of a decl which is local to an inline
4347 function, so we have to trace all of the way back through the origin chain
4348 to find out what sort of node actually served as the original seed for the
4349 given block. */
4350
4351 static tree
4352 decl_ultimate_origin (const_tree decl)
4353 {
4354 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4355 return NULL_TREE;
4356
4357 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4358 we're trying to output the abstract instance of this function. */
4359 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4360 return NULL_TREE;
4361
4362 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4363 most distant ancestor, this should never happen. */
4364 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4365
4366 return DECL_ABSTRACT_ORIGIN (decl);
4367 }
4368
4369 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4370 of a virtual function may refer to a base class, so we check the 'this'
4371 parameter. */
4372
4373 static tree
4374 decl_class_context (tree decl)
4375 {
4376 tree context = NULL_TREE;
4377
4378 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4379 context = DECL_CONTEXT (decl);
4380 else
4381 context = TYPE_MAIN_VARIANT
4382 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4383
4384 if (context && !TYPE_P (context))
4385 context = NULL_TREE;
4386
4387 return context;
4388 }
4389 \f
4390 /* Add an attribute/value pair to a DIE. */
4391
4392 static inline void
4393 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4394 {
4395 /* Maybe this should be an assert? */
4396 if (die == NULL)
4397 return;
4398
4399 if (flag_checking)
4400 {
4401 /* Check we do not add duplicate attrs. Can't use get_AT here
4402 because that recurses to the specification/abstract origin DIE. */
4403 dw_attr_node *a;
4404 unsigned ix;
4405 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4406 gcc_assert (a->dw_attr != attr->dw_attr);
4407 }
4408
4409 vec_safe_reserve (die->die_attr, 1);
4410 vec_safe_push (die->die_attr, *attr);
4411 }
4412
4413 static inline enum dw_val_class
4414 AT_class (dw_attr_node *a)
4415 {
4416 return a->dw_attr_val.val_class;
4417 }
4418
4419 /* Return the index for any attribute that will be referenced with a
4420 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4421 indices are stored in dw_attr_val.v.val_str for reference counting
4422 pruning. */
4423
4424 static inline unsigned int
4425 AT_index (dw_attr_node *a)
4426 {
4427 if (AT_class (a) == dw_val_class_str)
4428 return a->dw_attr_val.v.val_str->index;
4429 else if (a->dw_attr_val.val_entry != NULL)
4430 return a->dw_attr_val.val_entry->index;
4431 return NOT_INDEXED;
4432 }
4433
4434 /* Add a flag value attribute to a DIE. */
4435
4436 static inline void
4437 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4438 {
4439 dw_attr_node attr;
4440
4441 attr.dw_attr = attr_kind;
4442 attr.dw_attr_val.val_class = dw_val_class_flag;
4443 attr.dw_attr_val.val_entry = NULL;
4444 attr.dw_attr_val.v.val_flag = flag;
4445 add_dwarf_attr (die, &attr);
4446 }
4447
4448 static inline unsigned
4449 AT_flag (dw_attr_node *a)
4450 {
4451 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4452 return a->dw_attr_val.v.val_flag;
4453 }
4454
4455 /* Add a signed integer attribute value to a DIE. */
4456
4457 static inline void
4458 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_const;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_int = int_val;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline HOST_WIDE_INT
4470 AT_int (dw_attr_node *a)
4471 {
4472 gcc_assert (a && (AT_class (a) == dw_val_class_const
4473 || AT_class (a) == dw_val_class_const_implicit));
4474 return a->dw_attr_val.v.val_int;
4475 }
4476
4477 /* Add an unsigned integer attribute value to a DIE. */
4478
4479 static inline void
4480 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4481 unsigned HOST_WIDE_INT unsigned_val)
4482 {
4483 dw_attr_node attr;
4484
4485 attr.dw_attr = attr_kind;
4486 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4487 attr.dw_attr_val.val_entry = NULL;
4488 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4489 add_dwarf_attr (die, &attr);
4490 }
4491
4492 static inline unsigned HOST_WIDE_INT
4493 AT_unsigned (dw_attr_node *a)
4494 {
4495 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4496 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4497 return a->dw_attr_val.v.val_unsigned;
4498 }
4499
4500 /* Add an unsigned wide integer attribute value to a DIE. */
4501
4502 static inline void
4503 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4504 const wide_int& w)
4505 {
4506 dw_attr_node attr;
4507
4508 attr.dw_attr = attr_kind;
4509 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4510 attr.dw_attr_val.val_entry = NULL;
4511 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4512 *attr.dw_attr_val.v.val_wide = w;
4513 add_dwarf_attr (die, &attr);
4514 }
4515
4516 /* Add an unsigned double integer attribute value to a DIE. */
4517
4518 static inline void
4519 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4520 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4521 {
4522 dw_attr_node attr;
4523
4524 attr.dw_attr = attr_kind;
4525 attr.dw_attr_val.val_class = dw_val_class_const_double;
4526 attr.dw_attr_val.val_entry = NULL;
4527 attr.dw_attr_val.v.val_double.high = high;
4528 attr.dw_attr_val.v.val_double.low = low;
4529 add_dwarf_attr (die, &attr);
4530 }
4531
4532 /* Add a floating point attribute value to a DIE and return it. */
4533
4534 static inline void
4535 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4536 unsigned int length, unsigned int elt_size, unsigned char *array)
4537 {
4538 dw_attr_node attr;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_vec;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_vec.length = length;
4544 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4545 attr.dw_attr_val.v.val_vec.array = array;
4546 add_dwarf_attr (die, &attr);
4547 }
4548
4549 /* Add an 8-byte data attribute value to a DIE. */
4550
4551 static inline void
4552 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4553 unsigned char data8[8])
4554 {
4555 dw_attr_node attr;
4556
4557 attr.dw_attr = attr_kind;
4558 attr.dw_attr_val.val_class = dw_val_class_data8;
4559 attr.dw_attr_val.val_entry = NULL;
4560 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4561 add_dwarf_attr (die, &attr);
4562 }
4563
4564 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4565 dwarf_split_debug_info, address attributes in dies destined for the
4566 final executable have force_direct set to avoid using indexed
4567 references. */
4568
4569 static inline void
4570 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4571 bool force_direct)
4572 {
4573 dw_attr_node attr;
4574 char * lbl_id;
4575
4576 lbl_id = xstrdup (lbl_low);
4577 attr.dw_attr = DW_AT_low_pc;
4578 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4579 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4580 if (dwarf_split_debug_info && !force_direct)
4581 attr.dw_attr_val.val_entry
4582 = add_addr_table_entry (lbl_id, ate_kind_label);
4583 else
4584 attr.dw_attr_val.val_entry = NULL;
4585 add_dwarf_attr (die, &attr);
4586
4587 attr.dw_attr = DW_AT_high_pc;
4588 if (dwarf_version < 4)
4589 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4590 else
4591 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4592 lbl_id = xstrdup (lbl_high);
4593 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4594 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4595 && dwarf_split_debug_info && !force_direct)
4596 attr.dw_attr_val.val_entry
4597 = add_addr_table_entry (lbl_id, ate_kind_label);
4598 else
4599 attr.dw_attr_val.val_entry = NULL;
4600 add_dwarf_attr (die, &attr);
4601 }
4602
4603 /* Hash and equality functions for debug_str_hash. */
4604
4605 hashval_t
4606 indirect_string_hasher::hash (indirect_string_node *x)
4607 {
4608 return htab_hash_string (x->str);
4609 }
4610
4611 bool
4612 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4613 {
4614 return strcmp (x1->str, x2) == 0;
4615 }
4616
4617 /* Add STR to the given string hash table. */
4618
4619 static struct indirect_string_node *
4620 find_AT_string_in_table (const char *str,
4621 hash_table<indirect_string_hasher> *table,
4622 enum insert_option insert = INSERT)
4623 {
4624 struct indirect_string_node *node;
4625
4626 indirect_string_node **slot
4627 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4628 if (*slot == NULL)
4629 {
4630 node = ggc_cleared_alloc<indirect_string_node> ();
4631 node->str = ggc_strdup (str);
4632 *slot = node;
4633 }
4634 else
4635 node = *slot;
4636
4637 node->refcount++;
4638 return node;
4639 }
4640
4641 /* Add STR to the indirect string hash table. */
4642
4643 static struct indirect_string_node *
4644 find_AT_string (const char *str, enum insert_option insert = INSERT)
4645 {
4646 if (! debug_str_hash)
4647 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4648
4649 return find_AT_string_in_table (str, debug_str_hash, insert);
4650 }
4651
4652 /* Add a string attribute value to a DIE. */
4653
4654 static inline void
4655 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4656 {
4657 dw_attr_node attr;
4658 struct indirect_string_node *node;
4659
4660 node = find_AT_string (str);
4661
4662 attr.dw_attr = attr_kind;
4663 attr.dw_attr_val.val_class = dw_val_class_str;
4664 attr.dw_attr_val.val_entry = NULL;
4665 attr.dw_attr_val.v.val_str = node;
4666 add_dwarf_attr (die, &attr);
4667 }
4668
4669 static inline const char *
4670 AT_string (dw_attr_node *a)
4671 {
4672 gcc_assert (a && AT_class (a) == dw_val_class_str);
4673 return a->dw_attr_val.v.val_str->str;
4674 }
4675
4676 /* Call this function directly to bypass AT_string_form's logic to put
4677 the string inline in the die. */
4678
4679 static void
4680 set_indirect_string (struct indirect_string_node *node)
4681 {
4682 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4683 /* Already indirect is a no op. */
4684 if (node->form == DW_FORM_strp
4685 || node->form == DW_FORM_line_strp
4686 || node->form == dwarf_FORM (DW_FORM_strx))
4687 {
4688 gcc_assert (node->label);
4689 return;
4690 }
4691 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4692 ++dw2_string_counter;
4693 node->label = xstrdup (label);
4694
4695 if (!dwarf_split_debug_info)
4696 {
4697 node->form = DW_FORM_strp;
4698 node->index = NOT_INDEXED;
4699 }
4700 else
4701 {
4702 node->form = dwarf_FORM (DW_FORM_strx);
4703 node->index = NO_INDEX_ASSIGNED;
4704 }
4705 }
4706
4707 /* A helper function for dwarf2out_finish, called to reset indirect
4708 string decisions done for early LTO dwarf output before fat object
4709 dwarf output. */
4710
4711 int
4712 reset_indirect_string (indirect_string_node **h, void *)
4713 {
4714 struct indirect_string_node *node = *h;
4715 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4716 {
4717 free (node->label);
4718 node->label = NULL;
4719 node->form = (dwarf_form) 0;
4720 node->index = 0;
4721 }
4722 return 1;
4723 }
4724
4725 /* Find out whether a string should be output inline in DIE
4726 or out-of-line in .debug_str section. */
4727
4728 static enum dwarf_form
4729 find_string_form (struct indirect_string_node *node)
4730 {
4731 unsigned int len;
4732
4733 if (node->form)
4734 return node->form;
4735
4736 len = strlen (node->str) + 1;
4737
4738 /* If the string is shorter or equal to the size of the reference, it is
4739 always better to put it inline. */
4740 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4741 return node->form = DW_FORM_string;
4742
4743 /* If we cannot expect the linker to merge strings in .debug_str
4744 section, only put it into .debug_str if it is worth even in this
4745 single module. */
4746 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4747 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4748 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4749 return node->form = DW_FORM_string;
4750
4751 set_indirect_string (node);
4752
4753 return node->form;
4754 }
4755
4756 /* Find out whether the string referenced from the attribute should be
4757 output inline in DIE or out-of-line in .debug_str section. */
4758
4759 static enum dwarf_form
4760 AT_string_form (dw_attr_node *a)
4761 {
4762 gcc_assert (a && AT_class (a) == dw_val_class_str);
4763 return find_string_form (a->dw_attr_val.v.val_str);
4764 }
4765
4766 /* Add a DIE reference attribute value to a DIE. */
4767
4768 static inline void
4769 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4770 {
4771 dw_attr_node attr;
4772 gcc_checking_assert (targ_die != NULL);
4773
4774 /* With LTO we can end up trying to reference something we didn't create
4775 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4776 if (targ_die == NULL)
4777 return;
4778
4779 attr.dw_attr = attr_kind;
4780 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4781 attr.dw_attr_val.val_entry = NULL;
4782 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4783 attr.dw_attr_val.v.val_die_ref.external = 0;
4784 add_dwarf_attr (die, &attr);
4785 }
4786
4787 /* Change DIE reference REF to point to NEW_DIE instead. */
4788
4789 static inline void
4790 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4791 {
4792 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4793 ref->dw_attr_val.v.val_die_ref.die = new_die;
4794 ref->dw_attr_val.v.val_die_ref.external = 0;
4795 }
4796
4797 /* Add an AT_specification attribute to a DIE, and also make the back
4798 pointer from the specification to the definition. */
4799
4800 static inline void
4801 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4802 {
4803 add_AT_die_ref (die, DW_AT_specification, targ_die);
4804 gcc_assert (!targ_die->die_definition);
4805 targ_die->die_definition = die;
4806 }
4807
4808 static inline dw_die_ref
4809 AT_ref (dw_attr_node *a)
4810 {
4811 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4812 return a->dw_attr_val.v.val_die_ref.die;
4813 }
4814
4815 static inline int
4816 AT_ref_external (dw_attr_node *a)
4817 {
4818 if (a && AT_class (a) == dw_val_class_die_ref)
4819 return a->dw_attr_val.v.val_die_ref.external;
4820
4821 return 0;
4822 }
4823
4824 static inline void
4825 set_AT_ref_external (dw_attr_node *a, int i)
4826 {
4827 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4828 a->dw_attr_val.v.val_die_ref.external = i;
4829 }
4830
4831 /* Add a location description attribute value to a DIE. */
4832
4833 static inline void
4834 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4835 {
4836 dw_attr_node attr;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc = loc;
4842 add_dwarf_attr (die, &attr);
4843 }
4844
4845 static inline dw_loc_descr_ref
4846 AT_loc (dw_attr_node *a)
4847 {
4848 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4849 return a->dw_attr_val.v.val_loc;
4850 }
4851
4852 static inline void
4853 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4854 {
4855 dw_attr_node attr;
4856
4857 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4858 return;
4859
4860 attr.dw_attr = attr_kind;
4861 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4862 attr.dw_attr_val.val_entry = NULL;
4863 attr.dw_attr_val.v.val_loc_list = loc_list;
4864 add_dwarf_attr (die, &attr);
4865 have_location_lists = true;
4866 }
4867
4868 static inline dw_loc_list_ref
4869 AT_loc_list (dw_attr_node *a)
4870 {
4871 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4872 return a->dw_attr_val.v.val_loc_list;
4873 }
4874
4875 /* Add a view list attribute to DIE. It must have a DW_AT_location
4876 attribute, because the view list complements the location list. */
4877
4878 static inline void
4879 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4880 {
4881 dw_attr_node attr;
4882
4883 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4884 return;
4885
4886 attr.dw_attr = attr_kind;
4887 attr.dw_attr_val.val_class = dw_val_class_view_list;
4888 attr.dw_attr_val.val_entry = NULL;
4889 attr.dw_attr_val.v.val_view_list = die;
4890 add_dwarf_attr (die, &attr);
4891 gcc_checking_assert (get_AT (die, DW_AT_location));
4892 gcc_assert (have_location_lists);
4893 }
4894
4895 /* Return a pointer to the location list referenced by the attribute.
4896 If the named attribute is a view list, look up the corresponding
4897 DW_AT_location attribute and return its location list. */
4898
4899 static inline dw_loc_list_ref *
4900 AT_loc_list_ptr (dw_attr_node *a)
4901 {
4902 gcc_assert (a);
4903 switch (AT_class (a))
4904 {
4905 case dw_val_class_loc_list:
4906 return &a->dw_attr_val.v.val_loc_list;
4907 case dw_val_class_view_list:
4908 {
4909 dw_attr_node *l;
4910 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4911 if (!l)
4912 return NULL;
4913 gcc_checking_assert (l + 1 == a);
4914 return AT_loc_list_ptr (l);
4915 }
4916 default:
4917 gcc_unreachable ();
4918 }
4919 }
4920
4921 /* Return the location attribute value associated with a view list
4922 attribute value. */
4923
4924 static inline dw_val_node *
4925 view_list_to_loc_list_val_node (dw_val_node *val)
4926 {
4927 gcc_assert (val->val_class == dw_val_class_view_list);
4928 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4929 if (!loc)
4930 return NULL;
4931 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4932 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4933 return &loc->dw_attr_val;
4934 }
4935
4936 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4937 {
4938 static hashval_t hash (addr_table_entry *);
4939 static bool equal (addr_table_entry *, addr_table_entry *);
4940 };
4941
4942 /* Table of entries into the .debug_addr section. */
4943
4944 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4945
4946 /* Hash an address_table_entry. */
4947
4948 hashval_t
4949 addr_hasher::hash (addr_table_entry *a)
4950 {
4951 inchash::hash hstate;
4952 switch (a->kind)
4953 {
4954 case ate_kind_rtx:
4955 hstate.add_int (0);
4956 break;
4957 case ate_kind_rtx_dtprel:
4958 hstate.add_int (1);
4959 break;
4960 case ate_kind_label:
4961 return htab_hash_string (a->addr.label);
4962 default:
4963 gcc_unreachable ();
4964 }
4965 inchash::add_rtx (a->addr.rtl, hstate);
4966 return hstate.end ();
4967 }
4968
4969 /* Determine equality for two address_table_entries. */
4970
4971 bool
4972 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4973 {
4974 if (a1->kind != a2->kind)
4975 return 0;
4976 switch (a1->kind)
4977 {
4978 case ate_kind_rtx:
4979 case ate_kind_rtx_dtprel:
4980 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4981 case ate_kind_label:
4982 return strcmp (a1->addr.label, a2->addr.label) == 0;
4983 default:
4984 gcc_unreachable ();
4985 }
4986 }
4987
4988 /* Initialize an addr_table_entry. */
4989
4990 void
4991 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4992 {
4993 e->kind = kind;
4994 switch (kind)
4995 {
4996 case ate_kind_rtx:
4997 case ate_kind_rtx_dtprel:
4998 e->addr.rtl = (rtx) addr;
4999 break;
5000 case ate_kind_label:
5001 e->addr.label = (char *) addr;
5002 break;
5003 }
5004 e->refcount = 0;
5005 e->index = NO_INDEX_ASSIGNED;
5006 }
5007
5008 /* Add attr to the address table entry to the table. Defer setting an
5009 index until output time. */
5010
5011 static addr_table_entry *
5012 add_addr_table_entry (void *addr, enum ate_kind kind)
5013 {
5014 addr_table_entry *node;
5015 addr_table_entry finder;
5016
5017 gcc_assert (dwarf_split_debug_info);
5018 if (! addr_index_table)
5019 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5020 init_addr_table_entry (&finder, kind, addr);
5021 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5022
5023 if (*slot == HTAB_EMPTY_ENTRY)
5024 {
5025 node = ggc_cleared_alloc<addr_table_entry> ();
5026 init_addr_table_entry (node, kind, addr);
5027 *slot = node;
5028 }
5029 else
5030 node = *slot;
5031
5032 node->refcount++;
5033 return node;
5034 }
5035
5036 /* Remove an entry from the addr table by decrementing its refcount.
5037 Strictly, decrementing the refcount would be enough, but the
5038 assertion that the entry is actually in the table has found
5039 bugs. */
5040
5041 static void
5042 remove_addr_table_entry (addr_table_entry *entry)
5043 {
5044 gcc_assert (dwarf_split_debug_info && addr_index_table);
5045 /* After an index is assigned, the table is frozen. */
5046 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5047 entry->refcount--;
5048 }
5049
5050 /* Given a location list, remove all addresses it refers to from the
5051 address_table. */
5052
5053 static void
5054 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5055 {
5056 for (; descr; descr = descr->dw_loc_next)
5057 if (descr->dw_loc_oprnd1.val_entry != NULL)
5058 {
5059 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5060 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5061 }
5062 }
5063
5064 /* A helper function for dwarf2out_finish called through
5065 htab_traverse. Assign an addr_table_entry its index. All entries
5066 must be collected into the table when this function is called,
5067 because the indexing code relies on htab_traverse to traverse nodes
5068 in the same order for each run. */
5069
5070 int
5071 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5072 {
5073 addr_table_entry *node = *h;
5074
5075 /* Don't index unreferenced nodes. */
5076 if (node->refcount == 0)
5077 return 1;
5078
5079 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5080 node->index = *index;
5081 *index += 1;
5082
5083 return 1;
5084 }
5085
5086 /* Add an address constant attribute value to a DIE. When using
5087 dwarf_split_debug_info, address attributes in dies destined for the
5088 final executable should be direct references--setting the parameter
5089 force_direct ensures this behavior. */
5090
5091 static inline void
5092 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5093 bool force_direct)
5094 {
5095 dw_attr_node attr;
5096
5097 attr.dw_attr = attr_kind;
5098 attr.dw_attr_val.val_class = dw_val_class_addr;
5099 attr.dw_attr_val.v.val_addr = addr;
5100 if (dwarf_split_debug_info && !force_direct)
5101 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5102 else
5103 attr.dw_attr_val.val_entry = NULL;
5104 add_dwarf_attr (die, &attr);
5105 }
5106
5107 /* Get the RTX from to an address DIE attribute. */
5108
5109 static inline rtx
5110 AT_addr (dw_attr_node *a)
5111 {
5112 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5113 return a->dw_attr_val.v.val_addr;
5114 }
5115
5116 /* Add a file attribute value to a DIE. */
5117
5118 static inline void
5119 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5120 struct dwarf_file_data *fd)
5121 {
5122 dw_attr_node attr;
5123
5124 attr.dw_attr = attr_kind;
5125 attr.dw_attr_val.val_class = dw_val_class_file;
5126 attr.dw_attr_val.val_entry = NULL;
5127 attr.dw_attr_val.v.val_file = fd;
5128 add_dwarf_attr (die, &attr);
5129 }
5130
5131 /* Get the dwarf_file_data from a file DIE attribute. */
5132
5133 static inline struct dwarf_file_data *
5134 AT_file (dw_attr_node *a)
5135 {
5136 gcc_assert (a && (AT_class (a) == dw_val_class_file
5137 || AT_class (a) == dw_val_class_file_implicit));
5138 return a->dw_attr_val.v.val_file;
5139 }
5140
5141 /* Add a symbolic view identifier attribute value to a DIE. */
5142
5143 static inline void
5144 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5145 const char *view_label)
5146 {
5147 dw_attr_node attr;
5148
5149 attr.dw_attr = attr_kind;
5150 attr.dw_attr_val.val_class = dw_val_class_symview;
5151 attr.dw_attr_val.val_entry = NULL;
5152 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5153 add_dwarf_attr (die, &attr);
5154 }
5155
5156 /* Add a label identifier attribute value to a DIE. */
5157
5158 static inline void
5159 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5160 const char *lbl_id)
5161 {
5162 dw_attr_node attr;
5163
5164 attr.dw_attr = attr_kind;
5165 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5166 attr.dw_attr_val.val_entry = NULL;
5167 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5168 if (dwarf_split_debug_info)
5169 attr.dw_attr_val.val_entry
5170 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5171 ate_kind_label);
5172 add_dwarf_attr (die, &attr);
5173 }
5174
5175 /* Add a section offset attribute value to a DIE, an offset into the
5176 debug_line section. */
5177
5178 static inline void
5179 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5180 const char *label)
5181 {
5182 dw_attr_node attr;
5183
5184 attr.dw_attr = attr_kind;
5185 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5186 attr.dw_attr_val.val_entry = NULL;
5187 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5188 add_dwarf_attr (die, &attr);
5189 }
5190
5191 /* Add a section offset attribute value to a DIE, an offset into the
5192 debug_macinfo section. */
5193
5194 static inline void
5195 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5196 const char *label)
5197 {
5198 dw_attr_node attr;
5199
5200 attr.dw_attr = attr_kind;
5201 attr.dw_attr_val.val_class = dw_val_class_macptr;
5202 attr.dw_attr_val.val_entry = NULL;
5203 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5204 add_dwarf_attr (die, &attr);
5205 }
5206
5207 /* Add a range_list attribute value to a DIE. When using
5208 dwarf_split_debug_info, address attributes in dies destined for the
5209 final executable should be direct references--setting the parameter
5210 force_direct ensures this behavior. */
5211
5212 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5213 #define RELOCATED_OFFSET (NULL)
5214
5215 static void
5216 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 long unsigned int offset, bool force_direct)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_range_list;
5223 /* For the range_list attribute, use val_entry to store whether the
5224 offset should follow split-debug-info or normal semantics. This
5225 value is read in output_range_list_offset. */
5226 if (dwarf_split_debug_info && !force_direct)
5227 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5228 else
5229 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5230 attr.dw_attr_val.v.val_offset = offset;
5231 add_dwarf_attr (die, &attr);
5232 }
5233
5234 /* Return the start label of a delta attribute. */
5235
5236 static inline const char *
5237 AT_vms_delta1 (dw_attr_node *a)
5238 {
5239 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5240 return a->dw_attr_val.v.val_vms_delta.lbl1;
5241 }
5242
5243 /* Return the end label of a delta attribute. */
5244
5245 static inline const char *
5246 AT_vms_delta2 (dw_attr_node *a)
5247 {
5248 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5249 return a->dw_attr_val.v.val_vms_delta.lbl2;
5250 }
5251
5252 static inline const char *
5253 AT_lbl (dw_attr_node *a)
5254 {
5255 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5256 || AT_class (a) == dw_val_class_lineptr
5257 || AT_class (a) == dw_val_class_macptr
5258 || AT_class (a) == dw_val_class_loclistsptr
5259 || AT_class (a) == dw_val_class_high_pc));
5260 return a->dw_attr_val.v.val_lbl_id;
5261 }
5262
5263 /* Get the attribute of type attr_kind. */
5264
5265 static dw_attr_node *
5266 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5267 {
5268 dw_attr_node *a;
5269 unsigned ix;
5270 dw_die_ref spec = NULL;
5271
5272 if (! die)
5273 return NULL;
5274
5275 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5276 if (a->dw_attr == attr_kind)
5277 return a;
5278 else if (a->dw_attr == DW_AT_specification
5279 || a->dw_attr == DW_AT_abstract_origin)
5280 spec = AT_ref (a);
5281
5282 if (spec)
5283 return get_AT (spec, attr_kind);
5284
5285 return NULL;
5286 }
5287
5288 /* Returns the parent of the declaration of DIE. */
5289
5290 static dw_die_ref
5291 get_die_parent (dw_die_ref die)
5292 {
5293 dw_die_ref t;
5294
5295 if (!die)
5296 return NULL;
5297
5298 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5299 || (t = get_AT_ref (die, DW_AT_specification)))
5300 die = t;
5301
5302 return die->die_parent;
5303 }
5304
5305 /* Return the "low pc" attribute value, typically associated with a subprogram
5306 DIE. Return null if the "low pc" attribute is either not present, or if it
5307 cannot be represented as an assembler label identifier. */
5308
5309 static inline const char *
5310 get_AT_low_pc (dw_die_ref die)
5311 {
5312 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5313
5314 return a ? AT_lbl (a) : NULL;
5315 }
5316
5317 /* Return the value of the string attribute designated by ATTR_KIND, or
5318 NULL if it is not present. */
5319
5320 static inline const char *
5321 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5322 {
5323 dw_attr_node *a = get_AT (die, attr_kind);
5324
5325 return a ? AT_string (a) : NULL;
5326 }
5327
5328 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5329 if it is not present. */
5330
5331 static inline int
5332 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5333 {
5334 dw_attr_node *a = get_AT (die, attr_kind);
5335
5336 return a ? AT_flag (a) : 0;
5337 }
5338
5339 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5340 if it is not present. */
5341
5342 static inline unsigned
5343 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_unsigned (a) : 0;
5348 }
5349
5350 static inline dw_die_ref
5351 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5352 {
5353 dw_attr_node *a = get_AT (die, attr_kind);
5354
5355 return a ? AT_ref (a) : NULL;
5356 }
5357
5358 static inline struct dwarf_file_data *
5359 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5360 {
5361 dw_attr_node *a = get_AT (die, attr_kind);
5362
5363 return a ? AT_file (a) : NULL;
5364 }
5365
5366 /* Return TRUE if the language is C. */
5367
5368 static inline bool
5369 is_c (void)
5370 {
5371 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5372
5373 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5374 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5375
5376
5377 }
5378
5379 /* Return TRUE if the language is C++. */
5380
5381 static inline bool
5382 is_cxx (void)
5383 {
5384 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5385
5386 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5387 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5388 }
5389
5390 /* Return TRUE if DECL was created by the C++ frontend. */
5391
5392 static bool
5393 is_cxx (const_tree decl)
5394 {
5395 if (in_lto_p)
5396 {
5397 const_tree context = get_ultimate_context (decl);
5398 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5399 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5400 }
5401 return is_cxx ();
5402 }
5403
5404 /* Return TRUE if the language is Fortran. */
5405
5406 static inline bool
5407 is_fortran (void)
5408 {
5409 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5410
5411 return (lang == DW_LANG_Fortran77
5412 || lang == DW_LANG_Fortran90
5413 || lang == DW_LANG_Fortran95
5414 || lang == DW_LANG_Fortran03
5415 || lang == DW_LANG_Fortran08);
5416 }
5417
5418 static inline bool
5419 is_fortran (const_tree decl)
5420 {
5421 if (in_lto_p)
5422 {
5423 const_tree context = get_ultimate_context (decl);
5424 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5425 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5426 "GNU Fortran", 11) == 0
5427 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5428 "GNU F77") == 0);
5429 }
5430 return is_fortran ();
5431 }
5432
5433 /* Return TRUE if the language is Ada. */
5434
5435 static inline bool
5436 is_ada (void)
5437 {
5438 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5439
5440 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5441 }
5442
5443 /* Return TRUE if the language is D. */
5444
5445 static inline bool
5446 is_dlang (void)
5447 {
5448 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5449
5450 return lang == DW_LANG_D;
5451 }
5452
5453 /* Remove the specified attribute if present. Return TRUE if removal
5454 was successful. */
5455
5456 static bool
5457 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5458 {
5459 dw_attr_node *a;
5460 unsigned ix;
5461
5462 if (! die)
5463 return false;
5464
5465 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5466 if (a->dw_attr == attr_kind)
5467 {
5468 if (AT_class (a) == dw_val_class_str)
5469 if (a->dw_attr_val.v.val_str->refcount)
5470 a->dw_attr_val.v.val_str->refcount--;
5471
5472 /* vec::ordered_remove should help reduce the number of abbrevs
5473 that are needed. */
5474 die->die_attr->ordered_remove (ix);
5475 return true;
5476 }
5477 return false;
5478 }
5479
5480 /* Remove CHILD from its parent. PREV must have the property that
5481 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5482
5483 static void
5484 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5485 {
5486 gcc_assert (child->die_parent == prev->die_parent);
5487 gcc_assert (prev->die_sib == child);
5488 if (prev == child)
5489 {
5490 gcc_assert (child->die_parent->die_child == child);
5491 prev = NULL;
5492 }
5493 else
5494 prev->die_sib = child->die_sib;
5495 if (child->die_parent->die_child == child)
5496 child->die_parent->die_child = prev;
5497 child->die_sib = NULL;
5498 }
5499
5500 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5501 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5502
5503 static void
5504 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5505 {
5506 dw_die_ref parent = old_child->die_parent;
5507
5508 gcc_assert (parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == old_child);
5510
5511 new_child->die_parent = parent;
5512 if (prev == old_child)
5513 {
5514 gcc_assert (parent->die_child == old_child);
5515 new_child->die_sib = new_child;
5516 }
5517 else
5518 {
5519 prev->die_sib = new_child;
5520 new_child->die_sib = old_child->die_sib;
5521 }
5522 if (old_child->die_parent->die_child == old_child)
5523 old_child->die_parent->die_child = new_child;
5524 old_child->die_sib = NULL;
5525 }
5526
5527 /* Move all children from OLD_PARENT to NEW_PARENT. */
5528
5529 static void
5530 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5531 {
5532 dw_die_ref c;
5533 new_parent->die_child = old_parent->die_child;
5534 old_parent->die_child = NULL;
5535 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5536 }
5537
5538 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5539 matches TAG. */
5540
5541 static void
5542 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5543 {
5544 dw_die_ref c;
5545
5546 c = die->die_child;
5547 if (c) do {
5548 dw_die_ref prev = c;
5549 c = c->die_sib;
5550 while (c->die_tag == tag)
5551 {
5552 remove_child_with_prev (c, prev);
5553 c->die_parent = NULL;
5554 /* Might have removed every child. */
5555 if (die->die_child == NULL)
5556 return;
5557 c = prev->die_sib;
5558 }
5559 } while (c != die->die_child);
5560 }
5561
5562 /* Add a CHILD_DIE as the last child of DIE. */
5563
5564 static void
5565 add_child_die (dw_die_ref die, dw_die_ref child_die)
5566 {
5567 /* FIXME this should probably be an assert. */
5568 if (! die || ! child_die)
5569 return;
5570 gcc_assert (die != child_die);
5571
5572 child_die->die_parent = die;
5573 if (die->die_child)
5574 {
5575 child_die->die_sib = die->die_child->die_sib;
5576 die->die_child->die_sib = child_die;
5577 }
5578 else
5579 child_die->die_sib = child_die;
5580 die->die_child = child_die;
5581 }
5582
5583 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5584
5585 static void
5586 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5587 dw_die_ref after_die)
5588 {
5589 gcc_assert (die
5590 && child_die
5591 && after_die
5592 && die->die_child
5593 && die != child_die);
5594
5595 child_die->die_parent = die;
5596 child_die->die_sib = after_die->die_sib;
5597 after_die->die_sib = child_die;
5598 if (die->die_child == after_die)
5599 die->die_child = child_die;
5600 }
5601
5602 /* Unassociate CHILD from its parent, and make its parent be
5603 NEW_PARENT. */
5604
5605 static void
5606 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5607 {
5608 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5609 if (p->die_sib == child)
5610 {
5611 remove_child_with_prev (child, p);
5612 break;
5613 }
5614 add_child_die (new_parent, child);
5615 }
5616
5617 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5618 is the specification, to the end of PARENT's list of children.
5619 This is done by removing and re-adding it. */
5620
5621 static void
5622 splice_child_die (dw_die_ref parent, dw_die_ref child)
5623 {
5624 /* We want the declaration DIE from inside the class, not the
5625 specification DIE at toplevel. */
5626 if (child->die_parent != parent)
5627 {
5628 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5629
5630 if (tmp)
5631 child = tmp;
5632 }
5633
5634 gcc_assert (child->die_parent == parent
5635 || (child->die_parent
5636 == get_AT_ref (parent, DW_AT_specification)));
5637
5638 reparent_child (child, parent);
5639 }
5640
5641 /* Create and return a new die with TAG_VALUE as tag. */
5642
5643 static inline dw_die_ref
5644 new_die_raw (enum dwarf_tag tag_value)
5645 {
5646 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5647 die->die_tag = tag_value;
5648 return die;
5649 }
5650
5651 /* Create and return a new die with a parent of PARENT_DIE. If
5652 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5653 associated tree T must be supplied to determine parenthood
5654 later. */
5655
5656 static inline dw_die_ref
5657 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5658 {
5659 dw_die_ref die = new_die_raw (tag_value);
5660
5661 if (parent_die != NULL)
5662 add_child_die (parent_die, die);
5663 else
5664 {
5665 limbo_die_node *limbo_node;
5666
5667 /* No DIEs created after early dwarf should end up in limbo,
5668 because the limbo list should not persist past LTO
5669 streaming. */
5670 if (tag_value != DW_TAG_compile_unit
5671 /* These are allowed because they're generated while
5672 breaking out COMDAT units late. */
5673 && tag_value != DW_TAG_type_unit
5674 && tag_value != DW_TAG_skeleton_unit
5675 && !early_dwarf
5676 /* Allow nested functions to live in limbo because they will
5677 only temporarily live there, as decls_for_scope will fix
5678 them up. */
5679 && (TREE_CODE (t) != FUNCTION_DECL
5680 || !decl_function_context (t))
5681 /* Same as nested functions above but for types. Types that
5682 are local to a function will be fixed in
5683 decls_for_scope. */
5684 && (!RECORD_OR_UNION_TYPE_P (t)
5685 || !TYPE_CONTEXT (t)
5686 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5687 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5688 especially in the ltrans stage, but once we implement LTO
5689 dwarf streaming, we should remove this exception. */
5690 && !in_lto_p)
5691 {
5692 fprintf (stderr, "symbol ended up in limbo too late:");
5693 debug_generic_stmt (t);
5694 gcc_unreachable ();
5695 }
5696
5697 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5698 limbo_node->die = die;
5699 limbo_node->created_for = t;
5700 limbo_node->next = limbo_die_list;
5701 limbo_die_list = limbo_node;
5702 }
5703
5704 return die;
5705 }
5706
5707 /* Return the DIE associated with the given type specifier. */
5708
5709 static inline dw_die_ref
5710 lookup_type_die (tree type)
5711 {
5712 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5713 if (die && die->removed)
5714 {
5715 TYPE_SYMTAB_DIE (type) = NULL;
5716 return NULL;
5717 }
5718 return die;
5719 }
5720
5721 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5722 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5723 anonymous type instead the one of the naming typedef. */
5724
5725 static inline dw_die_ref
5726 strip_naming_typedef (tree type, dw_die_ref type_die)
5727 {
5728 if (type
5729 && TREE_CODE (type) == RECORD_TYPE
5730 && type_die
5731 && type_die->die_tag == DW_TAG_typedef
5732 && is_naming_typedef_decl (TYPE_NAME (type)))
5733 type_die = get_AT_ref (type_die, DW_AT_type);
5734 return type_die;
5735 }
5736
5737 /* Like lookup_type_die, but if type is an anonymous type named by a
5738 typedef[1], return the DIE of the anonymous type instead the one of
5739 the naming typedef. This is because in gen_typedef_die, we did
5740 equate the anonymous struct named by the typedef with the DIE of
5741 the naming typedef. So by default, lookup_type_die on an anonymous
5742 struct yields the DIE of the naming typedef.
5743
5744 [1]: Read the comment of is_naming_typedef_decl to learn about what
5745 a naming typedef is. */
5746
5747 static inline dw_die_ref
5748 lookup_type_die_strip_naming_typedef (tree type)
5749 {
5750 dw_die_ref die = lookup_type_die (type);
5751 return strip_naming_typedef (type, die);
5752 }
5753
5754 /* Equate a DIE to a given type specifier. */
5755
5756 static inline void
5757 equate_type_number_to_die (tree type, dw_die_ref type_die)
5758 {
5759 TYPE_SYMTAB_DIE (type) = type_die;
5760 }
5761
5762 static dw_die_ref maybe_create_die_with_external_ref (tree);
5763 struct GTY(()) sym_off_pair
5764 {
5765 const char * GTY((skip)) sym;
5766 unsigned HOST_WIDE_INT off;
5767 };
5768 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5769
5770 /* Returns a hash value for X (which really is a die_struct). */
5771
5772 inline hashval_t
5773 decl_die_hasher::hash (die_node *x)
5774 {
5775 return (hashval_t) x->decl_id;
5776 }
5777
5778 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5779
5780 inline bool
5781 decl_die_hasher::equal (die_node *x, tree y)
5782 {
5783 return (x->decl_id == DECL_UID (y));
5784 }
5785
5786 /* Return the DIE associated with a given declaration. */
5787
5788 static inline dw_die_ref
5789 lookup_decl_die (tree decl)
5790 {
5791 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5792 NO_INSERT);
5793 if (!die)
5794 {
5795 if (in_lto_p)
5796 return maybe_create_die_with_external_ref (decl);
5797 return NULL;
5798 }
5799 if ((*die)->removed)
5800 {
5801 decl_die_table->clear_slot (die);
5802 return NULL;
5803 }
5804 return *die;
5805 }
5806
5807
5808 /* Return the DIE associated with BLOCK. */
5809
5810 static inline dw_die_ref
5811 lookup_block_die (tree block)
5812 {
5813 dw_die_ref die = BLOCK_DIE (block);
5814 if (!die && in_lto_p)
5815 return maybe_create_die_with_external_ref (block);
5816 return die;
5817 }
5818
5819 /* Associate DIE with BLOCK. */
5820
5821 static inline void
5822 equate_block_to_die (tree block, dw_die_ref die)
5823 {
5824 BLOCK_DIE (block) = die;
5825 }
5826 #undef BLOCK_DIE
5827
5828
5829 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5830 style reference. Return true if we found one refering to a DIE for
5831 DECL, otherwise return false. */
5832
5833 static bool
5834 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5835 unsigned HOST_WIDE_INT *off)
5836 {
5837 dw_die_ref die;
5838
5839 if (in_lto_p)
5840 {
5841 /* During WPA stage and incremental linking we use a hash-map
5842 to store the decl <-> label + offset map. */
5843 if (!external_die_map)
5844 return false;
5845 sym_off_pair *desc = external_die_map->get (decl);
5846 if (!desc)
5847 return false;
5848 *sym = desc->sym;
5849 *off = desc->off;
5850 return true;
5851 }
5852
5853 if (TREE_CODE (decl) == BLOCK)
5854 die = lookup_block_die (decl);
5855 else
5856 die = lookup_decl_die (decl);
5857 if (!die)
5858 return false;
5859
5860 /* Similar to get_ref_die_offset_label, but using the "correct"
5861 label. */
5862 *off = die->die_offset;
5863 while (die->die_parent)
5864 die = die->die_parent;
5865 /* For the containing CU DIE we compute a die_symbol in
5866 compute_comp_unit_symbol. */
5867 gcc_assert (die->die_tag == DW_TAG_compile_unit
5868 && die->die_id.die_symbol != NULL);
5869 *sym = die->die_id.die_symbol;
5870 return true;
5871 }
5872
5873 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5874
5875 static void
5876 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5877 const char *symbol, HOST_WIDE_INT offset)
5878 {
5879 /* Create a fake DIE that contains the reference. Don't use
5880 new_die because we don't want to end up in the limbo list. */
5881 /* ??? We probably want to share these, thus put a ref to the DIE
5882 we create here to the external_die_map entry. */
5883 dw_die_ref ref = new_die_raw (die->die_tag);
5884 ref->die_id.die_symbol = symbol;
5885 ref->die_offset = offset;
5886 ref->with_offset = 1;
5887 add_AT_die_ref (die, attr_kind, ref);
5888 }
5889
5890 /* Create a DIE for DECL if required and add a reference to a DIE
5891 at SYMBOL + OFFSET which contains attributes dumped early. */
5892
5893 static void
5894 dwarf2out_register_external_die (tree decl, const char *sym,
5895 unsigned HOST_WIDE_INT off)
5896 {
5897 if (debug_info_level == DINFO_LEVEL_NONE)
5898 return;
5899
5900 if (!external_die_map)
5901 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5902 gcc_checking_assert (!external_die_map->get (decl));
5903 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5904 external_die_map->put (decl, p);
5905 }
5906
5907 /* If we have a registered external DIE for DECL return a new DIE for
5908 the concrete instance with an appropriate abstract origin. */
5909
5910 static dw_die_ref
5911 maybe_create_die_with_external_ref (tree decl)
5912 {
5913 if (!external_die_map)
5914 return NULL;
5915 sym_off_pair *desc = external_die_map->get (decl);
5916 if (!desc)
5917 return NULL;
5918
5919 const char *sym = desc->sym;
5920 unsigned HOST_WIDE_INT off = desc->off;
5921
5922 in_lto_p = false;
5923 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5924 ? lookup_block_die (decl) : lookup_decl_die (decl));
5925 gcc_assert (!die);
5926 in_lto_p = true;
5927
5928 tree ctx;
5929 dw_die_ref parent = NULL;
5930 /* Need to lookup a DIE for the decls context - the containing
5931 function or translation unit. */
5932 if (TREE_CODE (decl) == BLOCK)
5933 {
5934 ctx = BLOCK_SUPERCONTEXT (decl);
5935 /* ??? We do not output DIEs for all scopes thus skip as
5936 many DIEs as needed. */
5937 while (TREE_CODE (ctx) == BLOCK
5938 && !lookup_block_die (ctx))
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 }
5941 else
5942 ctx = DECL_CONTEXT (decl);
5943 /* Peel types in the context stack. */
5944 while (ctx && TYPE_P (ctx))
5945 ctx = TYPE_CONTEXT (ctx);
5946 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5947 if (debug_info_level <= DINFO_LEVEL_TERSE)
5948 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5949 ctx = DECL_CONTEXT (ctx);
5950 if (ctx)
5951 {
5952 if (TREE_CODE (ctx) == BLOCK)
5953 parent = lookup_block_die (ctx);
5954 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5955 /* Keep the 1:1 association during WPA. */
5956 && !flag_wpa
5957 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5958 /* Otherwise all late annotations go to the main CU which
5959 imports the original CUs. */
5960 parent = comp_unit_die ();
5961 else if (TREE_CODE (ctx) == FUNCTION_DECL
5962 && TREE_CODE (decl) != FUNCTION_DECL
5963 && TREE_CODE (decl) != PARM_DECL
5964 && TREE_CODE (decl) != RESULT_DECL
5965 && TREE_CODE (decl) != BLOCK)
5966 /* Leave function local entities parent determination to when
5967 we process scope vars. */
5968 ;
5969 else
5970 parent = lookup_decl_die (ctx);
5971 }
5972 else
5973 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5974 Handle this case gracefully by globalizing stuff. */
5975 parent = comp_unit_die ();
5976 /* Create a DIE "stub". */
5977 switch (TREE_CODE (decl))
5978 {
5979 case TRANSLATION_UNIT_DECL:
5980 {
5981 die = comp_unit_die ();
5982 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5983 to create a DIE for the original CUs. */
5984 return die;
5985 }
5986 case NAMESPACE_DECL:
5987 if (is_fortran (decl))
5988 die = new_die (DW_TAG_module, parent, decl);
5989 else
5990 die = new_die (DW_TAG_namespace, parent, decl);
5991 break;
5992 case FUNCTION_DECL:
5993 die = new_die (DW_TAG_subprogram, parent, decl);
5994 break;
5995 case VAR_DECL:
5996 die = new_die (DW_TAG_variable, parent, decl);
5997 break;
5998 case RESULT_DECL:
5999 die = new_die (DW_TAG_variable, parent, decl);
6000 break;
6001 case PARM_DECL:
6002 die = new_die (DW_TAG_formal_parameter, parent, decl);
6003 break;
6004 case CONST_DECL:
6005 die = new_die (DW_TAG_constant, parent, decl);
6006 break;
6007 case LABEL_DECL:
6008 die = new_die (DW_TAG_label, parent, decl);
6009 break;
6010 case BLOCK:
6011 die = new_die (DW_TAG_lexical_block, parent, decl);
6012 break;
6013 default:
6014 gcc_unreachable ();
6015 }
6016 if (TREE_CODE (decl) == BLOCK)
6017 equate_block_to_die (decl, die);
6018 else
6019 equate_decl_number_to_die (decl, die);
6020
6021 add_desc_attribute (die, decl);
6022
6023 /* Add a reference to the DIE providing early debug at $sym + off. */
6024 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6025
6026 return die;
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 {
6459 if (flag_dump_noaddr || flag_dump_unnumbered)
6460 fprintf (outfile, " #\n");
6461 else
6462 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6463 }
6464 break;
6465 case dw_val_class_loc_list:
6466 fprintf (outfile, "location list -> label:%s",
6467 val->v.val_loc_list->ll_symbol);
6468 break;
6469 case dw_val_class_view_list:
6470 val = view_list_to_loc_list_val_node (val);
6471 fprintf (outfile, "location list with views -> labels:%s and %s",
6472 val->v.val_loc_list->ll_symbol,
6473 val->v.val_loc_list->vl_symbol);
6474 break;
6475 case dw_val_class_range_list:
6476 fprintf (outfile, "range list");
6477 break;
6478 case dw_val_class_const:
6479 case dw_val_class_const_implicit:
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6481 break;
6482 case dw_val_class_unsigned_const:
6483 case dw_val_class_unsigned_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6485 break;
6486 case dw_val_class_const_double:
6487 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6488 HOST_WIDE_INT_PRINT_UNSIGNED")",
6489 val->v.val_double.high,
6490 val->v.val_double.low);
6491 break;
6492 case dw_val_class_wide_int:
6493 {
6494 int i = val->v.val_wide->get_len ();
6495 fprintf (outfile, "constant (");
6496 gcc_assert (i > 0);
6497 if (val->v.val_wide->elt (i - 1) == 0)
6498 fprintf (outfile, "0x");
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6500 val->v.val_wide->elt (--i));
6501 while (--i >= 0)
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6503 val->v.val_wide->elt (i));
6504 fprintf (outfile, ")");
6505 break;
6506 }
6507 case dw_val_class_vec:
6508 fprintf (outfile, "floating-point or vector constant");
6509 break;
6510 case dw_val_class_flag:
6511 fprintf (outfile, "%u", val->v.val_flag);
6512 break;
6513 case dw_val_class_die_ref:
6514 if (val->v.val_die_ref.die != NULL)
6515 {
6516 dw_die_ref die = val->v.val_die_ref.die;
6517
6518 if (die->comdat_type_p)
6519 {
6520 fprintf (outfile, "die -> signature: ");
6521 print_signature (outfile,
6522 die->die_id.die_type_node->signature);
6523 }
6524 else if (die->die_id.die_symbol)
6525 {
6526 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6527 if (die->with_offset)
6528 fprintf (outfile, " + %ld", die->die_offset);
6529 }
6530 else
6531 fprintf (outfile, "die -> %ld", die->die_offset);
6532 if (flag_dump_noaddr || flag_dump_unnumbered)
6533 fprintf (outfile, " #");
6534 else
6535 fprintf (outfile, " (%p)", (void *) die);
6536 }
6537 else
6538 fprintf (outfile, "die -> <null>");
6539 break;
6540 case dw_val_class_vms_delta:
6541 fprintf (outfile, "delta: @slotcount(%s-%s)",
6542 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6543 break;
6544 case dw_val_class_symview:
6545 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6546 break;
6547 case dw_val_class_lbl_id:
6548 case dw_val_class_lineptr:
6549 case dw_val_class_macptr:
6550 case dw_val_class_loclistsptr:
6551 case dw_val_class_high_pc:
6552 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6553 break;
6554 case dw_val_class_str:
6555 if (val->v.val_str->str != NULL)
6556 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6557 else
6558 fprintf (outfile, "<null>");
6559 break;
6560 case dw_val_class_file:
6561 case dw_val_class_file_implicit:
6562 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6563 val->v.val_file->emitted_number);
6564 break;
6565 case dw_val_class_data8:
6566 {
6567 int i;
6568
6569 for (i = 0; i < 8; i++)
6570 fprintf (outfile, "%02x", val->v.val_data8[i]);
6571 break;
6572 }
6573 case dw_val_class_discr_value:
6574 print_discr_value (outfile, &val->v.val_discr_value);
6575 break;
6576 case dw_val_class_discr_list:
6577 for (dw_discr_list_ref node = val->v.val_discr_list;
6578 node != NULL;
6579 node = node->dw_discr_next)
6580 {
6581 if (node->dw_discr_range)
6582 {
6583 fprintf (outfile, " .. ");
6584 print_discr_value (outfile, &node->dw_discr_lower_bound);
6585 print_discr_value (outfile, &node->dw_discr_upper_bound);
6586 }
6587 else
6588 print_discr_value (outfile, &node->dw_discr_lower_bound);
6589
6590 if (node->dw_discr_next != NULL)
6591 fprintf (outfile, " | ");
6592 }
6593 default:
6594 break;
6595 }
6596 }
6597
6598 /* Likewise, for a DIE attribute. */
6599
6600 static void
6601 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6602 {
6603 print_dw_val (&a->dw_attr_val, recurse, outfile);
6604 }
6605
6606
6607 /* Print the list of operands in the LOC location description to OUTFILE. This
6608 routine is a debugging aid only. */
6609
6610 static void
6611 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6612 {
6613 dw_loc_descr_ref l = loc;
6614
6615 if (loc == NULL)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, "<null>\n");
6619 return;
6620 }
6621
6622 for (l = loc; l != NULL; l = l->dw_loc_next)
6623 {
6624 print_spaces (outfile);
6625 if (flag_dump_noaddr || flag_dump_unnumbered)
6626 fprintf (outfile, "#");
6627 else
6628 fprintf (outfile, "(%p)", (void *) l);
6629 fprintf (outfile, " %s",
6630 dwarf_stack_op_name (l->dw_loc_opc));
6631 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6632 {
6633 fprintf (outfile, " ");
6634 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6635 }
6636 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6637 {
6638 fprintf (outfile, ", ");
6639 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6640 }
6641 fprintf (outfile, "\n");
6642 }
6643 }
6644
6645 /* Print the information associated with a given DIE, and its children.
6646 This routine is a debugging aid only. */
6647
6648 static void
6649 print_die (dw_die_ref die, FILE *outfile)
6650 {
6651 dw_attr_node *a;
6652 dw_die_ref c;
6653 unsigned ix;
6654
6655 print_spaces (outfile);
6656 fprintf (outfile, "DIE %4ld: %s ",
6657 die->die_offset, dwarf_tag_name (die->die_tag));
6658 if (flag_dump_noaddr || flag_dump_unnumbered)
6659 fprintf (outfile, "#\n");
6660 else
6661 fprintf (outfile, "(%p)\n", (void*) die);
6662 print_spaces (outfile);
6663 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6664 fprintf (outfile, " offset: %ld", die->die_offset);
6665 fprintf (outfile, " mark: %d\n", die->die_mark);
6666
6667 if (die->comdat_type_p)
6668 {
6669 print_spaces (outfile);
6670 fprintf (outfile, " signature: ");
6671 print_signature (outfile, die->die_id.die_type_node->signature);
6672 fprintf (outfile, "\n");
6673 }
6674
6675 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6676 {
6677 print_spaces (outfile);
6678 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6679
6680 print_attribute (a, true, outfile);
6681 fprintf (outfile, "\n");
6682 }
6683
6684 if (die->die_child != NULL)
6685 {
6686 print_indent += 4;
6687 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6688 print_indent -= 4;
6689 }
6690 if (print_indent == 0)
6691 fprintf (outfile, "\n");
6692 }
6693
6694 /* Print the list of operations in the LOC location description. */
6695
6696 DEBUG_FUNCTION void
6697 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6698 {
6699 print_loc_descr (loc, stderr);
6700 }
6701
6702 /* Print the information collected for a given DIE. */
6703
6704 DEBUG_FUNCTION void
6705 debug_dwarf_die (dw_die_ref die)
6706 {
6707 print_die (die, stderr);
6708 }
6709
6710 DEBUG_FUNCTION void
6711 debug (die_struct &ref)
6712 {
6713 print_die (&ref, stderr);
6714 }
6715
6716 DEBUG_FUNCTION void
6717 debug (die_struct *ptr)
6718 {
6719 if (ptr)
6720 debug (*ptr);
6721 else
6722 fprintf (stderr, "<nil>\n");
6723 }
6724
6725
6726 /* Print all DWARF information collected for the compilation unit.
6727 This routine is a debugging aid only. */
6728
6729 DEBUG_FUNCTION void
6730 debug_dwarf (void)
6731 {
6732 print_indent = 0;
6733 print_die (comp_unit_die (), stderr);
6734 }
6735
6736 /* Verify the DIE tree structure. */
6737
6738 DEBUG_FUNCTION void
6739 verify_die (dw_die_ref die)
6740 {
6741 gcc_assert (!die->die_mark);
6742 if (die->die_parent == NULL
6743 && die->die_sib == NULL)
6744 return;
6745 /* Verify the die_sib list is cyclic. */
6746 dw_die_ref x = die;
6747 do
6748 {
6749 x->die_mark = 1;
6750 x = x->die_sib;
6751 }
6752 while (x && !x->die_mark);
6753 gcc_assert (x == die);
6754 x = die;
6755 do
6756 {
6757 /* Verify all dies have the same parent. */
6758 gcc_assert (x->die_parent == die->die_parent);
6759 if (x->die_child)
6760 {
6761 /* Verify the child has the proper parent and recurse. */
6762 gcc_assert (x->die_child->die_parent == x);
6763 verify_die (x->die_child);
6764 }
6765 x->die_mark = 0;
6766 x = x->die_sib;
6767 }
6768 while (x && x->die_mark);
6769 }
6770
6771 /* Sanity checks on DIEs. */
6772
6773 static void
6774 check_die (dw_die_ref die)
6775 {
6776 unsigned ix;
6777 dw_attr_node *a;
6778 bool inline_found = false;
6779 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6780 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6781 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6782 {
6783 switch (a->dw_attr)
6784 {
6785 case DW_AT_inline:
6786 if (a->dw_attr_val.v.val_unsigned)
6787 inline_found = true;
6788 break;
6789 case DW_AT_location:
6790 ++n_location;
6791 break;
6792 case DW_AT_low_pc:
6793 ++n_low_pc;
6794 break;
6795 case DW_AT_high_pc:
6796 ++n_high_pc;
6797 break;
6798 case DW_AT_artificial:
6799 ++n_artificial;
6800 break;
6801 case DW_AT_decl_column:
6802 ++n_decl_column;
6803 break;
6804 case DW_AT_decl_line:
6805 ++n_decl_line;
6806 break;
6807 case DW_AT_decl_file:
6808 ++n_decl_file;
6809 break;
6810 default:
6811 break;
6812 }
6813 }
6814 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6815 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6816 {
6817 fprintf (stderr, "Duplicate attributes in DIE:\n");
6818 debug_dwarf_die (die);
6819 gcc_unreachable ();
6820 }
6821 if (inline_found)
6822 {
6823 /* A debugging information entry that is a member of an abstract
6824 instance tree [that has DW_AT_inline] should not contain any
6825 attributes which describe aspects of the subroutine which vary
6826 between distinct inlined expansions or distinct out-of-line
6827 expansions. */
6828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6829 gcc_assert (a->dw_attr != DW_AT_low_pc
6830 && a->dw_attr != DW_AT_high_pc
6831 && a->dw_attr != DW_AT_location
6832 && a->dw_attr != DW_AT_frame_base
6833 && a->dw_attr != DW_AT_call_all_calls
6834 && a->dw_attr != DW_AT_GNU_all_call_sites);
6835 }
6836 }
6837 \f
6838 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6839 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6840 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6841
6842 /* Calculate the checksum of a location expression. */
6843
6844 static inline void
6845 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6846 {
6847 int tem;
6848 inchash::hash hstate;
6849 hashval_t hash;
6850
6851 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6852 CHECKSUM (tem);
6853 hash_loc_operands (loc, hstate);
6854 hash = hstate.end();
6855 CHECKSUM (hash);
6856 }
6857
6858 /* Calculate the checksum of an attribute. */
6859
6860 static void
6861 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6862 {
6863 dw_loc_descr_ref loc;
6864 rtx r;
6865
6866 CHECKSUM (at->dw_attr);
6867
6868 /* We don't care that this was compiled with a different compiler
6869 snapshot; if the output is the same, that's what matters. */
6870 if (at->dw_attr == DW_AT_producer)
6871 return;
6872
6873 switch (AT_class (at))
6874 {
6875 case dw_val_class_const:
6876 case dw_val_class_const_implicit:
6877 CHECKSUM (at->dw_attr_val.v.val_int);
6878 break;
6879 case dw_val_class_unsigned_const:
6880 case dw_val_class_unsigned_const_implicit:
6881 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6882 break;
6883 case dw_val_class_const_double:
6884 CHECKSUM (at->dw_attr_val.v.val_double);
6885 break;
6886 case dw_val_class_wide_int:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6888 get_full_len (*at->dw_attr_val.v.val_wide)
6889 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6890 break;
6891 case dw_val_class_vec:
6892 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6893 (at->dw_attr_val.v.val_vec.length
6894 * at->dw_attr_val.v.val_vec.elt_size));
6895 break;
6896 case dw_val_class_flag:
6897 CHECKSUM (at->dw_attr_val.v.val_flag);
6898 break;
6899 case dw_val_class_str:
6900 CHECKSUM_STRING (AT_string (at));
6901 break;
6902
6903 case dw_val_class_addr:
6904 r = AT_addr (at);
6905 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6906 CHECKSUM_STRING (XSTR (r, 0));
6907 break;
6908
6909 case dw_val_class_offset:
6910 CHECKSUM (at->dw_attr_val.v.val_offset);
6911 break;
6912
6913 case dw_val_class_loc:
6914 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6915 loc_checksum (loc, ctx);
6916 break;
6917
6918 case dw_val_class_die_ref:
6919 die_checksum (AT_ref (at), ctx, mark);
6920 break;
6921
6922 case dw_val_class_fde_ref:
6923 case dw_val_class_vms_delta:
6924 case dw_val_class_symview:
6925 case dw_val_class_lbl_id:
6926 case dw_val_class_lineptr:
6927 case dw_val_class_macptr:
6928 case dw_val_class_loclistsptr:
6929 case dw_val_class_high_pc:
6930 break;
6931
6932 case dw_val_class_file:
6933 case dw_val_class_file_implicit:
6934 CHECKSUM_STRING (AT_file (at)->filename);
6935 break;
6936
6937 case dw_val_class_data8:
6938 CHECKSUM (at->dw_attr_val.v.val_data8);
6939 break;
6940
6941 default:
6942 break;
6943 }
6944 }
6945
6946 /* Calculate the checksum of a DIE. */
6947
6948 static void
6949 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6950 {
6951 dw_die_ref c;
6952 dw_attr_node *a;
6953 unsigned ix;
6954
6955 /* To avoid infinite recursion. */
6956 if (die->die_mark)
6957 {
6958 CHECKSUM (die->die_mark);
6959 return;
6960 }
6961 die->die_mark = ++(*mark);
6962
6963 CHECKSUM (die->die_tag);
6964
6965 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6966 attr_checksum (a, ctx, mark);
6967
6968 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6969 }
6970
6971 #undef CHECKSUM
6972 #undef CHECKSUM_BLOCK
6973 #undef CHECKSUM_STRING
6974
6975 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6976 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6977 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6978 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6979 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6980 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6981 #define CHECKSUM_ATTR(FOO) \
6982 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6983
6984 /* Calculate the checksum of a number in signed LEB128 format. */
6985
6986 static void
6987 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6988 {
6989 unsigned char byte;
6990 bool more;
6991
6992 while (1)
6993 {
6994 byte = (value & 0x7f);
6995 value >>= 7;
6996 more = !((value == 0 && (byte & 0x40) == 0)
6997 || (value == -1 && (byte & 0x40) != 0));
6998 if (more)
6999 byte |= 0x80;
7000 CHECKSUM (byte);
7001 if (!more)
7002 break;
7003 }
7004 }
7005
7006 /* Calculate the checksum of a number in unsigned LEB128 format. */
7007
7008 static void
7009 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 while (1)
7012 {
7013 unsigned char byte = (value & 0x7f);
7014 value >>= 7;
7015 if (value != 0)
7016 /* More bytes to follow. */
7017 byte |= 0x80;
7018 CHECKSUM (byte);
7019 if (value == 0)
7020 break;
7021 }
7022 }
7023
7024 /* Checksum the context of the DIE. This adds the names of any
7025 surrounding namespaces or structures to the checksum. */
7026
7027 static void
7028 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7029 {
7030 const char *name;
7031 dw_die_ref spec;
7032 int tag = die->die_tag;
7033
7034 if (tag != DW_TAG_namespace
7035 && tag != DW_TAG_structure_type
7036 && tag != DW_TAG_class_type)
7037 return;
7038
7039 name = get_AT_string (die, DW_AT_name);
7040
7041 spec = get_AT_ref (die, DW_AT_specification);
7042 if (spec != NULL)
7043 die = spec;
7044
7045 if (die->die_parent != NULL)
7046 checksum_die_context (die->die_parent, ctx);
7047
7048 CHECKSUM_ULEB128 ('C');
7049 CHECKSUM_ULEB128 (tag);
7050 if (name != NULL)
7051 CHECKSUM_STRING (name);
7052 }
7053
7054 /* Calculate the checksum of a location expression. */
7055
7056 static inline void
7057 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7058 {
7059 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7060 were emitted as a DW_FORM_sdata instead of a location expression. */
7061 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7062 {
7063 CHECKSUM_ULEB128 (DW_FORM_sdata);
7064 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7065 return;
7066 }
7067
7068 /* Otherwise, just checksum the raw location expression. */
7069 while (loc != NULL)
7070 {
7071 inchash::hash hstate;
7072 hashval_t hash;
7073
7074 CHECKSUM_ULEB128 (loc->dtprel);
7075 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7076 hash_loc_operands (loc, hstate);
7077 hash = hstate.end ();
7078 CHECKSUM (hash);
7079 loc = loc->dw_loc_next;
7080 }
7081 }
7082
7083 /* Calculate the checksum of an attribute. */
7084
7085 static void
7086 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7087 struct md5_ctx *ctx, int *mark)
7088 {
7089 dw_loc_descr_ref loc;
7090 rtx r;
7091
7092 if (AT_class (at) == dw_val_class_die_ref)
7093 {
7094 dw_die_ref target_die = AT_ref (at);
7095
7096 /* For pointer and reference types, we checksum only the (qualified)
7097 name of the target type (if there is a name). For friend entries,
7098 we checksum only the (qualified) name of the target type or function.
7099 This allows the checksum to remain the same whether the target type
7100 is complete or not. */
7101 if ((at->dw_attr == DW_AT_type
7102 && (tag == DW_TAG_pointer_type
7103 || tag == DW_TAG_reference_type
7104 || tag == DW_TAG_rvalue_reference_type
7105 || tag == DW_TAG_ptr_to_member_type))
7106 || (at->dw_attr == DW_AT_friend
7107 && tag == DW_TAG_friend))
7108 {
7109 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7110
7111 if (name_attr != NULL)
7112 {
7113 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7114
7115 if (decl == NULL)
7116 decl = target_die;
7117 CHECKSUM_ULEB128 ('N');
7118 CHECKSUM_ULEB128 (at->dw_attr);
7119 if (decl->die_parent != NULL)
7120 checksum_die_context (decl->die_parent, ctx);
7121 CHECKSUM_ULEB128 ('E');
7122 CHECKSUM_STRING (AT_string (name_attr));
7123 return;
7124 }
7125 }
7126
7127 /* For all other references to another DIE, we check to see if the
7128 target DIE has already been visited. If it has, we emit a
7129 backward reference; if not, we descend recursively. */
7130 if (target_die->die_mark > 0)
7131 {
7132 CHECKSUM_ULEB128 ('R');
7133 CHECKSUM_ULEB128 (at->dw_attr);
7134 CHECKSUM_ULEB128 (target_die->die_mark);
7135 }
7136 else
7137 {
7138 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7139
7140 if (decl == NULL)
7141 decl = target_die;
7142 target_die->die_mark = ++(*mark);
7143 CHECKSUM_ULEB128 ('T');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145 if (decl->die_parent != NULL)
7146 checksum_die_context (decl->die_parent, ctx);
7147 die_checksum_ordered (target_die, ctx, mark);
7148 }
7149 return;
7150 }
7151
7152 CHECKSUM_ULEB128 ('A');
7153 CHECKSUM_ULEB128 (at->dw_attr);
7154
7155 switch (AT_class (at))
7156 {
7157 case dw_val_class_const:
7158 case dw_val_class_const_implicit:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7161 break;
7162
7163 case dw_val_class_unsigned_const:
7164 case dw_val_class_unsigned_const_implicit:
7165 CHECKSUM_ULEB128 (DW_FORM_sdata);
7166 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7167 break;
7168
7169 case dw_val_class_const_double:
7170 CHECKSUM_ULEB128 (DW_FORM_block);
7171 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7172 CHECKSUM (at->dw_attr_val.v.val_double);
7173 break;
7174
7175 case dw_val_class_wide_int:
7176 CHECKSUM_ULEB128 (DW_FORM_block);
7177 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7178 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7179 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7180 get_full_len (*at->dw_attr_val.v.val_wide)
7181 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7182 break;
7183
7184 case dw_val_class_vec:
7185 CHECKSUM_ULEB128 (DW_FORM_block);
7186 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7187 * at->dw_attr_val.v.val_vec.elt_size);
7188 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7189 (at->dw_attr_val.v.val_vec.length
7190 * at->dw_attr_val.v.val_vec.elt_size));
7191 break;
7192
7193 case dw_val_class_flag:
7194 CHECKSUM_ULEB128 (DW_FORM_flag);
7195 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7196 break;
7197
7198 case dw_val_class_str:
7199 CHECKSUM_ULEB128 (DW_FORM_string);
7200 CHECKSUM_STRING (AT_string (at));
7201 break;
7202
7203 case dw_val_class_addr:
7204 r = AT_addr (at);
7205 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7206 CHECKSUM_ULEB128 (DW_FORM_string);
7207 CHECKSUM_STRING (XSTR (r, 0));
7208 break;
7209
7210 case dw_val_class_offset:
7211 CHECKSUM_ULEB128 (DW_FORM_sdata);
7212 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7213 break;
7214
7215 case dw_val_class_loc:
7216 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7217 loc_checksum_ordered (loc, ctx);
7218 break;
7219
7220 case dw_val_class_fde_ref:
7221 case dw_val_class_symview:
7222 case dw_val_class_lbl_id:
7223 case dw_val_class_lineptr:
7224 case dw_val_class_macptr:
7225 case dw_val_class_loclistsptr:
7226 case dw_val_class_high_pc:
7227 break;
7228
7229 case dw_val_class_file:
7230 case dw_val_class_file_implicit:
7231 CHECKSUM_ULEB128 (DW_FORM_string);
7232 CHECKSUM_STRING (AT_file (at)->filename);
7233 break;
7234
7235 case dw_val_class_data8:
7236 CHECKSUM (at->dw_attr_val.v.val_data8);
7237 break;
7238
7239 default:
7240 break;
7241 }
7242 }
7243
7244 struct checksum_attributes
7245 {
7246 dw_attr_node *at_name;
7247 dw_attr_node *at_type;
7248 dw_attr_node *at_friend;
7249 dw_attr_node *at_accessibility;
7250 dw_attr_node *at_address_class;
7251 dw_attr_node *at_alignment;
7252 dw_attr_node *at_allocated;
7253 dw_attr_node *at_artificial;
7254 dw_attr_node *at_associated;
7255 dw_attr_node *at_binary_scale;
7256 dw_attr_node *at_bit_offset;
7257 dw_attr_node *at_bit_size;
7258 dw_attr_node *at_bit_stride;
7259 dw_attr_node *at_byte_size;
7260 dw_attr_node *at_byte_stride;
7261 dw_attr_node *at_const_value;
7262 dw_attr_node *at_containing_type;
7263 dw_attr_node *at_count;
7264 dw_attr_node *at_data_location;
7265 dw_attr_node *at_data_member_location;
7266 dw_attr_node *at_decimal_scale;
7267 dw_attr_node *at_decimal_sign;
7268 dw_attr_node *at_default_value;
7269 dw_attr_node *at_digit_count;
7270 dw_attr_node *at_discr;
7271 dw_attr_node *at_discr_list;
7272 dw_attr_node *at_discr_value;
7273 dw_attr_node *at_encoding;
7274 dw_attr_node *at_endianity;
7275 dw_attr_node *at_explicit;
7276 dw_attr_node *at_is_optional;
7277 dw_attr_node *at_location;
7278 dw_attr_node *at_lower_bound;
7279 dw_attr_node *at_mutable;
7280 dw_attr_node *at_ordering;
7281 dw_attr_node *at_picture_string;
7282 dw_attr_node *at_prototyped;
7283 dw_attr_node *at_small;
7284 dw_attr_node *at_segment;
7285 dw_attr_node *at_string_length;
7286 dw_attr_node *at_string_length_bit_size;
7287 dw_attr_node *at_string_length_byte_size;
7288 dw_attr_node *at_threads_scaled;
7289 dw_attr_node *at_upper_bound;
7290 dw_attr_node *at_use_location;
7291 dw_attr_node *at_use_UTF8;
7292 dw_attr_node *at_variable_parameter;
7293 dw_attr_node *at_virtuality;
7294 dw_attr_node *at_visibility;
7295 dw_attr_node *at_vtable_elem_location;
7296 };
7297
7298 /* Collect the attributes that we will want to use for the checksum. */
7299
7300 static void
7301 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7302 {
7303 dw_attr_node *a;
7304 unsigned ix;
7305
7306 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7307 {
7308 switch (a->dw_attr)
7309 {
7310 case DW_AT_name:
7311 attrs->at_name = a;
7312 break;
7313 case DW_AT_type:
7314 attrs->at_type = a;
7315 break;
7316 case DW_AT_friend:
7317 attrs->at_friend = a;
7318 break;
7319 case DW_AT_accessibility:
7320 attrs->at_accessibility = a;
7321 break;
7322 case DW_AT_address_class:
7323 attrs->at_address_class = a;
7324 break;
7325 case DW_AT_alignment:
7326 attrs->at_alignment = a;
7327 break;
7328 case DW_AT_allocated:
7329 attrs->at_allocated = a;
7330 break;
7331 case DW_AT_artificial:
7332 attrs->at_artificial = a;
7333 break;
7334 case DW_AT_associated:
7335 attrs->at_associated = a;
7336 break;
7337 case DW_AT_binary_scale:
7338 attrs->at_binary_scale = a;
7339 break;
7340 case DW_AT_bit_offset:
7341 attrs->at_bit_offset = a;
7342 break;
7343 case DW_AT_bit_size:
7344 attrs->at_bit_size = a;
7345 break;
7346 case DW_AT_bit_stride:
7347 attrs->at_bit_stride = a;
7348 break;
7349 case DW_AT_byte_size:
7350 attrs->at_byte_size = a;
7351 break;
7352 case DW_AT_byte_stride:
7353 attrs->at_byte_stride = a;
7354 break;
7355 case DW_AT_const_value:
7356 attrs->at_const_value = a;
7357 break;
7358 case DW_AT_containing_type:
7359 attrs->at_containing_type = a;
7360 break;
7361 case DW_AT_count:
7362 attrs->at_count = a;
7363 break;
7364 case DW_AT_data_location:
7365 attrs->at_data_location = a;
7366 break;
7367 case DW_AT_data_member_location:
7368 attrs->at_data_member_location = a;
7369 break;
7370 case DW_AT_decimal_scale:
7371 attrs->at_decimal_scale = a;
7372 break;
7373 case DW_AT_decimal_sign:
7374 attrs->at_decimal_sign = a;
7375 break;
7376 case DW_AT_default_value:
7377 attrs->at_default_value = a;
7378 break;
7379 case DW_AT_digit_count:
7380 attrs->at_digit_count = a;
7381 break;
7382 case DW_AT_discr:
7383 attrs->at_discr = a;
7384 break;
7385 case DW_AT_discr_list:
7386 attrs->at_discr_list = a;
7387 break;
7388 case DW_AT_discr_value:
7389 attrs->at_discr_value = a;
7390 break;
7391 case DW_AT_encoding:
7392 attrs->at_encoding = a;
7393 break;
7394 case DW_AT_endianity:
7395 attrs->at_endianity = a;
7396 break;
7397 case DW_AT_explicit:
7398 attrs->at_explicit = a;
7399 break;
7400 case DW_AT_is_optional:
7401 attrs->at_is_optional = a;
7402 break;
7403 case DW_AT_location:
7404 attrs->at_location = a;
7405 break;
7406 case DW_AT_lower_bound:
7407 attrs->at_lower_bound = a;
7408 break;
7409 case DW_AT_mutable:
7410 attrs->at_mutable = a;
7411 break;
7412 case DW_AT_ordering:
7413 attrs->at_ordering = a;
7414 break;
7415 case DW_AT_picture_string:
7416 attrs->at_picture_string = a;
7417 break;
7418 case DW_AT_prototyped:
7419 attrs->at_prototyped = a;
7420 break;
7421 case DW_AT_small:
7422 attrs->at_small = a;
7423 break;
7424 case DW_AT_segment:
7425 attrs->at_segment = a;
7426 break;
7427 case DW_AT_string_length:
7428 attrs->at_string_length = a;
7429 break;
7430 case DW_AT_string_length_bit_size:
7431 attrs->at_string_length_bit_size = a;
7432 break;
7433 case DW_AT_string_length_byte_size:
7434 attrs->at_string_length_byte_size = a;
7435 break;
7436 case DW_AT_threads_scaled:
7437 attrs->at_threads_scaled = a;
7438 break;
7439 case DW_AT_upper_bound:
7440 attrs->at_upper_bound = a;
7441 break;
7442 case DW_AT_use_location:
7443 attrs->at_use_location = a;
7444 break;
7445 case DW_AT_use_UTF8:
7446 attrs->at_use_UTF8 = a;
7447 break;
7448 case DW_AT_variable_parameter:
7449 attrs->at_variable_parameter = a;
7450 break;
7451 case DW_AT_virtuality:
7452 attrs->at_virtuality = a;
7453 break;
7454 case DW_AT_visibility:
7455 attrs->at_visibility = a;
7456 break;
7457 case DW_AT_vtable_elem_location:
7458 attrs->at_vtable_elem_location = a;
7459 break;
7460 default:
7461 break;
7462 }
7463 }
7464 }
7465
7466 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7467
7468 static void
7469 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7470 {
7471 dw_die_ref c;
7472 dw_die_ref decl;
7473 struct checksum_attributes attrs;
7474
7475 CHECKSUM_ULEB128 ('D');
7476 CHECKSUM_ULEB128 (die->die_tag);
7477
7478 memset (&attrs, 0, sizeof (attrs));
7479
7480 decl = get_AT_ref (die, DW_AT_specification);
7481 if (decl != NULL)
7482 collect_checksum_attributes (&attrs, decl);
7483 collect_checksum_attributes (&attrs, die);
7484
7485 CHECKSUM_ATTR (attrs.at_name);
7486 CHECKSUM_ATTR (attrs.at_accessibility);
7487 CHECKSUM_ATTR (attrs.at_address_class);
7488 CHECKSUM_ATTR (attrs.at_allocated);
7489 CHECKSUM_ATTR (attrs.at_artificial);
7490 CHECKSUM_ATTR (attrs.at_associated);
7491 CHECKSUM_ATTR (attrs.at_binary_scale);
7492 CHECKSUM_ATTR (attrs.at_bit_offset);
7493 CHECKSUM_ATTR (attrs.at_bit_size);
7494 CHECKSUM_ATTR (attrs.at_bit_stride);
7495 CHECKSUM_ATTR (attrs.at_byte_size);
7496 CHECKSUM_ATTR (attrs.at_byte_stride);
7497 CHECKSUM_ATTR (attrs.at_const_value);
7498 CHECKSUM_ATTR (attrs.at_containing_type);
7499 CHECKSUM_ATTR (attrs.at_count);
7500 CHECKSUM_ATTR (attrs.at_data_location);
7501 CHECKSUM_ATTR (attrs.at_data_member_location);
7502 CHECKSUM_ATTR (attrs.at_decimal_scale);
7503 CHECKSUM_ATTR (attrs.at_decimal_sign);
7504 CHECKSUM_ATTR (attrs.at_default_value);
7505 CHECKSUM_ATTR (attrs.at_digit_count);
7506 CHECKSUM_ATTR (attrs.at_discr);
7507 CHECKSUM_ATTR (attrs.at_discr_list);
7508 CHECKSUM_ATTR (attrs.at_discr_value);
7509 CHECKSUM_ATTR (attrs.at_encoding);
7510 CHECKSUM_ATTR (attrs.at_endianity);
7511 CHECKSUM_ATTR (attrs.at_explicit);
7512 CHECKSUM_ATTR (attrs.at_is_optional);
7513 CHECKSUM_ATTR (attrs.at_location);
7514 CHECKSUM_ATTR (attrs.at_lower_bound);
7515 CHECKSUM_ATTR (attrs.at_mutable);
7516 CHECKSUM_ATTR (attrs.at_ordering);
7517 CHECKSUM_ATTR (attrs.at_picture_string);
7518 CHECKSUM_ATTR (attrs.at_prototyped);
7519 CHECKSUM_ATTR (attrs.at_small);
7520 CHECKSUM_ATTR (attrs.at_segment);
7521 CHECKSUM_ATTR (attrs.at_string_length);
7522 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7523 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7524 CHECKSUM_ATTR (attrs.at_threads_scaled);
7525 CHECKSUM_ATTR (attrs.at_upper_bound);
7526 CHECKSUM_ATTR (attrs.at_use_location);
7527 CHECKSUM_ATTR (attrs.at_use_UTF8);
7528 CHECKSUM_ATTR (attrs.at_variable_parameter);
7529 CHECKSUM_ATTR (attrs.at_virtuality);
7530 CHECKSUM_ATTR (attrs.at_visibility);
7531 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7532 CHECKSUM_ATTR (attrs.at_type);
7533 CHECKSUM_ATTR (attrs.at_friend);
7534 CHECKSUM_ATTR (attrs.at_alignment);
7535
7536 /* Checksum the child DIEs. */
7537 c = die->die_child;
7538 if (c) do {
7539 dw_attr_node *name_attr;
7540
7541 c = c->die_sib;
7542 name_attr = get_AT (c, DW_AT_name);
7543 if (is_template_instantiation (c))
7544 {
7545 /* Ignore instantiations of member type and function templates. */
7546 }
7547 else if (name_attr != NULL
7548 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7549 {
7550 /* Use a shallow checksum for named nested types and member
7551 functions. */
7552 CHECKSUM_ULEB128 ('S');
7553 CHECKSUM_ULEB128 (c->die_tag);
7554 CHECKSUM_STRING (AT_string (name_attr));
7555 }
7556 else
7557 {
7558 /* Use a deep checksum for other children. */
7559 /* Mark this DIE so it gets processed when unmarking. */
7560 if (c->die_mark == 0)
7561 c->die_mark = -1;
7562 die_checksum_ordered (c, ctx, mark);
7563 }
7564 } while (c != die->die_child);
7565
7566 CHECKSUM_ULEB128 (0);
7567 }
7568
7569 /* Add a type name and tag to a hash. */
7570 static void
7571 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7572 {
7573 CHECKSUM_ULEB128 (tag);
7574 CHECKSUM_STRING (name);
7575 }
7576
7577 #undef CHECKSUM
7578 #undef CHECKSUM_STRING
7579 #undef CHECKSUM_ATTR
7580 #undef CHECKSUM_LEB128
7581 #undef CHECKSUM_ULEB128
7582
7583 /* Generate the type signature for DIE. This is computed by generating an
7584 MD5 checksum over the DIE's tag, its relevant attributes, and its
7585 children. Attributes that are references to other DIEs are processed
7586 by recursion, using the MARK field to prevent infinite recursion.
7587 If the DIE is nested inside a namespace or another type, we also
7588 need to include that context in the signature. The lower 64 bits
7589 of the resulting MD5 checksum comprise the signature. */
7590
7591 static void
7592 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7593 {
7594 int mark;
7595 const char *name;
7596 unsigned char checksum[16];
7597 struct md5_ctx ctx;
7598 dw_die_ref decl;
7599 dw_die_ref parent;
7600
7601 name = get_AT_string (die, DW_AT_name);
7602 decl = get_AT_ref (die, DW_AT_specification);
7603 parent = get_die_parent (die);
7604
7605 /* First, compute a signature for just the type name (and its surrounding
7606 context, if any. This is stored in the type unit DIE for link-time
7607 ODR (one-definition rule) checking. */
7608
7609 if (is_cxx () && name != NULL)
7610 {
7611 md5_init_ctx (&ctx);
7612
7613 /* Checksum the names of surrounding namespaces and structures. */
7614 if (parent != NULL)
7615 checksum_die_context (parent, &ctx);
7616
7617 /* Checksum the current DIE. */
7618 die_odr_checksum (die->die_tag, name, &ctx);
7619 md5_finish_ctx (&ctx, checksum);
7620
7621 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7622 }
7623
7624 /* Next, compute the complete type signature. */
7625
7626 md5_init_ctx (&ctx);
7627 mark = 1;
7628 die->die_mark = mark;
7629
7630 /* Checksum the names of surrounding namespaces and structures. */
7631 if (parent != NULL)
7632 checksum_die_context (parent, &ctx);
7633
7634 /* Checksum the DIE and its children. */
7635 die_checksum_ordered (die, &ctx, &mark);
7636 unmark_all_dies (die);
7637 md5_finish_ctx (&ctx, checksum);
7638
7639 /* Store the signature in the type node and link the type DIE and the
7640 type node together. */
7641 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7642 DWARF_TYPE_SIGNATURE_SIZE);
7643 die->comdat_type_p = true;
7644 die->die_id.die_type_node = type_node;
7645 type_node->type_die = die;
7646
7647 /* If the DIE is a specification, link its declaration to the type node
7648 as well. */
7649 if (decl != NULL)
7650 {
7651 decl->comdat_type_p = true;
7652 decl->die_id.die_type_node = type_node;
7653 }
7654 }
7655
7656 /* Do the location expressions look same? */
7657 static inline int
7658 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7659 {
7660 return loc1->dw_loc_opc == loc2->dw_loc_opc
7661 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7662 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7663 }
7664
7665 /* Do the values look the same? */
7666 static int
7667 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7668 {
7669 dw_loc_descr_ref loc1, loc2;
7670 rtx r1, r2;
7671
7672 if (v1->val_class != v2->val_class)
7673 return 0;
7674
7675 switch (v1->val_class)
7676 {
7677 case dw_val_class_const:
7678 case dw_val_class_const_implicit:
7679 return v1->v.val_int == v2->v.val_int;
7680 case dw_val_class_unsigned_const:
7681 case dw_val_class_unsigned_const_implicit:
7682 return v1->v.val_unsigned == v2->v.val_unsigned;
7683 case dw_val_class_const_double:
7684 return v1->v.val_double.high == v2->v.val_double.high
7685 && v1->v.val_double.low == v2->v.val_double.low;
7686 case dw_val_class_wide_int:
7687 return *v1->v.val_wide == *v2->v.val_wide;
7688 case dw_val_class_vec:
7689 if (v1->v.val_vec.length != v2->v.val_vec.length
7690 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7691 return 0;
7692 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7693 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7694 return 0;
7695 return 1;
7696 case dw_val_class_flag:
7697 return v1->v.val_flag == v2->v.val_flag;
7698 case dw_val_class_str:
7699 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7700
7701 case dw_val_class_addr:
7702 r1 = v1->v.val_addr;
7703 r2 = v2->v.val_addr;
7704 if (GET_CODE (r1) != GET_CODE (r2))
7705 return 0;
7706 return !rtx_equal_p (r1, r2);
7707
7708 case dw_val_class_offset:
7709 return v1->v.val_offset == v2->v.val_offset;
7710
7711 case dw_val_class_loc:
7712 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7713 loc1 && loc2;
7714 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7715 if (!same_loc_p (loc1, loc2, mark))
7716 return 0;
7717 return !loc1 && !loc2;
7718
7719 case dw_val_class_die_ref:
7720 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7721
7722 case dw_val_class_symview:
7723 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7724
7725 case dw_val_class_fde_ref:
7726 case dw_val_class_vms_delta:
7727 case dw_val_class_lbl_id:
7728 case dw_val_class_lineptr:
7729 case dw_val_class_macptr:
7730 case dw_val_class_loclistsptr:
7731 case dw_val_class_high_pc:
7732 return 1;
7733
7734 case dw_val_class_file:
7735 case dw_val_class_file_implicit:
7736 return v1->v.val_file == v2->v.val_file;
7737
7738 case dw_val_class_data8:
7739 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7740
7741 default:
7742 return 1;
7743 }
7744 }
7745
7746 /* Do the attributes look the same? */
7747
7748 static int
7749 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7750 {
7751 if (at1->dw_attr != at2->dw_attr)
7752 return 0;
7753
7754 /* We don't care that this was compiled with a different compiler
7755 snapshot; if the output is the same, that's what matters. */
7756 if (at1->dw_attr == DW_AT_producer)
7757 return 1;
7758
7759 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7760 }
7761
7762 /* Do the dies look the same? */
7763
7764 static int
7765 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7766 {
7767 dw_die_ref c1, c2;
7768 dw_attr_node *a1;
7769 unsigned ix;
7770
7771 /* To avoid infinite recursion. */
7772 if (die1->die_mark)
7773 return die1->die_mark == die2->die_mark;
7774 die1->die_mark = die2->die_mark = ++(*mark);
7775
7776 if (die1->die_tag != die2->die_tag)
7777 return 0;
7778
7779 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7780 return 0;
7781
7782 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7783 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7784 return 0;
7785
7786 c1 = die1->die_child;
7787 c2 = die2->die_child;
7788 if (! c1)
7789 {
7790 if (c2)
7791 return 0;
7792 }
7793 else
7794 for (;;)
7795 {
7796 if (!same_die_p (c1, c2, mark))
7797 return 0;
7798 c1 = c1->die_sib;
7799 c2 = c2->die_sib;
7800 if (c1 == die1->die_child)
7801 {
7802 if (c2 == die2->die_child)
7803 break;
7804 else
7805 return 0;
7806 }
7807 }
7808
7809 return 1;
7810 }
7811
7812 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7813 children, and set die_symbol. */
7814
7815 static void
7816 compute_comp_unit_symbol (dw_die_ref unit_die)
7817 {
7818 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7819 const char *base = die_name ? lbasename (die_name) : "anonymous";
7820 char *name = XALLOCAVEC (char, strlen (base) + 64);
7821 char *p;
7822 int i, mark;
7823 unsigned char checksum[16];
7824 struct md5_ctx ctx;
7825
7826 /* Compute the checksum of the DIE, then append part of it as hex digits to
7827 the name filename of the unit. */
7828
7829 md5_init_ctx (&ctx);
7830 mark = 0;
7831 die_checksum (unit_die, &ctx, &mark);
7832 unmark_all_dies (unit_die);
7833 md5_finish_ctx (&ctx, checksum);
7834
7835 /* When we this for comp_unit_die () we have a DW_AT_name that might
7836 not start with a letter but with anything valid for filenames and
7837 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7838 character is not a letter. */
7839 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7840 clean_symbol_name (name);
7841
7842 p = name + strlen (name);
7843 for (i = 0; i < 4; i++)
7844 {
7845 sprintf (p, "%.2x", checksum[i]);
7846 p += 2;
7847 }
7848
7849 unit_die->die_id.die_symbol = xstrdup (name);
7850 }
7851
7852 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7853
7854 static int
7855 is_type_die (dw_die_ref die)
7856 {
7857 switch (die->die_tag)
7858 {
7859 case DW_TAG_array_type:
7860 case DW_TAG_class_type:
7861 case DW_TAG_interface_type:
7862 case DW_TAG_enumeration_type:
7863 case DW_TAG_pointer_type:
7864 case DW_TAG_reference_type:
7865 case DW_TAG_rvalue_reference_type:
7866 case DW_TAG_string_type:
7867 case DW_TAG_structure_type:
7868 case DW_TAG_subroutine_type:
7869 case DW_TAG_union_type:
7870 case DW_TAG_ptr_to_member_type:
7871 case DW_TAG_set_type:
7872 case DW_TAG_subrange_type:
7873 case DW_TAG_base_type:
7874 case DW_TAG_const_type:
7875 case DW_TAG_file_type:
7876 case DW_TAG_packed_type:
7877 case DW_TAG_volatile_type:
7878 case DW_TAG_typedef:
7879 return 1;
7880 default:
7881 return 0;
7882 }
7883 }
7884
7885 /* Returns true iff C is a compile-unit DIE. */
7886
7887 static inline bool
7888 is_cu_die (dw_die_ref c)
7889 {
7890 return c && (c->die_tag == DW_TAG_compile_unit
7891 || c->die_tag == DW_TAG_skeleton_unit);
7892 }
7893
7894 /* Returns true iff C is a unit DIE of some sort. */
7895
7896 static inline bool
7897 is_unit_die (dw_die_ref c)
7898 {
7899 return c && (c->die_tag == DW_TAG_compile_unit
7900 || c->die_tag == DW_TAG_partial_unit
7901 || c->die_tag == DW_TAG_type_unit
7902 || c->die_tag == DW_TAG_skeleton_unit);
7903 }
7904
7905 /* Returns true iff C is a namespace DIE. */
7906
7907 static inline bool
7908 is_namespace_die (dw_die_ref c)
7909 {
7910 return c && c->die_tag == DW_TAG_namespace;
7911 }
7912
7913 /* Return non-zero if this DIE is a template parameter. */
7914
7915 static inline bool
7916 is_template_parameter (dw_die_ref die)
7917 {
7918 switch (die->die_tag)
7919 {
7920 case DW_TAG_template_type_param:
7921 case DW_TAG_template_value_param:
7922 case DW_TAG_GNU_template_template_param:
7923 case DW_TAG_GNU_template_parameter_pack:
7924 return true;
7925 default:
7926 return false;
7927 }
7928 }
7929
7930 /* Return non-zero if this DIE represents a template instantiation. */
7931
7932 static inline bool
7933 is_template_instantiation (dw_die_ref die)
7934 {
7935 dw_die_ref c;
7936
7937 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7938 return false;
7939 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7940 return false;
7941 }
7942
7943 static char *
7944 gen_internal_sym (const char *prefix)
7945 {
7946 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7947
7948 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7949 return xstrdup (buf);
7950 }
7951
7952 /* Return non-zero if this DIE is a declaration. */
7953
7954 static int
7955 is_declaration_die (dw_die_ref die)
7956 {
7957 dw_attr_node *a;
7958 unsigned ix;
7959
7960 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7961 if (a->dw_attr == DW_AT_declaration)
7962 return 1;
7963
7964 return 0;
7965 }
7966
7967 /* Return non-zero if this DIE is nested inside a subprogram. */
7968
7969 static int
7970 is_nested_in_subprogram (dw_die_ref die)
7971 {
7972 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7973
7974 if (decl == NULL)
7975 decl = die;
7976 return local_scope_p (decl);
7977 }
7978
7979 /* Return non-zero if this DIE contains a defining declaration of a
7980 subprogram. */
7981
7982 static int
7983 contains_subprogram_definition (dw_die_ref die)
7984 {
7985 dw_die_ref c;
7986
7987 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7988 return 1;
7989 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7990 return 0;
7991 }
7992
7993 /* Return non-zero if this is a type DIE that should be moved to a
7994 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7995 unit type. */
7996
7997 static int
7998 should_move_die_to_comdat (dw_die_ref die)
7999 {
8000 switch (die->die_tag)
8001 {
8002 case DW_TAG_class_type:
8003 case DW_TAG_structure_type:
8004 case DW_TAG_enumeration_type:
8005 case DW_TAG_union_type:
8006 /* Don't move declarations, inlined instances, types nested in a
8007 subprogram, or types that contain subprogram definitions. */
8008 if (is_declaration_die (die)
8009 || get_AT (die, DW_AT_abstract_origin)
8010 || is_nested_in_subprogram (die)
8011 || contains_subprogram_definition (die))
8012 return 0;
8013 return 1;
8014 case DW_TAG_array_type:
8015 case DW_TAG_interface_type:
8016 case DW_TAG_pointer_type:
8017 case DW_TAG_reference_type:
8018 case DW_TAG_rvalue_reference_type:
8019 case DW_TAG_string_type:
8020 case DW_TAG_subroutine_type:
8021 case DW_TAG_ptr_to_member_type:
8022 case DW_TAG_set_type:
8023 case DW_TAG_subrange_type:
8024 case DW_TAG_base_type:
8025 case DW_TAG_const_type:
8026 case DW_TAG_file_type:
8027 case DW_TAG_packed_type:
8028 case DW_TAG_volatile_type:
8029 case DW_TAG_typedef:
8030 default:
8031 return 0;
8032 }
8033 }
8034
8035 /* Make a clone of DIE. */
8036
8037 static dw_die_ref
8038 clone_die (dw_die_ref die)
8039 {
8040 dw_die_ref clone = new_die_raw (die->die_tag);
8041 dw_attr_node *a;
8042 unsigned ix;
8043
8044 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8045 add_dwarf_attr (clone, a);
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of the tree rooted at DIE. */
8051
8052 static dw_die_ref
8053 clone_tree (dw_die_ref die)
8054 {
8055 dw_die_ref c;
8056 dw_die_ref clone = clone_die (die);
8057
8058 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8059
8060 return clone;
8061 }
8062
8063 /* Make a clone of DIE as a declaration. */
8064
8065 static dw_die_ref
8066 clone_as_declaration (dw_die_ref die)
8067 {
8068 dw_die_ref clone;
8069 dw_die_ref decl;
8070 dw_attr_node *a;
8071 unsigned ix;
8072
8073 /* If the DIE is already a declaration, just clone it. */
8074 if (is_declaration_die (die))
8075 return clone_die (die);
8076
8077 /* If the DIE is a specification, just clone its declaration DIE. */
8078 decl = get_AT_ref (die, DW_AT_specification);
8079 if (decl != NULL)
8080 {
8081 clone = clone_die (decl);
8082 if (die->comdat_type_p)
8083 add_AT_die_ref (clone, DW_AT_signature, die);
8084 return clone;
8085 }
8086
8087 clone = new_die_raw (die->die_tag);
8088
8089 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8090 {
8091 /* We don't want to copy over all attributes.
8092 For example we don't want DW_AT_byte_size because otherwise we will no
8093 longer have a declaration and GDB will treat it as a definition. */
8094
8095 switch (a->dw_attr)
8096 {
8097 case DW_AT_abstract_origin:
8098 case DW_AT_artificial:
8099 case DW_AT_containing_type:
8100 case DW_AT_external:
8101 case DW_AT_name:
8102 case DW_AT_type:
8103 case DW_AT_virtuality:
8104 case DW_AT_linkage_name:
8105 case DW_AT_MIPS_linkage_name:
8106 add_dwarf_attr (clone, a);
8107 break;
8108 case DW_AT_byte_size:
8109 case DW_AT_alignment:
8110 default:
8111 break;
8112 }
8113 }
8114
8115 if (die->comdat_type_p)
8116 add_AT_die_ref (clone, DW_AT_signature, die);
8117
8118 add_AT_flag (clone, DW_AT_declaration, 1);
8119 return clone;
8120 }
8121
8122
8123 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8124
8125 struct decl_table_entry
8126 {
8127 dw_die_ref orig;
8128 dw_die_ref copy;
8129 };
8130
8131 /* Helpers to manipulate hash table of copied declarations. */
8132
8133 /* Hashtable helpers. */
8134
8135 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8136 {
8137 typedef die_struct *compare_type;
8138 static inline hashval_t hash (const decl_table_entry *);
8139 static inline bool equal (const decl_table_entry *, const die_struct *);
8140 };
8141
8142 inline hashval_t
8143 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8144 {
8145 return htab_hash_pointer (entry->orig);
8146 }
8147
8148 inline bool
8149 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8150 const die_struct *entry2)
8151 {
8152 return entry1->orig == entry2;
8153 }
8154
8155 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8156
8157 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8158 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8159 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8160 to check if the ancestor has already been copied into UNIT. */
8161
8162 static dw_die_ref
8163 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8164 decl_hash_type *decl_table)
8165 {
8166 dw_die_ref parent = die->die_parent;
8167 dw_die_ref new_parent = unit;
8168 dw_die_ref copy;
8169 decl_table_entry **slot = NULL;
8170 struct decl_table_entry *entry = NULL;
8171
8172 if (decl_table)
8173 {
8174 /* Check if the entry has already been copied to UNIT. */
8175 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8176 INSERT);
8177 if (*slot != HTAB_EMPTY_ENTRY)
8178 {
8179 entry = *slot;
8180 return entry->copy;
8181 }
8182
8183 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8184 entry = XCNEW (struct decl_table_entry);
8185 entry->orig = die;
8186 entry->copy = NULL;
8187 *slot = entry;
8188 }
8189
8190 if (parent != NULL)
8191 {
8192 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8193 if (spec != NULL)
8194 parent = spec;
8195 if (!is_unit_die (parent))
8196 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8197 }
8198
8199 copy = clone_as_declaration (die);
8200 add_child_die (new_parent, copy);
8201
8202 if (decl_table)
8203 {
8204 /* Record the pointer to the copy. */
8205 entry->copy = copy;
8206 }
8207
8208 return copy;
8209 }
8210 /* Copy the declaration context to the new type unit DIE. This includes
8211 any surrounding namespace or type declarations. If the DIE has an
8212 AT_specification attribute, it also includes attributes and children
8213 attached to the specification, and returns a pointer to the original
8214 parent of the declaration DIE. Returns NULL otherwise. */
8215
8216 static dw_die_ref
8217 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8218 {
8219 dw_die_ref decl;
8220 dw_die_ref new_decl;
8221 dw_die_ref orig_parent = NULL;
8222
8223 decl = get_AT_ref (die, DW_AT_specification);
8224 if (decl == NULL)
8225 decl = die;
8226 else
8227 {
8228 unsigned ix;
8229 dw_die_ref c;
8230 dw_attr_node *a;
8231
8232 /* The original DIE will be changed to a declaration, and must
8233 be moved to be a child of the original declaration DIE. */
8234 orig_parent = decl->die_parent;
8235
8236 /* Copy the type node pointer from the new DIE to the original
8237 declaration DIE so we can forward references later. */
8238 decl->comdat_type_p = true;
8239 decl->die_id.die_type_node = die->die_id.die_type_node;
8240
8241 remove_AT (die, DW_AT_specification);
8242
8243 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8244 {
8245 if (a->dw_attr != DW_AT_name
8246 && a->dw_attr != DW_AT_declaration
8247 && a->dw_attr != DW_AT_external)
8248 add_dwarf_attr (die, a);
8249 }
8250
8251 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8252 }
8253
8254 if (decl->die_parent != NULL
8255 && !is_unit_die (decl->die_parent))
8256 {
8257 new_decl = copy_ancestor_tree (unit, decl, NULL);
8258 if (new_decl != NULL)
8259 {
8260 remove_AT (new_decl, DW_AT_signature);
8261 add_AT_specification (die, new_decl);
8262 }
8263 }
8264
8265 return orig_parent;
8266 }
8267
8268 /* Generate the skeleton ancestor tree for the given NODE, then clone
8269 the DIE and add the clone into the tree. */
8270
8271 static void
8272 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8273 {
8274 if (node->new_die != NULL)
8275 return;
8276
8277 node->new_die = clone_as_declaration (node->old_die);
8278
8279 if (node->parent != NULL)
8280 {
8281 generate_skeleton_ancestor_tree (node->parent);
8282 add_child_die (node->parent->new_die, node->new_die);
8283 }
8284 }
8285
8286 /* Generate a skeleton tree of DIEs containing any declarations that are
8287 found in the original tree. We traverse the tree looking for declaration
8288 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8289
8290 static void
8291 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8292 {
8293 skeleton_chain_node node;
8294 dw_die_ref c;
8295 dw_die_ref first;
8296 dw_die_ref prev = NULL;
8297 dw_die_ref next = NULL;
8298
8299 node.parent = parent;
8300
8301 first = c = parent->old_die->die_child;
8302 if (c)
8303 next = c->die_sib;
8304 if (c) do {
8305 if (prev == NULL || prev->die_sib == c)
8306 prev = c;
8307 c = next;
8308 next = (c == first ? NULL : c->die_sib);
8309 node.old_die = c;
8310 node.new_die = NULL;
8311 if (is_declaration_die (c))
8312 {
8313 if (is_template_instantiation (c))
8314 {
8315 /* Instantiated templates do not need to be cloned into the
8316 type unit. Just move the DIE and its children back to
8317 the skeleton tree (in the main CU). */
8318 remove_child_with_prev (c, prev);
8319 add_child_die (parent->new_die, c);
8320 c = prev;
8321 }
8322 else if (c->comdat_type_p)
8323 {
8324 /* This is the skeleton of earlier break_out_comdat_types
8325 type. Clone the existing DIE, but keep the children
8326 under the original (which is in the main CU). */
8327 dw_die_ref clone = clone_die (c);
8328
8329 replace_child (c, clone, prev);
8330 generate_skeleton_ancestor_tree (parent);
8331 add_child_die (parent->new_die, c);
8332 c = clone;
8333 continue;
8334 }
8335 else
8336 {
8337 /* Clone the existing DIE, move the original to the skeleton
8338 tree (which is in the main CU), and put the clone, with
8339 all the original's children, where the original came from
8340 (which is about to be moved to the type unit). */
8341 dw_die_ref clone = clone_die (c);
8342 move_all_children (c, clone);
8343
8344 /* If the original has a DW_AT_object_pointer attribute,
8345 it would now point to a child DIE just moved to the
8346 cloned tree, so we need to remove that attribute from
8347 the original. */
8348 remove_AT (c, DW_AT_object_pointer);
8349
8350 replace_child (c, clone, prev);
8351 generate_skeleton_ancestor_tree (parent);
8352 add_child_die (parent->new_die, c);
8353 node.old_die = clone;
8354 node.new_die = c;
8355 c = clone;
8356 }
8357 }
8358 generate_skeleton_bottom_up (&node);
8359 } while (next != NULL);
8360 }
8361
8362 /* Wrapper function for generate_skeleton_bottom_up. */
8363
8364 static dw_die_ref
8365 generate_skeleton (dw_die_ref die)
8366 {
8367 skeleton_chain_node node;
8368
8369 node.old_die = die;
8370 node.new_die = NULL;
8371 node.parent = NULL;
8372
8373 /* If this type definition is nested inside another type,
8374 and is not an instantiation of a template, always leave
8375 at least a declaration in its place. */
8376 if (die->die_parent != NULL
8377 && is_type_die (die->die_parent)
8378 && !is_template_instantiation (die))
8379 node.new_die = clone_as_declaration (die);
8380
8381 generate_skeleton_bottom_up (&node);
8382 return node.new_die;
8383 }
8384
8385 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8386 declaration. The original DIE is moved to a new compile unit so that
8387 existing references to it follow it to the new location. If any of the
8388 original DIE's descendants is a declaration, we need to replace the
8389 original DIE with a skeleton tree and move the declarations back into the
8390 skeleton tree. */
8391
8392 static dw_die_ref
8393 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8394 dw_die_ref prev)
8395 {
8396 dw_die_ref skeleton, orig_parent;
8397
8398 /* Copy the declaration context to the type unit DIE. If the returned
8399 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8400 that DIE. */
8401 orig_parent = copy_declaration_context (unit, child);
8402
8403 skeleton = generate_skeleton (child);
8404 if (skeleton == NULL)
8405 remove_child_with_prev (child, prev);
8406 else
8407 {
8408 skeleton->comdat_type_p = true;
8409 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8410
8411 /* If the original DIE was a specification, we need to put
8412 the skeleton under the parent DIE of the declaration.
8413 This leaves the original declaration in the tree, but
8414 it will be pruned later since there are no longer any
8415 references to it. */
8416 if (orig_parent != NULL)
8417 {
8418 remove_child_with_prev (child, prev);
8419 add_child_die (orig_parent, skeleton);
8420 }
8421 else
8422 replace_child (child, skeleton, prev);
8423 }
8424
8425 return skeleton;
8426 }
8427
8428 static void
8429 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8430 comdat_type_node *type_node,
8431 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8432
8433 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8434 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8435 DWARF procedure references in the DW_AT_location attribute. */
8436
8437 static dw_die_ref
8438 copy_dwarf_procedure (dw_die_ref die,
8439 comdat_type_node *type_node,
8440 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8441 {
8442 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8443
8444 /* DWARF procedures are not supposed to have children... */
8445 gcc_assert (die->die_child == NULL);
8446
8447 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8448 gcc_assert (vec_safe_length (die->die_attr) == 1
8449 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8450
8451 /* Do not copy more than once DWARF procedures. */
8452 bool existed;
8453 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8454 if (existed)
8455 return die_copy;
8456
8457 die_copy = clone_die (die);
8458 add_child_die (type_node->root_die, die_copy);
8459 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8460 return die_copy;
8461 }
8462
8463 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8464 procedures in DIE's attributes. */
8465
8466 static void
8467 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8468 comdat_type_node *type_node,
8469 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8470 {
8471 dw_attr_node *a;
8472 unsigned i;
8473
8474 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8475 {
8476 dw_loc_descr_ref loc;
8477
8478 if (a->dw_attr_val.val_class != dw_val_class_loc)
8479 continue;
8480
8481 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8482 {
8483 switch (loc->dw_loc_opc)
8484 {
8485 case DW_OP_call2:
8486 case DW_OP_call4:
8487 case DW_OP_call_ref:
8488 gcc_assert (loc->dw_loc_oprnd1.val_class
8489 == dw_val_class_die_ref);
8490 loc->dw_loc_oprnd1.v.val_die_ref.die
8491 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8492 type_node,
8493 copied_dwarf_procs);
8494
8495 default:
8496 break;
8497 }
8498 }
8499 }
8500 }
8501
8502 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8503 rewrite references to point to the copies.
8504
8505 References are looked for in DIE's attributes and recursively in all its
8506 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8507 mapping from old DWARF procedures to their copy. It is used not to copy
8508 twice the same DWARF procedure under TYPE_NODE. */
8509
8510 static void
8511 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8512 comdat_type_node *type_node,
8513 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8514 {
8515 dw_die_ref c;
8516
8517 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8518 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8519 type_node,
8520 copied_dwarf_procs));
8521 }
8522
8523 /* Traverse the DIE and set up additional .debug_types or .debug_info
8524 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8525 section. */
8526
8527 static void
8528 break_out_comdat_types (dw_die_ref die)
8529 {
8530 dw_die_ref c;
8531 dw_die_ref first;
8532 dw_die_ref prev = NULL;
8533 dw_die_ref next = NULL;
8534 dw_die_ref unit = NULL;
8535
8536 first = c = die->die_child;
8537 if (c)
8538 next = c->die_sib;
8539 if (c) do {
8540 if (prev == NULL || prev->die_sib == c)
8541 prev = c;
8542 c = next;
8543 next = (c == first ? NULL : c->die_sib);
8544 if (should_move_die_to_comdat (c))
8545 {
8546 dw_die_ref replacement;
8547 comdat_type_node *type_node;
8548
8549 /* Break out nested types into their own type units. */
8550 break_out_comdat_types (c);
8551
8552 /* Create a new type unit DIE as the root for the new tree, and
8553 add it to the list of comdat types. */
8554 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8555 add_AT_unsigned (unit, DW_AT_language,
8556 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8557 type_node = ggc_cleared_alloc<comdat_type_node> ();
8558 type_node->root_die = unit;
8559 type_node->next = comdat_type_list;
8560 comdat_type_list = type_node;
8561
8562 /* Generate the type signature. */
8563 generate_type_signature (c, type_node);
8564
8565 /* Copy the declaration context, attributes, and children of the
8566 declaration into the new type unit DIE, then remove this DIE
8567 from the main CU (or replace it with a skeleton if necessary). */
8568 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8569 type_node->skeleton_die = replacement;
8570
8571 /* Add the DIE to the new compunit. */
8572 add_child_die (unit, c);
8573
8574 /* Types can reference DWARF procedures for type size or data location
8575 expressions. Calls in DWARF expressions cannot target procedures
8576 that are not in the same section. So we must copy DWARF procedures
8577 along with this type and then rewrite references to them. */
8578 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8579 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8580
8581 if (replacement != NULL)
8582 c = replacement;
8583 }
8584 else if (c->die_tag == DW_TAG_namespace
8585 || c->die_tag == DW_TAG_class_type
8586 || c->die_tag == DW_TAG_structure_type
8587 || c->die_tag == DW_TAG_union_type)
8588 {
8589 /* Look for nested types that can be broken out. */
8590 break_out_comdat_types (c);
8591 }
8592 } while (next != NULL);
8593 }
8594
8595 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8596 Enter all the cloned children into the hash table decl_table. */
8597
8598 static dw_die_ref
8599 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8600 {
8601 dw_die_ref c;
8602 dw_die_ref clone;
8603 struct decl_table_entry *entry;
8604 decl_table_entry **slot;
8605
8606 if (die->die_tag == DW_TAG_subprogram)
8607 clone = clone_as_declaration (die);
8608 else
8609 clone = clone_die (die);
8610
8611 slot = decl_table->find_slot_with_hash (die,
8612 htab_hash_pointer (die), INSERT);
8613
8614 /* Assert that DIE isn't in the hash table yet. If it would be there
8615 before, the ancestors would be necessarily there as well, therefore
8616 clone_tree_partial wouldn't be called. */
8617 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8618
8619 entry = XCNEW (struct decl_table_entry);
8620 entry->orig = die;
8621 entry->copy = clone;
8622 *slot = entry;
8623
8624 if (die->die_tag != DW_TAG_subprogram)
8625 FOR_EACH_CHILD (die, c,
8626 add_child_die (clone, clone_tree_partial (c, decl_table)));
8627
8628 return clone;
8629 }
8630
8631 /* Walk the DIE and its children, looking for references to incomplete
8632 or trivial types that are unmarked (i.e., that are not in the current
8633 type_unit). */
8634
8635 static void
8636 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8637 {
8638 dw_die_ref c;
8639 dw_attr_node *a;
8640 unsigned ix;
8641
8642 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8643 {
8644 if (AT_class (a) == dw_val_class_die_ref)
8645 {
8646 dw_die_ref targ = AT_ref (a);
8647 decl_table_entry **slot;
8648 struct decl_table_entry *entry;
8649
8650 if (targ->die_mark != 0 || targ->comdat_type_p)
8651 continue;
8652
8653 slot = decl_table->find_slot_with_hash (targ,
8654 htab_hash_pointer (targ),
8655 INSERT);
8656
8657 if (*slot != HTAB_EMPTY_ENTRY)
8658 {
8659 /* TARG has already been copied, so we just need to
8660 modify the reference to point to the copy. */
8661 entry = *slot;
8662 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8663 }
8664 else
8665 {
8666 dw_die_ref parent = unit;
8667 dw_die_ref copy = clone_die (targ);
8668
8669 /* Record in DECL_TABLE that TARG has been copied.
8670 Need to do this now, before the recursive call,
8671 because DECL_TABLE may be expanded and SLOT
8672 would no longer be a valid pointer. */
8673 entry = XCNEW (struct decl_table_entry);
8674 entry->orig = targ;
8675 entry->copy = copy;
8676 *slot = entry;
8677
8678 /* If TARG is not a declaration DIE, we need to copy its
8679 children. */
8680 if (!is_declaration_die (targ))
8681 {
8682 FOR_EACH_CHILD (
8683 targ, c,
8684 add_child_die (copy,
8685 clone_tree_partial (c, decl_table)));
8686 }
8687
8688 /* Make sure the cloned tree is marked as part of the
8689 type unit. */
8690 mark_dies (copy);
8691
8692 /* If TARG has surrounding context, copy its ancestor tree
8693 into the new type unit. */
8694 if (targ->die_parent != NULL
8695 && !is_unit_die (targ->die_parent))
8696 parent = copy_ancestor_tree (unit, targ->die_parent,
8697 decl_table);
8698
8699 add_child_die (parent, copy);
8700 a->dw_attr_val.v.val_die_ref.die = copy;
8701
8702 /* Make sure the newly-copied DIE is walked. If it was
8703 installed in a previously-added context, it won't
8704 get visited otherwise. */
8705 if (parent != unit)
8706 {
8707 /* Find the highest point of the newly-added tree,
8708 mark each node along the way, and walk from there. */
8709 parent->die_mark = 1;
8710 while (parent->die_parent
8711 && parent->die_parent->die_mark == 0)
8712 {
8713 parent = parent->die_parent;
8714 parent->die_mark = 1;
8715 }
8716 copy_decls_walk (unit, parent, decl_table);
8717 }
8718 }
8719 }
8720 }
8721
8722 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8723 }
8724
8725 /* Copy declarations for "unworthy" types into the new comdat section.
8726 Incomplete types, modified types, and certain other types aren't broken
8727 out into comdat sections of their own, so they don't have a signature,
8728 and we need to copy the declaration into the same section so that we
8729 don't have an external reference. */
8730
8731 static void
8732 copy_decls_for_unworthy_types (dw_die_ref unit)
8733 {
8734 mark_dies (unit);
8735 decl_hash_type decl_table (10);
8736 copy_decls_walk (unit, unit, &decl_table);
8737 unmark_dies (unit);
8738 }
8739
8740 /* Traverse the DIE and add a sibling attribute if it may have the
8741 effect of speeding up access to siblings. To save some space,
8742 avoid generating sibling attributes for DIE's without children. */
8743
8744 static void
8745 add_sibling_attributes (dw_die_ref die)
8746 {
8747 dw_die_ref c;
8748
8749 if (! die->die_child)
8750 return;
8751
8752 if (die->die_parent && die != die->die_parent->die_child)
8753 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8754
8755 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8756 }
8757
8758 /* Output all location lists for the DIE and its children. */
8759
8760 static void
8761 output_location_lists (dw_die_ref die)
8762 {
8763 dw_die_ref c;
8764 dw_attr_node *a;
8765 unsigned ix;
8766
8767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8768 if (AT_class (a) == dw_val_class_loc_list)
8769 output_loc_list (AT_loc_list (a));
8770
8771 FOR_EACH_CHILD (die, c, output_location_lists (c));
8772 }
8773
8774 /* During assign_location_list_indexes and output_loclists_offset the
8775 current index, after it the number of assigned indexes (i.e. how
8776 large the .debug_loclists* offset table should be). */
8777 static unsigned int loc_list_idx;
8778
8779 /* Output all location list offsets for the DIE and its children. */
8780
8781 static void
8782 output_loclists_offsets (dw_die_ref die)
8783 {
8784 dw_die_ref c;
8785 dw_attr_node *a;
8786 unsigned ix;
8787
8788 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8789 if (AT_class (a) == dw_val_class_loc_list)
8790 {
8791 dw_loc_list_ref l = AT_loc_list (a);
8792 if (l->offset_emitted)
8793 continue;
8794 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8795 loc_section_label, NULL);
8796 gcc_assert (l->hash == loc_list_idx);
8797 loc_list_idx++;
8798 l->offset_emitted = true;
8799 }
8800
8801 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8802 }
8803
8804 /* Recursively set indexes of location lists. */
8805
8806 static void
8807 assign_location_list_indexes (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref list = AT_loc_list (a);
8817 if (!list->num_assigned)
8818 {
8819 list->num_assigned = true;
8820 list->hash = loc_list_idx++;
8821 }
8822 }
8823
8824 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8825 }
8826
8827 /* We want to limit the number of external references, because they are
8828 larger than local references: a relocation takes multiple words, and
8829 even a sig8 reference is always eight bytes, whereas a local reference
8830 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8831 So if we encounter multiple external references to the same type DIE, we
8832 make a local typedef stub for it and redirect all references there.
8833
8834 This is the element of the hash table for keeping track of these
8835 references. */
8836
8837 struct external_ref
8838 {
8839 dw_die_ref type;
8840 dw_die_ref stub;
8841 unsigned n_refs;
8842 };
8843
8844 /* Hashtable helpers. */
8845
8846 struct external_ref_hasher : free_ptr_hash <external_ref>
8847 {
8848 static inline hashval_t hash (const external_ref *);
8849 static inline bool equal (const external_ref *, const external_ref *);
8850 };
8851
8852 inline hashval_t
8853 external_ref_hasher::hash (const external_ref *r)
8854 {
8855 dw_die_ref die = r->type;
8856 hashval_t h = 0;
8857
8858 /* We can't use the address of the DIE for hashing, because
8859 that will make the order of the stub DIEs non-deterministic. */
8860 if (! die->comdat_type_p)
8861 /* We have a symbol; use it to compute a hash. */
8862 h = htab_hash_string (die->die_id.die_symbol);
8863 else
8864 {
8865 /* We have a type signature; use a subset of the bits as the hash.
8866 The 8-byte signature is at least as large as hashval_t. */
8867 comdat_type_node *type_node = die->die_id.die_type_node;
8868 memcpy (&h, type_node->signature, sizeof (h));
8869 }
8870 return h;
8871 }
8872
8873 inline bool
8874 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8875 {
8876 return r1->type == r2->type;
8877 }
8878
8879 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8880
8881 /* Return a pointer to the external_ref for references to DIE. */
8882
8883 static struct external_ref *
8884 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8885 {
8886 struct external_ref ref, *ref_p;
8887 external_ref **slot;
8888
8889 ref.type = die;
8890 slot = map->find_slot (&ref, INSERT);
8891 if (*slot != HTAB_EMPTY_ENTRY)
8892 return *slot;
8893
8894 ref_p = XCNEW (struct external_ref);
8895 ref_p->type = die;
8896 *slot = ref_p;
8897 return ref_p;
8898 }
8899
8900 /* Subroutine of optimize_external_refs, below.
8901
8902 If we see a type skeleton, record it as our stub. If we see external
8903 references, remember how many we've seen. */
8904
8905 static void
8906 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8907 {
8908 dw_die_ref c;
8909 dw_attr_node *a;
8910 unsigned ix;
8911 struct external_ref *ref_p;
8912
8913 if (is_type_die (die)
8914 && (c = get_AT_ref (die, DW_AT_signature)))
8915 {
8916 /* This is a local skeleton; use it for local references. */
8917 ref_p = lookup_external_ref (map, c);
8918 ref_p->stub = die;
8919 }
8920
8921 /* Scan the DIE references, and remember any that refer to DIEs from
8922 other CUs (i.e. those which are not marked). */
8923 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8924 if (AT_class (a) == dw_val_class_die_ref
8925 && (c = AT_ref (a))->die_mark == 0
8926 && is_type_die (c))
8927 {
8928 ref_p = lookup_external_ref (map, c);
8929 ref_p->n_refs++;
8930 }
8931
8932 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8933 }
8934
8935 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8936 points to an external_ref, DATA is the CU we're processing. If we don't
8937 already have a local stub, and we have multiple refs, build a stub. */
8938
8939 int
8940 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8941 {
8942 struct external_ref *ref_p = *slot;
8943
8944 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8945 {
8946 /* We have multiple references to this type, so build a small stub.
8947 Both of these forms are a bit dodgy from the perspective of the
8948 DWARF standard, since technically they should have names. */
8949 dw_die_ref cu = data;
8950 dw_die_ref type = ref_p->type;
8951 dw_die_ref stub = NULL;
8952
8953 if (type->comdat_type_p)
8954 {
8955 /* If we refer to this type via sig8, use AT_signature. */
8956 stub = new_die (type->die_tag, cu, NULL_TREE);
8957 add_AT_die_ref (stub, DW_AT_signature, type);
8958 }
8959 else
8960 {
8961 /* Otherwise, use a typedef with no name. */
8962 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8963 add_AT_die_ref (stub, DW_AT_type, type);
8964 }
8965
8966 stub->die_mark++;
8967 ref_p->stub = stub;
8968 }
8969 return 1;
8970 }
8971
8972 /* DIE is a unit; look through all the DIE references to see if there are
8973 any external references to types, and if so, create local stubs for
8974 them which will be applied in build_abbrev_table. This is useful because
8975 references to local DIEs are smaller. */
8976
8977 static external_ref_hash_type *
8978 optimize_external_refs (dw_die_ref die)
8979 {
8980 external_ref_hash_type *map = new external_ref_hash_type (10);
8981 optimize_external_refs_1 (die, map);
8982 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8983 return map;
8984 }
8985
8986 /* The following 3 variables are temporaries that are computed only during the
8987 build_abbrev_table call and used and released during the following
8988 optimize_abbrev_table call. */
8989
8990 /* First abbrev_id that can be optimized based on usage. */
8991 static unsigned int abbrev_opt_start;
8992
8993 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8994 abbrev_id smaller than this, because they must be already sized
8995 during build_abbrev_table). */
8996 static unsigned int abbrev_opt_base_type_end;
8997
8998 /* Vector of usage counts during build_abbrev_table. Indexed by
8999 abbrev_id - abbrev_opt_start. */
9000 static vec<unsigned int> abbrev_usage_count;
9001
9002 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9003 static vec<dw_die_ref> sorted_abbrev_dies;
9004
9005 /* The format of each DIE (and its attribute value pairs) is encoded in an
9006 abbreviation table. This routine builds the abbreviation table and assigns
9007 a unique abbreviation id for each abbreviation entry. The children of each
9008 die are visited recursively. */
9009
9010 static void
9011 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9012 {
9013 unsigned int abbrev_id = 0;
9014 dw_die_ref c;
9015 dw_attr_node *a;
9016 unsigned ix;
9017 dw_die_ref abbrev;
9018
9019 /* Scan the DIE references, and replace any that refer to
9020 DIEs from other CUs (i.e. those which are not marked) with
9021 the local stubs we built in optimize_external_refs. */
9022 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9023 if (AT_class (a) == dw_val_class_die_ref
9024 && (c = AT_ref (a))->die_mark == 0)
9025 {
9026 struct external_ref *ref_p;
9027 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9028
9029 if (is_type_die (c)
9030 && (ref_p = lookup_external_ref (extern_map, c))
9031 && ref_p->stub && ref_p->stub != die)
9032 change_AT_die_ref (a, ref_p->stub);
9033 else
9034 /* We aren't changing this reference, so mark it external. */
9035 set_AT_ref_external (a, 1);
9036 }
9037
9038 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9039 {
9040 dw_attr_node *die_a, *abbrev_a;
9041 unsigned ix;
9042 bool ok = true;
9043
9044 if (abbrev_id == 0)
9045 continue;
9046 if (abbrev->die_tag != die->die_tag)
9047 continue;
9048 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9049 continue;
9050
9051 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9052 continue;
9053
9054 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9055 {
9056 abbrev_a = &(*abbrev->die_attr)[ix];
9057 if ((abbrev_a->dw_attr != die_a->dw_attr)
9058 || (value_format (abbrev_a) != value_format (die_a)))
9059 {
9060 ok = false;
9061 break;
9062 }
9063 }
9064 if (ok)
9065 break;
9066 }
9067
9068 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9069 {
9070 vec_safe_push (abbrev_die_table, die);
9071 if (abbrev_opt_start)
9072 abbrev_usage_count.safe_push (0);
9073 }
9074 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9075 {
9076 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9077 sorted_abbrev_dies.safe_push (die);
9078 }
9079
9080 die->die_abbrev = abbrev_id;
9081 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9082 }
9083
9084 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9085 by die_abbrev's usage count, from the most commonly used
9086 abbreviation to the least. */
9087
9088 static int
9089 die_abbrev_cmp (const void *p1, const void *p2)
9090 {
9091 dw_die_ref die1 = *(const dw_die_ref *) p1;
9092 dw_die_ref die2 = *(const dw_die_ref *) p2;
9093
9094 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9095 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9096
9097 if (die1->die_abbrev >= abbrev_opt_base_type_end
9098 && die2->die_abbrev >= abbrev_opt_base_type_end)
9099 {
9100 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9101 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9102 return -1;
9103 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9104 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9105 return 1;
9106 }
9107
9108 /* Stabilize the sort. */
9109 if (die1->die_abbrev < die2->die_abbrev)
9110 return -1;
9111 if (die1->die_abbrev > die2->die_abbrev)
9112 return 1;
9113
9114 return 0;
9115 }
9116
9117 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9118 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9119 into dw_val_class_const_implicit or
9120 dw_val_class_unsigned_const_implicit. */
9121
9122 static void
9123 optimize_implicit_const (unsigned int first_id, unsigned int end,
9124 vec<bool> &implicit_consts)
9125 {
9126 /* It never makes sense if there is just one DIE using the abbreviation. */
9127 if (end < first_id + 2)
9128 return;
9129
9130 dw_attr_node *a;
9131 unsigned ix, i;
9132 dw_die_ref die = sorted_abbrev_dies[first_id];
9133 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9134 if (implicit_consts[ix])
9135 {
9136 enum dw_val_class new_class = dw_val_class_none;
9137 switch (AT_class (a))
9138 {
9139 case dw_val_class_unsigned_const:
9140 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9141 continue;
9142
9143 /* The .debug_abbrev section will grow by
9144 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9145 in all the DIEs using that abbreviation. */
9146 if (constant_size (AT_unsigned (a)) * (end - first_id)
9147 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9148 continue;
9149
9150 new_class = dw_val_class_unsigned_const_implicit;
9151 break;
9152
9153 case dw_val_class_const:
9154 new_class = dw_val_class_const_implicit;
9155 break;
9156
9157 case dw_val_class_file:
9158 new_class = dw_val_class_file_implicit;
9159 break;
9160
9161 default:
9162 continue;
9163 }
9164 for (i = first_id; i < end; i++)
9165 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9166 = new_class;
9167 }
9168 }
9169
9170 /* Attempt to optimize abbreviation table from abbrev_opt_start
9171 abbreviation above. */
9172
9173 static void
9174 optimize_abbrev_table (void)
9175 {
9176 if (abbrev_opt_start
9177 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9178 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9179 {
9180 auto_vec<bool, 32> implicit_consts;
9181 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9182
9183 unsigned int abbrev_id = abbrev_opt_start - 1;
9184 unsigned int first_id = ~0U;
9185 unsigned int last_abbrev_id = 0;
9186 unsigned int i;
9187 dw_die_ref die;
9188 if (abbrev_opt_base_type_end > abbrev_opt_start)
9189 abbrev_id = abbrev_opt_base_type_end - 1;
9190 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9191 most commonly used abbreviations come first. */
9192 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9193 {
9194 dw_attr_node *a;
9195 unsigned ix;
9196
9197 /* If calc_base_type_die_sizes has been called, the CU and
9198 base types after it can't be optimized, because we've already
9199 calculated their DIE offsets. We've sorted them first. */
9200 if (die->die_abbrev < abbrev_opt_base_type_end)
9201 continue;
9202 if (die->die_abbrev != last_abbrev_id)
9203 {
9204 last_abbrev_id = die->die_abbrev;
9205 if (dwarf_version >= 5 && first_id != ~0U)
9206 optimize_implicit_const (first_id, i, implicit_consts);
9207 abbrev_id++;
9208 (*abbrev_die_table)[abbrev_id] = die;
9209 if (dwarf_version >= 5)
9210 {
9211 first_id = i;
9212 implicit_consts.truncate (0);
9213
9214 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9215 switch (AT_class (a))
9216 {
9217 case dw_val_class_const:
9218 case dw_val_class_unsigned_const:
9219 case dw_val_class_file:
9220 implicit_consts.safe_push (true);
9221 break;
9222 default:
9223 implicit_consts.safe_push (false);
9224 break;
9225 }
9226 }
9227 }
9228 else if (dwarf_version >= 5)
9229 {
9230 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9231 if (!implicit_consts[ix])
9232 continue;
9233 else
9234 {
9235 dw_attr_node *other_a
9236 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9237 if (!dw_val_equal_p (&a->dw_attr_val,
9238 &other_a->dw_attr_val))
9239 implicit_consts[ix] = false;
9240 }
9241 }
9242 die->die_abbrev = abbrev_id;
9243 }
9244 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9245 if (dwarf_version >= 5 && first_id != ~0U)
9246 optimize_implicit_const (first_id, i, implicit_consts);
9247 }
9248
9249 abbrev_opt_start = 0;
9250 abbrev_opt_base_type_end = 0;
9251 abbrev_usage_count.release ();
9252 sorted_abbrev_dies.release ();
9253 }
9254 \f
9255 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9256
9257 static int
9258 constant_size (unsigned HOST_WIDE_INT value)
9259 {
9260 int log;
9261
9262 if (value == 0)
9263 log = 0;
9264 else
9265 log = floor_log2 (value);
9266
9267 log = log / 8;
9268 log = 1 << (floor_log2 (log) + 1);
9269
9270 return log;
9271 }
9272
9273 /* Return the size of a DIE as it is represented in the
9274 .debug_info section. */
9275
9276 static unsigned long
9277 size_of_die (dw_die_ref die)
9278 {
9279 unsigned long size = 0;
9280 dw_attr_node *a;
9281 unsigned ix;
9282 enum dwarf_form form;
9283
9284 size += size_of_uleb128 (die->die_abbrev);
9285 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9286 {
9287 switch (AT_class (a))
9288 {
9289 case dw_val_class_addr:
9290 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9291 {
9292 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9293 size += size_of_uleb128 (AT_index (a));
9294 }
9295 else
9296 size += DWARF2_ADDR_SIZE;
9297 break;
9298 case dw_val_class_offset:
9299 size += DWARF_OFFSET_SIZE;
9300 break;
9301 case dw_val_class_loc:
9302 {
9303 unsigned long lsize = size_of_locs (AT_loc (a));
9304
9305 /* Block length. */
9306 if (dwarf_version >= 4)
9307 size += size_of_uleb128 (lsize);
9308 else
9309 size += constant_size (lsize);
9310 size += lsize;
9311 }
9312 break;
9313 case dw_val_class_loc_list:
9314 case dw_val_class_view_list:
9315 if (dwarf_split_debug_info && dwarf_version >= 5)
9316 {
9317 gcc_assert (AT_loc_list (a)->num_assigned);
9318 size += size_of_uleb128 (AT_loc_list (a)->hash);
9319 }
9320 else
9321 size += DWARF_OFFSET_SIZE;
9322 break;
9323 case dw_val_class_range_list:
9324 if (value_format (a) == DW_FORM_rnglistx)
9325 {
9326 gcc_assert (rnglist_idx);
9327 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9328 size += size_of_uleb128 (r->idx);
9329 }
9330 else
9331 size += DWARF_OFFSET_SIZE;
9332 break;
9333 case dw_val_class_const:
9334 size += size_of_sleb128 (AT_int (a));
9335 break;
9336 case dw_val_class_unsigned_const:
9337 {
9338 int csize = constant_size (AT_unsigned (a));
9339 if (dwarf_version == 3
9340 && a->dw_attr == DW_AT_data_member_location
9341 && csize >= 4)
9342 size += size_of_uleb128 (AT_unsigned (a));
9343 else
9344 size += csize;
9345 }
9346 break;
9347 case dw_val_class_symview:
9348 if (symview_upper_bound <= 0xff)
9349 size += 1;
9350 else if (symview_upper_bound <= 0xffff)
9351 size += 2;
9352 else if (symview_upper_bound <= 0xffffffff)
9353 size += 4;
9354 else
9355 size += 8;
9356 break;
9357 case dw_val_class_const_implicit:
9358 case dw_val_class_unsigned_const_implicit:
9359 case dw_val_class_file_implicit:
9360 /* These occupy no size in the DIE, just an extra sleb128 in
9361 .debug_abbrev. */
9362 break;
9363 case dw_val_class_const_double:
9364 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9365 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9366 size++; /* block */
9367 break;
9368 case dw_val_class_wide_int:
9369 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9370 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9371 if (get_full_len (*a->dw_attr_val.v.val_wide)
9372 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9373 size++; /* block */
9374 break;
9375 case dw_val_class_vec:
9376 size += constant_size (a->dw_attr_val.v.val_vec.length
9377 * a->dw_attr_val.v.val_vec.elt_size)
9378 + a->dw_attr_val.v.val_vec.length
9379 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9380 break;
9381 case dw_val_class_flag:
9382 if (dwarf_version >= 4)
9383 /* Currently all add_AT_flag calls pass in 1 as last argument,
9384 so DW_FORM_flag_present can be used. If that ever changes,
9385 we'll need to use DW_FORM_flag and have some optimization
9386 in build_abbrev_table that will change those to
9387 DW_FORM_flag_present if it is set to 1 in all DIEs using
9388 the same abbrev entry. */
9389 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9390 else
9391 size += 1;
9392 break;
9393 case dw_val_class_die_ref:
9394 if (AT_ref_external (a))
9395 {
9396 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9397 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9398 is sized by target address length, whereas in DWARF3
9399 it's always sized as an offset. */
9400 if (use_debug_types)
9401 size += DWARF_TYPE_SIGNATURE_SIZE;
9402 else if (dwarf_version == 2)
9403 size += DWARF2_ADDR_SIZE;
9404 else
9405 size += DWARF_OFFSET_SIZE;
9406 }
9407 else
9408 size += DWARF_OFFSET_SIZE;
9409 break;
9410 case dw_val_class_fde_ref:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_lbl_id:
9414 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9415 {
9416 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9417 size += size_of_uleb128 (AT_index (a));
9418 }
9419 else
9420 size += DWARF2_ADDR_SIZE;
9421 break;
9422 case dw_val_class_lineptr:
9423 case dw_val_class_macptr:
9424 case dw_val_class_loclistsptr:
9425 size += DWARF_OFFSET_SIZE;
9426 break;
9427 case dw_val_class_str:
9428 form = AT_string_form (a);
9429 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9430 size += DWARF_OFFSET_SIZE;
9431 else if (form == dwarf_FORM (DW_FORM_strx))
9432 size += size_of_uleb128 (AT_index (a));
9433 else
9434 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9435 break;
9436 case dw_val_class_file:
9437 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9438 break;
9439 case dw_val_class_data8:
9440 size += 8;
9441 break;
9442 case dw_val_class_vms_delta:
9443 size += DWARF_OFFSET_SIZE;
9444 break;
9445 case dw_val_class_high_pc:
9446 size += DWARF2_ADDR_SIZE;
9447 break;
9448 case dw_val_class_discr_value:
9449 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9450 break;
9451 case dw_val_class_discr_list:
9452 {
9453 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9454
9455 /* This is a block, so we have the block length and then its
9456 data. */
9457 size += constant_size (block_size) + block_size;
9458 }
9459 break;
9460 default:
9461 gcc_unreachable ();
9462 }
9463 }
9464
9465 return size;
9466 }
9467
9468 /* Size the debugging information associated with a given DIE. Visits the
9469 DIE's children recursively. Updates the global variable next_die_offset, on
9470 each time through. Uses the current value of next_die_offset to update the
9471 die_offset field in each DIE. */
9472
9473 static void
9474 calc_die_sizes (dw_die_ref die)
9475 {
9476 dw_die_ref c;
9477
9478 gcc_assert (die->die_offset == 0
9479 || (unsigned long int) die->die_offset == next_die_offset);
9480 die->die_offset = next_die_offset;
9481 next_die_offset += size_of_die (die);
9482
9483 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9484
9485 if (die->die_child != NULL)
9486 /* Count the null byte used to terminate sibling lists. */
9487 next_die_offset += 1;
9488 }
9489
9490 /* Size just the base type children at the start of the CU.
9491 This is needed because build_abbrev needs to size locs
9492 and sizing of type based stack ops needs to know die_offset
9493 values for the base types. */
9494
9495 static void
9496 calc_base_type_die_sizes (void)
9497 {
9498 unsigned long die_offset = (dwarf_split_debug_info
9499 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9500 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9501 unsigned int i;
9502 dw_die_ref base_type;
9503 #if ENABLE_ASSERT_CHECKING
9504 dw_die_ref prev = comp_unit_die ()->die_child;
9505 #endif
9506
9507 die_offset += size_of_die (comp_unit_die ());
9508 for (i = 0; base_types.iterate (i, &base_type); i++)
9509 {
9510 #if ENABLE_ASSERT_CHECKING
9511 gcc_assert (base_type->die_offset == 0
9512 && prev->die_sib == base_type
9513 && base_type->die_child == NULL
9514 && base_type->die_abbrev);
9515 prev = base_type;
9516 #endif
9517 if (abbrev_opt_start
9518 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9519 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9520 base_type->die_offset = die_offset;
9521 die_offset += size_of_die (base_type);
9522 }
9523 }
9524
9525 /* Set the marks for a die and its children. We do this so
9526 that we know whether or not a reference needs to use FORM_ref_addr; only
9527 DIEs in the same CU will be marked. We used to clear out the offset
9528 and use that as the flag, but ran into ordering problems. */
9529
9530 static void
9531 mark_dies (dw_die_ref die)
9532 {
9533 dw_die_ref c;
9534
9535 gcc_assert (!die->die_mark);
9536
9537 die->die_mark = 1;
9538 FOR_EACH_CHILD (die, c, mark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die and its children. */
9542
9543 static void
9544 unmark_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547
9548 if (! use_debug_types)
9549 gcc_assert (die->die_mark);
9550
9551 die->die_mark = 0;
9552 FOR_EACH_CHILD (die, c, unmark_dies (c));
9553 }
9554
9555 /* Clear the marks for a die, its children and referred dies. */
9556
9557 static void
9558 unmark_all_dies (dw_die_ref die)
9559 {
9560 dw_die_ref c;
9561 dw_attr_node *a;
9562 unsigned ix;
9563
9564 if (!die->die_mark)
9565 return;
9566 die->die_mark = 0;
9567
9568 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9569
9570 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9571 if (AT_class (a) == dw_val_class_die_ref)
9572 unmark_all_dies (AT_ref (a));
9573 }
9574
9575 /* Calculate if the entry should appear in the final output file. It may be
9576 from a pruned a type. */
9577
9578 static bool
9579 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9580 {
9581 /* By limiting gnu pubnames to definitions only, gold can generate a
9582 gdb index without entries for declarations, which don't include
9583 enough information to be useful. */
9584 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9585 return false;
9586
9587 if (table == pubname_table)
9588 {
9589 /* Enumerator names are part of the pubname table, but the
9590 parent DW_TAG_enumeration_type die may have been pruned.
9591 Don't output them if that is the case. */
9592 if (p->die->die_tag == DW_TAG_enumerator &&
9593 (p->die->die_parent == NULL
9594 || !p->die->die_parent->die_perennial_p))
9595 return false;
9596
9597 /* Everything else in the pubname table is included. */
9598 return true;
9599 }
9600
9601 /* The pubtypes table shouldn't include types that have been
9602 pruned. */
9603 return (p->die->die_offset != 0
9604 || !flag_eliminate_unused_debug_types);
9605 }
9606
9607 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9608 generated for the compilation unit. */
9609
9610 static unsigned long
9611 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9612 {
9613 unsigned long size;
9614 unsigned i;
9615 pubname_entry *p;
9616 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9617
9618 size = DWARF_PUBNAMES_HEADER_SIZE;
9619 FOR_EACH_VEC_ELT (*names, i, p)
9620 if (include_pubname_in_output (names, p))
9621 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9622
9623 size += DWARF_OFFSET_SIZE;
9624 return size;
9625 }
9626
9627 /* Return the size of the information in the .debug_aranges section. */
9628
9629 static unsigned long
9630 size_of_aranges (void)
9631 {
9632 unsigned long size;
9633
9634 size = DWARF_ARANGES_HEADER_SIZE;
9635
9636 /* Count the address/length pair for this compilation unit. */
9637 if (text_section_used)
9638 size += 2 * DWARF2_ADDR_SIZE;
9639 if (cold_text_section_used)
9640 size += 2 * DWARF2_ADDR_SIZE;
9641 if (have_multiple_function_sections)
9642 {
9643 unsigned fde_idx;
9644 dw_fde_ref fde;
9645
9646 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9647 {
9648 if (DECL_IGNORED_P (fde->decl))
9649 continue;
9650 if (!fde->in_std_section)
9651 size += 2 * DWARF2_ADDR_SIZE;
9652 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9653 size += 2 * DWARF2_ADDR_SIZE;
9654 }
9655 }
9656
9657 /* Count the two zero words used to terminated the address range table. */
9658 size += 2 * DWARF2_ADDR_SIZE;
9659 return size;
9660 }
9661 \f
9662 /* Select the encoding of an attribute value. */
9663
9664 static enum dwarf_form
9665 value_format (dw_attr_node *a)
9666 {
9667 switch (AT_class (a))
9668 {
9669 case dw_val_class_addr:
9670 /* Only very few attributes allow DW_FORM_addr. */
9671 switch (a->dw_attr)
9672 {
9673 case DW_AT_low_pc:
9674 case DW_AT_high_pc:
9675 case DW_AT_entry_pc:
9676 case DW_AT_trampoline:
9677 return (AT_index (a) == NOT_INDEXED
9678 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9679 default:
9680 break;
9681 }
9682 switch (DWARF2_ADDR_SIZE)
9683 {
9684 case 1:
9685 return DW_FORM_data1;
9686 case 2:
9687 return DW_FORM_data2;
9688 case 4:
9689 return DW_FORM_data4;
9690 case 8:
9691 return DW_FORM_data8;
9692 default:
9693 gcc_unreachable ();
9694 }
9695 case dw_val_class_loc_list:
9696 case dw_val_class_view_list:
9697 if (dwarf_split_debug_info
9698 && dwarf_version >= 5
9699 && AT_loc_list (a)->num_assigned)
9700 return DW_FORM_loclistx;
9701 /* FALLTHRU */
9702 case dw_val_class_range_list:
9703 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9704 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9705 care about sizes of .debug* sections in shared libraries and
9706 executables and don't take into account relocations that affect just
9707 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9708 table in the .debug_rnglists section. */
9709 if (dwarf_split_debug_info
9710 && dwarf_version >= 5
9711 && AT_class (a) == dw_val_class_range_list
9712 && rnglist_idx
9713 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9714 return DW_FORM_rnglistx;
9715 if (dwarf_version >= 4)
9716 return DW_FORM_sec_offset;
9717 /* FALLTHRU */
9718 case dw_val_class_vms_delta:
9719 case dw_val_class_offset:
9720 switch (DWARF_OFFSET_SIZE)
9721 {
9722 case 4:
9723 return DW_FORM_data4;
9724 case 8:
9725 return DW_FORM_data8;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_loc:
9730 if (dwarf_version >= 4)
9731 return DW_FORM_exprloc;
9732 switch (constant_size (size_of_locs (AT_loc (a))))
9733 {
9734 case 1:
9735 return DW_FORM_block1;
9736 case 2:
9737 return DW_FORM_block2;
9738 case 4:
9739 return DW_FORM_block4;
9740 default:
9741 gcc_unreachable ();
9742 }
9743 case dw_val_class_const:
9744 return DW_FORM_sdata;
9745 case dw_val_class_unsigned_const:
9746 switch (constant_size (AT_unsigned (a)))
9747 {
9748 case 1:
9749 return DW_FORM_data1;
9750 case 2:
9751 return DW_FORM_data2;
9752 case 4:
9753 /* In DWARF3 DW_AT_data_member_location with
9754 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9755 constant, so we need to use DW_FORM_udata if we need
9756 a large constant. */
9757 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9758 return DW_FORM_udata;
9759 return DW_FORM_data4;
9760 case 8:
9761 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9762 return DW_FORM_udata;
9763 return DW_FORM_data8;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const_implicit:
9768 case dw_val_class_unsigned_const_implicit:
9769 case dw_val_class_file_implicit:
9770 return DW_FORM_implicit_const;
9771 case dw_val_class_const_double:
9772 switch (HOST_BITS_PER_WIDE_INT)
9773 {
9774 case 8:
9775 return DW_FORM_data2;
9776 case 16:
9777 return DW_FORM_data4;
9778 case 32:
9779 return DW_FORM_data8;
9780 case 64:
9781 if (dwarf_version >= 5)
9782 return DW_FORM_data16;
9783 /* FALLTHRU */
9784 default:
9785 return DW_FORM_block1;
9786 }
9787 case dw_val_class_wide_int:
9788 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9789 {
9790 case 8:
9791 return DW_FORM_data1;
9792 case 16:
9793 return DW_FORM_data2;
9794 case 32:
9795 return DW_FORM_data4;
9796 case 64:
9797 return DW_FORM_data8;
9798 case 128:
9799 if (dwarf_version >= 5)
9800 return DW_FORM_data16;
9801 /* FALLTHRU */
9802 default:
9803 return DW_FORM_block1;
9804 }
9805 case dw_val_class_symview:
9806 /* ??? We might use uleb128, but then we'd have to compute
9807 .debug_info offsets in the assembler. */
9808 if (symview_upper_bound <= 0xff)
9809 return DW_FORM_data1;
9810 else if (symview_upper_bound <= 0xffff)
9811 return DW_FORM_data2;
9812 else if (symview_upper_bound <= 0xffffffff)
9813 return DW_FORM_data4;
9814 else
9815 return DW_FORM_data8;
9816 case dw_val_class_vec:
9817 switch (constant_size (a->dw_attr_val.v.val_vec.length
9818 * a->dw_attr_val.v.val_vec.elt_size))
9819 {
9820 case 1:
9821 return DW_FORM_block1;
9822 case 2:
9823 return DW_FORM_block2;
9824 case 4:
9825 return DW_FORM_block4;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_flag:
9830 if (dwarf_version >= 4)
9831 {
9832 /* Currently all add_AT_flag calls pass in 1 as last argument,
9833 so DW_FORM_flag_present can be used. If that ever changes,
9834 we'll need to use DW_FORM_flag and have some optimization
9835 in build_abbrev_table that will change those to
9836 DW_FORM_flag_present if it is set to 1 in all DIEs using
9837 the same abbrev entry. */
9838 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9839 return DW_FORM_flag_present;
9840 }
9841 return DW_FORM_flag;
9842 case dw_val_class_die_ref:
9843 if (AT_ref_external (a))
9844 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9845 else
9846 return DW_FORM_ref;
9847 case dw_val_class_fde_ref:
9848 return DW_FORM_data;
9849 case dw_val_class_lbl_id:
9850 return (AT_index (a) == NOT_INDEXED
9851 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9852 case dw_val_class_lineptr:
9853 case dw_val_class_macptr:
9854 case dw_val_class_loclistsptr:
9855 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9856 case dw_val_class_str:
9857 return AT_string_form (a);
9858 case dw_val_class_file:
9859 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9860 {
9861 case 1:
9862 return DW_FORM_data1;
9863 case 2:
9864 return DW_FORM_data2;
9865 case 4:
9866 return DW_FORM_data4;
9867 default:
9868 gcc_unreachable ();
9869 }
9870
9871 case dw_val_class_data8:
9872 return DW_FORM_data8;
9873
9874 case dw_val_class_high_pc:
9875 switch (DWARF2_ADDR_SIZE)
9876 {
9877 case 1:
9878 return DW_FORM_data1;
9879 case 2:
9880 return DW_FORM_data2;
9881 case 4:
9882 return DW_FORM_data4;
9883 case 8:
9884 return DW_FORM_data8;
9885 default:
9886 gcc_unreachable ();
9887 }
9888
9889 case dw_val_class_discr_value:
9890 return (a->dw_attr_val.v.val_discr_value.pos
9891 ? DW_FORM_udata
9892 : DW_FORM_sdata);
9893 case dw_val_class_discr_list:
9894 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9895 {
9896 case 1:
9897 return DW_FORM_block1;
9898 case 2:
9899 return DW_FORM_block2;
9900 case 4:
9901 return DW_FORM_block4;
9902 default:
9903 gcc_unreachable ();
9904 }
9905
9906 default:
9907 gcc_unreachable ();
9908 }
9909 }
9910
9911 /* Output the encoding of an attribute value. */
9912
9913 static void
9914 output_value_format (dw_attr_node *a)
9915 {
9916 enum dwarf_form form = value_format (a);
9917
9918 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9919 }
9920
9921 /* Given a die and id, produce the appropriate abbreviations. */
9922
9923 static void
9924 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9925 {
9926 unsigned ix;
9927 dw_attr_node *a_attr;
9928
9929 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9930 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9931 dwarf_tag_name (abbrev->die_tag));
9932
9933 if (abbrev->die_child != NULL)
9934 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9935 else
9936 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9937
9938 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9939 {
9940 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9941 dwarf_attr_name (a_attr->dw_attr));
9942 output_value_format (a_attr);
9943 if (value_format (a_attr) == DW_FORM_implicit_const)
9944 {
9945 if (AT_class (a_attr) == dw_val_class_file_implicit)
9946 {
9947 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9948 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9949 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9950 }
9951 else
9952 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9953 }
9954 }
9955
9956 dw2_asm_output_data (1, 0, NULL);
9957 dw2_asm_output_data (1, 0, NULL);
9958 }
9959
9960
9961 /* Output the .debug_abbrev section which defines the DIE abbreviation
9962 table. */
9963
9964 static void
9965 output_abbrev_section (void)
9966 {
9967 unsigned int abbrev_id;
9968 dw_die_ref abbrev;
9969
9970 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9971 if (abbrev_id != 0)
9972 output_die_abbrevs (abbrev_id, abbrev);
9973
9974 /* Terminate the table. */
9975 dw2_asm_output_data (1, 0, NULL);
9976 }
9977
9978 /* Return a new location list, given the begin and end range, and the
9979 expression. */
9980
9981 static inline dw_loc_list_ref
9982 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9983 const char *end, var_loc_view vend,
9984 const char *section)
9985 {
9986 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9987
9988 retlist->begin = begin;
9989 retlist->begin_entry = NULL;
9990 retlist->end = end;
9991 retlist->expr = expr;
9992 retlist->section = section;
9993 retlist->vbegin = vbegin;
9994 retlist->vend = vend;
9995
9996 return retlist;
9997 }
9998
9999 /* Return true iff there's any nonzero view number in the loc list.
10000
10001 ??? When views are not enabled, we'll often extend a single range
10002 to the entire function, so that we emit a single location
10003 expression rather than a location list. With views, even with a
10004 single range, we'll output a list if start or end have a nonzero
10005 view. If we change this, we may want to stop splitting a single
10006 range in dw_loc_list just because of a nonzero view, even if it
10007 straddles across hot/cold partitions. */
10008
10009 static bool
10010 loc_list_has_views (dw_loc_list_ref list)
10011 {
10012 if (!debug_variable_location_views)
10013 return false;
10014
10015 for (dw_loc_list_ref loc = list;
10016 loc != NULL; loc = loc->dw_loc_next)
10017 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10018 return true;
10019
10020 return false;
10021 }
10022
10023 /* Generate a new internal symbol for this location list node, if it
10024 hasn't got one yet. */
10025
10026 static inline void
10027 gen_llsym (dw_loc_list_ref list)
10028 {
10029 gcc_assert (!list->ll_symbol);
10030 list->ll_symbol = gen_internal_sym ("LLST");
10031
10032 if (!loc_list_has_views (list))
10033 return;
10034
10035 if (dwarf2out_locviews_in_attribute ())
10036 {
10037 /* Use the same label_num for the view list. */
10038 label_num--;
10039 list->vl_symbol = gen_internal_sym ("LVUS");
10040 }
10041 else
10042 list->vl_symbol = list->ll_symbol;
10043 }
10044
10045 /* Generate a symbol for the list, but only if we really want to emit
10046 it as a list. */
10047
10048 static inline void
10049 maybe_gen_llsym (dw_loc_list_ref list)
10050 {
10051 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10052 return;
10053
10054 gen_llsym (list);
10055 }
10056
10057 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10058 NULL, don't consider size of the location expression. If we're not
10059 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10060 representation in *SIZEP. */
10061
10062 static bool
10063 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10064 {
10065 /* Don't output an entry that starts and ends at the same address. */
10066 if (strcmp (curr->begin, curr->end) == 0
10067 && curr->vbegin == curr->vend && !curr->force)
10068 return true;
10069
10070 if (!sizep)
10071 return false;
10072
10073 unsigned long size = size_of_locs (curr->expr);
10074
10075 /* If the expression is too large, drop it on the floor. We could
10076 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10077 in the expression, but >= 64KB expressions for a single value
10078 in a single range are unlikely very useful. */
10079 if (dwarf_version < 5 && size > 0xffff)
10080 return true;
10081
10082 *sizep = size;
10083
10084 return false;
10085 }
10086
10087 /* Output a view pair loclist entry for CURR, if it requires one. */
10088
10089 static void
10090 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10091 {
10092 if (!dwarf2out_locviews_in_loclist ())
10093 return;
10094
10095 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10096 return;
10097
10098 #ifdef DW_LLE_view_pair
10099 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10100
10101 if (dwarf2out_as_locview_support)
10102 {
10103 if (ZERO_VIEW_P (curr->vbegin))
10104 dw2_asm_output_data_uleb128 (0, "Location view begin");
10105 else
10106 {
10107 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10108 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10109 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10110 }
10111
10112 if (ZERO_VIEW_P (curr->vend))
10113 dw2_asm_output_data_uleb128 (0, "Location view end");
10114 else
10115 {
10116 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10117 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10118 dw2_asm_output_symname_uleb128 (label, "Location view end");
10119 }
10120 }
10121 else
10122 {
10123 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10124 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10125 }
10126 #endif /* DW_LLE_view_pair */
10127
10128 return;
10129 }
10130
10131 /* Output the location list given to us. */
10132
10133 static void
10134 output_loc_list (dw_loc_list_ref list_head)
10135 {
10136 int vcount = 0, lcount = 0;
10137
10138 if (list_head->emitted)
10139 return;
10140 list_head->emitted = true;
10141
10142 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10143 {
10144 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10145
10146 for (dw_loc_list_ref curr = list_head; curr != NULL;
10147 curr = curr->dw_loc_next)
10148 {
10149 unsigned long size;
10150
10151 if (skip_loc_list_entry (curr, &size))
10152 continue;
10153
10154 vcount++;
10155
10156 /* ?? dwarf_split_debug_info? */
10157 if (dwarf2out_as_locview_support)
10158 {
10159 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10160
10161 if (!ZERO_VIEW_P (curr->vbegin))
10162 {
10163 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10164 dw2_asm_output_symname_uleb128 (label,
10165 "View list begin (%s)",
10166 list_head->vl_symbol);
10167 }
10168 else
10169 dw2_asm_output_data_uleb128 (0,
10170 "View list begin (%s)",
10171 list_head->vl_symbol);
10172
10173 if (!ZERO_VIEW_P (curr->vend))
10174 {
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10176 dw2_asm_output_symname_uleb128 (label,
10177 "View list end (%s)",
10178 list_head->vl_symbol);
10179 }
10180 else
10181 dw2_asm_output_data_uleb128 (0,
10182 "View list end (%s)",
10183 list_head->vl_symbol);
10184 }
10185 else
10186 {
10187 dw2_asm_output_data_uleb128 (curr->vbegin,
10188 "View list begin (%s)",
10189 list_head->vl_symbol);
10190 dw2_asm_output_data_uleb128 (curr->vend,
10191 "View list end (%s)",
10192 list_head->vl_symbol);
10193 }
10194 }
10195 }
10196
10197 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10198
10199 const char *last_section = NULL;
10200 const char *base_label = NULL;
10201
10202 /* Walk the location list, and output each range + expression. */
10203 for (dw_loc_list_ref curr = list_head; curr != NULL;
10204 curr = curr->dw_loc_next)
10205 {
10206 unsigned long size;
10207
10208 /* Skip this entry? If we skip it here, we must skip it in the
10209 view list above as well. */
10210 if (skip_loc_list_entry (curr, &size))
10211 continue;
10212
10213 lcount++;
10214
10215 if (dwarf_version >= 5)
10216 {
10217 if (dwarf_split_debug_info)
10218 {
10219 dwarf2out_maybe_output_loclist_view_pair (curr);
10220 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10221 uleb128 index into .debug_addr and uleb128 length. */
10222 dw2_asm_output_data (1, DW_LLE_startx_length,
10223 "DW_LLE_startx_length (%s)",
10224 list_head->ll_symbol);
10225 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10226 "Location list range start index "
10227 "(%s)", curr->begin);
10228 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10229 For that case we probably need to emit DW_LLE_startx_endx,
10230 but we'd need 2 .debug_addr entries rather than just one. */
10231 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10232 "Location list length (%s)",
10233 list_head->ll_symbol);
10234 }
10235 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10236 {
10237 dwarf2out_maybe_output_loclist_view_pair (curr);
10238 /* If all code is in .text section, the base address is
10239 already provided by the CU attributes. Use
10240 DW_LLE_offset_pair where both addresses are uleb128 encoded
10241 offsets against that base. */
10242 dw2_asm_output_data (1, DW_LLE_offset_pair,
10243 "DW_LLE_offset_pair (%s)",
10244 list_head->ll_symbol);
10245 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10246 "Location list begin address (%s)",
10247 list_head->ll_symbol);
10248 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10249 "Location list end address (%s)",
10250 list_head->ll_symbol);
10251 }
10252 else if (HAVE_AS_LEB128)
10253 {
10254 /* Otherwise, find out how many consecutive entries could share
10255 the same base entry. If just one, emit DW_LLE_start_length,
10256 otherwise emit DW_LLE_base_address for the base address
10257 followed by a series of DW_LLE_offset_pair. */
10258 if (last_section == NULL || curr->section != last_section)
10259 {
10260 dw_loc_list_ref curr2;
10261 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10262 curr2 = curr2->dw_loc_next)
10263 {
10264 if (strcmp (curr2->begin, curr2->end) == 0
10265 && !curr2->force)
10266 continue;
10267 break;
10268 }
10269 if (curr2 == NULL || curr->section != curr2->section)
10270 last_section = NULL;
10271 else
10272 {
10273 last_section = curr->section;
10274 base_label = curr->begin;
10275 dw2_asm_output_data (1, DW_LLE_base_address,
10276 "DW_LLE_base_address (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10279 "Base address (%s)",
10280 list_head->ll_symbol);
10281 }
10282 }
10283 /* Only one entry with the same base address. Use
10284 DW_LLE_start_length with absolute address and uleb128
10285 length. */
10286 if (last_section == NULL)
10287 {
10288 dwarf2out_maybe_output_loclist_view_pair (curr);
10289 dw2_asm_output_data (1, DW_LLE_start_length,
10290 "DW_LLE_start_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10293 "Location list begin address (%s)",
10294 list_head->ll_symbol);
10295 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10296 "Location list length "
10297 "(%s)", list_head->ll_symbol);
10298 }
10299 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10300 DW_LLE_base_address. */
10301 else
10302 {
10303 dwarf2out_maybe_output_loclist_view_pair (curr);
10304 dw2_asm_output_data (1, DW_LLE_offset_pair,
10305 "DW_LLE_offset_pair (%s)",
10306 list_head->ll_symbol);
10307 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10308 "Location list begin address "
10309 "(%s)", list_head->ll_symbol);
10310 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10311 "Location list end address "
10312 "(%s)", list_head->ll_symbol);
10313 }
10314 }
10315 /* The assembler does not support .uleb128 directive. Emit
10316 DW_LLE_start_end with a pair of absolute addresses. */
10317 else
10318 {
10319 dwarf2out_maybe_output_loclist_view_pair (curr);
10320 dw2_asm_output_data (1, DW_LLE_start_end,
10321 "DW_LLE_start_end (%s)",
10322 list_head->ll_symbol);
10323 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10324 "Location list begin address (%s)",
10325 list_head->ll_symbol);
10326 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10327 "Location list end address (%s)",
10328 list_head->ll_symbol);
10329 }
10330 }
10331 else if (dwarf_split_debug_info)
10332 {
10333 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10334 and 4 byte length. */
10335 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10336 "Location list start/length entry (%s)",
10337 list_head->ll_symbol);
10338 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10339 "Location list range start index (%s)",
10340 curr->begin);
10341 /* The length field is 4 bytes. If we ever need to support
10342 an 8-byte length, we can add a new DW_LLE code or fall back
10343 to DW_LLE_GNU_start_end_entry. */
10344 dw2_asm_output_delta (4, curr->end, curr->begin,
10345 "Location list range length (%s)",
10346 list_head->ll_symbol);
10347 }
10348 else if (!have_multiple_function_sections)
10349 {
10350 /* Pair of relative addresses against start of text section. */
10351 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10352 "Location list begin address (%s)",
10353 list_head->ll_symbol);
10354 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10355 "Location list end address (%s)",
10356 list_head->ll_symbol);
10357 }
10358 else
10359 {
10360 /* Pair of absolute addresses. */
10361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10362 "Location list begin address (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10365 "Location list end address (%s)",
10366 list_head->ll_symbol);
10367 }
10368
10369 /* Output the block length for this list of location operations. */
10370 if (dwarf_version >= 5)
10371 dw2_asm_output_data_uleb128 (size, "Location expression size");
10372 else
10373 {
10374 gcc_assert (size <= 0xffff);
10375 dw2_asm_output_data (2, size, "Location expression size");
10376 }
10377
10378 output_loc_sequence (curr->expr, -1);
10379 }
10380
10381 /* And finally list termination. */
10382 if (dwarf_version >= 5)
10383 dw2_asm_output_data (1, DW_LLE_end_of_list,
10384 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10385 else if (dwarf_split_debug_info)
10386 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10387 "Location list terminator (%s)",
10388 list_head->ll_symbol);
10389 else
10390 {
10391 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10392 "Location list terminator begin (%s)",
10393 list_head->ll_symbol);
10394 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10395 "Location list terminator end (%s)",
10396 list_head->ll_symbol);
10397 }
10398
10399 gcc_assert (!list_head->vl_symbol
10400 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10401 }
10402
10403 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10404 section. Emit a relocated reference if val_entry is NULL, otherwise,
10405 emit an indirect reference. */
10406
10407 static void
10408 output_range_list_offset (dw_attr_node *a)
10409 {
10410 const char *name = dwarf_attr_name (a->dw_attr);
10411
10412 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10413 {
10414 if (dwarf_version >= 5)
10415 {
10416 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10417 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10418 debug_ranges_section, "%s", name);
10419 }
10420 else
10421 {
10422 char *p = strchr (ranges_section_label, '\0');
10423 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10424 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10425 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10426 debug_ranges_section, "%s", name);
10427 *p = '\0';
10428 }
10429 }
10430 else if (dwarf_version >= 5)
10431 {
10432 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10433 gcc_assert (rnglist_idx);
10434 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10435 }
10436 else
10437 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10438 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10439 "%s (offset from %s)", name, ranges_section_label);
10440 }
10441
10442 /* Output the offset into the debug_loc section. */
10443
10444 static void
10445 output_loc_list_offset (dw_attr_node *a)
10446 {
10447 char *sym = AT_loc_list (a)->ll_symbol;
10448
10449 gcc_assert (sym);
10450 if (!dwarf_split_debug_info)
10451 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10452 "%s", dwarf_attr_name (a->dw_attr));
10453 else if (dwarf_version >= 5)
10454 {
10455 gcc_assert (AT_loc_list (a)->num_assigned);
10456 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10457 dwarf_attr_name (a->dw_attr),
10458 sym);
10459 }
10460 else
10461 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10462 "%s", dwarf_attr_name (a->dw_attr));
10463 }
10464
10465 /* Output the offset into the debug_loc section. */
10466
10467 static void
10468 output_view_list_offset (dw_attr_node *a)
10469 {
10470 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10471
10472 gcc_assert (sym);
10473 if (dwarf_split_debug_info)
10474 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10475 "%s", dwarf_attr_name (a->dw_attr));
10476 else
10477 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10478 "%s", dwarf_attr_name (a->dw_attr));
10479 }
10480
10481 /* Output an attribute's index or value appropriately. */
10482
10483 static void
10484 output_attr_index_or_value (dw_attr_node *a)
10485 {
10486 const char *name = dwarf_attr_name (a->dw_attr);
10487
10488 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10489 {
10490 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10491 return;
10492 }
10493 switch (AT_class (a))
10494 {
10495 case dw_val_class_addr:
10496 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10497 break;
10498 case dw_val_class_high_pc:
10499 case dw_val_class_lbl_id:
10500 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10501 break;
10502 default:
10503 gcc_unreachable ();
10504 }
10505 }
10506
10507 /* Output a type signature. */
10508
10509 static inline void
10510 output_signature (const char *sig, const char *name)
10511 {
10512 int i;
10513
10514 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10515 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10516 }
10517
10518 /* Output a discriminant value. */
10519
10520 static inline void
10521 output_discr_value (dw_discr_value *discr_value, const char *name)
10522 {
10523 if (discr_value->pos)
10524 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10525 else
10526 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10527 }
10528
10529 /* Output the DIE and its attributes. Called recursively to generate
10530 the definitions of each child DIE. */
10531
10532 static void
10533 output_die (dw_die_ref die)
10534 {
10535 dw_attr_node *a;
10536 dw_die_ref c;
10537 unsigned long size;
10538 unsigned ix;
10539
10540 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10541 (unsigned long)die->die_offset,
10542 dwarf_tag_name (die->die_tag));
10543
10544 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10545 {
10546 const char *name = dwarf_attr_name (a->dw_attr);
10547
10548 switch (AT_class (a))
10549 {
10550 case dw_val_class_addr:
10551 output_attr_index_or_value (a);
10552 break;
10553
10554 case dw_val_class_offset:
10555 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10556 "%s", name);
10557 break;
10558
10559 case dw_val_class_range_list:
10560 output_range_list_offset (a);
10561 break;
10562
10563 case dw_val_class_loc:
10564 size = size_of_locs (AT_loc (a));
10565
10566 /* Output the block length for this list of location operations. */
10567 if (dwarf_version >= 4)
10568 dw2_asm_output_data_uleb128 (size, "%s", name);
10569 else
10570 dw2_asm_output_data (constant_size (size), size, "%s", name);
10571
10572 output_loc_sequence (AT_loc (a), -1);
10573 break;
10574
10575 case dw_val_class_const:
10576 /* ??? It would be slightly more efficient to use a scheme like is
10577 used for unsigned constants below, but gdb 4.x does not sign
10578 extend. Gdb 5.x does sign extend. */
10579 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10580 break;
10581
10582 case dw_val_class_unsigned_const:
10583 {
10584 int csize = constant_size (AT_unsigned (a));
10585 if (dwarf_version == 3
10586 && a->dw_attr == DW_AT_data_member_location
10587 && csize >= 4)
10588 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10589 else
10590 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10591 }
10592 break;
10593
10594 case dw_val_class_symview:
10595 {
10596 int vsize;
10597 if (symview_upper_bound <= 0xff)
10598 vsize = 1;
10599 else if (symview_upper_bound <= 0xffff)
10600 vsize = 2;
10601 else if (symview_upper_bound <= 0xffffffff)
10602 vsize = 4;
10603 else
10604 vsize = 8;
10605 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10606 "%s", name);
10607 }
10608 break;
10609
10610 case dw_val_class_const_implicit:
10611 if (flag_debug_asm)
10612 fprintf (asm_out_file, "\t\t\t%s %s ("
10613 HOST_WIDE_INT_PRINT_DEC ")\n",
10614 ASM_COMMENT_START, name, AT_int (a));
10615 break;
10616
10617 case dw_val_class_unsigned_const_implicit:
10618 if (flag_debug_asm)
10619 fprintf (asm_out_file, "\t\t\t%s %s ("
10620 HOST_WIDE_INT_PRINT_HEX ")\n",
10621 ASM_COMMENT_START, name, AT_unsigned (a));
10622 break;
10623
10624 case dw_val_class_const_double:
10625 {
10626 unsigned HOST_WIDE_INT first, second;
10627
10628 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10629 dw2_asm_output_data (1,
10630 HOST_BITS_PER_DOUBLE_INT
10631 / HOST_BITS_PER_CHAR,
10632 NULL);
10633
10634 if (WORDS_BIG_ENDIAN)
10635 {
10636 first = a->dw_attr_val.v.val_double.high;
10637 second = a->dw_attr_val.v.val_double.low;
10638 }
10639 else
10640 {
10641 first = a->dw_attr_val.v.val_double.low;
10642 second = a->dw_attr_val.v.val_double.high;
10643 }
10644
10645 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10646 first, "%s", name);
10647 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10648 second, NULL);
10649 }
10650 break;
10651
10652 case dw_val_class_wide_int:
10653 {
10654 int i;
10655 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10656 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10657 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10658 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10659 * l, NULL);
10660
10661 if (WORDS_BIG_ENDIAN)
10662 for (i = len - 1; i >= 0; --i)
10663 {
10664 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10665 "%s", name);
10666 name = "";
10667 }
10668 else
10669 for (i = 0; i < len; ++i)
10670 {
10671 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10672 "%s", name);
10673 name = "";
10674 }
10675 }
10676 break;
10677
10678 case dw_val_class_vec:
10679 {
10680 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10681 unsigned int len = a->dw_attr_val.v.val_vec.length;
10682 unsigned int i;
10683 unsigned char *p;
10684
10685 dw2_asm_output_data (constant_size (len * elt_size),
10686 len * elt_size, "%s", name);
10687 if (elt_size > sizeof (HOST_WIDE_INT))
10688 {
10689 elt_size /= 2;
10690 len *= 2;
10691 }
10692 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10693 i < len;
10694 i++, p += elt_size)
10695 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10696 "fp or vector constant word %u", i);
10697 break;
10698 }
10699
10700 case dw_val_class_flag:
10701 if (dwarf_version >= 4)
10702 {
10703 /* Currently all add_AT_flag calls pass in 1 as last argument,
10704 so DW_FORM_flag_present can be used. If that ever changes,
10705 we'll need to use DW_FORM_flag and have some optimization
10706 in build_abbrev_table that will change those to
10707 DW_FORM_flag_present if it is set to 1 in all DIEs using
10708 the same abbrev entry. */
10709 gcc_assert (AT_flag (a) == 1);
10710 if (flag_debug_asm)
10711 fprintf (asm_out_file, "\t\t\t%s %s\n",
10712 ASM_COMMENT_START, name);
10713 break;
10714 }
10715 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10716 break;
10717
10718 case dw_val_class_loc_list:
10719 output_loc_list_offset (a);
10720 break;
10721
10722 case dw_val_class_view_list:
10723 output_view_list_offset (a);
10724 break;
10725
10726 case dw_val_class_die_ref:
10727 if (AT_ref_external (a))
10728 {
10729 if (AT_ref (a)->comdat_type_p)
10730 {
10731 comdat_type_node *type_node
10732 = AT_ref (a)->die_id.die_type_node;
10733
10734 gcc_assert (type_node);
10735 output_signature (type_node->signature, name);
10736 }
10737 else
10738 {
10739 const char *sym = AT_ref (a)->die_id.die_symbol;
10740 int size;
10741
10742 gcc_assert (sym);
10743 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10744 length, whereas in DWARF3 it's always sized as an
10745 offset. */
10746 if (dwarf_version == 2)
10747 size = DWARF2_ADDR_SIZE;
10748 else
10749 size = DWARF_OFFSET_SIZE;
10750 /* ??? We cannot unconditionally output die_offset if
10751 non-zero - others might create references to those
10752 DIEs via symbols.
10753 And we do not clear its DIE offset after outputting it
10754 (and the label refers to the actual DIEs, not the
10755 DWARF CU unit header which is when using label + offset
10756 would be the correct thing to do).
10757 ??? This is the reason for the with_offset flag. */
10758 if (AT_ref (a)->with_offset)
10759 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10760 debug_info_section, "%s", name);
10761 else
10762 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10763 name);
10764 }
10765 }
10766 else
10767 {
10768 gcc_assert (AT_ref (a)->die_offset);
10769 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10770 "%s", name);
10771 }
10772 break;
10773
10774 case dw_val_class_fde_ref:
10775 {
10776 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10777
10778 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10779 a->dw_attr_val.v.val_fde_index * 2);
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10781 "%s", name);
10782 }
10783 break;
10784
10785 case dw_val_class_vms_delta:
10786 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10787 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10788 AT_vms_delta2 (a), AT_vms_delta1 (a),
10789 "%s", name);
10790 #else
10791 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10792 AT_vms_delta2 (a), AT_vms_delta1 (a),
10793 "%s", name);
10794 #endif
10795 break;
10796
10797 case dw_val_class_lbl_id:
10798 output_attr_index_or_value (a);
10799 break;
10800
10801 case dw_val_class_lineptr:
10802 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10803 debug_line_section, "%s", name);
10804 break;
10805
10806 case dw_val_class_macptr:
10807 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10808 debug_macinfo_section, "%s", name);
10809 break;
10810
10811 case dw_val_class_loclistsptr:
10812 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10813 debug_loc_section, "%s", name);
10814 break;
10815
10816 case dw_val_class_str:
10817 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10818 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10819 a->dw_attr_val.v.val_str->label,
10820 debug_str_section,
10821 "%s: \"%s\"", name, AT_string (a));
10822 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10823 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10824 a->dw_attr_val.v.val_str->label,
10825 debug_line_str_section,
10826 "%s: \"%s\"", name, AT_string (a));
10827 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10828 dw2_asm_output_data_uleb128 (AT_index (a),
10829 "%s: \"%s\"", name, AT_string (a));
10830 else
10831 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10832 break;
10833
10834 case dw_val_class_file:
10835 {
10836 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10837
10838 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10839 a->dw_attr_val.v.val_file->filename);
10840 break;
10841 }
10842
10843 case dw_val_class_file_implicit:
10844 if (flag_debug_asm)
10845 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10846 ASM_COMMENT_START, name,
10847 maybe_emit_file (a->dw_attr_val.v.val_file),
10848 a->dw_attr_val.v.val_file->filename);
10849 break;
10850
10851 case dw_val_class_data8:
10852 {
10853 int i;
10854
10855 for (i = 0; i < 8; i++)
10856 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10857 i == 0 ? "%s" : NULL, name);
10858 break;
10859 }
10860
10861 case dw_val_class_high_pc:
10862 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10863 get_AT_low_pc (die), "DW_AT_high_pc");
10864 break;
10865
10866 case dw_val_class_discr_value:
10867 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10868 break;
10869
10870 case dw_val_class_discr_list:
10871 {
10872 dw_discr_list_ref list = AT_discr_list (a);
10873 const int size = size_of_discr_list (list);
10874
10875 /* This is a block, so output its length first. */
10876 dw2_asm_output_data (constant_size (size), size,
10877 "%s: block size", name);
10878
10879 for (; list != NULL; list = list->dw_discr_next)
10880 {
10881 /* One byte for the discriminant value descriptor, and then as
10882 many LEB128 numbers as required. */
10883 if (list->dw_discr_range)
10884 dw2_asm_output_data (1, DW_DSC_range,
10885 "%s: DW_DSC_range", name);
10886 else
10887 dw2_asm_output_data (1, DW_DSC_label,
10888 "%s: DW_DSC_label", name);
10889
10890 output_discr_value (&list->dw_discr_lower_bound, name);
10891 if (list->dw_discr_range)
10892 output_discr_value (&list->dw_discr_upper_bound, name);
10893 }
10894 break;
10895 }
10896
10897 default:
10898 gcc_unreachable ();
10899 }
10900 }
10901
10902 FOR_EACH_CHILD (die, c, output_die (c));
10903
10904 /* Add null byte to terminate sibling list. */
10905 if (die->die_child != NULL)
10906 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10907 (unsigned long) die->die_offset);
10908 }
10909
10910 /* Output the dwarf version number. */
10911
10912 static void
10913 output_dwarf_version ()
10914 {
10915 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10916 views in loclist. That will change eventually. */
10917 if (dwarf_version == 6)
10918 {
10919 static bool once;
10920 if (!once)
10921 {
10922 warning (0,
10923 "-gdwarf-6 is output as version 5 with incompatibilities");
10924 once = true;
10925 }
10926 dw2_asm_output_data (2, 5, "DWARF version number");
10927 }
10928 else
10929 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10930 }
10931
10932 /* Output the compilation unit that appears at the beginning of the
10933 .debug_info section, and precedes the DIE descriptions. */
10934
10935 static void
10936 output_compilation_unit_header (enum dwarf_unit_type ut)
10937 {
10938 if (!XCOFF_DEBUGGING_INFO)
10939 {
10940 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10941 dw2_asm_output_data (4, 0xffffffff,
10942 "Initial length escape value indicating 64-bit DWARF extension");
10943 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10944 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10945 "Length of Compilation Unit Info");
10946 }
10947
10948 output_dwarf_version ();
10949 if (dwarf_version >= 5)
10950 {
10951 const char *name;
10952 switch (ut)
10953 {
10954 case DW_UT_compile: name = "DW_UT_compile"; break;
10955 case DW_UT_type: name = "DW_UT_type"; break;
10956 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10957 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10958 default: gcc_unreachable ();
10959 }
10960 dw2_asm_output_data (1, ut, "%s", name);
10961 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10962 }
10963 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10964 debug_abbrev_section,
10965 "Offset Into Abbrev. Section");
10966 if (dwarf_version < 5)
10967 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10968 }
10969
10970 /* Output the compilation unit DIE and its children. */
10971
10972 static void
10973 output_comp_unit (dw_die_ref die, int output_if_empty,
10974 const unsigned char *dwo_id)
10975 {
10976 const char *secname, *oldsym;
10977 char *tmp;
10978
10979 /* Unless we are outputting main CU, we may throw away empty ones. */
10980 if (!output_if_empty && die->die_child == NULL)
10981 return;
10982
10983 /* Even if there are no children of this DIE, we must output the information
10984 about the compilation unit. Otherwise, on an empty translation unit, we
10985 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10986 will then complain when examining the file. First mark all the DIEs in
10987 this CU so we know which get local refs. */
10988 mark_dies (die);
10989
10990 external_ref_hash_type *extern_map = optimize_external_refs (die);
10991
10992 /* For now, optimize only the main CU, in order to optimize the rest
10993 we'd need to see all of them earlier. Leave the rest for post-linking
10994 tools like DWZ. */
10995 if (die == comp_unit_die ())
10996 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10997
10998 build_abbrev_table (die, extern_map);
10999
11000 optimize_abbrev_table ();
11001
11002 delete extern_map;
11003
11004 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11005 next_die_offset = (dwo_id
11006 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11007 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11008 calc_die_sizes (die);
11009
11010 oldsym = die->die_id.die_symbol;
11011 if (oldsym && die->comdat_type_p)
11012 {
11013 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11014
11015 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11016 secname = tmp;
11017 die->die_id.die_symbol = NULL;
11018 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11019 }
11020 else
11021 {
11022 switch_to_section (debug_info_section);
11023 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11024 info_section_emitted = true;
11025 }
11026
11027 /* For LTO cross unit DIE refs we want a symbol on the start of the
11028 debuginfo section, not on the CU DIE. */
11029 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11030 {
11031 /* ??? No way to get visibility assembled without a decl. */
11032 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11033 get_identifier (oldsym), char_type_node);
11034 TREE_PUBLIC (decl) = true;
11035 TREE_STATIC (decl) = true;
11036 DECL_ARTIFICIAL (decl) = true;
11037 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11038 DECL_VISIBILITY_SPECIFIED (decl) = true;
11039 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11040 #ifdef ASM_WEAKEN_LABEL
11041 /* We prefer a .weak because that handles duplicates from duplicate
11042 archive members in a graceful way. */
11043 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11044 #else
11045 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11046 #endif
11047 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11048 }
11049
11050 /* Output debugging information. */
11051 output_compilation_unit_header (dwo_id
11052 ? DW_UT_split_compile : DW_UT_compile);
11053 if (dwarf_version >= 5)
11054 {
11055 if (dwo_id != NULL)
11056 for (int i = 0; i < 8; i++)
11057 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11058 }
11059 output_die (die);
11060
11061 /* Leave the marks on the main CU, so we can check them in
11062 output_pubnames. */
11063 if (oldsym)
11064 {
11065 unmark_dies (die);
11066 die->die_id.die_symbol = oldsym;
11067 }
11068 }
11069
11070 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11071 and .debug_pubtypes. This is configured per-target, but can be
11072 overridden by the -gpubnames or -gno-pubnames options. */
11073
11074 static inline bool
11075 want_pubnames (void)
11076 {
11077 if (debug_info_level <= DINFO_LEVEL_TERSE)
11078 return false;
11079 if (debug_generate_pub_sections != -1)
11080 return debug_generate_pub_sections;
11081 return targetm.want_debug_pub_sections;
11082 }
11083
11084 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11085
11086 static void
11087 add_AT_pubnames (dw_die_ref die)
11088 {
11089 if (want_pubnames ())
11090 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11091 }
11092
11093 /* Add a string attribute value to a skeleton DIE. */
11094
11095 static inline void
11096 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11097 const char *str)
11098 {
11099 dw_attr_node attr;
11100 struct indirect_string_node *node;
11101
11102 if (! skeleton_debug_str_hash)
11103 skeleton_debug_str_hash
11104 = hash_table<indirect_string_hasher>::create_ggc (10);
11105
11106 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11107 find_string_form (node);
11108 if (node->form == dwarf_FORM (DW_FORM_strx))
11109 node->form = DW_FORM_strp;
11110
11111 attr.dw_attr = attr_kind;
11112 attr.dw_attr_val.val_class = dw_val_class_str;
11113 attr.dw_attr_val.val_entry = NULL;
11114 attr.dw_attr_val.v.val_str = node;
11115 add_dwarf_attr (die, &attr);
11116 }
11117
11118 /* Helper function to generate top-level dies for skeleton debug_info and
11119 debug_types. */
11120
11121 static void
11122 add_top_level_skeleton_die_attrs (dw_die_ref die)
11123 {
11124 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11125 const char *comp_dir = comp_dir_string ();
11126
11127 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11128 if (comp_dir != NULL)
11129 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11130 add_AT_pubnames (die);
11131 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11132 }
11133
11134 /* Output skeleton debug sections that point to the dwo file. */
11135
11136 static void
11137 output_skeleton_debug_sections (dw_die_ref comp_unit,
11138 const unsigned char *dwo_id)
11139 {
11140 /* These attributes will be found in the full debug_info section. */
11141 remove_AT (comp_unit, DW_AT_producer);
11142 remove_AT (comp_unit, DW_AT_language);
11143
11144 switch_to_section (debug_skeleton_info_section);
11145 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11146
11147 /* Produce the skeleton compilation-unit header. This one differs enough from
11148 a normal CU header that it's better not to call output_compilation_unit
11149 header. */
11150 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11151 dw2_asm_output_data (4, 0xffffffff,
11152 "Initial length escape value indicating 64-bit "
11153 "DWARF extension");
11154
11155 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11156 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11157 - DWARF_INITIAL_LENGTH_SIZE
11158 + size_of_die (comp_unit),
11159 "Length of Compilation Unit Info");
11160 output_dwarf_version ();
11161 if (dwarf_version >= 5)
11162 {
11163 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11164 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11165 }
11166 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11167 debug_skeleton_abbrev_section,
11168 "Offset Into Abbrev. Section");
11169 if (dwarf_version < 5)
11170 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11171 else
11172 for (int i = 0; i < 8; i++)
11173 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11174
11175 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11176 output_die (comp_unit);
11177
11178 /* Build the skeleton debug_abbrev section. */
11179 switch_to_section (debug_skeleton_abbrev_section);
11180 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11181
11182 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11183
11184 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11185 }
11186
11187 /* Output a comdat type unit DIE and its children. */
11188
11189 static void
11190 output_comdat_type_unit (comdat_type_node *node)
11191 {
11192 const char *secname;
11193 char *tmp;
11194 int i;
11195 #if defined (OBJECT_FORMAT_ELF)
11196 tree comdat_key;
11197 #endif
11198
11199 /* First mark all the DIEs in this CU so we know which get local refs. */
11200 mark_dies (node->root_die);
11201
11202 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11203
11204 build_abbrev_table (node->root_die, extern_map);
11205
11206 delete extern_map;
11207 extern_map = NULL;
11208
11209 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11210 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11211 calc_die_sizes (node->root_die);
11212
11213 #if defined (OBJECT_FORMAT_ELF)
11214 if (dwarf_version >= 5)
11215 {
11216 if (!dwarf_split_debug_info)
11217 secname = ".debug_info";
11218 else
11219 secname = ".debug_info.dwo";
11220 }
11221 else if (!dwarf_split_debug_info)
11222 secname = ".debug_types";
11223 else
11224 secname = ".debug_types.dwo";
11225
11226 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11227 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11228 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11229 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11230 comdat_key = get_identifier (tmp);
11231 targetm.asm_out.named_section (secname,
11232 SECTION_DEBUG | SECTION_LINKONCE,
11233 comdat_key);
11234 #else
11235 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11236 sprintf (tmp, (dwarf_version >= 5
11237 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11238 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11239 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11240 secname = tmp;
11241 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11242 #endif
11243
11244 /* Output debugging information. */
11245 output_compilation_unit_header (dwarf_split_debug_info
11246 ? DW_UT_split_type : DW_UT_type);
11247 output_signature (node->signature, "Type Signature");
11248 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11249 "Offset to Type DIE");
11250 output_die (node->root_die);
11251
11252 unmark_dies (node->root_die);
11253 }
11254
11255 /* Return the DWARF2/3 pubname associated with a decl. */
11256
11257 static const char *
11258 dwarf2_name (tree decl, int scope)
11259 {
11260 if (DECL_NAMELESS (decl))
11261 return NULL;
11262 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11263 }
11264
11265 /* Add a new entry to .debug_pubnames if appropriate. */
11266
11267 static void
11268 add_pubname_string (const char *str, dw_die_ref die)
11269 {
11270 pubname_entry e;
11271
11272 e.die = die;
11273 e.name = xstrdup (str);
11274 vec_safe_push (pubname_table, e);
11275 }
11276
11277 static void
11278 add_pubname (tree decl, dw_die_ref die)
11279 {
11280 if (!want_pubnames ())
11281 return;
11282
11283 /* Don't add items to the table when we expect that the consumer will have
11284 just read the enclosing die. For example, if the consumer is looking at a
11285 class_member, it will either be inside the class already, or will have just
11286 looked up the class to find the member. Either way, searching the class is
11287 faster than searching the index. */
11288 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11289 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11290 {
11291 const char *name = dwarf2_name (decl, 1);
11292
11293 if (name)
11294 add_pubname_string (name, die);
11295 }
11296 }
11297
11298 /* Add an enumerator to the pubnames section. */
11299
11300 static void
11301 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11302 {
11303 pubname_entry e;
11304
11305 gcc_assert (scope_name);
11306 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11307 e.die = die;
11308 vec_safe_push (pubname_table, e);
11309 }
11310
11311 /* Add a new entry to .debug_pubtypes if appropriate. */
11312
11313 static void
11314 add_pubtype (tree decl, dw_die_ref die)
11315 {
11316 pubname_entry e;
11317
11318 if (!want_pubnames ())
11319 return;
11320
11321 if ((TREE_PUBLIC (decl)
11322 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11323 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11324 {
11325 tree scope = NULL;
11326 const char *scope_name = "";
11327 const char *sep = is_cxx () ? "::" : ".";
11328 const char *name;
11329
11330 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11331 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11332 {
11333 scope_name = lang_hooks.dwarf_name (scope, 1);
11334 if (scope_name != NULL && scope_name[0] != '\0')
11335 scope_name = concat (scope_name, sep, NULL);
11336 else
11337 scope_name = "";
11338 }
11339
11340 if (TYPE_P (decl))
11341 name = type_tag (decl);
11342 else
11343 name = lang_hooks.dwarf_name (decl, 1);
11344
11345 /* If we don't have a name for the type, there's no point in adding
11346 it to the table. */
11347 if (name != NULL && name[0] != '\0')
11348 {
11349 e.die = die;
11350 e.name = concat (scope_name, name, NULL);
11351 vec_safe_push (pubtype_table, e);
11352 }
11353
11354 /* Although it might be more consistent to add the pubinfo for the
11355 enumerators as their dies are created, they should only be added if the
11356 enum type meets the criteria above. So rather than re-check the parent
11357 enum type whenever an enumerator die is created, just output them all
11358 here. This isn't protected by the name conditional because anonymous
11359 enums don't have names. */
11360 if (die->die_tag == DW_TAG_enumeration_type)
11361 {
11362 dw_die_ref c;
11363
11364 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11365 }
11366 }
11367 }
11368
11369 /* Output a single entry in the pubnames table. */
11370
11371 static void
11372 output_pubname (dw_offset die_offset, pubname_entry *entry)
11373 {
11374 dw_die_ref die = entry->die;
11375 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11376
11377 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11378
11379 if (debug_generate_pub_sections == 2)
11380 {
11381 /* This logic follows gdb's method for determining the value of the flag
11382 byte. */
11383 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11384 switch (die->die_tag)
11385 {
11386 case DW_TAG_typedef:
11387 case DW_TAG_base_type:
11388 case DW_TAG_subrange_type:
11389 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11391 break;
11392 case DW_TAG_enumerator:
11393 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11394 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11395 if (!is_cxx ())
11396 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11397 break;
11398 case DW_TAG_subprogram:
11399 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11400 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11401 if (!is_ada ())
11402 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11403 break;
11404 case DW_TAG_constant:
11405 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11406 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11407 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11408 break;
11409 case DW_TAG_variable:
11410 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11411 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11412 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11413 break;
11414 case DW_TAG_namespace:
11415 case DW_TAG_imported_declaration:
11416 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11417 break;
11418 case DW_TAG_class_type:
11419 case DW_TAG_interface_type:
11420 case DW_TAG_structure_type:
11421 case DW_TAG_union_type:
11422 case DW_TAG_enumeration_type:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11424 if (!is_cxx ())
11425 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11426 break;
11427 default:
11428 /* An unusual tag. Leave the flag-byte empty. */
11429 break;
11430 }
11431 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11432 "GDB-index flags");
11433 }
11434
11435 dw2_asm_output_nstring (entry->name, -1, "external name");
11436 }
11437
11438
11439 /* Output the public names table used to speed up access to externally
11440 visible names; or the public types table used to find type definitions. */
11441
11442 static void
11443 output_pubnames (vec<pubname_entry, va_gc> *names)
11444 {
11445 unsigned i;
11446 unsigned long pubnames_length = size_of_pubnames (names);
11447 pubname_entry *pub;
11448
11449 if (!XCOFF_DEBUGGING_INFO)
11450 {
11451 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11452 dw2_asm_output_data (4, 0xffffffff,
11453 "Initial length escape value indicating 64-bit DWARF extension");
11454 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11455 "Pub Info Length");
11456 }
11457
11458 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11459 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11460
11461 if (dwarf_split_debug_info)
11462 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11463 debug_skeleton_info_section,
11464 "Offset of Compilation Unit Info");
11465 else
11466 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11467 debug_info_section,
11468 "Offset of Compilation Unit Info");
11469 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11470 "Compilation Unit Length");
11471
11472 FOR_EACH_VEC_ELT (*names, i, pub)
11473 {
11474 if (include_pubname_in_output (names, pub))
11475 {
11476 dw_offset die_offset = pub->die->die_offset;
11477
11478 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11479 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11480 gcc_assert (pub->die->die_mark);
11481
11482 /* If we're putting types in their own .debug_types sections,
11483 the .debug_pubtypes table will still point to the compile
11484 unit (not the type unit), so we want to use the offset of
11485 the skeleton DIE (if there is one). */
11486 if (pub->die->comdat_type_p && names == pubtype_table)
11487 {
11488 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11489
11490 if (type_node != NULL)
11491 die_offset = (type_node->skeleton_die != NULL
11492 ? type_node->skeleton_die->die_offset
11493 : comp_unit_die ()->die_offset);
11494 }
11495
11496 output_pubname (die_offset, pub);
11497 }
11498 }
11499
11500 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11501 }
11502
11503 /* Output public names and types tables if necessary. */
11504
11505 static void
11506 output_pubtables (void)
11507 {
11508 if (!want_pubnames () || !info_section_emitted)
11509 return;
11510
11511 switch_to_section (debug_pubnames_section);
11512 output_pubnames (pubname_table);
11513 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11514 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11515 simply won't look for the section. */
11516 switch_to_section (debug_pubtypes_section);
11517 output_pubnames (pubtype_table);
11518 }
11519
11520
11521 /* Output the information that goes into the .debug_aranges table.
11522 Namely, define the beginning and ending address range of the
11523 text section generated for this compilation unit. */
11524
11525 static void
11526 output_aranges (void)
11527 {
11528 unsigned i;
11529 unsigned long aranges_length = size_of_aranges ();
11530
11531 if (!XCOFF_DEBUGGING_INFO)
11532 {
11533 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11534 dw2_asm_output_data (4, 0xffffffff,
11535 "Initial length escape value indicating 64-bit DWARF extension");
11536 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11537 "Length of Address Ranges Info");
11538 }
11539
11540 /* Version number for aranges is still 2, even up to DWARF5. */
11541 dw2_asm_output_data (2, 2, "DWARF aranges version");
11542 if (dwarf_split_debug_info)
11543 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11544 debug_skeleton_info_section,
11545 "Offset of Compilation Unit Info");
11546 else
11547 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11548 debug_info_section,
11549 "Offset of Compilation Unit Info");
11550 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11551 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11552
11553 /* We need to align to twice the pointer size here. */
11554 if (DWARF_ARANGES_PAD_SIZE)
11555 {
11556 /* Pad using a 2 byte words so that padding is correct for any
11557 pointer size. */
11558 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11559 2 * DWARF2_ADDR_SIZE);
11560 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11561 dw2_asm_output_data (2, 0, NULL);
11562 }
11563
11564 /* It is necessary not to output these entries if the sections were
11565 not used; if the sections were not used, the length will be 0 and
11566 the address may end up as 0 if the section is discarded by ld
11567 --gc-sections, leaving an invalid (0, 0) entry that can be
11568 confused with the terminator. */
11569 if (text_section_used)
11570 {
11571 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11572 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11573 text_section_label, "Length");
11574 }
11575 if (cold_text_section_used)
11576 {
11577 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11578 "Address");
11579 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11580 cold_text_section_label, "Length");
11581 }
11582
11583 if (have_multiple_function_sections)
11584 {
11585 unsigned fde_idx;
11586 dw_fde_ref fde;
11587
11588 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11589 {
11590 if (DECL_IGNORED_P (fde->decl))
11591 continue;
11592 if (!fde->in_std_section)
11593 {
11594 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11595 "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11597 fde->dw_fde_begin, "Length");
11598 }
11599 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11604 fde->dw_fde_second_begin, "Length");
11605 }
11606 }
11607 }
11608
11609 /* Output the terminator words. */
11610 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11611 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11612 }
11613
11614 /* Add a new entry to .debug_ranges. Return its index into
11615 ranges_table vector. */
11616
11617 static unsigned int
11618 add_ranges_num (int num, bool maybe_new_sec)
11619 {
11620 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11621 vec_safe_push (ranges_table, r);
11622 return vec_safe_length (ranges_table) - 1;
11623 }
11624
11625 /* Add a new entry to .debug_ranges corresponding to a block, or a
11626 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11627 this entry might be in a different section from previous range. */
11628
11629 static unsigned int
11630 add_ranges (const_tree block, bool maybe_new_sec)
11631 {
11632 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11633 }
11634
11635 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11636 chain, or middle entry of a chain that will be directly referred to. */
11637
11638 static void
11639 note_rnglist_head (unsigned int offset)
11640 {
11641 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11642 return;
11643 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11644 }
11645
11646 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11647 When using dwarf_split_debug_info, address attributes in dies destined
11648 for the final executable should be direct references--setting the
11649 parameter force_direct ensures this behavior. */
11650
11651 static void
11652 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11653 bool *added, bool force_direct)
11654 {
11655 unsigned int in_use = vec_safe_length (ranges_by_label);
11656 unsigned int offset;
11657 dw_ranges_by_label rbl = { begin, end };
11658 vec_safe_push (ranges_by_label, rbl);
11659 offset = add_ranges_num (-(int)in_use - 1, true);
11660 if (!*added)
11661 {
11662 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11663 *added = true;
11664 note_rnglist_head (offset);
11665 }
11666 }
11667
11668 /* Emit .debug_ranges section. */
11669
11670 static void
11671 output_ranges (void)
11672 {
11673 unsigned i;
11674 static const char *const start_fmt = "Offset %#x";
11675 const char *fmt = start_fmt;
11676 dw_ranges *r;
11677
11678 switch_to_section (debug_ranges_section);
11679 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11680 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11681 {
11682 int block_num = r->num;
11683
11684 if (block_num > 0)
11685 {
11686 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11687 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11688
11689 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11690 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11691
11692 /* If all code is in the text section, then the compilation
11693 unit base address defaults to DW_AT_low_pc, which is the
11694 base of the text section. */
11695 if (!have_multiple_function_sections)
11696 {
11697 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11698 text_section_label,
11699 fmt, i * 2 * DWARF2_ADDR_SIZE);
11700 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11701 text_section_label, NULL);
11702 }
11703
11704 /* Otherwise, the compilation unit base address is zero,
11705 which allows us to use absolute addresses, and not worry
11706 about whether the target supports cross-section
11707 arithmetic. */
11708 else
11709 {
11710 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11713 }
11714
11715 fmt = NULL;
11716 }
11717
11718 /* Negative block_num stands for an index into ranges_by_label. */
11719 else if (block_num < 0)
11720 {
11721 int lab_idx = - block_num - 1;
11722
11723 if (!have_multiple_function_sections)
11724 {
11725 gcc_unreachable ();
11726 #if 0
11727 /* If we ever use add_ranges_by_labels () for a single
11728 function section, all we have to do is to take out
11729 the #if 0 above. */
11730 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11731 (*ranges_by_label)[lab_idx].begin,
11732 text_section_label,
11733 fmt, i * 2 * DWARF2_ADDR_SIZE);
11734 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11735 (*ranges_by_label)[lab_idx].end,
11736 text_section_label, NULL);
11737 #endif
11738 }
11739 else
11740 {
11741 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11742 (*ranges_by_label)[lab_idx].begin,
11743 fmt, i * 2 * DWARF2_ADDR_SIZE);
11744 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11745 (*ranges_by_label)[lab_idx].end,
11746 NULL);
11747 }
11748 }
11749 else
11750 {
11751 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11752 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11753 fmt = start_fmt;
11754 }
11755 }
11756 }
11757
11758 /* Non-zero if .debug_line_str should be used for .debug_line section
11759 strings or strings that are likely shareable with those. */
11760 #define DWARF5_USE_DEBUG_LINE_STR \
11761 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11762 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11763 /* FIXME: there is no .debug_line_str.dwo section, \
11764 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11765 && !dwarf_split_debug_info)
11766
11767 /* Assign .debug_rnglists indexes. */
11768
11769 static void
11770 index_rnglists (void)
11771 {
11772 unsigned i;
11773 dw_ranges *r;
11774
11775 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11776 if (r->label)
11777 r->idx = rnglist_idx++;
11778 }
11779
11780 /* Emit .debug_rnglists section. */
11781
11782 static void
11783 output_rnglists (unsigned generation)
11784 {
11785 unsigned i;
11786 dw_ranges *r;
11787 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11788 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11789 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11790
11791 switch_to_section (debug_ranges_section);
11792 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11793 /* There are up to 4 unique ranges labels per generation.
11794 See also init_sections_and_labels. */
11795 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11796 2 + generation * 4);
11797 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11798 3 + generation * 4);
11799 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11800 dw2_asm_output_data (4, 0xffffffff,
11801 "Initial length escape value indicating "
11802 "64-bit DWARF extension");
11803 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11804 "Length of Range Lists");
11805 ASM_OUTPUT_LABEL (asm_out_file, l1);
11806 output_dwarf_version ();
11807 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11808 dw2_asm_output_data (1, 0, "Segment Size");
11809 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11810 about relocation sizes and primarily care about the size of .debug*
11811 sections in linked shared libraries and executables, then
11812 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11813 into it are usually larger than just DW_FORM_sec_offset offsets
11814 into the .debug_rnglists section. */
11815 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11816 "Offset Entry Count");
11817 if (dwarf_split_debug_info)
11818 {
11819 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11820 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11821 if (r->label)
11822 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11823 ranges_base_label, NULL);
11824 }
11825
11826 const char *lab = "";
11827 unsigned int len = vec_safe_length (ranges_table);
11828 const char *base = NULL;
11829 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11830 {
11831 int block_num = r->num;
11832
11833 if (r->label)
11834 {
11835 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11836 lab = r->label;
11837 }
11838 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11839 base = NULL;
11840 if (block_num > 0)
11841 {
11842 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11843 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11844
11845 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11846 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11847
11848 if (HAVE_AS_LEB128)
11849 {
11850 /* If all code is in the text section, then the compilation
11851 unit base address defaults to DW_AT_low_pc, which is the
11852 base of the text section. */
11853 if (!have_multiple_function_sections)
11854 {
11855 dw2_asm_output_data (1, DW_RLE_offset_pair,
11856 "DW_RLE_offset_pair (%s)", lab);
11857 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11858 "Range begin address (%s)", lab);
11859 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11860 "Range end address (%s)", lab);
11861 continue;
11862 }
11863 if (base == NULL)
11864 {
11865 dw_ranges *r2 = NULL;
11866 if (i < len - 1)
11867 r2 = &(*ranges_table)[i + 1];
11868 if (r2
11869 && r2->num != 0
11870 && r2->label == NULL
11871 && !r2->maybe_new_sec)
11872 {
11873 dw2_asm_output_data (1, DW_RLE_base_address,
11874 "DW_RLE_base_address (%s)", lab);
11875 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11876 "Base address (%s)", lab);
11877 strcpy (basebuf, blabel);
11878 base = basebuf;
11879 }
11880 }
11881 if (base)
11882 {
11883 dw2_asm_output_data (1, DW_RLE_offset_pair,
11884 "DW_RLE_offset_pair (%s)", lab);
11885 dw2_asm_output_delta_uleb128 (blabel, base,
11886 "Range begin address (%s)", lab);
11887 dw2_asm_output_delta_uleb128 (elabel, base,
11888 "Range end address (%s)", lab);
11889 continue;
11890 }
11891 dw2_asm_output_data (1, DW_RLE_start_length,
11892 "DW_RLE_start_length (%s)", lab);
11893 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11894 "Range begin address (%s)", lab);
11895 dw2_asm_output_delta_uleb128 (elabel, blabel,
11896 "Range length (%s)", lab);
11897 }
11898 else
11899 {
11900 dw2_asm_output_data (1, DW_RLE_start_end,
11901 "DW_RLE_start_end (%s)", lab);
11902 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11903 "Range begin address (%s)", lab);
11904 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11905 "Range end address (%s)", lab);
11906 }
11907 }
11908
11909 /* Negative block_num stands for an index into ranges_by_label. */
11910 else if (block_num < 0)
11911 {
11912 int lab_idx = - block_num - 1;
11913 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11914 const char *elabel = (*ranges_by_label)[lab_idx].end;
11915
11916 if (!have_multiple_function_sections)
11917 gcc_unreachable ();
11918 if (HAVE_AS_LEB128)
11919 {
11920 dw2_asm_output_data (1, DW_RLE_start_length,
11921 "DW_RLE_start_length (%s)", lab);
11922 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11923 "Range begin address (%s)", lab);
11924 dw2_asm_output_delta_uleb128 (elabel, blabel,
11925 "Range length (%s)", lab);
11926 }
11927 else
11928 {
11929 dw2_asm_output_data (1, DW_RLE_start_end,
11930 "DW_RLE_start_end (%s)", lab);
11931 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11932 "Range begin address (%s)", lab);
11933 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11934 "Range end address (%s)", lab);
11935 }
11936 }
11937 else
11938 dw2_asm_output_data (1, DW_RLE_end_of_list,
11939 "DW_RLE_end_of_list (%s)", lab);
11940 }
11941 ASM_OUTPUT_LABEL (asm_out_file, l2);
11942 }
11943
11944 /* Data structure containing information about input files. */
11945 struct file_info
11946 {
11947 const char *path; /* Complete file name. */
11948 const char *fname; /* File name part. */
11949 int length; /* Length of entire string. */
11950 struct dwarf_file_data * file_idx; /* Index in input file table. */
11951 int dir_idx; /* Index in directory table. */
11952 };
11953
11954 /* Data structure containing information about directories with source
11955 files. */
11956 struct dir_info
11957 {
11958 const char *path; /* Path including directory name. */
11959 int length; /* Path length. */
11960 int prefix; /* Index of directory entry which is a prefix. */
11961 int count; /* Number of files in this directory. */
11962 int dir_idx; /* Index of directory used as base. */
11963 };
11964
11965 /* Callback function for file_info comparison. We sort by looking at
11966 the directories in the path. */
11967
11968 static int
11969 file_info_cmp (const void *p1, const void *p2)
11970 {
11971 const struct file_info *const s1 = (const struct file_info *) p1;
11972 const struct file_info *const s2 = (const struct file_info *) p2;
11973 const unsigned char *cp1;
11974 const unsigned char *cp2;
11975
11976 /* Take care of file names without directories. We need to make sure that
11977 we return consistent values to qsort since some will get confused if
11978 we return the same value when identical operands are passed in opposite
11979 orders. So if neither has a directory, return 0 and otherwise return
11980 1 or -1 depending on which one has the directory. We want the one with
11981 the directory to sort after the one without, so all no directory files
11982 are at the start (normally only the compilation unit file). */
11983 if ((s1->path == s1->fname || s2->path == s2->fname))
11984 return (s2->path == s2->fname) - (s1->path == s1->fname);
11985
11986 cp1 = (const unsigned char *) s1->path;
11987 cp2 = (const unsigned char *) s2->path;
11988
11989 while (1)
11990 {
11991 ++cp1;
11992 ++cp2;
11993 /* Reached the end of the first path? If so, handle like above,
11994 but now we want longer directory prefixes before shorter ones. */
11995 if ((cp1 == (const unsigned char *) s1->fname)
11996 || (cp2 == (const unsigned char *) s2->fname))
11997 return ((cp1 == (const unsigned char *) s1->fname)
11998 - (cp2 == (const unsigned char *) s2->fname));
11999
12000 /* Character of current path component the same? */
12001 else if (*cp1 != *cp2)
12002 return *cp1 - *cp2;
12003 }
12004 }
12005
12006 struct file_name_acquire_data
12007 {
12008 struct file_info *files;
12009 int used_files;
12010 int max_files;
12011 };
12012
12013 /* Traversal function for the hash table. */
12014
12015 int
12016 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12017 {
12018 struct dwarf_file_data *d = *slot;
12019 struct file_info *fi;
12020 const char *f;
12021
12022 gcc_assert (fnad->max_files >= d->emitted_number);
12023
12024 if (! d->emitted_number)
12025 return 1;
12026
12027 gcc_assert (fnad->max_files != fnad->used_files);
12028
12029 fi = fnad->files + fnad->used_files++;
12030
12031 /* Skip all leading "./". */
12032 f = d->filename;
12033 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12034 f += 2;
12035
12036 /* Create a new array entry. */
12037 fi->path = f;
12038 fi->length = strlen (f);
12039 fi->file_idx = d;
12040
12041 /* Search for the file name part. */
12042 f = strrchr (f, DIR_SEPARATOR);
12043 #if defined (DIR_SEPARATOR_2)
12044 {
12045 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12046
12047 if (g != NULL)
12048 {
12049 if (f == NULL || f < g)
12050 f = g;
12051 }
12052 }
12053 #endif
12054
12055 fi->fname = f == NULL ? fi->path : f + 1;
12056 return 1;
12057 }
12058
12059 /* Helper function for output_file_names. Emit a FORM encoded
12060 string STR, with assembly comment start ENTRY_KIND and
12061 index IDX */
12062
12063 static void
12064 output_line_string (enum dwarf_form form, const char *str,
12065 const char *entry_kind, unsigned int idx)
12066 {
12067 switch (form)
12068 {
12069 case DW_FORM_string:
12070 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12071 break;
12072 case DW_FORM_line_strp:
12073 if (!debug_line_str_hash)
12074 debug_line_str_hash
12075 = hash_table<indirect_string_hasher>::create_ggc (10);
12076
12077 struct indirect_string_node *node;
12078 node = find_AT_string_in_table (str, debug_line_str_hash);
12079 set_indirect_string (node);
12080 node->form = form;
12081 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12082 debug_line_str_section, "%s: %#x: \"%s\"",
12083 entry_kind, 0, node->str);
12084 break;
12085 default:
12086 gcc_unreachable ();
12087 }
12088 }
12089
12090 /* Output the directory table and the file name table. We try to minimize
12091 the total amount of memory needed. A heuristic is used to avoid large
12092 slowdowns with many input files. */
12093
12094 static void
12095 output_file_names (void)
12096 {
12097 struct file_name_acquire_data fnad;
12098 int numfiles;
12099 struct file_info *files;
12100 struct dir_info *dirs;
12101 int *saved;
12102 int *savehere;
12103 int *backmap;
12104 int ndirs;
12105 int idx_offset;
12106 int i;
12107
12108 if (!last_emitted_file)
12109 {
12110 if (dwarf_version >= 5)
12111 {
12112 dw2_asm_output_data (1, 0, "Directory entry format count");
12113 dw2_asm_output_data_uleb128 (0, "Directories count");
12114 dw2_asm_output_data (1, 0, "File name entry format count");
12115 dw2_asm_output_data_uleb128 (0, "File names count");
12116 }
12117 else
12118 {
12119 dw2_asm_output_data (1, 0, "End directory table");
12120 dw2_asm_output_data (1, 0, "End file name table");
12121 }
12122 return;
12123 }
12124
12125 numfiles = last_emitted_file->emitted_number;
12126
12127 /* Allocate the various arrays we need. */
12128 files = XALLOCAVEC (struct file_info, numfiles);
12129 dirs = XALLOCAVEC (struct dir_info, numfiles);
12130
12131 fnad.files = files;
12132 fnad.used_files = 0;
12133 fnad.max_files = numfiles;
12134 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12135 gcc_assert (fnad.used_files == fnad.max_files);
12136
12137 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12138
12139 /* Find all the different directories used. */
12140 dirs[0].path = files[0].path;
12141 dirs[0].length = files[0].fname - files[0].path;
12142 dirs[0].prefix = -1;
12143 dirs[0].count = 1;
12144 dirs[0].dir_idx = 0;
12145 files[0].dir_idx = 0;
12146 ndirs = 1;
12147
12148 for (i = 1; i < numfiles; i++)
12149 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12150 && memcmp (dirs[ndirs - 1].path, files[i].path,
12151 dirs[ndirs - 1].length) == 0)
12152 {
12153 /* Same directory as last entry. */
12154 files[i].dir_idx = ndirs - 1;
12155 ++dirs[ndirs - 1].count;
12156 }
12157 else
12158 {
12159 int j;
12160
12161 /* This is a new directory. */
12162 dirs[ndirs].path = files[i].path;
12163 dirs[ndirs].length = files[i].fname - files[i].path;
12164 dirs[ndirs].count = 1;
12165 dirs[ndirs].dir_idx = ndirs;
12166 files[i].dir_idx = ndirs;
12167
12168 /* Search for a prefix. */
12169 dirs[ndirs].prefix = -1;
12170 for (j = 0; j < ndirs; j++)
12171 if (dirs[j].length < dirs[ndirs].length
12172 && dirs[j].length > 1
12173 && (dirs[ndirs].prefix == -1
12174 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12175 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12176 dirs[ndirs].prefix = j;
12177
12178 ++ndirs;
12179 }
12180
12181 /* Now to the actual work. We have to find a subset of the directories which
12182 allow expressing the file name using references to the directory table
12183 with the least amount of characters. We do not do an exhaustive search
12184 where we would have to check out every combination of every single
12185 possible prefix. Instead we use a heuristic which provides nearly optimal
12186 results in most cases and never is much off. */
12187 saved = XALLOCAVEC (int, ndirs);
12188 savehere = XALLOCAVEC (int, ndirs);
12189
12190 memset (saved, '\0', ndirs * sizeof (saved[0]));
12191 for (i = 0; i < ndirs; i++)
12192 {
12193 int j;
12194 int total;
12195
12196 /* We can always save some space for the current directory. But this
12197 does not mean it will be enough to justify adding the directory. */
12198 savehere[i] = dirs[i].length;
12199 total = (savehere[i] - saved[i]) * dirs[i].count;
12200
12201 for (j = i + 1; j < ndirs; j++)
12202 {
12203 savehere[j] = 0;
12204 if (saved[j] < dirs[i].length)
12205 {
12206 /* Determine whether the dirs[i] path is a prefix of the
12207 dirs[j] path. */
12208 int k;
12209
12210 k = dirs[j].prefix;
12211 while (k != -1 && k != (int) i)
12212 k = dirs[k].prefix;
12213
12214 if (k == (int) i)
12215 {
12216 /* Yes it is. We can possibly save some memory by
12217 writing the filenames in dirs[j] relative to
12218 dirs[i]. */
12219 savehere[j] = dirs[i].length;
12220 total += (savehere[j] - saved[j]) * dirs[j].count;
12221 }
12222 }
12223 }
12224
12225 /* Check whether we can save enough to justify adding the dirs[i]
12226 directory. */
12227 if (total > dirs[i].length + 1)
12228 {
12229 /* It's worthwhile adding. */
12230 for (j = i; j < ndirs; j++)
12231 if (savehere[j] > 0)
12232 {
12233 /* Remember how much we saved for this directory so far. */
12234 saved[j] = savehere[j];
12235
12236 /* Remember the prefix directory. */
12237 dirs[j].dir_idx = i;
12238 }
12239 }
12240 }
12241
12242 /* Emit the directory name table. */
12243 idx_offset = dirs[0].length > 0 ? 1 : 0;
12244 enum dwarf_form str_form = DW_FORM_string;
12245 enum dwarf_form idx_form = DW_FORM_udata;
12246 if (dwarf_version >= 5)
12247 {
12248 const char *comp_dir = comp_dir_string ();
12249 if (comp_dir == NULL)
12250 comp_dir = "";
12251 dw2_asm_output_data (1, 1, "Directory entry format count");
12252 if (DWARF5_USE_DEBUG_LINE_STR)
12253 str_form = DW_FORM_line_strp;
12254 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12255 dw2_asm_output_data_uleb128 (str_form, "%s",
12256 get_DW_FORM_name (str_form));
12257 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12258 if (str_form == DW_FORM_string)
12259 {
12260 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12261 for (i = 1 - idx_offset; i < ndirs; i++)
12262 dw2_asm_output_nstring (dirs[i].path,
12263 dirs[i].length
12264 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12265 "Directory Entry: %#x", i + idx_offset);
12266 }
12267 else
12268 {
12269 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12270 for (i = 1 - idx_offset; i < ndirs; i++)
12271 {
12272 const char *str
12273 = ggc_alloc_string (dirs[i].path,
12274 dirs[i].length
12275 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12276 output_line_string (str_form, str, "Directory Entry",
12277 (unsigned) i + idx_offset);
12278 }
12279 }
12280 }
12281 else
12282 {
12283 for (i = 1 - idx_offset; i < ndirs; i++)
12284 dw2_asm_output_nstring (dirs[i].path,
12285 dirs[i].length
12286 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12287 "Directory Entry: %#x", i + idx_offset);
12288
12289 dw2_asm_output_data (1, 0, "End directory table");
12290 }
12291
12292 /* We have to emit them in the order of emitted_number since that's
12293 used in the debug info generation. To do this efficiently we
12294 generate a back-mapping of the indices first. */
12295 backmap = XALLOCAVEC (int, numfiles);
12296 for (i = 0; i < numfiles; i++)
12297 backmap[files[i].file_idx->emitted_number - 1] = i;
12298
12299 if (dwarf_version >= 5)
12300 {
12301 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12302 if (filename0 == NULL)
12303 filename0 = "";
12304 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12305 DW_FORM_data2. Choose one based on the number of directories
12306 and how much space would they occupy in each encoding.
12307 If we have at most 256 directories, all indexes fit into
12308 a single byte, so DW_FORM_data1 is most compact (if there
12309 are at most 128 directories, DW_FORM_udata would be as
12310 compact as that, but not shorter and slower to decode). */
12311 if (ndirs + idx_offset <= 256)
12312 idx_form = DW_FORM_data1;
12313 /* If there are more than 65536 directories, we have to use
12314 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12315 Otherwise, compute what space would occupy if all the indexes
12316 used DW_FORM_udata - sum - and compare that to how large would
12317 be DW_FORM_data2 encoding, and pick the more efficient one. */
12318 else if (ndirs + idx_offset <= 65536)
12319 {
12320 unsigned HOST_WIDE_INT sum = 1;
12321 for (i = 0; i < numfiles; i++)
12322 {
12323 int file_idx = backmap[i];
12324 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12325 sum += size_of_uleb128 (dir_idx);
12326 }
12327 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12328 idx_form = DW_FORM_data2;
12329 }
12330 #ifdef VMS_DEBUGGING_INFO
12331 dw2_asm_output_data (1, 4, "File name entry format count");
12332 #else
12333 dw2_asm_output_data (1, 2, "File name entry format count");
12334 #endif
12335 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12336 dw2_asm_output_data_uleb128 (str_form, "%s",
12337 get_DW_FORM_name (str_form));
12338 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12339 "DW_LNCT_directory_index");
12340 dw2_asm_output_data_uleb128 (idx_form, "%s",
12341 get_DW_FORM_name (idx_form));
12342 #ifdef VMS_DEBUGGING_INFO
12343 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12344 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12345 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12346 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12347 #endif
12348 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12349
12350 output_line_string (str_form, filename0, "File Entry", 0);
12351
12352 /* Include directory index. */
12353 if (idx_form != DW_FORM_udata)
12354 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12355 0, NULL);
12356 else
12357 dw2_asm_output_data_uleb128 (0, NULL);
12358
12359 #ifdef VMS_DEBUGGING_INFO
12360 dw2_asm_output_data_uleb128 (0, NULL);
12361 dw2_asm_output_data_uleb128 (0, NULL);
12362 #endif
12363 }
12364
12365 /* Now write all the file names. */
12366 for (i = 0; i < numfiles; i++)
12367 {
12368 int file_idx = backmap[i];
12369 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12370
12371 #ifdef VMS_DEBUGGING_INFO
12372 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12373
12374 /* Setting these fields can lead to debugger miscomparisons,
12375 but VMS Debug requires them to be set correctly. */
12376
12377 int ver;
12378 long long cdt;
12379 long siz;
12380 int maxfilelen = (strlen (files[file_idx].path)
12381 + dirs[dir_idx].length
12382 + MAX_VMS_VERSION_LEN + 1);
12383 char *filebuf = XALLOCAVEC (char, maxfilelen);
12384
12385 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12386 snprintf (filebuf, maxfilelen, "%s;%d",
12387 files[file_idx].path + dirs[dir_idx].length, ver);
12388
12389 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12390
12391 /* Include directory index. */
12392 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12393 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12394 dir_idx + idx_offset, NULL);
12395 else
12396 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12397
12398 /* Modification time. */
12399 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12400 &cdt, 0, 0, 0) == 0)
12401 ? cdt : 0, NULL);
12402
12403 /* File length in bytes. */
12404 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12405 0, &siz, 0, 0) == 0)
12406 ? siz : 0, NULL);
12407 #else
12408 output_line_string (str_form,
12409 files[file_idx].path + dirs[dir_idx].length,
12410 "File Entry", (unsigned) i + 1);
12411
12412 /* Include directory index. */
12413 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12414 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12415 dir_idx + idx_offset, NULL);
12416 else
12417 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12418
12419 if (dwarf_version >= 5)
12420 continue;
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 (0, NULL);
12424
12425 /* File length in bytes. */
12426 dw2_asm_output_data_uleb128 (0, NULL);
12427 #endif /* VMS_DEBUGGING_INFO */
12428 }
12429
12430 if (dwarf_version < 5)
12431 dw2_asm_output_data (1, 0, "End file name table");
12432 }
12433
12434
12435 /* Output one line number table into the .debug_line section. */
12436
12437 static void
12438 output_one_line_info_table (dw_line_info_table *table)
12439 {
12440 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12441 unsigned int current_line = 1;
12442 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12443 dw_line_info_entry *ent, *prev_addr;
12444 size_t i;
12445 unsigned int view;
12446
12447 view = 0;
12448
12449 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12450 {
12451 switch (ent->opcode)
12452 {
12453 case LI_set_address:
12454 /* ??? Unfortunately, we have little choice here currently, and
12455 must always use the most general form. GCC does not know the
12456 address delta itself, so we can't use DW_LNS_advance_pc. Many
12457 ports do have length attributes which will give an upper bound
12458 on the address range. We could perhaps use length attributes
12459 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12460 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12461
12462 view = 0;
12463
12464 /* This can handle any delta. This takes
12465 4+DWARF2_ADDR_SIZE bytes. */
12466 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12467 debug_variable_location_views
12468 ? ", reset view to 0" : "");
12469 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12470 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12471 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12472
12473 prev_addr = ent;
12474 break;
12475
12476 case LI_adv_address:
12477 {
12478 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12479 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12480 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12481
12482 view++;
12483
12484 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12485 dw2_asm_output_delta (2, line_label, prev_label,
12486 "from %s to %s", prev_label, line_label);
12487
12488 prev_addr = ent;
12489 break;
12490 }
12491
12492 case LI_set_line:
12493 if (ent->val == current_line)
12494 {
12495 /* We still need to start a new row, so output a copy insn. */
12496 dw2_asm_output_data (1, DW_LNS_copy,
12497 "copy line %u", current_line);
12498 }
12499 else
12500 {
12501 int line_offset = ent->val - current_line;
12502 int line_delta = line_offset - DWARF_LINE_BASE;
12503
12504 current_line = ent->val;
12505 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12506 {
12507 /* This can handle deltas from -10 to 234, using the current
12508 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12509 This takes 1 byte. */
12510 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12511 "line %u", current_line);
12512 }
12513 else
12514 {
12515 /* This can handle any delta. This takes at least 4 bytes,
12516 depending on the value being encoded. */
12517 dw2_asm_output_data (1, DW_LNS_advance_line,
12518 "advance to line %u", current_line);
12519 dw2_asm_output_data_sleb128 (line_offset, NULL);
12520 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12521 }
12522 }
12523 break;
12524
12525 case LI_set_file:
12526 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12527 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12528 break;
12529
12530 case LI_set_column:
12531 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12532 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12533 break;
12534
12535 case LI_negate_stmt:
12536 current_is_stmt = !current_is_stmt;
12537 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12538 "is_stmt %d", current_is_stmt);
12539 break;
12540
12541 case LI_set_prologue_end:
12542 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12543 "set prologue end");
12544 break;
12545
12546 case LI_set_epilogue_begin:
12547 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12548 "set epilogue begin");
12549 break;
12550
12551 case LI_set_discriminator:
12552 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12553 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12554 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12555 dw2_asm_output_data_uleb128 (ent->val, NULL);
12556 break;
12557 }
12558 }
12559
12560 /* Emit debug info for the address of the end of the table. */
12561 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12562 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12563 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12564 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12565
12566 dw2_asm_output_data (1, 0, "end sequence");
12567 dw2_asm_output_data_uleb128 (1, NULL);
12568 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12569 }
12570
12571 /* Output the source line number correspondence information. This
12572 information goes into the .debug_line section. */
12573
12574 static void
12575 output_line_info (bool prologue_only)
12576 {
12577 static unsigned int generation;
12578 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12579 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12580 bool saw_one = false;
12581 int opc;
12582
12583 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12584 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12585 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12586 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12587
12588 if (!XCOFF_DEBUGGING_INFO)
12589 {
12590 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12591 dw2_asm_output_data (4, 0xffffffff,
12592 "Initial length escape value indicating 64-bit DWARF extension");
12593 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12594 "Length of Source Line Info");
12595 }
12596
12597 ASM_OUTPUT_LABEL (asm_out_file, l1);
12598
12599 output_dwarf_version ();
12600 if (dwarf_version >= 5)
12601 {
12602 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12603 dw2_asm_output_data (1, 0, "Segment Size");
12604 }
12605 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12606 ASM_OUTPUT_LABEL (asm_out_file, p1);
12607
12608 /* Define the architecture-dependent minimum instruction length (in bytes).
12609 In this implementation of DWARF, this field is used for information
12610 purposes only. Since GCC generates assembly language, we have no
12611 a priori knowledge of how many instruction bytes are generated for each
12612 source line, and therefore can use only the DW_LNE_set_address and
12613 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12614 this as '1', which is "correct enough" for all architectures,
12615 and don't let the target override. */
12616 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12617
12618 if (dwarf_version >= 4)
12619 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12620 "Maximum Operations Per Instruction");
12621 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12622 "Default is_stmt_start flag");
12623 dw2_asm_output_data (1, DWARF_LINE_BASE,
12624 "Line Base Value (Special Opcodes)");
12625 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12626 "Line Range Value (Special Opcodes)");
12627 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12628 "Special Opcode Base");
12629
12630 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12631 {
12632 int n_op_args;
12633 switch (opc)
12634 {
12635 case DW_LNS_advance_pc:
12636 case DW_LNS_advance_line:
12637 case DW_LNS_set_file:
12638 case DW_LNS_set_column:
12639 case DW_LNS_fixed_advance_pc:
12640 case DW_LNS_set_isa:
12641 n_op_args = 1;
12642 break;
12643 default:
12644 n_op_args = 0;
12645 break;
12646 }
12647
12648 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12649 opc, n_op_args);
12650 }
12651
12652 /* Write out the information about the files we use. */
12653 output_file_names ();
12654 ASM_OUTPUT_LABEL (asm_out_file, p2);
12655 if (prologue_only)
12656 {
12657 /* Output the marker for the end of the line number info. */
12658 ASM_OUTPUT_LABEL (asm_out_file, l2);
12659 return;
12660 }
12661
12662 if (separate_line_info)
12663 {
12664 dw_line_info_table *table;
12665 size_t i;
12666
12667 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12668 if (table->in_use)
12669 {
12670 output_one_line_info_table (table);
12671 saw_one = true;
12672 }
12673 }
12674 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12675 {
12676 output_one_line_info_table (cold_text_section_line_info);
12677 saw_one = true;
12678 }
12679
12680 /* ??? Some Darwin linkers crash on a .debug_line section with no
12681 sequences. Further, merely a DW_LNE_end_sequence entry is not
12682 sufficient -- the address column must also be initialized.
12683 Make sure to output at least one set_address/end_sequence pair,
12684 choosing .text since that section is always present. */
12685 if (text_section_line_info->in_use || !saw_one)
12686 output_one_line_info_table (text_section_line_info);
12687
12688 /* Output the marker for the end of the line number info. */
12689 ASM_OUTPUT_LABEL (asm_out_file, l2);
12690 }
12691 \f
12692 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12693
12694 static inline bool
12695 need_endianity_attribute_p (bool reverse)
12696 {
12697 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12698 }
12699
12700 /* Given a pointer to a tree node for some base type, return a pointer to
12701 a DIE that describes the given type. REVERSE is true if the type is
12702 to be interpreted in the reverse storage order wrt the target order.
12703
12704 This routine must only be called for GCC type nodes that correspond to
12705 Dwarf base (fundamental) types. */
12706
12707 static dw_die_ref
12708 base_type_die (tree type, bool reverse)
12709 {
12710 dw_die_ref base_type_result;
12711 enum dwarf_type encoding;
12712 bool fpt_used = false;
12713 struct fixed_point_type_info fpt_info;
12714 tree type_bias = NULL_TREE;
12715
12716 /* If this is a subtype that should not be emitted as a subrange type,
12717 use the base type. See subrange_type_for_debug_p. */
12718 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12719 type = TREE_TYPE (type);
12720
12721 switch (TREE_CODE (type))
12722 {
12723 case INTEGER_TYPE:
12724 if ((dwarf_version >= 4 || !dwarf_strict)
12725 && TYPE_NAME (type)
12726 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12727 && DECL_IS_BUILTIN (TYPE_NAME (type))
12728 && DECL_NAME (TYPE_NAME (type)))
12729 {
12730 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12731 if (strcmp (name, "char16_t") == 0
12732 || strcmp (name, "char32_t") == 0)
12733 {
12734 encoding = DW_ATE_UTF;
12735 break;
12736 }
12737 }
12738 if ((dwarf_version >= 3 || !dwarf_strict)
12739 && lang_hooks.types.get_fixed_point_type_info)
12740 {
12741 memset (&fpt_info, 0, sizeof (fpt_info));
12742 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12743 {
12744 fpt_used = true;
12745 encoding = ((TYPE_UNSIGNED (type))
12746 ? DW_ATE_unsigned_fixed
12747 : DW_ATE_signed_fixed);
12748 break;
12749 }
12750 }
12751 if (TYPE_STRING_FLAG (type))
12752 {
12753 if (TYPE_UNSIGNED (type))
12754 encoding = DW_ATE_unsigned_char;
12755 else
12756 encoding = DW_ATE_signed_char;
12757 }
12758 else if (TYPE_UNSIGNED (type))
12759 encoding = DW_ATE_unsigned;
12760 else
12761 encoding = DW_ATE_signed;
12762
12763 if (!dwarf_strict
12764 && lang_hooks.types.get_type_bias)
12765 type_bias = lang_hooks.types.get_type_bias (type);
12766 break;
12767
12768 case REAL_TYPE:
12769 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12770 {
12771 if (dwarf_version >= 3 || !dwarf_strict)
12772 encoding = DW_ATE_decimal_float;
12773 else
12774 encoding = DW_ATE_lo_user;
12775 }
12776 else
12777 encoding = DW_ATE_float;
12778 break;
12779
12780 case FIXED_POINT_TYPE:
12781 if (!(dwarf_version >= 3 || !dwarf_strict))
12782 encoding = DW_ATE_lo_user;
12783 else if (TYPE_UNSIGNED (type))
12784 encoding = DW_ATE_unsigned_fixed;
12785 else
12786 encoding = DW_ATE_signed_fixed;
12787 break;
12788
12789 /* Dwarf2 doesn't know anything about complex ints, so use
12790 a user defined type for it. */
12791 case COMPLEX_TYPE:
12792 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12793 encoding = DW_ATE_complex_float;
12794 else
12795 encoding = DW_ATE_lo_user;
12796 break;
12797
12798 case BOOLEAN_TYPE:
12799 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12800 encoding = DW_ATE_boolean;
12801 break;
12802
12803 default:
12804 /* No other TREE_CODEs are Dwarf fundamental types. */
12805 gcc_unreachable ();
12806 }
12807
12808 base_type_result = new_die_raw (DW_TAG_base_type);
12809
12810 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12811 int_size_in_bytes (type));
12812 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12813
12814 if (need_endianity_attribute_p (reverse))
12815 add_AT_unsigned (base_type_result, DW_AT_endianity,
12816 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12817
12818 add_alignment_attribute (base_type_result, type);
12819
12820 if (fpt_used)
12821 {
12822 switch (fpt_info.scale_factor_kind)
12823 {
12824 case fixed_point_scale_factor_binary:
12825 add_AT_int (base_type_result, DW_AT_binary_scale,
12826 fpt_info.scale_factor.binary);
12827 break;
12828
12829 case fixed_point_scale_factor_decimal:
12830 add_AT_int (base_type_result, DW_AT_decimal_scale,
12831 fpt_info.scale_factor.decimal);
12832 break;
12833
12834 case fixed_point_scale_factor_arbitrary:
12835 /* Arbitrary scale factors cannot be described in standard DWARF,
12836 yet. */
12837 if (!dwarf_strict)
12838 {
12839 /* Describe the scale factor as a rational constant. */
12840 const dw_die_ref scale_factor
12841 = new_die (DW_TAG_constant, comp_unit_die (), type);
12842
12843 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12844 fpt_info.scale_factor.arbitrary.numerator);
12845 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12846 fpt_info.scale_factor.arbitrary.denominator);
12847
12848 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12849 }
12850 break;
12851
12852 default:
12853 gcc_unreachable ();
12854 }
12855 }
12856
12857 if (type_bias)
12858 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12859 dw_scalar_form_constant
12860 | dw_scalar_form_exprloc
12861 | dw_scalar_form_reference,
12862 NULL);
12863
12864 return base_type_result;
12865 }
12866
12867 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12868 named 'auto' in its type: return true for it, false otherwise. */
12869
12870 static inline bool
12871 is_cxx_auto (tree type)
12872 {
12873 if (is_cxx ())
12874 {
12875 tree name = TYPE_IDENTIFIER (type);
12876 if (name == get_identifier ("auto")
12877 || name == get_identifier ("decltype(auto)"))
12878 return true;
12879 }
12880 return false;
12881 }
12882
12883 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12884 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12885
12886 static inline int
12887 is_base_type (tree type)
12888 {
12889 switch (TREE_CODE (type))
12890 {
12891 case INTEGER_TYPE:
12892 case REAL_TYPE:
12893 case FIXED_POINT_TYPE:
12894 case COMPLEX_TYPE:
12895 case BOOLEAN_TYPE:
12896 return 1;
12897
12898 case VOID_TYPE:
12899 case ARRAY_TYPE:
12900 case RECORD_TYPE:
12901 case UNION_TYPE:
12902 case QUAL_UNION_TYPE:
12903 case ENUMERAL_TYPE:
12904 case FUNCTION_TYPE:
12905 case METHOD_TYPE:
12906 case POINTER_TYPE:
12907 case REFERENCE_TYPE:
12908 case NULLPTR_TYPE:
12909 case OFFSET_TYPE:
12910 case LANG_TYPE:
12911 case VECTOR_TYPE:
12912 return 0;
12913
12914 default:
12915 if (is_cxx_auto (type))
12916 return 0;
12917 gcc_unreachable ();
12918 }
12919
12920 return 0;
12921 }
12922
12923 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12924 node, return the size in bits for the type if it is a constant, or else
12925 return the alignment for the type if the type's size is not constant, or
12926 else return BITS_PER_WORD if the type actually turns out to be an
12927 ERROR_MARK node. */
12928
12929 static inline unsigned HOST_WIDE_INT
12930 simple_type_size_in_bits (const_tree type)
12931 {
12932 if (TREE_CODE (type) == ERROR_MARK)
12933 return BITS_PER_WORD;
12934 else if (TYPE_SIZE (type) == NULL_TREE)
12935 return 0;
12936 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12937 return tree_to_uhwi (TYPE_SIZE (type));
12938 else
12939 return TYPE_ALIGN (type);
12940 }
12941
12942 /* Similarly, but return an offset_int instead of UHWI. */
12943
12944 static inline offset_int
12945 offset_int_type_size_in_bits (const_tree type)
12946 {
12947 if (TREE_CODE (type) == ERROR_MARK)
12948 return BITS_PER_WORD;
12949 else if (TYPE_SIZE (type) == NULL_TREE)
12950 return 0;
12951 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12952 return wi::to_offset (TYPE_SIZE (type));
12953 else
12954 return TYPE_ALIGN (type);
12955 }
12956
12957 /* Given a pointer to a tree node for a subrange type, return a pointer
12958 to a DIE that describes the given type. */
12959
12960 static dw_die_ref
12961 subrange_type_die (tree type, tree low, tree high, tree bias,
12962 dw_die_ref context_die)
12963 {
12964 dw_die_ref subrange_die;
12965 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12966
12967 if (context_die == NULL)
12968 context_die = comp_unit_die ();
12969
12970 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12971
12972 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12973 {
12974 /* The size of the subrange type and its base type do not match,
12975 so we need to generate a size attribute for the subrange type. */
12976 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12977 }
12978
12979 add_alignment_attribute (subrange_die, type);
12980
12981 if (low)
12982 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12983 if (high)
12984 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12985 if (bias && !dwarf_strict)
12986 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12987 dw_scalar_form_constant
12988 | dw_scalar_form_exprloc
12989 | dw_scalar_form_reference,
12990 NULL);
12991
12992 return subrange_die;
12993 }
12994
12995 /* Returns the (const and/or volatile) cv_qualifiers associated with
12996 the decl node. This will normally be augmented with the
12997 cv_qualifiers of the underlying type in add_type_attribute. */
12998
12999 static int
13000 decl_quals (const_tree decl)
13001 {
13002 return ((TREE_READONLY (decl)
13003 /* The C++ front-end correctly marks reference-typed
13004 variables as readonly, but from a language (and debug
13005 info) standpoint they are not const-qualified. */
13006 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13007 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13008 | (TREE_THIS_VOLATILE (decl)
13009 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13010 }
13011
13012 /* Determine the TYPE whose qualifiers match the largest strict subset
13013 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13014 qualifiers outside QUAL_MASK. */
13015
13016 static int
13017 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13018 {
13019 tree t;
13020 int best_rank = 0, best_qual = 0, max_rank;
13021
13022 type_quals &= qual_mask;
13023 max_rank = popcount_hwi (type_quals) - 1;
13024
13025 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13026 t = TYPE_NEXT_VARIANT (t))
13027 {
13028 int q = TYPE_QUALS (t) & qual_mask;
13029
13030 if ((q & type_quals) == q && q != type_quals
13031 && check_base_type (t, type))
13032 {
13033 int rank = popcount_hwi (q);
13034
13035 if (rank > best_rank)
13036 {
13037 best_rank = rank;
13038 best_qual = q;
13039 }
13040 }
13041 }
13042
13043 return best_qual;
13044 }
13045
13046 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13047 static const dwarf_qual_info_t dwarf_qual_info[] =
13048 {
13049 { TYPE_QUAL_CONST, DW_TAG_const_type },
13050 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13051 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13052 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13053 };
13054 static const unsigned int dwarf_qual_info_size
13055 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13056
13057 /* If DIE is a qualified DIE of some base DIE with the same parent,
13058 return the base DIE, otherwise return NULL. Set MASK to the
13059 qualifiers added compared to the returned DIE. */
13060
13061 static dw_die_ref
13062 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13063 {
13064 unsigned int i;
13065 for (i = 0; i < dwarf_qual_info_size; i++)
13066 if (die->die_tag == dwarf_qual_info[i].t)
13067 break;
13068 if (i == dwarf_qual_info_size)
13069 return NULL;
13070 if (vec_safe_length (die->die_attr) != 1)
13071 return NULL;
13072 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13073 if (type == NULL || type->die_parent != die->die_parent)
13074 return NULL;
13075 *mask |= dwarf_qual_info[i].q;
13076 if (depth)
13077 {
13078 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13079 if (ret)
13080 return ret;
13081 }
13082 return type;
13083 }
13084
13085 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13086 entry that chains the modifiers specified by CV_QUALS in front of the
13087 given type. REVERSE is true if the type is to be interpreted in the
13088 reverse storage order wrt the target order. */
13089
13090 static dw_die_ref
13091 modified_type_die (tree type, int cv_quals, bool reverse,
13092 dw_die_ref context_die)
13093 {
13094 enum tree_code code = TREE_CODE (type);
13095 dw_die_ref mod_type_die;
13096 dw_die_ref sub_die = NULL;
13097 tree item_type = NULL;
13098 tree qualified_type;
13099 tree name, low, high;
13100 dw_die_ref mod_scope;
13101 /* Only these cv-qualifiers are currently handled. */
13102 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13103 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13104 ENCODE_QUAL_ADDR_SPACE(~0U));
13105 const bool reverse_base_type
13106 = need_endianity_attribute_p (reverse) && is_base_type (type);
13107
13108 if (code == ERROR_MARK)
13109 return NULL;
13110
13111 if (lang_hooks.types.get_debug_type)
13112 {
13113 tree debug_type = lang_hooks.types.get_debug_type (type);
13114
13115 if (debug_type != NULL_TREE && debug_type != type)
13116 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13117 }
13118
13119 cv_quals &= cv_qual_mask;
13120
13121 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13122 tag modifier (and not an attribute) old consumers won't be able
13123 to handle it. */
13124 if (dwarf_version < 3)
13125 cv_quals &= ~TYPE_QUAL_RESTRICT;
13126
13127 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13128 if (dwarf_version < 5)
13129 cv_quals &= ~TYPE_QUAL_ATOMIC;
13130
13131 /* See if we already have the appropriately qualified variant of
13132 this type. */
13133 qualified_type = get_qualified_type (type, cv_quals);
13134
13135 if (qualified_type == sizetype)
13136 {
13137 /* Try not to expose the internal sizetype type's name. */
13138 if (TYPE_NAME (qualified_type)
13139 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13140 {
13141 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13142
13143 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13144 && (TYPE_PRECISION (t)
13145 == TYPE_PRECISION (qualified_type))
13146 && (TYPE_UNSIGNED (t)
13147 == TYPE_UNSIGNED (qualified_type)));
13148 qualified_type = t;
13149 }
13150 else if (qualified_type == sizetype
13151 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13152 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13153 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13154 qualified_type = size_type_node;
13155 }
13156
13157 /* If we do, then we can just use its DIE, if it exists. */
13158 if (qualified_type)
13159 {
13160 mod_type_die = lookup_type_die (qualified_type);
13161
13162 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13163 dealt with specially: the DIE with the attribute, if it exists, is
13164 placed immediately after the regular DIE for the same base type. */
13165 if (mod_type_die
13166 && (!reverse_base_type
13167 || ((mod_type_die = mod_type_die->die_sib) != NULL
13168 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13169 return mod_type_die;
13170 }
13171
13172 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13173
13174 /* Handle C typedef types. */
13175 if (name
13176 && TREE_CODE (name) == TYPE_DECL
13177 && DECL_ORIGINAL_TYPE (name)
13178 && !DECL_ARTIFICIAL (name))
13179 {
13180 tree dtype = TREE_TYPE (name);
13181
13182 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13183 if (qualified_type == dtype && !reverse_base_type)
13184 {
13185 tree origin = decl_ultimate_origin (name);
13186
13187 /* Typedef variants that have an abstract origin don't get their own
13188 type DIE (see gen_typedef_die), so fall back on the ultimate
13189 abstract origin instead. */
13190 if (origin != NULL && origin != name)
13191 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13192 context_die);
13193
13194 /* For a named type, use the typedef. */
13195 gen_type_die (qualified_type, context_die);
13196 return lookup_type_die (qualified_type);
13197 }
13198 else
13199 {
13200 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13201 dquals &= cv_qual_mask;
13202 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13203 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13204 /* cv-unqualified version of named type. Just use
13205 the unnamed type to which it refers. */
13206 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13207 reverse, context_die);
13208 /* Else cv-qualified version of named type; fall through. */
13209 }
13210 }
13211
13212 mod_scope = scope_die_for (type, context_die);
13213
13214 if (cv_quals)
13215 {
13216 int sub_quals = 0, first_quals = 0;
13217 unsigned i;
13218 dw_die_ref first = NULL, last = NULL;
13219
13220 /* Determine a lesser qualified type that most closely matches
13221 this one. Then generate DW_TAG_* entries for the remaining
13222 qualifiers. */
13223 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13224 cv_qual_mask);
13225 if (sub_quals && use_debug_types)
13226 {
13227 bool needed = false;
13228 /* If emitting type units, make sure the order of qualifiers
13229 is canonical. Thus, start from unqualified type if
13230 an earlier qualifier is missing in sub_quals, but some later
13231 one is present there. */
13232 for (i = 0; i < dwarf_qual_info_size; i++)
13233 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13234 needed = true;
13235 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13236 {
13237 sub_quals = 0;
13238 break;
13239 }
13240 }
13241 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13242 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13243 {
13244 /* As not all intermediate qualified DIEs have corresponding
13245 tree types, ensure that qualified DIEs in the same scope
13246 as their DW_AT_type are emitted after their DW_AT_type,
13247 only with other qualified DIEs for the same type possibly
13248 in between them. Determine the range of such qualified
13249 DIEs now (first being the base type, last being corresponding
13250 last qualified DIE for it). */
13251 unsigned int count = 0;
13252 first = qualified_die_p (mod_type_die, &first_quals,
13253 dwarf_qual_info_size);
13254 if (first == NULL)
13255 first = mod_type_die;
13256 gcc_assert ((first_quals & ~sub_quals) == 0);
13257 for (count = 0, last = first;
13258 count < (1U << dwarf_qual_info_size);
13259 count++, last = last->die_sib)
13260 {
13261 int quals = 0;
13262 if (last == mod_scope->die_child)
13263 break;
13264 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13265 != first)
13266 break;
13267 }
13268 }
13269
13270 for (i = 0; i < dwarf_qual_info_size; i++)
13271 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13272 {
13273 dw_die_ref d;
13274 if (first && first != last)
13275 {
13276 for (d = first->die_sib; ; d = d->die_sib)
13277 {
13278 int quals = 0;
13279 qualified_die_p (d, &quals, dwarf_qual_info_size);
13280 if (quals == (first_quals | dwarf_qual_info[i].q))
13281 break;
13282 if (d == last)
13283 {
13284 d = NULL;
13285 break;
13286 }
13287 }
13288 if (d)
13289 {
13290 mod_type_die = d;
13291 continue;
13292 }
13293 }
13294 if (first)
13295 {
13296 d = new_die_raw (dwarf_qual_info[i].t);
13297 add_child_die_after (mod_scope, d, last);
13298 last = d;
13299 }
13300 else
13301 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13302 if (mod_type_die)
13303 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13304 mod_type_die = d;
13305 first_quals |= dwarf_qual_info[i].q;
13306 }
13307 }
13308 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13309 {
13310 dwarf_tag tag = DW_TAG_pointer_type;
13311 if (code == REFERENCE_TYPE)
13312 {
13313 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13314 tag = DW_TAG_rvalue_reference_type;
13315 else
13316 tag = DW_TAG_reference_type;
13317 }
13318 mod_type_die = new_die (tag, mod_scope, type);
13319
13320 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13321 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13322 add_alignment_attribute (mod_type_die, type);
13323 item_type = TREE_TYPE (type);
13324
13325 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13326 if (!ADDR_SPACE_GENERIC_P (as))
13327 {
13328 int action = targetm.addr_space.debug (as);
13329 if (action >= 0)
13330 {
13331 /* Positive values indicate an address_class. */
13332 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13333 }
13334 else
13335 {
13336 /* Negative values indicate an (inverted) segment base reg. */
13337 dw_loc_descr_ref d
13338 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13339 add_AT_loc (mod_type_die, DW_AT_segment, d);
13340 }
13341 }
13342 }
13343 else if (code == INTEGER_TYPE
13344 && TREE_TYPE (type) != NULL_TREE
13345 && subrange_type_for_debug_p (type, &low, &high))
13346 {
13347 tree bias = NULL_TREE;
13348 if (lang_hooks.types.get_type_bias)
13349 bias = lang_hooks.types.get_type_bias (type);
13350 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13351 item_type = TREE_TYPE (type);
13352 }
13353 else if (is_base_type (type))
13354 {
13355 mod_type_die = base_type_die (type, reverse);
13356
13357 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13358 if (reverse_base_type)
13359 {
13360 dw_die_ref after_die
13361 = modified_type_die (type, cv_quals, false, context_die);
13362 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13363 }
13364 else
13365 add_child_die (comp_unit_die (), mod_type_die);
13366
13367 add_pubtype (type, mod_type_die);
13368 }
13369 else
13370 {
13371 gen_type_die (type, context_die);
13372
13373 /* We have to get the type_main_variant here (and pass that to the
13374 `lookup_type_die' routine) because the ..._TYPE node we have
13375 might simply be a *copy* of some original type node (where the
13376 copy was created to help us keep track of typedef names) and
13377 that copy might have a different TYPE_UID from the original
13378 ..._TYPE node. */
13379 if (TREE_CODE (type) == FUNCTION_TYPE
13380 || TREE_CODE (type) == METHOD_TYPE)
13381 {
13382 /* For function/method types, can't just use type_main_variant here,
13383 because that can have different ref-qualifiers for C++,
13384 but try to canonicalize. */
13385 tree main = TYPE_MAIN_VARIANT (type);
13386 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13387 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13388 && check_base_type (t, main)
13389 && check_lang_type (t, type))
13390 return lookup_type_die (t);
13391 return lookup_type_die (type);
13392 }
13393 else if (TREE_CODE (type) != VECTOR_TYPE
13394 && TREE_CODE (type) != ARRAY_TYPE)
13395 return lookup_type_die (type_main_variant (type));
13396 else
13397 /* Vectors have the debugging information in the type,
13398 not the main variant. */
13399 return lookup_type_die (type);
13400 }
13401
13402 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13403 don't output a DW_TAG_typedef, since there isn't one in the
13404 user's program; just attach a DW_AT_name to the type.
13405 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13406 if the base type already has the same name. */
13407 if (name
13408 && ((TREE_CODE (name) != TYPE_DECL
13409 && (qualified_type == TYPE_MAIN_VARIANT (type)
13410 || (cv_quals == TYPE_UNQUALIFIED)))
13411 || (TREE_CODE (name) == TYPE_DECL
13412 && TREE_TYPE (name) == qualified_type
13413 && DECL_NAME (name))))
13414 {
13415 if (TREE_CODE (name) == TYPE_DECL)
13416 /* Could just call add_name_and_src_coords_attributes here,
13417 but since this is a builtin type it doesn't have any
13418 useful source coordinates anyway. */
13419 name = DECL_NAME (name);
13420 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13421 }
13422 /* This probably indicates a bug. */
13423 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13424 {
13425 name = TYPE_IDENTIFIER (type);
13426 add_name_attribute (mod_type_die,
13427 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13428 }
13429
13430 if (qualified_type && !reverse_base_type)
13431 equate_type_number_to_die (qualified_type, mod_type_die);
13432
13433 if (item_type)
13434 /* We must do this after the equate_type_number_to_die call, in case
13435 this is a recursive type. This ensures that the modified_type_die
13436 recursion will terminate even if the type is recursive. Recursive
13437 types are possible in Ada. */
13438 sub_die = modified_type_die (item_type,
13439 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13440 reverse,
13441 context_die);
13442
13443 if (sub_die != NULL)
13444 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13445
13446 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13447 if (TYPE_ARTIFICIAL (type))
13448 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13449
13450 return mod_type_die;
13451 }
13452
13453 /* Generate DIEs for the generic parameters of T.
13454 T must be either a generic type or a generic function.
13455 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13456
13457 static void
13458 gen_generic_params_dies (tree t)
13459 {
13460 tree parms, args;
13461 int parms_num, i;
13462 dw_die_ref die = NULL;
13463 int non_default;
13464
13465 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13466 return;
13467
13468 if (TYPE_P (t))
13469 die = lookup_type_die (t);
13470 else if (DECL_P (t))
13471 die = lookup_decl_die (t);
13472
13473 gcc_assert (die);
13474
13475 parms = lang_hooks.get_innermost_generic_parms (t);
13476 if (!parms)
13477 /* T has no generic parameter. It means T is neither a generic type
13478 or function. End of story. */
13479 return;
13480
13481 parms_num = TREE_VEC_LENGTH (parms);
13482 args = lang_hooks.get_innermost_generic_args (t);
13483 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13484 non_default = int_cst_value (TREE_CHAIN (args));
13485 else
13486 non_default = TREE_VEC_LENGTH (args);
13487 for (i = 0; i < parms_num; i++)
13488 {
13489 tree parm, arg, arg_pack_elems;
13490 dw_die_ref parm_die;
13491
13492 parm = TREE_VEC_ELT (parms, i);
13493 arg = TREE_VEC_ELT (args, i);
13494 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13495 gcc_assert (parm && TREE_VALUE (parm) && arg);
13496
13497 if (parm && TREE_VALUE (parm) && arg)
13498 {
13499 /* If PARM represents a template parameter pack,
13500 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13501 by DW_TAG_template_*_parameter DIEs for the argument
13502 pack elements of ARG. Note that ARG would then be
13503 an argument pack. */
13504 if (arg_pack_elems)
13505 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13506 arg_pack_elems,
13507 die);
13508 else
13509 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13510 true /* emit name */, die);
13511 if (i >= non_default)
13512 add_AT_flag (parm_die, DW_AT_default_value, 1);
13513 }
13514 }
13515 }
13516
13517 /* Create and return a DIE for PARM which should be
13518 the representation of a generic type parameter.
13519 For instance, in the C++ front end, PARM would be a template parameter.
13520 ARG is the argument to PARM.
13521 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13522 name of the PARM.
13523 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13524 as a child node. */
13525
13526 static dw_die_ref
13527 generic_parameter_die (tree parm, tree arg,
13528 bool emit_name_p,
13529 dw_die_ref parent_die)
13530 {
13531 dw_die_ref tmpl_die = NULL;
13532 const char *name = NULL;
13533
13534 if (!parm || !DECL_NAME (parm) || !arg)
13535 return NULL;
13536
13537 /* We support non-type generic parameters and arguments,
13538 type generic parameters and arguments, as well as
13539 generic generic parameters (a.k.a. template template parameters in C++)
13540 and arguments. */
13541 if (TREE_CODE (parm) == PARM_DECL)
13542 /* PARM is a nontype generic parameter */
13543 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13544 else if (TREE_CODE (parm) == TYPE_DECL)
13545 /* PARM is a type generic parameter. */
13546 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13547 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13548 /* PARM is a generic generic parameter.
13549 Its DIE is a GNU extension. It shall have a
13550 DW_AT_name attribute to represent the name of the template template
13551 parameter, and a DW_AT_GNU_template_name attribute to represent the
13552 name of the template template argument. */
13553 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13554 parent_die, parm);
13555 else
13556 gcc_unreachable ();
13557
13558 if (tmpl_die)
13559 {
13560 tree tmpl_type;
13561
13562 /* If PARM is a generic parameter pack, it means we are
13563 emitting debug info for a template argument pack element.
13564 In other terms, ARG is a template argument pack element.
13565 In that case, we don't emit any DW_AT_name attribute for
13566 the die. */
13567 if (emit_name_p)
13568 {
13569 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13570 gcc_assert (name);
13571 add_AT_string (tmpl_die, DW_AT_name, name);
13572 }
13573
13574 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13575 {
13576 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13577 TMPL_DIE should have a child DW_AT_type attribute that is set
13578 to the type of the argument to PARM, which is ARG.
13579 If PARM is a type generic parameter, TMPL_DIE should have a
13580 child DW_AT_type that is set to ARG. */
13581 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13582 add_type_attribute (tmpl_die, tmpl_type,
13583 (TREE_THIS_VOLATILE (tmpl_type)
13584 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13585 false, parent_die);
13586 }
13587 else
13588 {
13589 /* So TMPL_DIE is a DIE representing a
13590 a generic generic template parameter, a.k.a template template
13591 parameter in C++ and arg is a template. */
13592
13593 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13594 to the name of the argument. */
13595 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13596 if (name)
13597 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13598 }
13599
13600 if (TREE_CODE (parm) == PARM_DECL)
13601 /* So PARM is a non-type generic parameter.
13602 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13603 attribute of TMPL_DIE which value represents the value
13604 of ARG.
13605 We must be careful here:
13606 The value of ARG might reference some function decls.
13607 We might currently be emitting debug info for a generic
13608 type and types are emitted before function decls, we don't
13609 know if the function decls referenced by ARG will actually be
13610 emitted after cgraph computations.
13611 So must defer the generation of the DW_AT_const_value to
13612 after cgraph is ready. */
13613 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13614 }
13615
13616 return tmpl_die;
13617 }
13618
13619 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13620 PARM_PACK must be a template parameter pack. The returned DIE
13621 will be child DIE of PARENT_DIE. */
13622
13623 static dw_die_ref
13624 template_parameter_pack_die (tree parm_pack,
13625 tree parm_pack_args,
13626 dw_die_ref parent_die)
13627 {
13628 dw_die_ref die;
13629 int j;
13630
13631 gcc_assert (parent_die && parm_pack);
13632
13633 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13634 add_name_and_src_coords_attributes (die, parm_pack);
13635 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13636 generic_parameter_die (parm_pack,
13637 TREE_VEC_ELT (parm_pack_args, j),
13638 false /* Don't emit DW_AT_name */,
13639 die);
13640 return die;
13641 }
13642
13643 /* Return the DBX register number described by a given RTL node. */
13644
13645 static unsigned int
13646 dbx_reg_number (const_rtx rtl)
13647 {
13648 unsigned regno = REGNO (rtl);
13649
13650 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13651
13652 #ifdef LEAF_REG_REMAP
13653 if (crtl->uses_only_leaf_regs)
13654 {
13655 int leaf_reg = LEAF_REG_REMAP (regno);
13656 if (leaf_reg != -1)
13657 regno = (unsigned) leaf_reg;
13658 }
13659 #endif
13660
13661 regno = DBX_REGISTER_NUMBER (regno);
13662 gcc_assert (regno != INVALID_REGNUM);
13663 return regno;
13664 }
13665
13666 /* Optionally add a DW_OP_piece term to a location description expression.
13667 DW_OP_piece is only added if the location description expression already
13668 doesn't end with DW_OP_piece. */
13669
13670 static void
13671 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13672 {
13673 dw_loc_descr_ref loc;
13674
13675 if (*list_head != NULL)
13676 {
13677 /* Find the end of the chain. */
13678 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13679 ;
13680
13681 if (loc->dw_loc_opc != DW_OP_piece)
13682 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13683 }
13684 }
13685
13686 /* Return a location descriptor that designates a machine register or
13687 zero if there is none. */
13688
13689 static dw_loc_descr_ref
13690 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13691 {
13692 rtx regs;
13693
13694 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13695 return 0;
13696
13697 /* We only use "frame base" when we're sure we're talking about the
13698 post-prologue local stack frame. We do this by *not* running
13699 register elimination until this point, and recognizing the special
13700 argument pointer and soft frame pointer rtx's.
13701 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13702 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13703 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13704 {
13705 dw_loc_descr_ref result = NULL;
13706
13707 if (dwarf_version >= 4 || !dwarf_strict)
13708 {
13709 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13710 initialized);
13711 if (result)
13712 add_loc_descr (&result,
13713 new_loc_descr (DW_OP_stack_value, 0, 0));
13714 }
13715 return result;
13716 }
13717
13718 regs = targetm.dwarf_register_span (rtl);
13719
13720 if (REG_NREGS (rtl) > 1 || regs)
13721 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13722 else
13723 {
13724 unsigned int dbx_regnum = dbx_reg_number (rtl);
13725 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13726 return 0;
13727 return one_reg_loc_descriptor (dbx_regnum, initialized);
13728 }
13729 }
13730
13731 /* Return a location descriptor that designates a machine register for
13732 a given hard register number. */
13733
13734 static dw_loc_descr_ref
13735 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13736 {
13737 dw_loc_descr_ref reg_loc_descr;
13738
13739 if (regno <= 31)
13740 reg_loc_descr
13741 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13742 else
13743 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13744
13745 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13746 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13747
13748 return reg_loc_descr;
13749 }
13750
13751 /* Given an RTL of a register, return a location descriptor that
13752 designates a value that spans more than one register. */
13753
13754 static dw_loc_descr_ref
13755 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13756 enum var_init_status initialized)
13757 {
13758 int size, i;
13759 dw_loc_descr_ref loc_result = NULL;
13760
13761 /* Simple, contiguous registers. */
13762 if (regs == NULL_RTX)
13763 {
13764 unsigned reg = REGNO (rtl);
13765 int nregs;
13766
13767 #ifdef LEAF_REG_REMAP
13768 if (crtl->uses_only_leaf_regs)
13769 {
13770 int leaf_reg = LEAF_REG_REMAP (reg);
13771 if (leaf_reg != -1)
13772 reg = (unsigned) leaf_reg;
13773 }
13774 #endif
13775
13776 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13777 nregs = REG_NREGS (rtl);
13778
13779 /* At present we only track constant-sized pieces. */
13780 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13781 return NULL;
13782 size /= nregs;
13783
13784 loc_result = NULL;
13785 while (nregs--)
13786 {
13787 dw_loc_descr_ref t;
13788
13789 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13790 VAR_INIT_STATUS_INITIALIZED);
13791 add_loc_descr (&loc_result, t);
13792 add_loc_descr_op_piece (&loc_result, size);
13793 ++reg;
13794 }
13795 return loc_result;
13796 }
13797
13798 /* Now onto stupid register sets in non contiguous locations. */
13799
13800 gcc_assert (GET_CODE (regs) == PARALLEL);
13801
13802 /* At present we only track constant-sized pieces. */
13803 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13804 return NULL;
13805 loc_result = NULL;
13806
13807 for (i = 0; i < XVECLEN (regs, 0); ++i)
13808 {
13809 dw_loc_descr_ref t;
13810
13811 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13812 VAR_INIT_STATUS_INITIALIZED);
13813 add_loc_descr (&loc_result, t);
13814 add_loc_descr_op_piece (&loc_result, size);
13815 }
13816
13817 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13818 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13819 return loc_result;
13820 }
13821
13822 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13823
13824 /* Return a location descriptor that designates a constant i,
13825 as a compound operation from constant (i >> shift), constant shift
13826 and DW_OP_shl. */
13827
13828 static dw_loc_descr_ref
13829 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13830 {
13831 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13832 add_loc_descr (&ret, int_loc_descriptor (shift));
13833 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13834 return ret;
13835 }
13836
13837 /* Return a location descriptor that designates constant POLY_I. */
13838
13839 static dw_loc_descr_ref
13840 int_loc_descriptor (poly_int64 poly_i)
13841 {
13842 enum dwarf_location_atom op;
13843
13844 HOST_WIDE_INT i;
13845 if (!poly_i.is_constant (&i))
13846 {
13847 /* Create location descriptions for the non-constant part and
13848 add any constant offset at the end. */
13849 dw_loc_descr_ref ret = NULL;
13850 HOST_WIDE_INT constant = poly_i.coeffs[0];
13851 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13852 {
13853 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13854 if (coeff != 0)
13855 {
13856 dw_loc_descr_ref start = ret;
13857 unsigned int factor;
13858 int bias;
13859 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13860 (j, &factor, &bias);
13861
13862 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13863 add COEFF * (REGNO / FACTOR) now and subtract
13864 COEFF * BIAS from the final constant part. */
13865 constant -= coeff * bias;
13866 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13867 if (coeff % factor == 0)
13868 coeff /= factor;
13869 else
13870 {
13871 int amount = exact_log2 (factor);
13872 gcc_assert (amount >= 0);
13873 add_loc_descr (&ret, int_loc_descriptor (amount));
13874 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13875 }
13876 if (coeff != 1)
13877 {
13878 add_loc_descr (&ret, int_loc_descriptor (coeff));
13879 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13880 }
13881 if (start)
13882 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13883 }
13884 }
13885 loc_descr_plus_const (&ret, constant);
13886 return ret;
13887 }
13888
13889 /* Pick the smallest representation of a constant, rather than just
13890 defaulting to the LEB encoding. */
13891 if (i >= 0)
13892 {
13893 int clz = clz_hwi (i);
13894 int ctz = ctz_hwi (i);
13895 if (i <= 31)
13896 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13897 else if (i <= 0xff)
13898 op = DW_OP_const1u;
13899 else if (i <= 0xffff)
13900 op = DW_OP_const2u;
13901 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13902 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13903 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13904 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13905 while DW_OP_const4u is 5 bytes. */
13906 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13907 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13908 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13909 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13910 while DW_OP_const4u is 5 bytes. */
13911 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13912
13913 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13914 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13915 <= 4)
13916 {
13917 /* As i >= 2**31, the double cast above will yield a negative number.
13918 Since wrapping is defined in DWARF expressions we can output big
13919 positive integers as small negative ones, regardless of the size
13920 of host wide ints.
13921
13922 Here, since the evaluator will handle 32-bit values and since i >=
13923 2**31, we know it's going to be interpreted as a negative literal:
13924 store it this way if we can do better than 5 bytes this way. */
13925 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13926 }
13927 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13928 op = DW_OP_const4u;
13929
13930 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13931 least 6 bytes: see if we can do better before falling back to it. */
13932 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13933 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13934 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13935 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13936 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13937 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13938 >= HOST_BITS_PER_WIDE_INT)
13939 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13940 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13941 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13942 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13943 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13944 && size_of_uleb128 (i) > 6)
13945 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13946 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13947 else
13948 op = DW_OP_constu;
13949 }
13950 else
13951 {
13952 if (i >= -0x80)
13953 op = DW_OP_const1s;
13954 else if (i >= -0x8000)
13955 op = DW_OP_const2s;
13956 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13957 {
13958 if (size_of_int_loc_descriptor (i) < 5)
13959 {
13960 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13961 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13962 return ret;
13963 }
13964 op = DW_OP_const4s;
13965 }
13966 else
13967 {
13968 if (size_of_int_loc_descriptor (i)
13969 < (unsigned long) 1 + size_of_sleb128 (i))
13970 {
13971 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13972 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13973 return ret;
13974 }
13975 op = DW_OP_consts;
13976 }
13977 }
13978
13979 return new_loc_descr (op, i, 0);
13980 }
13981
13982 /* Likewise, for unsigned constants. */
13983
13984 static dw_loc_descr_ref
13985 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13986 {
13987 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13988 const unsigned HOST_WIDE_INT max_uint
13989 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13990
13991 /* If possible, use the clever signed constants handling. */
13992 if (i <= max_int)
13993 return int_loc_descriptor ((HOST_WIDE_INT) i);
13994
13995 /* Here, we are left with positive numbers that cannot be represented as
13996 HOST_WIDE_INT, i.e.:
13997 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13998
13999 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14000 whereas may be better to output a negative integer: thanks to integer
14001 wrapping, we know that:
14002 x = x - 2 ** DWARF2_ADDR_SIZE
14003 = x - 2 * (max (HOST_WIDE_INT) + 1)
14004 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14005 small negative integers. Let's try that in cases it will clearly improve
14006 the encoding: there is no gain turning DW_OP_const4u into
14007 DW_OP_const4s. */
14008 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14009 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14010 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14011 {
14012 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14013
14014 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14015 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14016 const HOST_WIDE_INT second_shift
14017 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14018
14019 /* So we finally have:
14020 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14021 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14022 return int_loc_descriptor (second_shift);
14023 }
14024
14025 /* Last chance: fallback to a simple constant operation. */
14026 return new_loc_descr
14027 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14028 ? DW_OP_const4u
14029 : DW_OP_const8u,
14030 i, 0);
14031 }
14032
14033 /* Generate and return a location description that computes the unsigned
14034 comparison of the two stack top entries (a OP b where b is the top-most
14035 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14036 LE_EXPR, GT_EXPR or GE_EXPR. */
14037
14038 static dw_loc_descr_ref
14039 uint_comparison_loc_list (enum tree_code kind)
14040 {
14041 enum dwarf_location_atom op, flip_op;
14042 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14043
14044 switch (kind)
14045 {
14046 case LT_EXPR:
14047 op = DW_OP_lt;
14048 break;
14049 case LE_EXPR:
14050 op = DW_OP_le;
14051 break;
14052 case GT_EXPR:
14053 op = DW_OP_gt;
14054 break;
14055 case GE_EXPR:
14056 op = DW_OP_ge;
14057 break;
14058 default:
14059 gcc_unreachable ();
14060 }
14061
14062 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14063 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14064
14065 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14066 possible to perform unsigned comparisons: we just have to distinguish
14067 three cases:
14068
14069 1. when a and b have the same sign (as signed integers); then we should
14070 return: a OP(signed) b;
14071
14072 2. when a is a negative signed integer while b is a positive one, then a
14073 is a greater unsigned integer than b; likewise when a and b's roles
14074 are flipped.
14075
14076 So first, compare the sign of the two operands. */
14077 ret = new_loc_descr (DW_OP_over, 0, 0);
14078 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14079 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14080 /* If they have different signs (i.e. they have different sign bits), then
14081 the stack top value has now the sign bit set and thus it's smaller than
14082 zero. */
14083 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14084 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14085 add_loc_descr (&ret, bra_node);
14086
14087 /* We are in case 1. At this point, we know both operands have the same
14088 sign, to it's safe to use the built-in signed comparison. */
14089 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14090 add_loc_descr (&ret, jmp_node);
14091
14092 /* We are in case 2. Here, we know both operands do not have the same sign,
14093 so we have to flip the signed comparison. */
14094 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14095 tmp = new_loc_descr (flip_op, 0, 0);
14096 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14097 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14098 add_loc_descr (&ret, tmp);
14099
14100 /* This dummy operation is necessary to make the two branches join. */
14101 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14102 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14103 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14104 add_loc_descr (&ret, tmp);
14105
14106 return ret;
14107 }
14108
14109 /* Likewise, but takes the location description lists (might be destructive on
14110 them). Return NULL if either is NULL or if concatenation fails. */
14111
14112 static dw_loc_list_ref
14113 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14114 enum tree_code kind)
14115 {
14116 if (left == NULL || right == NULL)
14117 return NULL;
14118
14119 add_loc_list (&left, right);
14120 if (left == NULL)
14121 return NULL;
14122
14123 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14124 return left;
14125 }
14126
14127 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14128 without actually allocating it. */
14129
14130 static unsigned long
14131 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14132 {
14133 return size_of_int_loc_descriptor (i >> shift)
14134 + size_of_int_loc_descriptor (shift)
14135 + 1;
14136 }
14137
14138 /* Return size_of_locs (int_loc_descriptor (i)) without
14139 actually allocating it. */
14140
14141 static unsigned long
14142 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14143 {
14144 unsigned long s;
14145
14146 if (i >= 0)
14147 {
14148 int clz, ctz;
14149 if (i <= 31)
14150 return 1;
14151 else if (i <= 0xff)
14152 return 2;
14153 else if (i <= 0xffff)
14154 return 3;
14155 clz = clz_hwi (i);
14156 ctz = ctz_hwi (i);
14157 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14158 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14159 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14160 - clz - 5);
14161 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14162 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14163 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14164 - clz - 8);
14165 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14166 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14167 <= 4)
14168 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14169 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14170 return 5;
14171 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14172 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14173 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14174 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14175 - clz - 8);
14176 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14177 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14178 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14179 - clz - 16);
14180 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14181 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14182 && s > 6)
14183 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14184 - clz - 32);
14185 else
14186 return 1 + s;
14187 }
14188 else
14189 {
14190 if (i >= -0x80)
14191 return 2;
14192 else if (i >= -0x8000)
14193 return 3;
14194 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14195 {
14196 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14197 {
14198 s = size_of_int_loc_descriptor (-i) + 1;
14199 if (s < 5)
14200 return s;
14201 }
14202 return 5;
14203 }
14204 else
14205 {
14206 unsigned long r = 1 + size_of_sleb128 (i);
14207 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14208 {
14209 s = size_of_int_loc_descriptor (-i) + 1;
14210 if (s < r)
14211 return s;
14212 }
14213 return r;
14214 }
14215 }
14216 }
14217
14218 /* Return loc description representing "address" of integer value.
14219 This can appear only as toplevel expression. */
14220
14221 static dw_loc_descr_ref
14222 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14223 {
14224 int litsize;
14225 dw_loc_descr_ref loc_result = NULL;
14226
14227 if (!(dwarf_version >= 4 || !dwarf_strict))
14228 return NULL;
14229
14230 litsize = size_of_int_loc_descriptor (i);
14231 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14232 is more compact. For DW_OP_stack_value we need:
14233 litsize + 1 (DW_OP_stack_value)
14234 and for DW_OP_implicit_value:
14235 1 (DW_OP_implicit_value) + 1 (length) + size. */
14236 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14237 {
14238 loc_result = int_loc_descriptor (i);
14239 add_loc_descr (&loc_result,
14240 new_loc_descr (DW_OP_stack_value, 0, 0));
14241 return loc_result;
14242 }
14243
14244 loc_result = new_loc_descr (DW_OP_implicit_value,
14245 size, 0);
14246 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14247 loc_result->dw_loc_oprnd2.v.val_int = i;
14248 return loc_result;
14249 }
14250
14251 /* Return a location descriptor that designates a base+offset location. */
14252
14253 static dw_loc_descr_ref
14254 based_loc_descr (rtx reg, poly_int64 offset,
14255 enum var_init_status initialized)
14256 {
14257 unsigned int regno;
14258 dw_loc_descr_ref result;
14259 dw_fde_ref fde = cfun->fde;
14260
14261 /* We only use "frame base" when we're sure we're talking about the
14262 post-prologue local stack frame. We do this by *not* running
14263 register elimination until this point, and recognizing the special
14264 argument pointer and soft frame pointer rtx's. */
14265 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14266 {
14267 rtx elim = (ira_use_lra_p
14268 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14269 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14270
14271 if (elim != reg)
14272 {
14273 /* Allow hard frame pointer here even if frame pointer
14274 isn't used since hard frame pointer is encoded with
14275 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14276 not hard frame pointer directly. */
14277 elim = strip_offset_and_add (elim, &offset);
14278 gcc_assert (elim == hard_frame_pointer_rtx
14279 || elim == stack_pointer_rtx);
14280
14281 /* If drap register is used to align stack, use frame
14282 pointer + offset to access stack variables. If stack
14283 is aligned without drap, use stack pointer + offset to
14284 access stack variables. */
14285 if (crtl->stack_realign_tried
14286 && reg == frame_pointer_rtx)
14287 {
14288 int base_reg
14289 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14290 ? HARD_FRAME_POINTER_REGNUM
14291 : REGNO (elim));
14292 return new_reg_loc_descr (base_reg, offset);
14293 }
14294
14295 gcc_assert (frame_pointer_fb_offset_valid);
14296 offset += frame_pointer_fb_offset;
14297 HOST_WIDE_INT const_offset;
14298 if (offset.is_constant (&const_offset))
14299 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14300 else
14301 {
14302 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14303 loc_descr_plus_const (&ret, offset);
14304 return ret;
14305 }
14306 }
14307 }
14308
14309 regno = REGNO (reg);
14310 #ifdef LEAF_REG_REMAP
14311 if (crtl->uses_only_leaf_regs)
14312 {
14313 int leaf_reg = LEAF_REG_REMAP (regno);
14314 if (leaf_reg != -1)
14315 regno = (unsigned) leaf_reg;
14316 }
14317 #endif
14318 regno = DWARF_FRAME_REGNUM (regno);
14319
14320 HOST_WIDE_INT const_offset;
14321 if (!optimize && fde
14322 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14323 && offset.is_constant (&const_offset))
14324 {
14325 /* Use cfa+offset to represent the location of arguments passed
14326 on the stack when drap is used to align stack.
14327 Only do this when not optimizing, for optimized code var-tracking
14328 is supposed to track where the arguments live and the register
14329 used as vdrap or drap in some spot might be used for something
14330 else in other part of the routine. */
14331 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14332 }
14333
14334 result = new_reg_loc_descr (regno, offset);
14335
14336 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14337 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14338
14339 return result;
14340 }
14341
14342 /* Return true if this RTL expression describes a base+offset calculation. */
14343
14344 static inline int
14345 is_based_loc (const_rtx rtl)
14346 {
14347 return (GET_CODE (rtl) == PLUS
14348 && ((REG_P (XEXP (rtl, 0))
14349 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14350 && CONST_INT_P (XEXP (rtl, 1)))));
14351 }
14352
14353 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14354 failed. */
14355
14356 static dw_loc_descr_ref
14357 tls_mem_loc_descriptor (rtx mem)
14358 {
14359 tree base;
14360 dw_loc_descr_ref loc_result;
14361
14362 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14363 return NULL;
14364
14365 base = get_base_address (MEM_EXPR (mem));
14366 if (base == NULL
14367 || !VAR_P (base)
14368 || !DECL_THREAD_LOCAL_P (base))
14369 return NULL;
14370
14371 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14372 if (loc_result == NULL)
14373 return NULL;
14374
14375 if (maybe_ne (MEM_OFFSET (mem), 0))
14376 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14377
14378 return loc_result;
14379 }
14380
14381 /* Output debug info about reason why we failed to expand expression as dwarf
14382 expression. */
14383
14384 static void
14385 expansion_failed (tree expr, rtx rtl, char const *reason)
14386 {
14387 if (dump_file && (dump_flags & TDF_DETAILS))
14388 {
14389 fprintf (dump_file, "Failed to expand as dwarf: ");
14390 if (expr)
14391 print_generic_expr (dump_file, expr, dump_flags);
14392 if (rtl)
14393 {
14394 fprintf (dump_file, "\n");
14395 print_rtl (dump_file, rtl);
14396 }
14397 fprintf (dump_file, "\nReason: %s\n", reason);
14398 }
14399 }
14400
14401 /* Helper function for const_ok_for_output. */
14402
14403 static bool
14404 const_ok_for_output_1 (rtx rtl)
14405 {
14406 if (targetm.const_not_ok_for_debug_p (rtl))
14407 {
14408 if (GET_CODE (rtl) != UNSPEC)
14409 {
14410 expansion_failed (NULL_TREE, rtl,
14411 "Expression rejected for debug by the backend.\n");
14412 return false;
14413 }
14414
14415 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14416 the target hook doesn't explicitly allow it in debug info, assume
14417 we can't express it in the debug info. */
14418 /* Don't complain about TLS UNSPECs, those are just too hard to
14419 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14420 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14421 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14422 if (flag_checking
14423 && (XVECLEN (rtl, 0) == 0
14424 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14425 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14426 inform (current_function_decl
14427 ? DECL_SOURCE_LOCATION (current_function_decl)
14428 : UNKNOWN_LOCATION,
14429 #if NUM_UNSPEC_VALUES > 0
14430 "non-delegitimized UNSPEC %s (%d) found in variable location",
14431 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14432 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14433 XINT (rtl, 1));
14434 #else
14435 "non-delegitimized UNSPEC %d found in variable location",
14436 XINT (rtl, 1));
14437 #endif
14438 expansion_failed (NULL_TREE, rtl,
14439 "UNSPEC hasn't been delegitimized.\n");
14440 return false;
14441 }
14442
14443 if (CONST_POLY_INT_P (rtl))
14444 return false;
14445
14446 if (targetm.const_not_ok_for_debug_p (rtl))
14447 {
14448 expansion_failed (NULL_TREE, rtl,
14449 "Expression rejected for debug by the backend.\n");
14450 return false;
14451 }
14452
14453 /* FIXME: Refer to PR60655. It is possible for simplification
14454 of rtl expressions in var tracking to produce such expressions.
14455 We should really identify / validate expressions
14456 enclosed in CONST that can be handled by assemblers on various
14457 targets and only handle legitimate cases here. */
14458 switch (GET_CODE (rtl))
14459 {
14460 case SYMBOL_REF:
14461 break;
14462 case NOT:
14463 case NEG:
14464 return false;
14465 default:
14466 return true;
14467 }
14468
14469 if (CONSTANT_POOL_ADDRESS_P (rtl))
14470 {
14471 bool marked;
14472 get_pool_constant_mark (rtl, &marked);
14473 /* If all references to this pool constant were optimized away,
14474 it was not output and thus we can't represent it. */
14475 if (!marked)
14476 {
14477 expansion_failed (NULL_TREE, rtl,
14478 "Constant was removed from constant pool.\n");
14479 return false;
14480 }
14481 }
14482
14483 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14484 return false;
14485
14486 /* Avoid references to external symbols in debug info, on several targets
14487 the linker might even refuse to link when linking a shared library,
14488 and in many other cases the relocations for .debug_info/.debug_loc are
14489 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14490 to be defined within the same shared library or executable are fine. */
14491 if (SYMBOL_REF_EXTERNAL_P (rtl))
14492 {
14493 tree decl = SYMBOL_REF_DECL (rtl);
14494
14495 if (decl == NULL || !targetm.binds_local_p (decl))
14496 {
14497 expansion_failed (NULL_TREE, rtl,
14498 "Symbol not defined in current TU.\n");
14499 return false;
14500 }
14501 }
14502
14503 return true;
14504 }
14505
14506 /* Return true if constant RTL can be emitted in DW_OP_addr or
14507 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14508 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14509
14510 static bool
14511 const_ok_for_output (rtx rtl)
14512 {
14513 if (GET_CODE (rtl) == SYMBOL_REF)
14514 return const_ok_for_output_1 (rtl);
14515
14516 if (GET_CODE (rtl) == CONST)
14517 {
14518 subrtx_var_iterator::array_type array;
14519 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14520 if (!const_ok_for_output_1 (*iter))
14521 return false;
14522 return true;
14523 }
14524
14525 return true;
14526 }
14527
14528 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14529 if possible, NULL otherwise. */
14530
14531 static dw_die_ref
14532 base_type_for_mode (machine_mode mode, bool unsignedp)
14533 {
14534 dw_die_ref type_die;
14535 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14536
14537 if (type == NULL)
14538 return NULL;
14539 switch (TREE_CODE (type))
14540 {
14541 case INTEGER_TYPE:
14542 case REAL_TYPE:
14543 break;
14544 default:
14545 return NULL;
14546 }
14547 type_die = lookup_type_die (type);
14548 if (!type_die)
14549 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14550 comp_unit_die ());
14551 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14552 return NULL;
14553 return type_die;
14554 }
14555
14556 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14557 type matching MODE, or, if MODE is narrower than or as wide as
14558 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14559 possible. */
14560
14561 static dw_loc_descr_ref
14562 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14563 {
14564 machine_mode outer_mode = mode;
14565 dw_die_ref type_die;
14566 dw_loc_descr_ref cvt;
14567
14568 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14569 {
14570 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14571 return op;
14572 }
14573 type_die = base_type_for_mode (outer_mode, 1);
14574 if (type_die == NULL)
14575 return NULL;
14576 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14577 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14578 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14579 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14580 add_loc_descr (&op, cvt);
14581 return op;
14582 }
14583
14584 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14585
14586 static dw_loc_descr_ref
14587 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14588 dw_loc_descr_ref op1)
14589 {
14590 dw_loc_descr_ref ret = op0;
14591 add_loc_descr (&ret, op1);
14592 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14593 if (STORE_FLAG_VALUE != 1)
14594 {
14595 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14596 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14597 }
14598 return ret;
14599 }
14600
14601 /* Subroutine of scompare_loc_descriptor for the case in which we're
14602 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14603 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14604
14605 static dw_loc_descr_ref
14606 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14607 scalar_int_mode op_mode,
14608 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14609 {
14610 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14611 dw_loc_descr_ref cvt;
14612
14613 if (type_die == NULL)
14614 return NULL;
14615 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14616 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14617 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14618 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14619 add_loc_descr (&op0, cvt);
14620 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14621 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14622 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14623 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14624 add_loc_descr (&op1, cvt);
14625 return compare_loc_descriptor (op, op0, op1);
14626 }
14627
14628 /* Subroutine of scompare_loc_descriptor for the case in which we're
14629 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14630 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14631
14632 static dw_loc_descr_ref
14633 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14634 scalar_int_mode op_mode,
14635 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14636 {
14637 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14638 /* For eq/ne, if the operands are known to be zero-extended,
14639 there is no need to do the fancy shifting up. */
14640 if (op == DW_OP_eq || op == DW_OP_ne)
14641 {
14642 dw_loc_descr_ref last0, last1;
14643 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14644 ;
14645 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14646 ;
14647 /* deref_size zero extends, and for constants we can check
14648 whether they are zero extended or not. */
14649 if (((last0->dw_loc_opc == DW_OP_deref_size
14650 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14651 || (CONST_INT_P (XEXP (rtl, 0))
14652 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14653 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14654 && ((last1->dw_loc_opc == DW_OP_deref_size
14655 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14656 || (CONST_INT_P (XEXP (rtl, 1))
14657 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14658 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14659 return compare_loc_descriptor (op, op0, op1);
14660
14661 /* EQ/NE comparison against constant in narrower type than
14662 DWARF2_ADDR_SIZE can be performed either as
14663 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14664 DW_OP_{eq,ne}
14665 or
14666 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14667 DW_OP_{eq,ne}. Pick whatever is shorter. */
14668 if (CONST_INT_P (XEXP (rtl, 1))
14669 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14670 && (size_of_int_loc_descriptor (shift) + 1
14671 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14672 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14673 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14674 & GET_MODE_MASK (op_mode))))
14675 {
14676 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14677 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14678 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14679 & GET_MODE_MASK (op_mode));
14680 return compare_loc_descriptor (op, op0, op1);
14681 }
14682 }
14683 add_loc_descr (&op0, int_loc_descriptor (shift));
14684 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14685 if (CONST_INT_P (XEXP (rtl, 1)))
14686 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14687 else
14688 {
14689 add_loc_descr (&op1, int_loc_descriptor (shift));
14690 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14691 }
14692 return compare_loc_descriptor (op, op0, op1);
14693 }
14694
14695 /* Return location descriptor for unsigned comparison OP RTL. */
14696
14697 static dw_loc_descr_ref
14698 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14699 machine_mode mem_mode)
14700 {
14701 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14702 dw_loc_descr_ref op0, op1;
14703
14704 if (op_mode == VOIDmode)
14705 op_mode = GET_MODE (XEXP (rtl, 1));
14706 if (op_mode == VOIDmode)
14707 return NULL;
14708
14709 scalar_int_mode int_op_mode;
14710 if (dwarf_strict
14711 && dwarf_version < 5
14712 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14713 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14714 return NULL;
14715
14716 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14717 VAR_INIT_STATUS_INITIALIZED);
14718 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14719 VAR_INIT_STATUS_INITIALIZED);
14720
14721 if (op0 == NULL || op1 == NULL)
14722 return NULL;
14723
14724 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14725 {
14726 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14727 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14728
14729 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14730 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14731 }
14732 return compare_loc_descriptor (op, op0, op1);
14733 }
14734
14735 /* Return location descriptor for unsigned comparison OP RTL. */
14736
14737 static dw_loc_descr_ref
14738 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14739 machine_mode mem_mode)
14740 {
14741 dw_loc_descr_ref op0, op1;
14742
14743 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14744 if (test_op_mode == VOIDmode)
14745 test_op_mode = GET_MODE (XEXP (rtl, 1));
14746
14747 scalar_int_mode op_mode;
14748 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14749 return NULL;
14750
14751 if (dwarf_strict
14752 && dwarf_version < 5
14753 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14754 return NULL;
14755
14756 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14757 VAR_INIT_STATUS_INITIALIZED);
14758 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14759 VAR_INIT_STATUS_INITIALIZED);
14760
14761 if (op0 == NULL || op1 == NULL)
14762 return NULL;
14763
14764 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14765 {
14766 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14767 dw_loc_descr_ref last0, last1;
14768 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14769 ;
14770 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14771 ;
14772 if (CONST_INT_P (XEXP (rtl, 0)))
14773 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14774 /* deref_size zero extends, so no need to mask it again. */
14775 else if (last0->dw_loc_opc != DW_OP_deref_size
14776 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14777 {
14778 add_loc_descr (&op0, int_loc_descriptor (mask));
14779 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14780 }
14781 if (CONST_INT_P (XEXP (rtl, 1)))
14782 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14783 /* deref_size zero extends, so no need to mask it again. */
14784 else if (last1->dw_loc_opc != DW_OP_deref_size
14785 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14786 {
14787 add_loc_descr (&op1, int_loc_descriptor (mask));
14788 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14789 }
14790 }
14791 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14792 {
14793 HOST_WIDE_INT bias = 1;
14794 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14795 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14796 if (CONST_INT_P (XEXP (rtl, 1)))
14797 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14798 + INTVAL (XEXP (rtl, 1)));
14799 else
14800 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14801 bias, 0));
14802 }
14803 return compare_loc_descriptor (op, op0, op1);
14804 }
14805
14806 /* Return location descriptor for {U,S}{MIN,MAX}. */
14807
14808 static dw_loc_descr_ref
14809 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14810 machine_mode mem_mode)
14811 {
14812 enum dwarf_location_atom op;
14813 dw_loc_descr_ref op0, op1, ret;
14814 dw_loc_descr_ref bra_node, drop_node;
14815
14816 scalar_int_mode int_mode;
14817 if (dwarf_strict
14818 && dwarf_version < 5
14819 && (!is_a <scalar_int_mode> (mode, &int_mode)
14820 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14821 return NULL;
14822
14823 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14824 VAR_INIT_STATUS_INITIALIZED);
14825 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827
14828 if (op0 == NULL || op1 == NULL)
14829 return NULL;
14830
14831 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14832 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14833 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14834 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14835 {
14836 /* Checked by the caller. */
14837 int_mode = as_a <scalar_int_mode> (mode);
14838 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14839 {
14840 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14841 add_loc_descr (&op0, int_loc_descriptor (mask));
14842 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14843 add_loc_descr (&op1, int_loc_descriptor (mask));
14844 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14845 }
14846 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14847 {
14848 HOST_WIDE_INT bias = 1;
14849 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14850 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14851 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14852 }
14853 }
14854 else if (is_a <scalar_int_mode> (mode, &int_mode)
14855 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14856 {
14857 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14858 add_loc_descr (&op0, int_loc_descriptor (shift));
14859 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14860 add_loc_descr (&op1, int_loc_descriptor (shift));
14861 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14862 }
14863 else if (is_a <scalar_int_mode> (mode, &int_mode)
14864 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14865 {
14866 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14867 dw_loc_descr_ref cvt;
14868 if (type_die == NULL)
14869 return NULL;
14870 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14871 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14872 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14873 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14874 add_loc_descr (&op0, cvt);
14875 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14876 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14877 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14878 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14879 add_loc_descr (&op1, cvt);
14880 }
14881
14882 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14883 op = DW_OP_lt;
14884 else
14885 op = DW_OP_gt;
14886 ret = op0;
14887 add_loc_descr (&ret, op1);
14888 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14889 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14890 add_loc_descr (&ret, bra_node);
14891 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14892 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14893 add_loc_descr (&ret, drop_node);
14894 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14895 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14896 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14897 && is_a <scalar_int_mode> (mode, &int_mode)
14898 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14899 ret = convert_descriptor_to_mode (int_mode, ret);
14900 return ret;
14901 }
14902
14903 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14904 but after converting arguments to type_die, afterwards
14905 convert back to unsigned. */
14906
14907 static dw_loc_descr_ref
14908 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14909 scalar_int_mode mode, machine_mode mem_mode)
14910 {
14911 dw_loc_descr_ref cvt, op0, op1;
14912
14913 if (type_die == NULL)
14914 return NULL;
14915 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14916 VAR_INIT_STATUS_INITIALIZED);
14917 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14918 VAR_INIT_STATUS_INITIALIZED);
14919 if (op0 == NULL || op1 == NULL)
14920 return NULL;
14921 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14922 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14923 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14924 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14925 add_loc_descr (&op0, cvt);
14926 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14927 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14928 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14929 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14930 add_loc_descr (&op1, cvt);
14931 add_loc_descr (&op0, op1);
14932 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14933 return convert_descriptor_to_mode (mode, op0);
14934 }
14935
14936 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14937 const0 is DW_OP_lit0 or corresponding typed constant,
14938 const1 is DW_OP_lit1 or corresponding typed constant
14939 and constMSB is constant with just the MSB bit set
14940 for the mode):
14941 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14942 L1: const0 DW_OP_swap
14943 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14944 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14945 L3: DW_OP_drop
14946 L4: DW_OP_nop
14947
14948 CTZ is similar:
14949 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14950 L1: const0 DW_OP_swap
14951 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14952 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14953 L3: DW_OP_drop
14954 L4: DW_OP_nop
14955
14956 FFS is similar:
14957 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14958 L1: const1 DW_OP_swap
14959 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14960 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14961 L3: DW_OP_drop
14962 L4: DW_OP_nop */
14963
14964 static dw_loc_descr_ref
14965 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14966 machine_mode mem_mode)
14967 {
14968 dw_loc_descr_ref op0, ret, tmp;
14969 HOST_WIDE_INT valv;
14970 dw_loc_descr_ref l1jump, l1label;
14971 dw_loc_descr_ref l2jump, l2label;
14972 dw_loc_descr_ref l3jump, l3label;
14973 dw_loc_descr_ref l4jump, l4label;
14974 rtx msb;
14975
14976 if (GET_MODE (XEXP (rtl, 0)) != mode)
14977 return NULL;
14978
14979 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14980 VAR_INIT_STATUS_INITIALIZED);
14981 if (op0 == NULL)
14982 return NULL;
14983 ret = op0;
14984 if (GET_CODE (rtl) == CLZ)
14985 {
14986 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14987 valv = GET_MODE_BITSIZE (mode);
14988 }
14989 else if (GET_CODE (rtl) == FFS)
14990 valv = 0;
14991 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14992 valv = GET_MODE_BITSIZE (mode);
14993 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14994 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14995 add_loc_descr (&ret, l1jump);
14996 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14997 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14998 VAR_INIT_STATUS_INITIALIZED);
14999 if (tmp == NULL)
15000 return NULL;
15001 add_loc_descr (&ret, tmp);
15002 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15003 add_loc_descr (&ret, l4jump);
15004 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15005 ? const1_rtx : const0_rtx,
15006 mode, mem_mode,
15007 VAR_INIT_STATUS_INITIALIZED);
15008 if (l1label == NULL)
15009 return NULL;
15010 add_loc_descr (&ret, l1label);
15011 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15012 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15013 add_loc_descr (&ret, l2label);
15014 if (GET_CODE (rtl) != CLZ)
15015 msb = const1_rtx;
15016 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15017 msb = GEN_INT (HOST_WIDE_INT_1U
15018 << (GET_MODE_BITSIZE (mode) - 1));
15019 else
15020 msb = immed_wide_int_const
15021 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15022 GET_MODE_PRECISION (mode)), mode);
15023 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15024 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15025 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15026 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15027 else
15028 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15029 VAR_INIT_STATUS_INITIALIZED);
15030 if (tmp == NULL)
15031 return NULL;
15032 add_loc_descr (&ret, tmp);
15033 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15034 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15035 add_loc_descr (&ret, l3jump);
15036 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15037 VAR_INIT_STATUS_INITIALIZED);
15038 if (tmp == NULL)
15039 return NULL;
15040 add_loc_descr (&ret, tmp);
15041 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15042 ? DW_OP_shl : DW_OP_shr, 0, 0));
15043 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15044 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15045 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15046 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15047 add_loc_descr (&ret, l2jump);
15048 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15049 add_loc_descr (&ret, l3label);
15050 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15051 add_loc_descr (&ret, l4label);
15052 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15053 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15054 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15055 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15056 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15057 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15058 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15059 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15060 return ret;
15061 }
15062
15063 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15064 const1 is DW_OP_lit1 or corresponding typed constant):
15065 const0 DW_OP_swap
15066 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15067 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15068 L2: DW_OP_drop
15069
15070 PARITY is similar:
15071 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15072 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15073 L2: DW_OP_drop */
15074
15075 static dw_loc_descr_ref
15076 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15077 machine_mode mem_mode)
15078 {
15079 dw_loc_descr_ref op0, ret, tmp;
15080 dw_loc_descr_ref l1jump, l1label;
15081 dw_loc_descr_ref l2jump, l2label;
15082
15083 if (GET_MODE (XEXP (rtl, 0)) != mode)
15084 return NULL;
15085
15086 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15087 VAR_INIT_STATUS_INITIALIZED);
15088 if (op0 == NULL)
15089 return NULL;
15090 ret = op0;
15091 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15092 VAR_INIT_STATUS_INITIALIZED);
15093 if (tmp == NULL)
15094 return NULL;
15095 add_loc_descr (&ret, tmp);
15096 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15097 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15098 add_loc_descr (&ret, l1label);
15099 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15100 add_loc_descr (&ret, l2jump);
15101 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15102 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15103 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15104 VAR_INIT_STATUS_INITIALIZED);
15105 if (tmp == NULL)
15106 return NULL;
15107 add_loc_descr (&ret, tmp);
15108 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15109 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15110 ? DW_OP_plus : DW_OP_xor, 0, 0));
15111 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15112 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15113 VAR_INIT_STATUS_INITIALIZED);
15114 add_loc_descr (&ret, tmp);
15115 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15116 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15117 add_loc_descr (&ret, l1jump);
15118 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15119 add_loc_descr (&ret, l2label);
15120 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15121 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15122 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15123 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15124 return ret;
15125 }
15126
15127 /* BSWAP (constS is initial shift count, either 56 or 24):
15128 constS const0
15129 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15130 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15131 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15132 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15133 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15134
15135 static dw_loc_descr_ref
15136 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15137 machine_mode mem_mode)
15138 {
15139 dw_loc_descr_ref op0, ret, tmp;
15140 dw_loc_descr_ref l1jump, l1label;
15141 dw_loc_descr_ref l2jump, l2label;
15142
15143 if (BITS_PER_UNIT != 8
15144 || (GET_MODE_BITSIZE (mode) != 32
15145 && GET_MODE_BITSIZE (mode) != 64))
15146 return NULL;
15147
15148 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15149 VAR_INIT_STATUS_INITIALIZED);
15150 if (op0 == NULL)
15151 return NULL;
15152
15153 ret = op0;
15154 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15155 mode, mem_mode,
15156 VAR_INIT_STATUS_INITIALIZED);
15157 if (tmp == NULL)
15158 return NULL;
15159 add_loc_descr (&ret, tmp);
15160 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15161 VAR_INIT_STATUS_INITIALIZED);
15162 if (tmp == NULL)
15163 return NULL;
15164 add_loc_descr (&ret, tmp);
15165 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15166 add_loc_descr (&ret, l1label);
15167 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15168 mode, mem_mode,
15169 VAR_INIT_STATUS_INITIALIZED);
15170 add_loc_descr (&ret, tmp);
15171 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15172 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15173 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15174 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15175 VAR_INIT_STATUS_INITIALIZED);
15176 if (tmp == NULL)
15177 return NULL;
15178 add_loc_descr (&ret, tmp);
15179 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15182 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15183 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15184 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15185 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15186 VAR_INIT_STATUS_INITIALIZED);
15187 add_loc_descr (&ret, tmp);
15188 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15189 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15190 add_loc_descr (&ret, l2jump);
15191 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15192 VAR_INIT_STATUS_INITIALIZED);
15193 add_loc_descr (&ret, tmp);
15194 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15195 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15196 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15197 add_loc_descr (&ret, l1jump);
15198 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15199 add_loc_descr (&ret, l2label);
15200 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15201 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15202 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15203 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15204 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15205 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15206 return ret;
15207 }
15208
15209 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15210 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15211 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15212 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15213
15214 ROTATERT is similar:
15215 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15216 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15217 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15218
15219 static dw_loc_descr_ref
15220 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15221 machine_mode mem_mode)
15222 {
15223 rtx rtlop1 = XEXP (rtl, 1);
15224 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15225 int i;
15226
15227 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15228 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15229 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15230 VAR_INIT_STATUS_INITIALIZED);
15231 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15232 VAR_INIT_STATUS_INITIALIZED);
15233 if (op0 == NULL || op1 == NULL)
15234 return NULL;
15235 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15236 for (i = 0; i < 2; i++)
15237 {
15238 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15239 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15240 mode, mem_mode,
15241 VAR_INIT_STATUS_INITIALIZED);
15242 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15243 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15244 ? DW_OP_const4u
15245 : HOST_BITS_PER_WIDE_INT == 64
15246 ? DW_OP_const8u : DW_OP_constu,
15247 GET_MODE_MASK (mode), 0);
15248 else
15249 mask[i] = NULL;
15250 if (mask[i] == NULL)
15251 return NULL;
15252 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15253 }
15254 ret = op0;
15255 add_loc_descr (&ret, op1);
15256 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15257 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15258 if (GET_CODE (rtl) == ROTATERT)
15259 {
15260 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15261 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15262 GET_MODE_BITSIZE (mode), 0));
15263 }
15264 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15265 if (mask[0] != NULL)
15266 add_loc_descr (&ret, mask[0]);
15267 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15268 if (mask[1] != NULL)
15269 {
15270 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15271 add_loc_descr (&ret, mask[1]);
15272 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15273 }
15274 if (GET_CODE (rtl) == ROTATE)
15275 {
15276 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15277 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15278 GET_MODE_BITSIZE (mode), 0));
15279 }
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15282 return ret;
15283 }
15284
15285 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15286 for DEBUG_PARAMETER_REF RTL. */
15287
15288 static dw_loc_descr_ref
15289 parameter_ref_descriptor (rtx rtl)
15290 {
15291 dw_loc_descr_ref ret;
15292 dw_die_ref ref;
15293
15294 if (dwarf_strict)
15295 return NULL;
15296 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15297 /* With LTO during LTRANS we get the late DIE that refers to the early
15298 DIE, thus we add another indirection here. This seems to confuse
15299 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15300 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15301 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15302 if (ref)
15303 {
15304 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15305 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15306 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15307 }
15308 else
15309 {
15310 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15311 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15312 }
15313 return ret;
15314 }
15315
15316 /* The following routine converts the RTL for a variable or parameter
15317 (resident in memory) into an equivalent Dwarf representation of a
15318 mechanism for getting the address of that same variable onto the top of a
15319 hypothetical "address evaluation" stack.
15320
15321 When creating memory location descriptors, we are effectively transforming
15322 the RTL for a memory-resident object into its Dwarf postfix expression
15323 equivalent. This routine recursively descends an RTL tree, turning
15324 it into Dwarf postfix code as it goes.
15325
15326 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15327
15328 MEM_MODE is the mode of the memory reference, needed to handle some
15329 autoincrement addressing modes.
15330
15331 Return 0 if we can't represent the location. */
15332
15333 dw_loc_descr_ref
15334 mem_loc_descriptor (rtx rtl, machine_mode mode,
15335 machine_mode mem_mode,
15336 enum var_init_status initialized)
15337 {
15338 dw_loc_descr_ref mem_loc_result = NULL;
15339 enum dwarf_location_atom op;
15340 dw_loc_descr_ref op0, op1;
15341 rtx inner = NULL_RTX;
15342 poly_int64 offset;
15343
15344 if (mode == VOIDmode)
15345 mode = GET_MODE (rtl);
15346
15347 /* Note that for a dynamically sized array, the location we will generate a
15348 description of here will be the lowest numbered location which is
15349 actually within the array. That's *not* necessarily the same as the
15350 zeroth element of the array. */
15351
15352 rtl = targetm.delegitimize_address (rtl);
15353
15354 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15355 return NULL;
15356
15357 scalar_int_mode int_mode, inner_mode, op1_mode;
15358 switch (GET_CODE (rtl))
15359 {
15360 case POST_INC:
15361 case POST_DEC:
15362 case POST_MODIFY:
15363 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15364
15365 case SUBREG:
15366 /* The case of a subreg may arise when we have a local (register)
15367 variable or a formal (register) parameter which doesn't quite fill
15368 up an entire register. For now, just assume that it is
15369 legitimate to make the Dwarf info refer to the whole register which
15370 contains the given subreg. */
15371 if (!subreg_lowpart_p (rtl))
15372 break;
15373 inner = SUBREG_REG (rtl);
15374 /* FALLTHRU */
15375 case TRUNCATE:
15376 if (inner == NULL_RTX)
15377 inner = XEXP (rtl, 0);
15378 if (is_a <scalar_int_mode> (mode, &int_mode)
15379 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15380 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15381 #ifdef POINTERS_EXTEND_UNSIGNED
15382 || (int_mode == Pmode && mem_mode != VOIDmode)
15383 #endif
15384 )
15385 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15386 {
15387 mem_loc_result = mem_loc_descriptor (inner,
15388 inner_mode,
15389 mem_mode, initialized);
15390 break;
15391 }
15392 if (dwarf_strict && dwarf_version < 5)
15393 break;
15394 if (is_a <scalar_int_mode> (mode, &int_mode)
15395 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15396 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15397 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15398 {
15399 dw_die_ref type_die;
15400 dw_loc_descr_ref cvt;
15401
15402 mem_loc_result = mem_loc_descriptor (inner,
15403 GET_MODE (inner),
15404 mem_mode, initialized);
15405 if (mem_loc_result == NULL)
15406 break;
15407 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15408 if (type_die == NULL)
15409 {
15410 mem_loc_result = NULL;
15411 break;
15412 }
15413 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15414 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15415 else
15416 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15417 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15418 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15419 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15420 add_loc_descr (&mem_loc_result, cvt);
15421 if (is_a <scalar_int_mode> (mode, &int_mode)
15422 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15423 {
15424 /* Convert it to untyped afterwards. */
15425 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15426 add_loc_descr (&mem_loc_result, cvt);
15427 }
15428 }
15429 break;
15430
15431 case REG:
15432 if (!is_a <scalar_int_mode> (mode, &int_mode)
15433 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15434 && rtl != arg_pointer_rtx
15435 && rtl != frame_pointer_rtx
15436 #ifdef POINTERS_EXTEND_UNSIGNED
15437 && (int_mode != Pmode || mem_mode == VOIDmode)
15438 #endif
15439 ))
15440 {
15441 dw_die_ref type_die;
15442 unsigned int dbx_regnum;
15443
15444 if (dwarf_strict && dwarf_version < 5)
15445 break;
15446 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15447 break;
15448 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15449 if (type_die == NULL)
15450 break;
15451
15452 dbx_regnum = dbx_reg_number (rtl);
15453 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15454 break;
15455 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15456 dbx_regnum, 0);
15457 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15458 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15459 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15460 break;
15461 }
15462 /* Whenever a register number forms a part of the description of the
15463 method for calculating the (dynamic) address of a memory resident
15464 object, DWARF rules require the register number be referred to as
15465 a "base register". This distinction is not based in any way upon
15466 what category of register the hardware believes the given register
15467 belongs to. This is strictly DWARF terminology we're dealing with
15468 here. Note that in cases where the location of a memory-resident
15469 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15470 OP_CONST (0)) the actual DWARF location descriptor that we generate
15471 may just be OP_BASEREG (basereg). This may look deceptively like
15472 the object in question was allocated to a register (rather than in
15473 memory) so DWARF consumers need to be aware of the subtle
15474 distinction between OP_REG and OP_BASEREG. */
15475 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15476 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15477 else if (stack_realign_drap
15478 && crtl->drap_reg
15479 && crtl->args.internal_arg_pointer == rtl
15480 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15481 {
15482 /* If RTL is internal_arg_pointer, which has been optimized
15483 out, use DRAP instead. */
15484 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15485 VAR_INIT_STATUS_INITIALIZED);
15486 }
15487 break;
15488
15489 case SIGN_EXTEND:
15490 case ZERO_EXTEND:
15491 if (!is_a <scalar_int_mode> (mode, &int_mode)
15492 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15493 break;
15494 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15495 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15496 if (op0 == 0)
15497 break;
15498 else if (GET_CODE (rtl) == ZERO_EXTEND
15499 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15500 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15501 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15502 to expand zero extend as two shifts instead of
15503 masking. */
15504 && GET_MODE_SIZE (inner_mode) <= 4)
15505 {
15506 mem_loc_result = op0;
15507 add_loc_descr (&mem_loc_result,
15508 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15509 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15510 }
15511 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15512 {
15513 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15514 shift *= BITS_PER_UNIT;
15515 if (GET_CODE (rtl) == SIGN_EXTEND)
15516 op = DW_OP_shra;
15517 else
15518 op = DW_OP_shr;
15519 mem_loc_result = op0;
15520 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15521 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15522 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15523 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15524 }
15525 else if (!dwarf_strict || dwarf_version >= 5)
15526 {
15527 dw_die_ref type_die1, type_die2;
15528 dw_loc_descr_ref cvt;
15529
15530 type_die1 = base_type_for_mode (inner_mode,
15531 GET_CODE (rtl) == ZERO_EXTEND);
15532 if (type_die1 == NULL)
15533 break;
15534 type_die2 = base_type_for_mode (int_mode, 1);
15535 if (type_die2 == NULL)
15536 break;
15537 mem_loc_result = op0;
15538 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15539 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15540 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15541 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15542 add_loc_descr (&mem_loc_result, cvt);
15543 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15544 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15545 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15546 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15547 add_loc_descr (&mem_loc_result, cvt);
15548 }
15549 break;
15550
15551 case MEM:
15552 {
15553 rtx new_rtl = avoid_constant_pool_reference (rtl);
15554 if (new_rtl != rtl)
15555 {
15556 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15557 initialized);
15558 if (mem_loc_result != NULL)
15559 return mem_loc_result;
15560 }
15561 }
15562 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15563 get_address_mode (rtl), mode,
15564 VAR_INIT_STATUS_INITIALIZED);
15565 if (mem_loc_result == NULL)
15566 mem_loc_result = tls_mem_loc_descriptor (rtl);
15567 if (mem_loc_result != NULL)
15568 {
15569 if (!is_a <scalar_int_mode> (mode, &int_mode)
15570 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15571 {
15572 dw_die_ref type_die;
15573 dw_loc_descr_ref deref;
15574 HOST_WIDE_INT size;
15575
15576 if (dwarf_strict && dwarf_version < 5)
15577 return NULL;
15578 if (!GET_MODE_SIZE (mode).is_constant (&size))
15579 return NULL;
15580 type_die
15581 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15582 if (type_die == NULL)
15583 return NULL;
15584 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15585 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15586 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15587 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15588 add_loc_descr (&mem_loc_result, deref);
15589 }
15590 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15591 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15592 else
15593 add_loc_descr (&mem_loc_result,
15594 new_loc_descr (DW_OP_deref_size,
15595 GET_MODE_SIZE (int_mode), 0));
15596 }
15597 break;
15598
15599 case LO_SUM:
15600 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15601
15602 case LABEL_REF:
15603 /* Some ports can transform a symbol ref into a label ref, because
15604 the symbol ref is too far away and has to be dumped into a constant
15605 pool. */
15606 case CONST:
15607 case SYMBOL_REF:
15608 if (!is_a <scalar_int_mode> (mode, &int_mode)
15609 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15610 #ifdef POINTERS_EXTEND_UNSIGNED
15611 && (int_mode != Pmode || mem_mode == VOIDmode)
15612 #endif
15613 ))
15614 break;
15615 if (GET_CODE (rtl) == SYMBOL_REF
15616 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15617 {
15618 dw_loc_descr_ref temp;
15619
15620 /* If this is not defined, we have no way to emit the data. */
15621 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15622 break;
15623
15624 temp = new_addr_loc_descr (rtl, dtprel_true);
15625
15626 /* We check for DWARF 5 here because gdb did not implement
15627 DW_OP_form_tls_address until after 7.12. */
15628 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15629 ? DW_OP_form_tls_address
15630 : DW_OP_GNU_push_tls_address),
15631 0, 0);
15632 add_loc_descr (&mem_loc_result, temp);
15633
15634 break;
15635 }
15636
15637 if (!const_ok_for_output (rtl))
15638 {
15639 if (GET_CODE (rtl) == CONST)
15640 switch (GET_CODE (XEXP (rtl, 0)))
15641 {
15642 case NOT:
15643 op = DW_OP_not;
15644 goto try_const_unop;
15645 case NEG:
15646 op = DW_OP_neg;
15647 goto try_const_unop;
15648 try_const_unop:
15649 rtx arg;
15650 arg = XEXP (XEXP (rtl, 0), 0);
15651 if (!CONSTANT_P (arg))
15652 arg = gen_rtx_CONST (int_mode, arg);
15653 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15654 initialized);
15655 if (op0)
15656 {
15657 mem_loc_result = op0;
15658 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15659 }
15660 break;
15661 default:
15662 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15663 mem_mode, initialized);
15664 break;
15665 }
15666 break;
15667 }
15668
15669 symref:
15670 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15671 vec_safe_push (used_rtx_array, rtl);
15672 break;
15673
15674 case CONCAT:
15675 case CONCATN:
15676 case VAR_LOCATION:
15677 case DEBUG_IMPLICIT_PTR:
15678 expansion_failed (NULL_TREE, rtl,
15679 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15680 return 0;
15681
15682 case ENTRY_VALUE:
15683 if (dwarf_strict && dwarf_version < 5)
15684 return NULL;
15685 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15686 {
15687 if (!is_a <scalar_int_mode> (mode, &int_mode)
15688 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15689 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15690 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15691 else
15692 {
15693 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15694 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15695 return NULL;
15696 op0 = one_reg_loc_descriptor (dbx_regnum,
15697 VAR_INIT_STATUS_INITIALIZED);
15698 }
15699 }
15700 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15701 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15702 {
15703 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15704 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15705 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15706 return NULL;
15707 }
15708 else
15709 gcc_unreachable ();
15710 if (op0 == NULL)
15711 return NULL;
15712 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15713 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15714 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15715 break;
15716
15717 case DEBUG_PARAMETER_REF:
15718 mem_loc_result = parameter_ref_descriptor (rtl);
15719 break;
15720
15721 case PRE_MODIFY:
15722 /* Extract the PLUS expression nested inside and fall into
15723 PLUS code below. */
15724 rtl = XEXP (rtl, 1);
15725 goto plus;
15726
15727 case PRE_INC:
15728 case PRE_DEC:
15729 /* Turn these into a PLUS expression and fall into the PLUS code
15730 below. */
15731 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15732 gen_int_mode (GET_CODE (rtl) == PRE_INC
15733 ? GET_MODE_UNIT_SIZE (mem_mode)
15734 : -GET_MODE_UNIT_SIZE (mem_mode),
15735 mode));
15736
15737 /* fall through */
15738
15739 case PLUS:
15740 plus:
15741 if (is_based_loc (rtl)
15742 && is_a <scalar_int_mode> (mode, &int_mode)
15743 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15744 || XEXP (rtl, 0) == arg_pointer_rtx
15745 || XEXP (rtl, 0) == frame_pointer_rtx))
15746 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15747 INTVAL (XEXP (rtl, 1)),
15748 VAR_INIT_STATUS_INITIALIZED);
15749 else
15750 {
15751 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15752 VAR_INIT_STATUS_INITIALIZED);
15753 if (mem_loc_result == 0)
15754 break;
15755
15756 if (CONST_INT_P (XEXP (rtl, 1))
15757 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15758 <= DWARF2_ADDR_SIZE))
15759 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15760 else
15761 {
15762 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15763 VAR_INIT_STATUS_INITIALIZED);
15764 if (op1 == 0)
15765 return NULL;
15766 add_loc_descr (&mem_loc_result, op1);
15767 add_loc_descr (&mem_loc_result,
15768 new_loc_descr (DW_OP_plus, 0, 0));
15769 }
15770 }
15771 break;
15772
15773 /* If a pseudo-reg is optimized away, it is possible for it to
15774 be replaced with a MEM containing a multiply or shift. */
15775 case MINUS:
15776 op = DW_OP_minus;
15777 goto do_binop;
15778
15779 case MULT:
15780 op = DW_OP_mul;
15781 goto do_binop;
15782
15783 case DIV:
15784 if ((!dwarf_strict || dwarf_version >= 5)
15785 && is_a <scalar_int_mode> (mode, &int_mode)
15786 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15787 {
15788 mem_loc_result = typed_binop (DW_OP_div, rtl,
15789 base_type_for_mode (mode, 0),
15790 int_mode, mem_mode);
15791 break;
15792 }
15793 op = DW_OP_div;
15794 goto do_binop;
15795
15796 case UMOD:
15797 op = DW_OP_mod;
15798 goto do_binop;
15799
15800 case ASHIFT:
15801 op = DW_OP_shl;
15802 goto do_shift;
15803
15804 case ASHIFTRT:
15805 op = DW_OP_shra;
15806 goto do_shift;
15807
15808 case LSHIFTRT:
15809 op = DW_OP_shr;
15810 goto do_shift;
15811
15812 do_shift:
15813 if (!is_a <scalar_int_mode> (mode, &int_mode))
15814 break;
15815 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15816 VAR_INIT_STATUS_INITIALIZED);
15817 {
15818 rtx rtlop1 = XEXP (rtl, 1);
15819 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15820 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15821 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15822 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15823 VAR_INIT_STATUS_INITIALIZED);
15824 }
15825
15826 if (op0 == 0 || op1 == 0)
15827 break;
15828
15829 mem_loc_result = op0;
15830 add_loc_descr (&mem_loc_result, op1);
15831 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15832 break;
15833
15834 case AND:
15835 op = DW_OP_and;
15836 goto do_binop;
15837
15838 case IOR:
15839 op = DW_OP_or;
15840 goto do_binop;
15841
15842 case XOR:
15843 op = DW_OP_xor;
15844 goto do_binop;
15845
15846 do_binop:
15847 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15848 VAR_INIT_STATUS_INITIALIZED);
15849 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15850 VAR_INIT_STATUS_INITIALIZED);
15851
15852 if (op0 == 0 || op1 == 0)
15853 break;
15854
15855 mem_loc_result = op0;
15856 add_loc_descr (&mem_loc_result, op1);
15857 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15858 break;
15859
15860 case MOD:
15861 if ((!dwarf_strict || dwarf_version >= 5)
15862 && is_a <scalar_int_mode> (mode, &int_mode)
15863 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15864 {
15865 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15866 base_type_for_mode (mode, 0),
15867 int_mode, mem_mode);
15868 break;
15869 }
15870
15871 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15872 VAR_INIT_STATUS_INITIALIZED);
15873 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15874 VAR_INIT_STATUS_INITIALIZED);
15875
15876 if (op0 == 0 || op1 == 0)
15877 break;
15878
15879 mem_loc_result = op0;
15880 add_loc_descr (&mem_loc_result, op1);
15881 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15882 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15883 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15884 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15885 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15886 break;
15887
15888 case UDIV:
15889 if ((!dwarf_strict || dwarf_version >= 5)
15890 && is_a <scalar_int_mode> (mode, &int_mode))
15891 {
15892 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15893 {
15894 op = DW_OP_div;
15895 goto do_binop;
15896 }
15897 mem_loc_result = typed_binop (DW_OP_div, rtl,
15898 base_type_for_mode (int_mode, 1),
15899 int_mode, mem_mode);
15900 }
15901 break;
15902
15903 case NOT:
15904 op = DW_OP_not;
15905 goto do_unop;
15906
15907 case ABS:
15908 op = DW_OP_abs;
15909 goto do_unop;
15910
15911 case NEG:
15912 op = DW_OP_neg;
15913 goto do_unop;
15914
15915 do_unop:
15916 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15917 VAR_INIT_STATUS_INITIALIZED);
15918
15919 if (op0 == 0)
15920 break;
15921
15922 mem_loc_result = op0;
15923 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15924 break;
15925
15926 case CONST_INT:
15927 if (!is_a <scalar_int_mode> (mode, &int_mode)
15928 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15929 #ifdef POINTERS_EXTEND_UNSIGNED
15930 || (int_mode == Pmode
15931 && mem_mode != VOIDmode
15932 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15933 #endif
15934 )
15935 {
15936 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15937 break;
15938 }
15939 if ((!dwarf_strict || dwarf_version >= 5)
15940 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15941 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15942 {
15943 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15944 scalar_int_mode amode;
15945 if (type_die == NULL)
15946 return NULL;
15947 if (INTVAL (rtl) >= 0
15948 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15949 .exists (&amode))
15950 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15951 /* const DW_OP_convert <XXX> vs.
15952 DW_OP_const_type <XXX, 1, const>. */
15953 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15954 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15955 {
15956 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15957 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15958 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15959 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15960 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15961 add_loc_descr (&mem_loc_result, op0);
15962 return mem_loc_result;
15963 }
15964 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15965 INTVAL (rtl));
15966 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15967 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15968 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15969 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15970 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15971 else
15972 {
15973 mem_loc_result->dw_loc_oprnd2.val_class
15974 = dw_val_class_const_double;
15975 mem_loc_result->dw_loc_oprnd2.v.val_double
15976 = double_int::from_shwi (INTVAL (rtl));
15977 }
15978 }
15979 break;
15980
15981 case CONST_DOUBLE:
15982 if (!dwarf_strict || dwarf_version >= 5)
15983 {
15984 dw_die_ref type_die;
15985
15986 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15987 CONST_DOUBLE rtx could represent either a large integer
15988 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15989 the value is always a floating point constant.
15990
15991 When it is an integer, a CONST_DOUBLE is used whenever
15992 the constant requires 2 HWIs to be adequately represented.
15993 We output CONST_DOUBLEs as blocks. */
15994 if (mode == VOIDmode
15995 || (GET_MODE (rtl) == VOIDmode
15996 && maybe_ne (GET_MODE_BITSIZE (mode),
15997 HOST_BITS_PER_DOUBLE_INT)))
15998 break;
15999 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16000 if (type_die == NULL)
16001 return NULL;
16002 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16003 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16004 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16005 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16006 #if TARGET_SUPPORTS_WIDE_INT == 0
16007 if (!SCALAR_FLOAT_MODE_P (mode))
16008 {
16009 mem_loc_result->dw_loc_oprnd2.val_class
16010 = dw_val_class_const_double;
16011 mem_loc_result->dw_loc_oprnd2.v.val_double
16012 = rtx_to_double_int (rtl);
16013 }
16014 else
16015 #endif
16016 {
16017 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16018 unsigned int length = GET_MODE_SIZE (float_mode);
16019 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16020
16021 insert_float (rtl, array);
16022 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16023 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16024 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16025 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16026 }
16027 }
16028 break;
16029
16030 case CONST_WIDE_INT:
16031 if (!dwarf_strict || dwarf_version >= 5)
16032 {
16033 dw_die_ref type_die;
16034
16035 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16036 if (type_die == NULL)
16037 return NULL;
16038 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16039 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16040 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16041 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16042 mem_loc_result->dw_loc_oprnd2.val_class
16043 = dw_val_class_wide_int;
16044 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16045 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16046 }
16047 break;
16048
16049 case CONST_POLY_INT:
16050 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16051 break;
16052
16053 case EQ:
16054 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16055 break;
16056
16057 case GE:
16058 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16059 break;
16060
16061 case GT:
16062 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16063 break;
16064
16065 case LE:
16066 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16067 break;
16068
16069 case LT:
16070 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16071 break;
16072
16073 case NE:
16074 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16075 break;
16076
16077 case GEU:
16078 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16079 break;
16080
16081 case GTU:
16082 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16083 break;
16084
16085 case LEU:
16086 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16087 break;
16088
16089 case LTU:
16090 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16091 break;
16092
16093 case UMIN:
16094 case UMAX:
16095 if (!SCALAR_INT_MODE_P (mode))
16096 break;
16097 /* FALLTHRU */
16098 case SMIN:
16099 case SMAX:
16100 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16101 break;
16102
16103 case ZERO_EXTRACT:
16104 case SIGN_EXTRACT:
16105 if (CONST_INT_P (XEXP (rtl, 1))
16106 && CONST_INT_P (XEXP (rtl, 2))
16107 && is_a <scalar_int_mode> (mode, &int_mode)
16108 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16109 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16110 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16111 && ((unsigned) INTVAL (XEXP (rtl, 1))
16112 + (unsigned) INTVAL (XEXP (rtl, 2))
16113 <= GET_MODE_BITSIZE (int_mode)))
16114 {
16115 int shift, size;
16116 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16117 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16118 if (op0 == 0)
16119 break;
16120 if (GET_CODE (rtl) == SIGN_EXTRACT)
16121 op = DW_OP_shra;
16122 else
16123 op = DW_OP_shr;
16124 mem_loc_result = op0;
16125 size = INTVAL (XEXP (rtl, 1));
16126 shift = INTVAL (XEXP (rtl, 2));
16127 if (BITS_BIG_ENDIAN)
16128 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16129 if (shift + size != (int) DWARF2_ADDR_SIZE)
16130 {
16131 add_loc_descr (&mem_loc_result,
16132 int_loc_descriptor (DWARF2_ADDR_SIZE
16133 - shift - size));
16134 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16135 }
16136 if (size != (int) DWARF2_ADDR_SIZE)
16137 {
16138 add_loc_descr (&mem_loc_result,
16139 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16140 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16141 }
16142 }
16143 break;
16144
16145 case IF_THEN_ELSE:
16146 {
16147 dw_loc_descr_ref op2, bra_node, drop_node;
16148 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16149 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16150 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16151 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16152 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16153 VAR_INIT_STATUS_INITIALIZED);
16154 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16155 VAR_INIT_STATUS_INITIALIZED);
16156 if (op0 == NULL || op1 == NULL || op2 == NULL)
16157 break;
16158
16159 mem_loc_result = op1;
16160 add_loc_descr (&mem_loc_result, op2);
16161 add_loc_descr (&mem_loc_result, op0);
16162 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16163 add_loc_descr (&mem_loc_result, bra_node);
16164 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16165 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16166 add_loc_descr (&mem_loc_result, drop_node);
16167 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16168 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16169 }
16170 break;
16171
16172 case FLOAT_EXTEND:
16173 case FLOAT_TRUNCATE:
16174 case FLOAT:
16175 case UNSIGNED_FLOAT:
16176 case FIX:
16177 case UNSIGNED_FIX:
16178 if (!dwarf_strict || dwarf_version >= 5)
16179 {
16180 dw_die_ref type_die;
16181 dw_loc_descr_ref cvt;
16182
16183 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16184 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16185 if (op0 == NULL)
16186 break;
16187 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16188 && (GET_CODE (rtl) == FLOAT
16189 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16190 {
16191 type_die = base_type_for_mode (int_mode,
16192 GET_CODE (rtl) == UNSIGNED_FLOAT);
16193 if (type_die == NULL)
16194 break;
16195 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16196 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16197 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16198 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16199 add_loc_descr (&op0, cvt);
16200 }
16201 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16202 if (type_die == NULL)
16203 break;
16204 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16205 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16206 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16207 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16208 add_loc_descr (&op0, cvt);
16209 if (is_a <scalar_int_mode> (mode, &int_mode)
16210 && (GET_CODE (rtl) == FIX
16211 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16212 {
16213 op0 = convert_descriptor_to_mode (int_mode, op0);
16214 if (op0 == NULL)
16215 break;
16216 }
16217 mem_loc_result = op0;
16218 }
16219 break;
16220
16221 case CLZ:
16222 case CTZ:
16223 case FFS:
16224 if (is_a <scalar_int_mode> (mode, &int_mode))
16225 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16226 break;
16227
16228 case POPCOUNT:
16229 case PARITY:
16230 if (is_a <scalar_int_mode> (mode, &int_mode))
16231 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16232 break;
16233
16234 case BSWAP:
16235 if (is_a <scalar_int_mode> (mode, &int_mode))
16236 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16237 break;
16238
16239 case ROTATE:
16240 case ROTATERT:
16241 if (is_a <scalar_int_mode> (mode, &int_mode))
16242 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16243 break;
16244
16245 case COMPARE:
16246 /* In theory, we could implement the above. */
16247 /* DWARF cannot represent the unsigned compare operations
16248 natively. */
16249 case SS_MULT:
16250 case US_MULT:
16251 case SS_DIV:
16252 case US_DIV:
16253 case SS_PLUS:
16254 case US_PLUS:
16255 case SS_MINUS:
16256 case US_MINUS:
16257 case SS_NEG:
16258 case US_NEG:
16259 case SS_ABS:
16260 case SS_ASHIFT:
16261 case US_ASHIFT:
16262 case SS_TRUNCATE:
16263 case US_TRUNCATE:
16264 case UNORDERED:
16265 case ORDERED:
16266 case UNEQ:
16267 case UNGE:
16268 case UNGT:
16269 case UNLE:
16270 case UNLT:
16271 case LTGT:
16272 case FRACT_CONVERT:
16273 case UNSIGNED_FRACT_CONVERT:
16274 case SAT_FRACT:
16275 case UNSIGNED_SAT_FRACT:
16276 case SQRT:
16277 case ASM_OPERANDS:
16278 case VEC_MERGE:
16279 case VEC_SELECT:
16280 case VEC_CONCAT:
16281 case VEC_DUPLICATE:
16282 case VEC_SERIES:
16283 case UNSPEC:
16284 case HIGH:
16285 case FMA:
16286 case STRICT_LOW_PART:
16287 case CONST_VECTOR:
16288 case CONST_FIXED:
16289 case CLRSB:
16290 case CLOBBER:
16291 case CLOBBER_HIGH:
16292 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16293 can't express it in the debug info. This can happen e.g. with some
16294 TLS UNSPECs. */
16295 break;
16296
16297 case CONST_STRING:
16298 resolve_one_addr (&rtl);
16299 goto symref;
16300
16301 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16302 the expression. An UNSPEC rtx represents a raw DWARF operation,
16303 new_loc_descr is called for it to build the operation directly.
16304 Otherwise mem_loc_descriptor is called recursively. */
16305 case PARALLEL:
16306 {
16307 int index = 0;
16308 dw_loc_descr_ref exp_result = NULL;
16309
16310 for (; index < XVECLEN (rtl, 0); index++)
16311 {
16312 rtx elem = XVECEXP (rtl, 0, index);
16313 if (GET_CODE (elem) == UNSPEC)
16314 {
16315 /* Each DWARF operation UNSPEC contain two operands, if
16316 one operand is not used for the operation, const0_rtx is
16317 passed. */
16318 gcc_assert (XVECLEN (elem, 0) == 2);
16319
16320 HOST_WIDE_INT dw_op = XINT (elem, 1);
16321 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16322 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16323 exp_result
16324 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16325 oprnd2);
16326 }
16327 else
16328 exp_result
16329 = mem_loc_descriptor (elem, mode, mem_mode,
16330 VAR_INIT_STATUS_INITIALIZED);
16331
16332 if (!mem_loc_result)
16333 mem_loc_result = exp_result;
16334 else
16335 add_loc_descr (&mem_loc_result, exp_result);
16336 }
16337
16338 break;
16339 }
16340
16341 default:
16342 if (flag_checking)
16343 {
16344 print_rtl (stderr, rtl);
16345 gcc_unreachable ();
16346 }
16347 break;
16348 }
16349
16350 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16351 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16352
16353 return mem_loc_result;
16354 }
16355
16356 /* Return a descriptor that describes the concatenation of two locations.
16357 This is typically a complex variable. */
16358
16359 static dw_loc_descr_ref
16360 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16361 {
16362 /* At present we only track constant-sized pieces. */
16363 unsigned int size0, size1;
16364 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16365 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16366 return 0;
16367
16368 dw_loc_descr_ref cc_loc_result = NULL;
16369 dw_loc_descr_ref x0_ref
16370 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16371 dw_loc_descr_ref x1_ref
16372 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16373
16374 if (x0_ref == 0 || x1_ref == 0)
16375 return 0;
16376
16377 cc_loc_result = x0_ref;
16378 add_loc_descr_op_piece (&cc_loc_result, size0);
16379
16380 add_loc_descr (&cc_loc_result, x1_ref);
16381 add_loc_descr_op_piece (&cc_loc_result, size1);
16382
16383 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16384 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16385
16386 return cc_loc_result;
16387 }
16388
16389 /* Return a descriptor that describes the concatenation of N
16390 locations. */
16391
16392 static dw_loc_descr_ref
16393 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16394 {
16395 unsigned int i;
16396 dw_loc_descr_ref cc_loc_result = NULL;
16397 unsigned int n = XVECLEN (concatn, 0);
16398 unsigned int size;
16399
16400 for (i = 0; i < n; ++i)
16401 {
16402 dw_loc_descr_ref ref;
16403 rtx x = XVECEXP (concatn, 0, i);
16404
16405 /* At present we only track constant-sized pieces. */
16406 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16407 return NULL;
16408
16409 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16410 if (ref == NULL)
16411 return NULL;
16412
16413 add_loc_descr (&cc_loc_result, ref);
16414 add_loc_descr_op_piece (&cc_loc_result, size);
16415 }
16416
16417 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16418 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16419
16420 return cc_loc_result;
16421 }
16422
16423 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16424 for DEBUG_IMPLICIT_PTR RTL. */
16425
16426 static dw_loc_descr_ref
16427 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16428 {
16429 dw_loc_descr_ref ret;
16430 dw_die_ref ref;
16431
16432 if (dwarf_strict && dwarf_version < 5)
16433 return NULL;
16434 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16435 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16436 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16437 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16438 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16439 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16440 if (ref)
16441 {
16442 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16443 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16444 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16445 }
16446 else
16447 {
16448 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16449 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16450 }
16451 return ret;
16452 }
16453
16454 /* Output a proper Dwarf location descriptor for a variable or parameter
16455 which is either allocated in a register or in a memory location. For a
16456 register, we just generate an OP_REG and the register number. For a
16457 memory location we provide a Dwarf postfix expression describing how to
16458 generate the (dynamic) address of the object onto the address stack.
16459
16460 MODE is mode of the decl if this loc_descriptor is going to be used in
16461 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16462 allowed, VOIDmode otherwise.
16463
16464 If we don't know how to describe it, return 0. */
16465
16466 static dw_loc_descr_ref
16467 loc_descriptor (rtx rtl, machine_mode mode,
16468 enum var_init_status initialized)
16469 {
16470 dw_loc_descr_ref loc_result = NULL;
16471 scalar_int_mode int_mode;
16472
16473 switch (GET_CODE (rtl))
16474 {
16475 case SUBREG:
16476 /* The case of a subreg may arise when we have a local (register)
16477 variable or a formal (register) parameter which doesn't quite fill
16478 up an entire register. For now, just assume that it is
16479 legitimate to make the Dwarf info refer to the whole register which
16480 contains the given subreg. */
16481 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16482 loc_result = loc_descriptor (SUBREG_REG (rtl),
16483 GET_MODE (SUBREG_REG (rtl)), initialized);
16484 else
16485 goto do_default;
16486 break;
16487
16488 case REG:
16489 loc_result = reg_loc_descriptor (rtl, initialized);
16490 break;
16491
16492 case MEM:
16493 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16494 GET_MODE (rtl), initialized);
16495 if (loc_result == NULL)
16496 loc_result = tls_mem_loc_descriptor (rtl);
16497 if (loc_result == NULL)
16498 {
16499 rtx new_rtl = avoid_constant_pool_reference (rtl);
16500 if (new_rtl != rtl)
16501 loc_result = loc_descriptor (new_rtl, mode, initialized);
16502 }
16503 break;
16504
16505 case CONCAT:
16506 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16507 initialized);
16508 break;
16509
16510 case CONCATN:
16511 loc_result = concatn_loc_descriptor (rtl, initialized);
16512 break;
16513
16514 case VAR_LOCATION:
16515 /* Single part. */
16516 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16517 {
16518 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16519 if (GET_CODE (loc) == EXPR_LIST)
16520 loc = XEXP (loc, 0);
16521 loc_result = loc_descriptor (loc, mode, initialized);
16522 break;
16523 }
16524
16525 rtl = XEXP (rtl, 1);
16526 /* FALLTHRU */
16527
16528 case PARALLEL:
16529 {
16530 rtvec par_elems = XVEC (rtl, 0);
16531 int num_elem = GET_NUM_ELEM (par_elems);
16532 machine_mode mode;
16533 int i, size;
16534
16535 /* Create the first one, so we have something to add to. */
16536 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16537 VOIDmode, initialized);
16538 if (loc_result == NULL)
16539 return NULL;
16540 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16541 /* At present we only track constant-sized pieces. */
16542 if (!GET_MODE_SIZE (mode).is_constant (&size))
16543 return NULL;
16544 add_loc_descr_op_piece (&loc_result, size);
16545 for (i = 1; i < num_elem; i++)
16546 {
16547 dw_loc_descr_ref temp;
16548
16549 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16550 VOIDmode, initialized);
16551 if (temp == NULL)
16552 return NULL;
16553 add_loc_descr (&loc_result, temp);
16554 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16555 /* At present we only track constant-sized pieces. */
16556 if (!GET_MODE_SIZE (mode).is_constant (&size))
16557 return NULL;
16558 add_loc_descr_op_piece (&loc_result, size);
16559 }
16560 }
16561 break;
16562
16563 case CONST_INT:
16564 if (mode != VOIDmode && mode != BLKmode)
16565 {
16566 int_mode = as_a <scalar_int_mode> (mode);
16567 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16568 INTVAL (rtl));
16569 }
16570 break;
16571
16572 case CONST_DOUBLE:
16573 if (mode == VOIDmode)
16574 mode = GET_MODE (rtl);
16575
16576 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16577 {
16578 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16579
16580 /* Note that a CONST_DOUBLE rtx could represent either an integer
16581 or a floating-point constant. A CONST_DOUBLE is used whenever
16582 the constant requires more than one word in order to be
16583 adequately represented. We output CONST_DOUBLEs as blocks. */
16584 scalar_mode smode = as_a <scalar_mode> (mode);
16585 loc_result = new_loc_descr (DW_OP_implicit_value,
16586 GET_MODE_SIZE (smode), 0);
16587 #if TARGET_SUPPORTS_WIDE_INT == 0
16588 if (!SCALAR_FLOAT_MODE_P (smode))
16589 {
16590 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16591 loc_result->dw_loc_oprnd2.v.val_double
16592 = rtx_to_double_int (rtl);
16593 }
16594 else
16595 #endif
16596 {
16597 unsigned int length = GET_MODE_SIZE (smode);
16598 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16599
16600 insert_float (rtl, array);
16601 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16602 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16603 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16604 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16605 }
16606 }
16607 break;
16608
16609 case CONST_WIDE_INT:
16610 if (mode == VOIDmode)
16611 mode = GET_MODE (rtl);
16612
16613 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16614 {
16615 int_mode = as_a <scalar_int_mode> (mode);
16616 loc_result = new_loc_descr (DW_OP_implicit_value,
16617 GET_MODE_SIZE (int_mode), 0);
16618 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16619 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16620 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16621 }
16622 break;
16623
16624 case CONST_VECTOR:
16625 if (mode == VOIDmode)
16626 mode = GET_MODE (rtl);
16627
16628 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16629 {
16630 unsigned int length;
16631 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16632 return NULL;
16633
16634 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16635 unsigned char *array
16636 = ggc_vec_alloc<unsigned char> (length * elt_size);
16637 unsigned int i;
16638 unsigned char *p;
16639 machine_mode imode = GET_MODE_INNER (mode);
16640
16641 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16642 switch (GET_MODE_CLASS (mode))
16643 {
16644 case MODE_VECTOR_INT:
16645 for (i = 0, p = array; i < length; i++, p += elt_size)
16646 {
16647 rtx elt = CONST_VECTOR_ELT (rtl, i);
16648 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16649 }
16650 break;
16651
16652 case MODE_VECTOR_FLOAT:
16653 for (i = 0, p = array; i < length; i++, p += elt_size)
16654 {
16655 rtx elt = CONST_VECTOR_ELT (rtl, i);
16656 insert_float (elt, p);
16657 }
16658 break;
16659
16660 default:
16661 gcc_unreachable ();
16662 }
16663
16664 loc_result = new_loc_descr (DW_OP_implicit_value,
16665 length * elt_size, 0);
16666 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16667 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16668 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16669 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16670 }
16671 break;
16672
16673 case CONST:
16674 if (mode == VOIDmode
16675 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16676 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16677 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16678 {
16679 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16680 break;
16681 }
16682 /* FALLTHROUGH */
16683 case SYMBOL_REF:
16684 if (!const_ok_for_output (rtl))
16685 break;
16686 /* FALLTHROUGH */
16687 case LABEL_REF:
16688 if (is_a <scalar_int_mode> (mode, &int_mode)
16689 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16690 && (dwarf_version >= 4 || !dwarf_strict))
16691 {
16692 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16693 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16694 vec_safe_push (used_rtx_array, rtl);
16695 }
16696 break;
16697
16698 case DEBUG_IMPLICIT_PTR:
16699 loc_result = implicit_ptr_descriptor (rtl, 0);
16700 break;
16701
16702 case PLUS:
16703 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16704 && CONST_INT_P (XEXP (rtl, 1)))
16705 {
16706 loc_result
16707 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16708 break;
16709 }
16710 /* FALLTHRU */
16711 do_default:
16712 default:
16713 if ((is_a <scalar_int_mode> (mode, &int_mode)
16714 && GET_MODE (rtl) == int_mode
16715 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16716 && dwarf_version >= 4)
16717 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16718 {
16719 /* Value expression. */
16720 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16721 if (loc_result)
16722 add_loc_descr (&loc_result,
16723 new_loc_descr (DW_OP_stack_value, 0, 0));
16724 }
16725 break;
16726 }
16727
16728 return loc_result;
16729 }
16730
16731 /* We need to figure out what section we should use as the base for the
16732 address ranges where a given location is valid.
16733 1. If this particular DECL has a section associated with it, use that.
16734 2. If this function has a section associated with it, use that.
16735 3. Otherwise, use the text section.
16736 XXX: If you split a variable across multiple sections, we won't notice. */
16737
16738 static const char *
16739 secname_for_decl (const_tree decl)
16740 {
16741 const char *secname;
16742
16743 if (VAR_OR_FUNCTION_DECL_P (decl)
16744 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16745 && DECL_SECTION_NAME (decl))
16746 secname = DECL_SECTION_NAME (decl);
16747 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16748 {
16749 if (in_cold_section_p)
16750 {
16751 section *sec = current_function_section ();
16752 if (sec->common.flags & SECTION_NAMED)
16753 return sec->named.name;
16754 }
16755 secname = DECL_SECTION_NAME (current_function_decl);
16756 }
16757 else if (cfun && in_cold_section_p)
16758 secname = crtl->subsections.cold_section_label;
16759 else
16760 secname = text_section_label;
16761
16762 return secname;
16763 }
16764
16765 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16766
16767 static bool
16768 decl_by_reference_p (tree decl)
16769 {
16770 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16771 || VAR_P (decl))
16772 && DECL_BY_REFERENCE (decl));
16773 }
16774
16775 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16776 for VARLOC. */
16777
16778 static dw_loc_descr_ref
16779 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16780 enum var_init_status initialized)
16781 {
16782 int have_address = 0;
16783 dw_loc_descr_ref descr;
16784 machine_mode mode;
16785
16786 if (want_address != 2)
16787 {
16788 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16789 /* Single part. */
16790 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16791 {
16792 varloc = PAT_VAR_LOCATION_LOC (varloc);
16793 if (GET_CODE (varloc) == EXPR_LIST)
16794 varloc = XEXP (varloc, 0);
16795 mode = GET_MODE (varloc);
16796 if (MEM_P (varloc))
16797 {
16798 rtx addr = XEXP (varloc, 0);
16799 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16800 mode, initialized);
16801 if (descr)
16802 have_address = 1;
16803 else
16804 {
16805 rtx x = avoid_constant_pool_reference (varloc);
16806 if (x != varloc)
16807 descr = mem_loc_descriptor (x, mode, VOIDmode,
16808 initialized);
16809 }
16810 }
16811 else
16812 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16813 }
16814 else
16815 return 0;
16816 }
16817 else
16818 {
16819 if (GET_CODE (varloc) == VAR_LOCATION)
16820 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16821 else
16822 mode = DECL_MODE (loc);
16823 descr = loc_descriptor (varloc, mode, initialized);
16824 have_address = 1;
16825 }
16826
16827 if (!descr)
16828 return 0;
16829
16830 if (want_address == 2 && !have_address
16831 && (dwarf_version >= 4 || !dwarf_strict))
16832 {
16833 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16834 {
16835 expansion_failed (loc, NULL_RTX,
16836 "DWARF address size mismatch");
16837 return 0;
16838 }
16839 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16840 have_address = 1;
16841 }
16842 /* Show if we can't fill the request for an address. */
16843 if (want_address && !have_address)
16844 {
16845 expansion_failed (loc, NULL_RTX,
16846 "Want address and only have value");
16847 return 0;
16848 }
16849
16850 /* If we've got an address and don't want one, dereference. */
16851 if (!want_address && have_address)
16852 {
16853 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16854 enum dwarf_location_atom op;
16855
16856 if (size > DWARF2_ADDR_SIZE || size == -1)
16857 {
16858 expansion_failed (loc, NULL_RTX,
16859 "DWARF address size mismatch");
16860 return 0;
16861 }
16862 else if (size == DWARF2_ADDR_SIZE)
16863 op = DW_OP_deref;
16864 else
16865 op = DW_OP_deref_size;
16866
16867 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16868 }
16869
16870 return descr;
16871 }
16872
16873 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16874 if it is not possible. */
16875
16876 static dw_loc_descr_ref
16877 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16878 {
16879 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16880 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16881 else if (dwarf_version >= 3 || !dwarf_strict)
16882 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16883 else
16884 return NULL;
16885 }
16886
16887 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16888 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16889
16890 static dw_loc_descr_ref
16891 dw_sra_loc_expr (tree decl, rtx loc)
16892 {
16893 rtx p;
16894 unsigned HOST_WIDE_INT padsize = 0;
16895 dw_loc_descr_ref descr, *descr_tail;
16896 unsigned HOST_WIDE_INT decl_size;
16897 rtx varloc;
16898 enum var_init_status initialized;
16899
16900 if (DECL_SIZE (decl) == NULL
16901 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16902 return NULL;
16903
16904 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16905 descr = NULL;
16906 descr_tail = &descr;
16907
16908 for (p = loc; p; p = XEXP (p, 1))
16909 {
16910 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16911 rtx loc_note = *decl_piece_varloc_ptr (p);
16912 dw_loc_descr_ref cur_descr;
16913 dw_loc_descr_ref *tail, last = NULL;
16914 unsigned HOST_WIDE_INT opsize = 0;
16915
16916 if (loc_note == NULL_RTX
16917 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16918 {
16919 padsize += bitsize;
16920 continue;
16921 }
16922 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16923 varloc = NOTE_VAR_LOCATION (loc_note);
16924 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16925 if (cur_descr == NULL)
16926 {
16927 padsize += bitsize;
16928 continue;
16929 }
16930
16931 /* Check that cur_descr either doesn't use
16932 DW_OP_*piece operations, or their sum is equal
16933 to bitsize. Otherwise we can't embed it. */
16934 for (tail = &cur_descr; *tail != NULL;
16935 tail = &(*tail)->dw_loc_next)
16936 if ((*tail)->dw_loc_opc == DW_OP_piece)
16937 {
16938 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16939 * BITS_PER_UNIT;
16940 last = *tail;
16941 }
16942 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16943 {
16944 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16945 last = *tail;
16946 }
16947
16948 if (last != NULL && opsize != bitsize)
16949 {
16950 padsize += bitsize;
16951 /* Discard the current piece of the descriptor and release any
16952 addr_table entries it uses. */
16953 remove_loc_list_addr_table_entries (cur_descr);
16954 continue;
16955 }
16956
16957 /* If there is a hole, add DW_OP_*piece after empty DWARF
16958 expression, which means that those bits are optimized out. */
16959 if (padsize)
16960 {
16961 if (padsize > decl_size)
16962 {
16963 remove_loc_list_addr_table_entries (cur_descr);
16964 goto discard_descr;
16965 }
16966 decl_size -= padsize;
16967 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16968 if (*descr_tail == NULL)
16969 {
16970 remove_loc_list_addr_table_entries (cur_descr);
16971 goto discard_descr;
16972 }
16973 descr_tail = &(*descr_tail)->dw_loc_next;
16974 padsize = 0;
16975 }
16976 *descr_tail = cur_descr;
16977 descr_tail = tail;
16978 if (bitsize > decl_size)
16979 goto discard_descr;
16980 decl_size -= bitsize;
16981 if (last == NULL)
16982 {
16983 HOST_WIDE_INT offset = 0;
16984 if (GET_CODE (varloc) == VAR_LOCATION
16985 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16986 {
16987 varloc = PAT_VAR_LOCATION_LOC (varloc);
16988 if (GET_CODE (varloc) == EXPR_LIST)
16989 varloc = XEXP (varloc, 0);
16990 }
16991 do
16992 {
16993 if (GET_CODE (varloc) == CONST
16994 || GET_CODE (varloc) == SIGN_EXTEND
16995 || GET_CODE (varloc) == ZERO_EXTEND)
16996 varloc = XEXP (varloc, 0);
16997 else if (GET_CODE (varloc) == SUBREG)
16998 varloc = SUBREG_REG (varloc);
16999 else
17000 break;
17001 }
17002 while (1);
17003 /* DW_OP_bit_size offset should be zero for register
17004 or implicit location descriptions and empty location
17005 descriptions, but for memory addresses needs big endian
17006 adjustment. */
17007 if (MEM_P (varloc))
17008 {
17009 unsigned HOST_WIDE_INT memsize;
17010 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17011 goto discard_descr;
17012 memsize *= BITS_PER_UNIT;
17013 if (memsize != bitsize)
17014 {
17015 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17016 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17017 goto discard_descr;
17018 if (memsize < bitsize)
17019 goto discard_descr;
17020 if (BITS_BIG_ENDIAN)
17021 offset = memsize - bitsize;
17022 }
17023 }
17024
17025 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17026 if (*descr_tail == NULL)
17027 goto discard_descr;
17028 descr_tail = &(*descr_tail)->dw_loc_next;
17029 }
17030 }
17031
17032 /* If there were any non-empty expressions, add padding till the end of
17033 the decl. */
17034 if (descr != NULL && decl_size != 0)
17035 {
17036 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17037 if (*descr_tail == NULL)
17038 goto discard_descr;
17039 }
17040 return descr;
17041
17042 discard_descr:
17043 /* Discard the descriptor and release any addr_table entries it uses. */
17044 remove_loc_list_addr_table_entries (descr);
17045 return NULL;
17046 }
17047
17048 /* Return the dwarf representation of the location list LOC_LIST of
17049 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17050 function. */
17051
17052 static dw_loc_list_ref
17053 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17054 {
17055 const char *endname, *secname;
17056 var_loc_view endview;
17057 rtx varloc;
17058 enum var_init_status initialized;
17059 struct var_loc_node *node;
17060 dw_loc_descr_ref descr;
17061 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17062 dw_loc_list_ref list = NULL;
17063 dw_loc_list_ref *listp = &list;
17064
17065 /* Now that we know what section we are using for a base,
17066 actually construct the list of locations.
17067 The first location information is what is passed to the
17068 function that creates the location list, and the remaining
17069 locations just get added on to that list.
17070 Note that we only know the start address for a location
17071 (IE location changes), so to build the range, we use
17072 the range [current location start, next location start].
17073 This means we have to special case the last node, and generate
17074 a range of [last location start, end of function label]. */
17075
17076 if (cfun && crtl->has_bb_partition)
17077 {
17078 bool save_in_cold_section_p = in_cold_section_p;
17079 in_cold_section_p = first_function_block_is_cold;
17080 if (loc_list->last_before_switch == NULL)
17081 in_cold_section_p = !in_cold_section_p;
17082 secname = secname_for_decl (decl);
17083 in_cold_section_p = save_in_cold_section_p;
17084 }
17085 else
17086 secname = secname_for_decl (decl);
17087
17088 for (node = loc_list->first; node; node = node->next)
17089 {
17090 bool range_across_switch = false;
17091 if (GET_CODE (node->loc) == EXPR_LIST
17092 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17093 {
17094 if (GET_CODE (node->loc) == EXPR_LIST)
17095 {
17096 descr = NULL;
17097 /* This requires DW_OP_{,bit_}piece, which is not usable
17098 inside DWARF expressions. */
17099 if (want_address == 2)
17100 descr = dw_sra_loc_expr (decl, node->loc);
17101 }
17102 else
17103 {
17104 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17105 varloc = NOTE_VAR_LOCATION (node->loc);
17106 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17107 }
17108 if (descr)
17109 {
17110 /* If section switch happens in between node->label
17111 and node->next->label (or end of function) and
17112 we can't emit it as a single entry list,
17113 emit two ranges, first one ending at the end
17114 of first partition and second one starting at the
17115 beginning of second partition. */
17116 if (node == loc_list->last_before_switch
17117 && (node != loc_list->first || loc_list->first->next
17118 /* If we are to emit a view number, we will emit
17119 a loclist rather than a single location
17120 expression for the entire function (see
17121 loc_list_has_views), so we have to split the
17122 range that straddles across partitions. */
17123 || !ZERO_VIEW_P (node->view))
17124 && current_function_decl)
17125 {
17126 endname = cfun->fde->dw_fde_end;
17127 endview = 0;
17128 range_across_switch = true;
17129 }
17130 /* The variable has a location between NODE->LABEL and
17131 NODE->NEXT->LABEL. */
17132 else if (node->next)
17133 endname = node->next->label, endview = node->next->view;
17134 /* If the variable has a location at the last label
17135 it keeps its location until the end of function. */
17136 else if (!current_function_decl)
17137 endname = text_end_label, endview = 0;
17138 else
17139 {
17140 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17141 current_function_funcdef_no);
17142 endname = ggc_strdup (label_id);
17143 endview = 0;
17144 }
17145
17146 *listp = new_loc_list (descr, node->label, node->view,
17147 endname, endview, secname);
17148 if (TREE_CODE (decl) == PARM_DECL
17149 && node == loc_list->first
17150 && NOTE_P (node->loc)
17151 && strcmp (node->label, endname) == 0)
17152 (*listp)->force = true;
17153 listp = &(*listp)->dw_loc_next;
17154 }
17155 }
17156
17157 if (cfun
17158 && crtl->has_bb_partition
17159 && node == loc_list->last_before_switch)
17160 {
17161 bool save_in_cold_section_p = in_cold_section_p;
17162 in_cold_section_p = !first_function_block_is_cold;
17163 secname = secname_for_decl (decl);
17164 in_cold_section_p = save_in_cold_section_p;
17165 }
17166
17167 if (range_across_switch)
17168 {
17169 if (GET_CODE (node->loc) == EXPR_LIST)
17170 descr = dw_sra_loc_expr (decl, node->loc);
17171 else
17172 {
17173 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17174 varloc = NOTE_VAR_LOCATION (node->loc);
17175 descr = dw_loc_list_1 (decl, varloc, want_address,
17176 initialized);
17177 }
17178 gcc_assert (descr);
17179 /* The variable has a location between NODE->LABEL and
17180 NODE->NEXT->LABEL. */
17181 if (node->next)
17182 endname = node->next->label, endview = node->next->view;
17183 else
17184 endname = cfun->fde->dw_fde_second_end, endview = 0;
17185 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17186 endname, endview, secname);
17187 listp = &(*listp)->dw_loc_next;
17188 }
17189 }
17190
17191 /* Try to avoid the overhead of a location list emitting a location
17192 expression instead, but only if we didn't have more than one
17193 location entry in the first place. If some entries were not
17194 representable, we don't want to pretend a single entry that was
17195 applies to the entire scope in which the variable is
17196 available. */
17197 if (list && loc_list->first->next)
17198 gen_llsym (list);
17199 else
17200 maybe_gen_llsym (list);
17201
17202 return list;
17203 }
17204
17205 /* Return if the loc_list has only single element and thus can be represented
17206 as location description. */
17207
17208 static bool
17209 single_element_loc_list_p (dw_loc_list_ref list)
17210 {
17211 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17212 return !list->ll_symbol;
17213 }
17214
17215 /* Duplicate a single element of location list. */
17216
17217 static inline dw_loc_descr_ref
17218 copy_loc_descr (dw_loc_descr_ref ref)
17219 {
17220 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17221 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17222 return copy;
17223 }
17224
17225 /* To each location in list LIST append loc descr REF. */
17226
17227 static void
17228 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17229 {
17230 dw_loc_descr_ref copy;
17231 add_loc_descr (&list->expr, ref);
17232 list = list->dw_loc_next;
17233 while (list)
17234 {
17235 copy = copy_loc_descr (ref);
17236 add_loc_descr (&list->expr, copy);
17237 while (copy->dw_loc_next)
17238 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17239 list = list->dw_loc_next;
17240 }
17241 }
17242
17243 /* To each location in list LIST prepend loc descr REF. */
17244
17245 static void
17246 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17247 {
17248 dw_loc_descr_ref copy;
17249 dw_loc_descr_ref ref_end = list->expr;
17250 add_loc_descr (&ref, list->expr);
17251 list->expr = ref;
17252 list = list->dw_loc_next;
17253 while (list)
17254 {
17255 dw_loc_descr_ref end = list->expr;
17256 list->expr = copy = copy_loc_descr (ref);
17257 while (copy->dw_loc_next != ref_end)
17258 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17259 copy->dw_loc_next = end;
17260 list = list->dw_loc_next;
17261 }
17262 }
17263
17264 /* Given two lists RET and LIST
17265 produce location list that is result of adding expression in LIST
17266 to expression in RET on each position in program.
17267 Might be destructive on both RET and LIST.
17268
17269 TODO: We handle only simple cases of RET or LIST having at most one
17270 element. General case would involve sorting the lists in program order
17271 and merging them that will need some additional work.
17272 Adding that will improve quality of debug info especially for SRA-ed
17273 structures. */
17274
17275 static void
17276 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17277 {
17278 if (!list)
17279 return;
17280 if (!*ret)
17281 {
17282 *ret = list;
17283 return;
17284 }
17285 if (!list->dw_loc_next)
17286 {
17287 add_loc_descr_to_each (*ret, list->expr);
17288 return;
17289 }
17290 if (!(*ret)->dw_loc_next)
17291 {
17292 prepend_loc_descr_to_each (list, (*ret)->expr);
17293 *ret = list;
17294 return;
17295 }
17296 expansion_failed (NULL_TREE, NULL_RTX,
17297 "Don't know how to merge two non-trivial"
17298 " location lists.\n");
17299 *ret = NULL;
17300 return;
17301 }
17302
17303 /* LOC is constant expression. Try a luck, look it up in constant
17304 pool and return its loc_descr of its address. */
17305
17306 static dw_loc_descr_ref
17307 cst_pool_loc_descr (tree loc)
17308 {
17309 /* Get an RTL for this, if something has been emitted. */
17310 rtx rtl = lookup_constant_def (loc);
17311
17312 if (!rtl || !MEM_P (rtl))
17313 {
17314 gcc_assert (!rtl);
17315 return 0;
17316 }
17317 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17318
17319 /* TODO: We might get more coverage if we was actually delaying expansion
17320 of all expressions till end of compilation when constant pools are fully
17321 populated. */
17322 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17323 {
17324 expansion_failed (loc, NULL_RTX,
17325 "CST value in contant pool but not marked.");
17326 return 0;
17327 }
17328 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17329 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17330 }
17331
17332 /* Return dw_loc_list representing address of addr_expr LOC
17333 by looking for inner INDIRECT_REF expression and turning
17334 it into simple arithmetics.
17335
17336 See loc_list_from_tree for the meaning of CONTEXT. */
17337
17338 static dw_loc_list_ref
17339 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17340 loc_descr_context *context)
17341 {
17342 tree obj, offset;
17343 poly_int64 bitsize, bitpos, bytepos;
17344 machine_mode mode;
17345 int unsignedp, reversep, volatilep = 0;
17346 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17347
17348 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17349 &bitsize, &bitpos, &offset, &mode,
17350 &unsignedp, &reversep, &volatilep);
17351 STRIP_NOPS (obj);
17352 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17353 {
17354 expansion_failed (loc, NULL_RTX, "bitfield access");
17355 return 0;
17356 }
17357 if (!INDIRECT_REF_P (obj))
17358 {
17359 expansion_failed (obj,
17360 NULL_RTX, "no indirect ref in inner refrence");
17361 return 0;
17362 }
17363 if (!offset && known_eq (bitpos, 0))
17364 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17365 context);
17366 else if (toplev
17367 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17368 && (dwarf_version >= 4 || !dwarf_strict))
17369 {
17370 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17371 if (!list_ret)
17372 return 0;
17373 if (offset)
17374 {
17375 /* Variable offset. */
17376 list_ret1 = loc_list_from_tree (offset, 0, context);
17377 if (list_ret1 == 0)
17378 return 0;
17379 add_loc_list (&list_ret, list_ret1);
17380 if (!list_ret)
17381 return 0;
17382 add_loc_descr_to_each (list_ret,
17383 new_loc_descr (DW_OP_plus, 0, 0));
17384 }
17385 HOST_WIDE_INT value;
17386 if (bytepos.is_constant (&value) && value > 0)
17387 add_loc_descr_to_each (list_ret,
17388 new_loc_descr (DW_OP_plus_uconst, value, 0));
17389 else if (maybe_ne (bytepos, 0))
17390 loc_list_plus_const (list_ret, bytepos);
17391 add_loc_descr_to_each (list_ret,
17392 new_loc_descr (DW_OP_stack_value, 0, 0));
17393 }
17394 return list_ret;
17395 }
17396
17397 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17398 all operations from LOC are nops, move to the last one. Insert in NOPS all
17399 operations that are skipped. */
17400
17401 static void
17402 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17403 hash_set<dw_loc_descr_ref> &nops)
17404 {
17405 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17406 {
17407 nops.add (loc);
17408 loc = loc->dw_loc_next;
17409 }
17410 }
17411
17412 /* Helper for loc_descr_without_nops: free the location description operation
17413 P. */
17414
17415 bool
17416 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17417 {
17418 ggc_free (loc);
17419 return true;
17420 }
17421
17422 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17423 finishes LOC. */
17424
17425 static void
17426 loc_descr_without_nops (dw_loc_descr_ref &loc)
17427 {
17428 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17429 return;
17430
17431 /* Set of all DW_OP_nop operations we remove. */
17432 hash_set<dw_loc_descr_ref> nops;
17433
17434 /* First, strip all prefix NOP operations in order to keep the head of the
17435 operations list. */
17436 loc_descr_to_next_no_nop (loc, nops);
17437
17438 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17439 {
17440 /* For control flow operations: strip "prefix" nops in destination
17441 labels. */
17442 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17443 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17444 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17445 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17446
17447 /* Do the same for the operations that follow, then move to the next
17448 iteration. */
17449 if (cur->dw_loc_next != NULL)
17450 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17451 cur = cur->dw_loc_next;
17452 }
17453
17454 nops.traverse<void *, free_loc_descr> (NULL);
17455 }
17456
17457
17458 struct dwarf_procedure_info;
17459
17460 /* Helper structure for location descriptions generation. */
17461 struct loc_descr_context
17462 {
17463 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17464 NULL_TREE if DW_OP_push_object_address in invalid for this location
17465 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17466 tree context_type;
17467 /* The ..._DECL node that should be translated as a
17468 DW_OP_push_object_address operation. */
17469 tree base_decl;
17470 /* Information about the DWARF procedure we are currently generating. NULL if
17471 we are not generating a DWARF procedure. */
17472 struct dwarf_procedure_info *dpi;
17473 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17474 by consumer. Used for DW_TAG_generic_subrange attributes. */
17475 bool placeholder_arg;
17476 /* True if PLACEHOLDER_EXPR has been seen. */
17477 bool placeholder_seen;
17478 };
17479
17480 /* DWARF procedures generation
17481
17482 DWARF expressions (aka. location descriptions) are used to encode variable
17483 things such as sizes or offsets. Such computations can have redundant parts
17484 that can be factorized in order to reduce the size of the output debug
17485 information. This is the whole point of DWARF procedures.
17486
17487 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17488 already factorized into functions ("size functions") in order to handle very
17489 big and complex types. Such functions are quite simple: they have integral
17490 arguments, they return an integral result and their body contains only a
17491 return statement with arithmetic expressions. This is the only kind of
17492 function we are interested in translating into DWARF procedures, here.
17493
17494 DWARF expressions and DWARF procedure are executed using a stack, so we have
17495 to define some calling convention for them to interact. Let's say that:
17496
17497 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17498 all arguments in reverse order (right-to-left) so that when the DWARF
17499 procedure execution starts, the first argument is the top of the stack.
17500
17501 - Then, when returning, the DWARF procedure must have consumed all arguments
17502 on the stack, must have pushed the result and touched nothing else.
17503
17504 - Each integral argument and the result are integral types can be hold in a
17505 single stack slot.
17506
17507 - We call "frame offset" the number of stack slots that are "under DWARF
17508 procedure control": it includes the arguments slots, the temporaries and
17509 the result slot. Thus, it is equal to the number of arguments when the
17510 procedure execution starts and must be equal to one (the result) when it
17511 returns. */
17512
17513 /* Helper structure used when generating operations for a DWARF procedure. */
17514 struct dwarf_procedure_info
17515 {
17516 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17517 currently translated. */
17518 tree fndecl;
17519 /* The number of arguments FNDECL takes. */
17520 unsigned args_count;
17521 };
17522
17523 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17524 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17525 equate it to this DIE. */
17526
17527 static dw_die_ref
17528 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17529 dw_die_ref parent_die)
17530 {
17531 dw_die_ref dwarf_proc_die;
17532
17533 if ((dwarf_version < 3 && dwarf_strict)
17534 || location == NULL)
17535 return NULL;
17536
17537 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17538 if (fndecl)
17539 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17540 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17541 return dwarf_proc_die;
17542 }
17543
17544 /* Return whether TYPE is a supported type as a DWARF procedure argument
17545 type or return type (we handle only scalar types and pointer types that
17546 aren't wider than the DWARF expression evaluation stack. */
17547
17548 static bool
17549 is_handled_procedure_type (tree type)
17550 {
17551 return ((INTEGRAL_TYPE_P (type)
17552 || TREE_CODE (type) == OFFSET_TYPE
17553 || TREE_CODE (type) == POINTER_TYPE)
17554 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17555 }
17556
17557 /* Helper for resolve_args_picking: do the same but stop when coming across
17558 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17559 offset *before* evaluating the corresponding operation. */
17560
17561 static bool
17562 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17563 struct dwarf_procedure_info *dpi,
17564 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17565 {
17566 /* The "frame_offset" identifier is already used to name a macro... */
17567 unsigned frame_offset_ = initial_frame_offset;
17568 dw_loc_descr_ref l;
17569
17570 for (l = loc; l != NULL;)
17571 {
17572 bool existed;
17573 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17574
17575 /* If we already met this node, there is nothing to compute anymore. */
17576 if (existed)
17577 {
17578 /* Make sure that the stack size is consistent wherever the execution
17579 flow comes from. */
17580 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17581 break;
17582 }
17583 l_frame_offset = frame_offset_;
17584
17585 /* If needed, relocate the picking offset with respect to the frame
17586 offset. */
17587 if (l->frame_offset_rel)
17588 {
17589 unsigned HOST_WIDE_INT off;
17590 switch (l->dw_loc_opc)
17591 {
17592 case DW_OP_pick:
17593 off = l->dw_loc_oprnd1.v.val_unsigned;
17594 break;
17595 case DW_OP_dup:
17596 off = 0;
17597 break;
17598 case DW_OP_over:
17599 off = 1;
17600 break;
17601 default:
17602 gcc_unreachable ();
17603 }
17604 /* frame_offset_ is the size of the current stack frame, including
17605 incoming arguments. Besides, the arguments are pushed
17606 right-to-left. Thus, in order to access the Nth argument from
17607 this operation node, the picking has to skip temporaries *plus*
17608 one stack slot per argument (0 for the first one, 1 for the second
17609 one, etc.).
17610
17611 The targetted argument number (N) is already set as the operand,
17612 and the number of temporaries can be computed with:
17613 frame_offsets_ - dpi->args_count */
17614 off += frame_offset_ - dpi->args_count;
17615
17616 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17617 if (off > 255)
17618 return false;
17619
17620 if (off == 0)
17621 {
17622 l->dw_loc_opc = DW_OP_dup;
17623 l->dw_loc_oprnd1.v.val_unsigned = 0;
17624 }
17625 else if (off == 1)
17626 {
17627 l->dw_loc_opc = DW_OP_over;
17628 l->dw_loc_oprnd1.v.val_unsigned = 0;
17629 }
17630 else
17631 {
17632 l->dw_loc_opc = DW_OP_pick;
17633 l->dw_loc_oprnd1.v.val_unsigned = off;
17634 }
17635 }
17636
17637 /* Update frame_offset according to the effect the current operation has
17638 on the stack. */
17639 switch (l->dw_loc_opc)
17640 {
17641 case DW_OP_deref:
17642 case DW_OP_swap:
17643 case DW_OP_rot:
17644 case DW_OP_abs:
17645 case DW_OP_neg:
17646 case DW_OP_not:
17647 case DW_OP_plus_uconst:
17648 case DW_OP_skip:
17649 case DW_OP_reg0:
17650 case DW_OP_reg1:
17651 case DW_OP_reg2:
17652 case DW_OP_reg3:
17653 case DW_OP_reg4:
17654 case DW_OP_reg5:
17655 case DW_OP_reg6:
17656 case DW_OP_reg7:
17657 case DW_OP_reg8:
17658 case DW_OP_reg9:
17659 case DW_OP_reg10:
17660 case DW_OP_reg11:
17661 case DW_OP_reg12:
17662 case DW_OP_reg13:
17663 case DW_OP_reg14:
17664 case DW_OP_reg15:
17665 case DW_OP_reg16:
17666 case DW_OP_reg17:
17667 case DW_OP_reg18:
17668 case DW_OP_reg19:
17669 case DW_OP_reg20:
17670 case DW_OP_reg21:
17671 case DW_OP_reg22:
17672 case DW_OP_reg23:
17673 case DW_OP_reg24:
17674 case DW_OP_reg25:
17675 case DW_OP_reg26:
17676 case DW_OP_reg27:
17677 case DW_OP_reg28:
17678 case DW_OP_reg29:
17679 case DW_OP_reg30:
17680 case DW_OP_reg31:
17681 case DW_OP_bregx:
17682 case DW_OP_piece:
17683 case DW_OP_deref_size:
17684 case DW_OP_nop:
17685 case DW_OP_bit_piece:
17686 case DW_OP_implicit_value:
17687 case DW_OP_stack_value:
17688 break;
17689
17690 case DW_OP_addr:
17691 case DW_OP_const1u:
17692 case DW_OP_const1s:
17693 case DW_OP_const2u:
17694 case DW_OP_const2s:
17695 case DW_OP_const4u:
17696 case DW_OP_const4s:
17697 case DW_OP_const8u:
17698 case DW_OP_const8s:
17699 case DW_OP_constu:
17700 case DW_OP_consts:
17701 case DW_OP_dup:
17702 case DW_OP_over:
17703 case DW_OP_pick:
17704 case DW_OP_lit0:
17705 case DW_OP_lit1:
17706 case DW_OP_lit2:
17707 case DW_OP_lit3:
17708 case DW_OP_lit4:
17709 case DW_OP_lit5:
17710 case DW_OP_lit6:
17711 case DW_OP_lit7:
17712 case DW_OP_lit8:
17713 case DW_OP_lit9:
17714 case DW_OP_lit10:
17715 case DW_OP_lit11:
17716 case DW_OP_lit12:
17717 case DW_OP_lit13:
17718 case DW_OP_lit14:
17719 case DW_OP_lit15:
17720 case DW_OP_lit16:
17721 case DW_OP_lit17:
17722 case DW_OP_lit18:
17723 case DW_OP_lit19:
17724 case DW_OP_lit20:
17725 case DW_OP_lit21:
17726 case DW_OP_lit22:
17727 case DW_OP_lit23:
17728 case DW_OP_lit24:
17729 case DW_OP_lit25:
17730 case DW_OP_lit26:
17731 case DW_OP_lit27:
17732 case DW_OP_lit28:
17733 case DW_OP_lit29:
17734 case DW_OP_lit30:
17735 case DW_OP_lit31:
17736 case DW_OP_breg0:
17737 case DW_OP_breg1:
17738 case DW_OP_breg2:
17739 case DW_OP_breg3:
17740 case DW_OP_breg4:
17741 case DW_OP_breg5:
17742 case DW_OP_breg6:
17743 case DW_OP_breg7:
17744 case DW_OP_breg8:
17745 case DW_OP_breg9:
17746 case DW_OP_breg10:
17747 case DW_OP_breg11:
17748 case DW_OP_breg12:
17749 case DW_OP_breg13:
17750 case DW_OP_breg14:
17751 case DW_OP_breg15:
17752 case DW_OP_breg16:
17753 case DW_OP_breg17:
17754 case DW_OP_breg18:
17755 case DW_OP_breg19:
17756 case DW_OP_breg20:
17757 case DW_OP_breg21:
17758 case DW_OP_breg22:
17759 case DW_OP_breg23:
17760 case DW_OP_breg24:
17761 case DW_OP_breg25:
17762 case DW_OP_breg26:
17763 case DW_OP_breg27:
17764 case DW_OP_breg28:
17765 case DW_OP_breg29:
17766 case DW_OP_breg30:
17767 case DW_OP_breg31:
17768 case DW_OP_fbreg:
17769 case DW_OP_push_object_address:
17770 case DW_OP_call_frame_cfa:
17771 case DW_OP_GNU_variable_value:
17772 ++frame_offset_;
17773 break;
17774
17775 case DW_OP_drop:
17776 case DW_OP_xderef:
17777 case DW_OP_and:
17778 case DW_OP_div:
17779 case DW_OP_minus:
17780 case DW_OP_mod:
17781 case DW_OP_mul:
17782 case DW_OP_or:
17783 case DW_OP_plus:
17784 case DW_OP_shl:
17785 case DW_OP_shr:
17786 case DW_OP_shra:
17787 case DW_OP_xor:
17788 case DW_OP_bra:
17789 case DW_OP_eq:
17790 case DW_OP_ge:
17791 case DW_OP_gt:
17792 case DW_OP_le:
17793 case DW_OP_lt:
17794 case DW_OP_ne:
17795 case DW_OP_regx:
17796 case DW_OP_xderef_size:
17797 --frame_offset_;
17798 break;
17799
17800 case DW_OP_call2:
17801 case DW_OP_call4:
17802 case DW_OP_call_ref:
17803 {
17804 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17805 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17806
17807 if (stack_usage == NULL)
17808 return false;
17809 frame_offset_ += *stack_usage;
17810 break;
17811 }
17812
17813 case DW_OP_implicit_pointer:
17814 case DW_OP_entry_value:
17815 case DW_OP_const_type:
17816 case DW_OP_regval_type:
17817 case DW_OP_deref_type:
17818 case DW_OP_convert:
17819 case DW_OP_reinterpret:
17820 case DW_OP_form_tls_address:
17821 case DW_OP_GNU_push_tls_address:
17822 case DW_OP_GNU_uninit:
17823 case DW_OP_GNU_encoded_addr:
17824 case DW_OP_GNU_implicit_pointer:
17825 case DW_OP_GNU_entry_value:
17826 case DW_OP_GNU_const_type:
17827 case DW_OP_GNU_regval_type:
17828 case DW_OP_GNU_deref_type:
17829 case DW_OP_GNU_convert:
17830 case DW_OP_GNU_reinterpret:
17831 case DW_OP_GNU_parameter_ref:
17832 /* loc_list_from_tree will probably not output these operations for
17833 size functions, so assume they will not appear here. */
17834 /* Fall through... */
17835
17836 default:
17837 gcc_unreachable ();
17838 }
17839
17840 /* Now, follow the control flow (except subroutine calls). */
17841 switch (l->dw_loc_opc)
17842 {
17843 case DW_OP_bra:
17844 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17845 frame_offsets))
17846 return false;
17847 /* Fall through. */
17848
17849 case DW_OP_skip:
17850 l = l->dw_loc_oprnd1.v.val_loc;
17851 break;
17852
17853 case DW_OP_stack_value:
17854 return true;
17855
17856 default:
17857 l = l->dw_loc_next;
17858 break;
17859 }
17860 }
17861
17862 return true;
17863 }
17864
17865 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17866 operations) in order to resolve the operand of DW_OP_pick operations that
17867 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17868 offset *before* LOC is executed. Return if all relocations were
17869 successful. */
17870
17871 static bool
17872 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17873 struct dwarf_procedure_info *dpi)
17874 {
17875 /* Associate to all visited operations the frame offset *before* evaluating
17876 this operation. */
17877 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17878
17879 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17880 frame_offsets);
17881 }
17882
17883 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17884 Return NULL if it is not possible. */
17885
17886 static dw_die_ref
17887 function_to_dwarf_procedure (tree fndecl)
17888 {
17889 struct loc_descr_context ctx;
17890 struct dwarf_procedure_info dpi;
17891 dw_die_ref dwarf_proc_die;
17892 tree tree_body = DECL_SAVED_TREE (fndecl);
17893 dw_loc_descr_ref loc_body, epilogue;
17894
17895 tree cursor;
17896 unsigned i;
17897
17898 /* Do not generate multiple DWARF procedures for the same function
17899 declaration. */
17900 dwarf_proc_die = lookup_decl_die (fndecl);
17901 if (dwarf_proc_die != NULL)
17902 return dwarf_proc_die;
17903
17904 /* DWARF procedures are available starting with the DWARFv3 standard. */
17905 if (dwarf_version < 3 && dwarf_strict)
17906 return NULL;
17907
17908 /* We handle only functions for which we still have a body, that return a
17909 supported type and that takes arguments with supported types. Note that
17910 there is no point translating functions that return nothing. */
17911 if (tree_body == NULL_TREE
17912 || DECL_RESULT (fndecl) == NULL_TREE
17913 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17914 return NULL;
17915
17916 for (cursor = DECL_ARGUMENTS (fndecl);
17917 cursor != NULL_TREE;
17918 cursor = TREE_CHAIN (cursor))
17919 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17920 return NULL;
17921
17922 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17923 if (TREE_CODE (tree_body) != RETURN_EXPR)
17924 return NULL;
17925 tree_body = TREE_OPERAND (tree_body, 0);
17926 if (TREE_CODE (tree_body) != MODIFY_EXPR
17927 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17928 return NULL;
17929 tree_body = TREE_OPERAND (tree_body, 1);
17930
17931 /* Try to translate the body expression itself. Note that this will probably
17932 cause an infinite recursion if its call graph has a cycle. This is very
17933 unlikely for size functions, however, so don't bother with such things at
17934 the moment. */
17935 ctx.context_type = NULL_TREE;
17936 ctx.base_decl = NULL_TREE;
17937 ctx.dpi = &dpi;
17938 ctx.placeholder_arg = false;
17939 ctx.placeholder_seen = false;
17940 dpi.fndecl = fndecl;
17941 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17942 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17943 if (!loc_body)
17944 return NULL;
17945
17946 /* After evaluating all operands in "loc_body", we should still have on the
17947 stack all arguments plus the desired function result (top of the stack).
17948 Generate code in order to keep only the result in our stack frame. */
17949 epilogue = NULL;
17950 for (i = 0; i < dpi.args_count; ++i)
17951 {
17952 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17953 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17954 op_couple->dw_loc_next->dw_loc_next = epilogue;
17955 epilogue = op_couple;
17956 }
17957 add_loc_descr (&loc_body, epilogue);
17958 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17959 return NULL;
17960
17961 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17962 because they are considered useful. Now there is an epilogue, they are
17963 not anymore, so give it another try. */
17964 loc_descr_without_nops (loc_body);
17965
17966 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17967 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17968 though, given that size functions do not come from source, so they should
17969 not have a dedicated DW_TAG_subprogram DIE. */
17970 dwarf_proc_die
17971 = new_dwarf_proc_die (loc_body, fndecl,
17972 get_context_die (DECL_CONTEXT (fndecl)));
17973
17974 /* The called DWARF procedure consumes one stack slot per argument and
17975 returns one stack slot. */
17976 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17977
17978 return dwarf_proc_die;
17979 }
17980
17981
17982 /* Generate Dwarf location list representing LOC.
17983 If WANT_ADDRESS is false, expression computing LOC will be computed
17984 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17985 if WANT_ADDRESS is 2, expression computing address useable in location
17986 will be returned (i.e. DW_OP_reg can be used
17987 to refer to register values).
17988
17989 CONTEXT provides information to customize the location descriptions
17990 generation. Its context_type field specifies what type is implicitly
17991 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17992 will not be generated.
17993
17994 Its DPI field determines whether we are generating a DWARF expression for a
17995 DWARF procedure, so PARM_DECL references are processed specifically.
17996
17997 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17998 and dpi fields were null. */
17999
18000 static dw_loc_list_ref
18001 loc_list_from_tree_1 (tree loc, int want_address,
18002 struct loc_descr_context *context)
18003 {
18004 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18005 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18006 int have_address = 0;
18007 enum dwarf_location_atom op;
18008
18009 /* ??? Most of the time we do not take proper care for sign/zero
18010 extending the values properly. Hopefully this won't be a real
18011 problem... */
18012
18013 if (context != NULL
18014 && context->base_decl == loc
18015 && want_address == 0)
18016 {
18017 if (dwarf_version >= 3 || !dwarf_strict)
18018 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18019 NULL, 0, NULL, 0, NULL);
18020 else
18021 return NULL;
18022 }
18023
18024 switch (TREE_CODE (loc))
18025 {
18026 case ERROR_MARK:
18027 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18028 return 0;
18029
18030 case PLACEHOLDER_EXPR:
18031 /* This case involves extracting fields from an object to determine the
18032 position of other fields. It is supposed to appear only as the first
18033 operand of COMPONENT_REF nodes and to reference precisely the type
18034 that the context allows. */
18035 if (context != NULL
18036 && TREE_TYPE (loc) == context->context_type
18037 && want_address >= 1)
18038 {
18039 if (dwarf_version >= 3 || !dwarf_strict)
18040 {
18041 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18042 have_address = 1;
18043 break;
18044 }
18045 else
18046 return NULL;
18047 }
18048 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18049 the single argument passed by consumer. */
18050 else if (context != NULL
18051 && context->placeholder_arg
18052 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18053 && want_address == 0)
18054 {
18055 ret = new_loc_descr (DW_OP_pick, 0, 0);
18056 ret->frame_offset_rel = 1;
18057 context->placeholder_seen = true;
18058 break;
18059 }
18060 else
18061 expansion_failed (loc, NULL_RTX,
18062 "PLACEHOLDER_EXPR for an unexpected type");
18063 break;
18064
18065 case CALL_EXPR:
18066 {
18067 const int nargs = call_expr_nargs (loc);
18068 tree callee = get_callee_fndecl (loc);
18069 int i;
18070 dw_die_ref dwarf_proc;
18071
18072 if (callee == NULL_TREE)
18073 goto call_expansion_failed;
18074
18075 /* We handle only functions that return an integer. */
18076 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18077 goto call_expansion_failed;
18078
18079 dwarf_proc = function_to_dwarf_procedure (callee);
18080 if (dwarf_proc == NULL)
18081 goto call_expansion_failed;
18082
18083 /* Evaluate arguments right-to-left so that the first argument will
18084 be the top-most one on the stack. */
18085 for (i = nargs - 1; i >= 0; --i)
18086 {
18087 dw_loc_descr_ref loc_descr
18088 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18089 context);
18090
18091 if (loc_descr == NULL)
18092 goto call_expansion_failed;
18093
18094 add_loc_descr (&ret, loc_descr);
18095 }
18096
18097 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18098 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18099 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18100 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18101 add_loc_descr (&ret, ret1);
18102 break;
18103
18104 call_expansion_failed:
18105 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18106 /* There are no opcodes for these operations. */
18107 return 0;
18108 }
18109
18110 case PREINCREMENT_EXPR:
18111 case PREDECREMENT_EXPR:
18112 case POSTINCREMENT_EXPR:
18113 case POSTDECREMENT_EXPR:
18114 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18115 /* There are no opcodes for these operations. */
18116 return 0;
18117
18118 case ADDR_EXPR:
18119 /* If we already want an address, see if there is INDIRECT_REF inside
18120 e.g. for &this->field. */
18121 if (want_address)
18122 {
18123 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18124 (loc, want_address == 2, context);
18125 if (list_ret)
18126 have_address = 1;
18127 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18128 && (ret = cst_pool_loc_descr (loc)))
18129 have_address = 1;
18130 }
18131 /* Otherwise, process the argument and look for the address. */
18132 if (!list_ret && !ret)
18133 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18134 else
18135 {
18136 if (want_address)
18137 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18138 return NULL;
18139 }
18140 break;
18141
18142 case VAR_DECL:
18143 if (DECL_THREAD_LOCAL_P (loc))
18144 {
18145 rtx rtl;
18146 enum dwarf_location_atom tls_op;
18147 enum dtprel_bool dtprel = dtprel_false;
18148
18149 if (targetm.have_tls)
18150 {
18151 /* If this is not defined, we have no way to emit the
18152 data. */
18153 if (!targetm.asm_out.output_dwarf_dtprel)
18154 return 0;
18155
18156 /* The way DW_OP_GNU_push_tls_address is specified, we
18157 can only look up addresses of objects in the current
18158 module. We used DW_OP_addr as first op, but that's
18159 wrong, because DW_OP_addr is relocated by the debug
18160 info consumer, while DW_OP_GNU_push_tls_address
18161 operand shouldn't be. */
18162 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18163 return 0;
18164 dtprel = dtprel_true;
18165 /* We check for DWARF 5 here because gdb did not implement
18166 DW_OP_form_tls_address until after 7.12. */
18167 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18168 : DW_OP_GNU_push_tls_address);
18169 }
18170 else
18171 {
18172 if (!targetm.emutls.debug_form_tls_address
18173 || !(dwarf_version >= 3 || !dwarf_strict))
18174 return 0;
18175 /* We stuffed the control variable into the DECL_VALUE_EXPR
18176 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18177 no longer appear in gimple code. We used the control
18178 variable in specific so that we could pick it up here. */
18179 loc = DECL_VALUE_EXPR (loc);
18180 tls_op = DW_OP_form_tls_address;
18181 }
18182
18183 rtl = rtl_for_decl_location (loc);
18184 if (rtl == NULL_RTX)
18185 return 0;
18186
18187 if (!MEM_P (rtl))
18188 return 0;
18189 rtl = XEXP (rtl, 0);
18190 if (! CONSTANT_P (rtl))
18191 return 0;
18192
18193 ret = new_addr_loc_descr (rtl, dtprel);
18194 ret1 = new_loc_descr (tls_op, 0, 0);
18195 add_loc_descr (&ret, ret1);
18196
18197 have_address = 1;
18198 break;
18199 }
18200 /* FALLTHRU */
18201
18202 case PARM_DECL:
18203 if (context != NULL && context->dpi != NULL
18204 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18205 {
18206 /* We are generating code for a DWARF procedure and we want to access
18207 one of its arguments: find the appropriate argument offset and let
18208 the resolve_args_picking pass compute the offset that complies
18209 with the stack frame size. */
18210 unsigned i = 0;
18211 tree cursor;
18212
18213 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18214 cursor != NULL_TREE && cursor != loc;
18215 cursor = TREE_CHAIN (cursor), ++i)
18216 ;
18217 /* If we are translating a DWARF procedure, all referenced parameters
18218 must belong to the current function. */
18219 gcc_assert (cursor != NULL_TREE);
18220
18221 ret = new_loc_descr (DW_OP_pick, i, 0);
18222 ret->frame_offset_rel = 1;
18223 break;
18224 }
18225 /* FALLTHRU */
18226
18227 case RESULT_DECL:
18228 if (DECL_HAS_VALUE_EXPR_P (loc))
18229 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18230 want_address, context);
18231 /* FALLTHRU */
18232
18233 case FUNCTION_DECL:
18234 {
18235 rtx rtl;
18236 var_loc_list *loc_list = lookup_decl_loc (loc);
18237
18238 if (loc_list && loc_list->first)
18239 {
18240 list_ret = dw_loc_list (loc_list, loc, want_address);
18241 have_address = want_address != 0;
18242 break;
18243 }
18244 rtl = rtl_for_decl_location (loc);
18245 if (rtl == NULL_RTX)
18246 {
18247 if (TREE_CODE (loc) != FUNCTION_DECL
18248 && early_dwarf
18249 && current_function_decl
18250 && want_address != 1
18251 && ! DECL_IGNORED_P (loc)
18252 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18253 || POINTER_TYPE_P (TREE_TYPE (loc)))
18254 && DECL_CONTEXT (loc) == current_function_decl
18255 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18256 <= DWARF2_ADDR_SIZE))
18257 {
18258 dw_die_ref ref = lookup_decl_die (loc);
18259 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18260 if (ref)
18261 {
18262 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18263 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18264 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18265 }
18266 else
18267 {
18268 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18269 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18270 }
18271 break;
18272 }
18273 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18274 return 0;
18275 }
18276 else if (CONST_INT_P (rtl))
18277 {
18278 HOST_WIDE_INT val = INTVAL (rtl);
18279 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18280 val &= GET_MODE_MASK (DECL_MODE (loc));
18281 ret = int_loc_descriptor (val);
18282 }
18283 else if (GET_CODE (rtl) == CONST_STRING)
18284 {
18285 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18286 return 0;
18287 }
18288 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18289 ret = new_addr_loc_descr (rtl, dtprel_false);
18290 else
18291 {
18292 machine_mode mode, mem_mode;
18293
18294 /* Certain constructs can only be represented at top-level. */
18295 if (want_address == 2)
18296 {
18297 ret = loc_descriptor (rtl, VOIDmode,
18298 VAR_INIT_STATUS_INITIALIZED);
18299 have_address = 1;
18300 }
18301 else
18302 {
18303 mode = GET_MODE (rtl);
18304 mem_mode = VOIDmode;
18305 if (MEM_P (rtl))
18306 {
18307 mem_mode = mode;
18308 mode = get_address_mode (rtl);
18309 rtl = XEXP (rtl, 0);
18310 have_address = 1;
18311 }
18312 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18313 VAR_INIT_STATUS_INITIALIZED);
18314 }
18315 if (!ret)
18316 expansion_failed (loc, rtl,
18317 "failed to produce loc descriptor for rtl");
18318 }
18319 }
18320 break;
18321
18322 case MEM_REF:
18323 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18324 {
18325 have_address = 1;
18326 goto do_plus;
18327 }
18328 /* Fallthru. */
18329 case INDIRECT_REF:
18330 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18331 have_address = 1;
18332 break;
18333
18334 case TARGET_MEM_REF:
18335 case SSA_NAME:
18336 case DEBUG_EXPR_DECL:
18337 return NULL;
18338
18339 case COMPOUND_EXPR:
18340 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18341 context);
18342
18343 CASE_CONVERT:
18344 case VIEW_CONVERT_EXPR:
18345 case SAVE_EXPR:
18346 case MODIFY_EXPR:
18347 case NON_LVALUE_EXPR:
18348 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18349 context);
18350
18351 case COMPONENT_REF:
18352 case BIT_FIELD_REF:
18353 case ARRAY_REF:
18354 case ARRAY_RANGE_REF:
18355 case REALPART_EXPR:
18356 case IMAGPART_EXPR:
18357 {
18358 tree obj, offset;
18359 poly_int64 bitsize, bitpos, bytepos;
18360 machine_mode mode;
18361 int unsignedp, reversep, volatilep = 0;
18362
18363 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18364 &unsignedp, &reversep, &volatilep);
18365
18366 gcc_assert (obj != loc);
18367
18368 list_ret = loc_list_from_tree_1 (obj,
18369 want_address == 2
18370 && known_eq (bitpos, 0)
18371 && !offset ? 2 : 1,
18372 context);
18373 /* TODO: We can extract value of the small expression via shifting even
18374 for nonzero bitpos. */
18375 if (list_ret == 0)
18376 return 0;
18377 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18378 || !multiple_p (bitsize, BITS_PER_UNIT))
18379 {
18380 expansion_failed (loc, NULL_RTX,
18381 "bitfield access");
18382 return 0;
18383 }
18384
18385 if (offset != NULL_TREE)
18386 {
18387 /* Variable offset. */
18388 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18389 if (list_ret1 == 0)
18390 return 0;
18391 add_loc_list (&list_ret, list_ret1);
18392 if (!list_ret)
18393 return 0;
18394 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18395 }
18396
18397 HOST_WIDE_INT value;
18398 if (bytepos.is_constant (&value) && value > 0)
18399 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18400 value, 0));
18401 else if (maybe_ne (bytepos, 0))
18402 loc_list_plus_const (list_ret, bytepos);
18403
18404 have_address = 1;
18405 break;
18406 }
18407
18408 case INTEGER_CST:
18409 if ((want_address || !tree_fits_shwi_p (loc))
18410 && (ret = cst_pool_loc_descr (loc)))
18411 have_address = 1;
18412 else if (want_address == 2
18413 && tree_fits_shwi_p (loc)
18414 && (ret = address_of_int_loc_descriptor
18415 (int_size_in_bytes (TREE_TYPE (loc)),
18416 tree_to_shwi (loc))))
18417 have_address = 1;
18418 else if (tree_fits_shwi_p (loc))
18419 ret = int_loc_descriptor (tree_to_shwi (loc));
18420 else if (tree_fits_uhwi_p (loc))
18421 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18422 else
18423 {
18424 expansion_failed (loc, NULL_RTX,
18425 "Integer operand is not host integer");
18426 return 0;
18427 }
18428 break;
18429
18430 case CONSTRUCTOR:
18431 case REAL_CST:
18432 case STRING_CST:
18433 case COMPLEX_CST:
18434 if ((ret = cst_pool_loc_descr (loc)))
18435 have_address = 1;
18436 else if (TREE_CODE (loc) == CONSTRUCTOR)
18437 {
18438 tree type = TREE_TYPE (loc);
18439 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18440 unsigned HOST_WIDE_INT offset = 0;
18441 unsigned HOST_WIDE_INT cnt;
18442 constructor_elt *ce;
18443
18444 if (TREE_CODE (type) == RECORD_TYPE)
18445 {
18446 /* This is very limited, but it's enough to output
18447 pointers to member functions, as long as the
18448 referenced function is defined in the current
18449 translation unit. */
18450 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18451 {
18452 tree val = ce->value;
18453
18454 tree field = ce->index;
18455
18456 if (val)
18457 STRIP_NOPS (val);
18458
18459 if (!field || DECL_BIT_FIELD (field))
18460 {
18461 expansion_failed (loc, NULL_RTX,
18462 "bitfield in record type constructor");
18463 size = offset = (unsigned HOST_WIDE_INT)-1;
18464 ret = NULL;
18465 break;
18466 }
18467
18468 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18469 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18470 gcc_assert (pos + fieldsize <= size);
18471 if (pos < offset)
18472 {
18473 expansion_failed (loc, NULL_RTX,
18474 "out-of-order fields in record constructor");
18475 size = offset = (unsigned HOST_WIDE_INT)-1;
18476 ret = NULL;
18477 break;
18478 }
18479 if (pos > offset)
18480 {
18481 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18482 add_loc_descr (&ret, ret1);
18483 offset = pos;
18484 }
18485 if (val && fieldsize != 0)
18486 {
18487 ret1 = loc_descriptor_from_tree (val, want_address, context);
18488 if (!ret1)
18489 {
18490 expansion_failed (loc, NULL_RTX,
18491 "unsupported expression in field");
18492 size = offset = (unsigned HOST_WIDE_INT)-1;
18493 ret = NULL;
18494 break;
18495 }
18496 add_loc_descr (&ret, ret1);
18497 }
18498 if (fieldsize)
18499 {
18500 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18501 add_loc_descr (&ret, ret1);
18502 offset = pos + fieldsize;
18503 }
18504 }
18505
18506 if (offset != size)
18507 {
18508 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18509 add_loc_descr (&ret, ret1);
18510 offset = size;
18511 }
18512
18513 have_address = !!want_address;
18514 }
18515 else
18516 expansion_failed (loc, NULL_RTX,
18517 "constructor of non-record type");
18518 }
18519 else
18520 /* We can construct small constants here using int_loc_descriptor. */
18521 expansion_failed (loc, NULL_RTX,
18522 "constructor or constant not in constant pool");
18523 break;
18524
18525 case TRUTH_AND_EXPR:
18526 case TRUTH_ANDIF_EXPR:
18527 case BIT_AND_EXPR:
18528 op = DW_OP_and;
18529 goto do_binop;
18530
18531 case TRUTH_XOR_EXPR:
18532 case BIT_XOR_EXPR:
18533 op = DW_OP_xor;
18534 goto do_binop;
18535
18536 case TRUTH_OR_EXPR:
18537 case TRUTH_ORIF_EXPR:
18538 case BIT_IOR_EXPR:
18539 op = DW_OP_or;
18540 goto do_binop;
18541
18542 case FLOOR_DIV_EXPR:
18543 case CEIL_DIV_EXPR:
18544 case ROUND_DIV_EXPR:
18545 case TRUNC_DIV_EXPR:
18546 case EXACT_DIV_EXPR:
18547 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18548 return 0;
18549 op = DW_OP_div;
18550 goto do_binop;
18551
18552 case MINUS_EXPR:
18553 op = DW_OP_minus;
18554 goto do_binop;
18555
18556 case FLOOR_MOD_EXPR:
18557 case CEIL_MOD_EXPR:
18558 case ROUND_MOD_EXPR:
18559 case TRUNC_MOD_EXPR:
18560 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18561 {
18562 op = DW_OP_mod;
18563 goto do_binop;
18564 }
18565 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18566 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18567 if (list_ret == 0 || list_ret1 == 0)
18568 return 0;
18569
18570 add_loc_list (&list_ret, list_ret1);
18571 if (list_ret == 0)
18572 return 0;
18573 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18574 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18575 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18576 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18577 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18578 break;
18579
18580 case MULT_EXPR:
18581 op = DW_OP_mul;
18582 goto do_binop;
18583
18584 case LSHIFT_EXPR:
18585 op = DW_OP_shl;
18586 goto do_binop;
18587
18588 case RSHIFT_EXPR:
18589 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18590 goto do_binop;
18591
18592 case POINTER_PLUS_EXPR:
18593 case PLUS_EXPR:
18594 do_plus:
18595 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18596 {
18597 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18598 smarter to encode their opposite. The DW_OP_plus_uconst operation
18599 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18600 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18601 bytes, Y being the size of the operation that pushes the opposite
18602 of the addend. So let's choose the smallest representation. */
18603 const tree tree_addend = TREE_OPERAND (loc, 1);
18604 offset_int wi_addend;
18605 HOST_WIDE_INT shwi_addend;
18606 dw_loc_descr_ref loc_naddend;
18607
18608 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18609 if (list_ret == 0)
18610 return 0;
18611
18612 /* Try to get the literal to push. It is the opposite of the addend,
18613 so as we rely on wrapping during DWARF evaluation, first decode
18614 the literal as a "DWARF-sized" signed number. */
18615 wi_addend = wi::to_offset (tree_addend);
18616 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18617 shwi_addend = wi_addend.to_shwi ();
18618 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18619 ? int_loc_descriptor (-shwi_addend)
18620 : NULL;
18621
18622 if (loc_naddend != NULL
18623 && ((unsigned) size_of_uleb128 (shwi_addend)
18624 > size_of_loc_descr (loc_naddend)))
18625 {
18626 add_loc_descr_to_each (list_ret, loc_naddend);
18627 add_loc_descr_to_each (list_ret,
18628 new_loc_descr (DW_OP_minus, 0, 0));
18629 }
18630 else
18631 {
18632 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18633 {
18634 loc_naddend = loc_cur;
18635 loc_cur = loc_cur->dw_loc_next;
18636 ggc_free (loc_naddend);
18637 }
18638 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18639 }
18640 break;
18641 }
18642
18643 op = DW_OP_plus;
18644 goto do_binop;
18645
18646 case LE_EXPR:
18647 op = DW_OP_le;
18648 goto do_comp_binop;
18649
18650 case GE_EXPR:
18651 op = DW_OP_ge;
18652 goto do_comp_binop;
18653
18654 case LT_EXPR:
18655 op = DW_OP_lt;
18656 goto do_comp_binop;
18657
18658 case GT_EXPR:
18659 op = DW_OP_gt;
18660 goto do_comp_binop;
18661
18662 do_comp_binop:
18663 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18664 {
18665 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18666 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18667 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18668 TREE_CODE (loc));
18669 break;
18670 }
18671 else
18672 goto do_binop;
18673
18674 case EQ_EXPR:
18675 op = DW_OP_eq;
18676 goto do_binop;
18677
18678 case NE_EXPR:
18679 op = DW_OP_ne;
18680 goto do_binop;
18681
18682 do_binop:
18683 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18684 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18685 if (list_ret == 0 || list_ret1 == 0)
18686 return 0;
18687
18688 add_loc_list (&list_ret, list_ret1);
18689 if (list_ret == 0)
18690 return 0;
18691 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18692 break;
18693
18694 case TRUTH_NOT_EXPR:
18695 case BIT_NOT_EXPR:
18696 op = DW_OP_not;
18697 goto do_unop;
18698
18699 case ABS_EXPR:
18700 op = DW_OP_abs;
18701 goto do_unop;
18702
18703 case NEGATE_EXPR:
18704 op = DW_OP_neg;
18705 goto do_unop;
18706
18707 do_unop:
18708 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18709 if (list_ret == 0)
18710 return 0;
18711
18712 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18713 break;
18714
18715 case MIN_EXPR:
18716 case MAX_EXPR:
18717 {
18718 const enum tree_code code =
18719 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18720
18721 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18722 build2 (code, integer_type_node,
18723 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18724 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18725 }
18726
18727 /* fall through */
18728
18729 case COND_EXPR:
18730 {
18731 dw_loc_descr_ref lhs
18732 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18733 dw_loc_list_ref rhs
18734 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18735 dw_loc_descr_ref bra_node, jump_node, tmp;
18736
18737 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18738 if (list_ret == 0 || lhs == 0 || rhs == 0)
18739 return 0;
18740
18741 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18742 add_loc_descr_to_each (list_ret, bra_node);
18743
18744 add_loc_list (&list_ret, rhs);
18745 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18746 add_loc_descr_to_each (list_ret, jump_node);
18747
18748 add_loc_descr_to_each (list_ret, lhs);
18749 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18750 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18751
18752 /* ??? Need a node to point the skip at. Use a nop. */
18753 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18754 add_loc_descr_to_each (list_ret, tmp);
18755 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18756 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18757 }
18758 break;
18759
18760 case FIX_TRUNC_EXPR:
18761 return 0;
18762
18763 default:
18764 /* Leave front-end specific codes as simply unknown. This comes
18765 up, for instance, with the C STMT_EXPR. */
18766 if ((unsigned int) TREE_CODE (loc)
18767 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18768 {
18769 expansion_failed (loc, NULL_RTX,
18770 "language specific tree node");
18771 return 0;
18772 }
18773
18774 /* Otherwise this is a generic code; we should just lists all of
18775 these explicitly. We forgot one. */
18776 if (flag_checking)
18777 gcc_unreachable ();
18778
18779 /* In a release build, we want to degrade gracefully: better to
18780 generate incomplete debugging information than to crash. */
18781 return NULL;
18782 }
18783
18784 if (!ret && !list_ret)
18785 return 0;
18786
18787 if (want_address == 2 && !have_address
18788 && (dwarf_version >= 4 || !dwarf_strict))
18789 {
18790 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18791 {
18792 expansion_failed (loc, NULL_RTX,
18793 "DWARF address size mismatch");
18794 return 0;
18795 }
18796 if (ret)
18797 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18798 else
18799 add_loc_descr_to_each (list_ret,
18800 new_loc_descr (DW_OP_stack_value, 0, 0));
18801 have_address = 1;
18802 }
18803 /* Show if we can't fill the request for an address. */
18804 if (want_address && !have_address)
18805 {
18806 expansion_failed (loc, NULL_RTX,
18807 "Want address and only have value");
18808 return 0;
18809 }
18810
18811 gcc_assert (!ret || !list_ret);
18812
18813 /* If we've got an address and don't want one, dereference. */
18814 if (!want_address && have_address)
18815 {
18816 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18817
18818 if (size > DWARF2_ADDR_SIZE || size == -1)
18819 {
18820 expansion_failed (loc, NULL_RTX,
18821 "DWARF address size mismatch");
18822 return 0;
18823 }
18824 else if (size == DWARF2_ADDR_SIZE)
18825 op = DW_OP_deref;
18826 else
18827 op = DW_OP_deref_size;
18828
18829 if (ret)
18830 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18831 else
18832 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18833 }
18834 if (ret)
18835 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18836
18837 return list_ret;
18838 }
18839
18840 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18841 expressions. */
18842
18843 static dw_loc_list_ref
18844 loc_list_from_tree (tree loc, int want_address,
18845 struct loc_descr_context *context)
18846 {
18847 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18848
18849 for (dw_loc_list_ref loc_cur = result;
18850 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18851 loc_descr_without_nops (loc_cur->expr);
18852 return result;
18853 }
18854
18855 /* Same as above but return only single location expression. */
18856 static dw_loc_descr_ref
18857 loc_descriptor_from_tree (tree loc, int want_address,
18858 struct loc_descr_context *context)
18859 {
18860 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18861 if (!ret)
18862 return NULL;
18863 if (ret->dw_loc_next)
18864 {
18865 expansion_failed (loc, NULL_RTX,
18866 "Location list where only loc descriptor needed");
18867 return NULL;
18868 }
18869 return ret->expr;
18870 }
18871
18872 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18873 pointer to the declared type for the relevant field variable, or return
18874 `integer_type_node' if the given node turns out to be an
18875 ERROR_MARK node. */
18876
18877 static inline tree
18878 field_type (const_tree decl)
18879 {
18880 tree type;
18881
18882 if (TREE_CODE (decl) == ERROR_MARK)
18883 return integer_type_node;
18884
18885 type = DECL_BIT_FIELD_TYPE (decl);
18886 if (type == NULL_TREE)
18887 type = TREE_TYPE (decl);
18888
18889 return type;
18890 }
18891
18892 /* Given a pointer to a tree node, return the alignment in bits for
18893 it, or else return BITS_PER_WORD if the node actually turns out to
18894 be an ERROR_MARK node. */
18895
18896 static inline unsigned
18897 simple_type_align_in_bits (const_tree type)
18898 {
18899 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18900 }
18901
18902 static inline unsigned
18903 simple_decl_align_in_bits (const_tree decl)
18904 {
18905 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18906 }
18907
18908 /* Return the result of rounding T up to ALIGN. */
18909
18910 static inline offset_int
18911 round_up_to_align (const offset_int &t, unsigned int align)
18912 {
18913 return wi::udiv_trunc (t + align - 1, align) * align;
18914 }
18915
18916 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18917 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18918 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18919 if we fail to return the size in one of these two forms. */
18920
18921 static dw_loc_descr_ref
18922 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18923 {
18924 tree tree_size;
18925 struct loc_descr_context ctx;
18926
18927 /* Return a constant integer in priority, if possible. */
18928 *cst_size = int_size_in_bytes (type);
18929 if (*cst_size != -1)
18930 return NULL;
18931
18932 ctx.context_type = const_cast<tree> (type);
18933 ctx.base_decl = NULL_TREE;
18934 ctx.dpi = NULL;
18935 ctx.placeholder_arg = false;
18936 ctx.placeholder_seen = false;
18937
18938 type = TYPE_MAIN_VARIANT (type);
18939 tree_size = TYPE_SIZE_UNIT (type);
18940 return ((tree_size != NULL_TREE)
18941 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18942 : NULL);
18943 }
18944
18945 /* Helper structure for RECORD_TYPE processing. */
18946 struct vlr_context
18947 {
18948 /* Root RECORD_TYPE. It is needed to generate data member location
18949 descriptions in variable-length records (VLR), but also to cope with
18950 variants, which are composed of nested structures multiplexed with
18951 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18952 function processing a FIELD_DECL, it is required to be non null. */
18953 tree struct_type;
18954 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18955 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18956 this variant part as part of the root record (in storage units). For
18957 regular records, it must be NULL_TREE. */
18958 tree variant_part_offset;
18959 };
18960
18961 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18962 addressed byte of the "containing object" for the given FIELD_DECL. If
18963 possible, return a native constant through CST_OFFSET (in which case NULL is
18964 returned); otherwise return a DWARF expression that computes the offset.
18965
18966 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18967 that offset is, either because the argument turns out to be a pointer to an
18968 ERROR_MARK node, or because the offset expression is too complex for us.
18969
18970 CTX is required: see the comment for VLR_CONTEXT. */
18971
18972 static dw_loc_descr_ref
18973 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18974 HOST_WIDE_INT *cst_offset)
18975 {
18976 tree tree_result;
18977 dw_loc_list_ref loc_result;
18978
18979 *cst_offset = 0;
18980
18981 if (TREE_CODE (decl) == ERROR_MARK)
18982 return NULL;
18983 else
18984 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18985
18986 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18987 case. */
18988 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18989 return NULL;
18990
18991 /* We used to handle only constant offsets in all cases. Now, we handle
18992 properly dynamic byte offsets only when PCC bitfield type doesn't
18993 matter. */
18994 if (PCC_BITFIELD_TYPE_MATTERS
18995 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18996 {
18997 offset_int object_offset_in_bits;
18998 offset_int object_offset_in_bytes;
18999 offset_int bitpos_int;
19000 tree type;
19001 tree field_size_tree;
19002 offset_int deepest_bitpos;
19003 offset_int field_size_in_bits;
19004 unsigned int type_align_in_bits;
19005 unsigned int decl_align_in_bits;
19006 offset_int type_size_in_bits;
19007
19008 bitpos_int = wi::to_offset (bit_position (decl));
19009 type = field_type (decl);
19010 type_size_in_bits = offset_int_type_size_in_bits (type);
19011 type_align_in_bits = simple_type_align_in_bits (type);
19012
19013 field_size_tree = DECL_SIZE (decl);
19014
19015 /* The size could be unspecified if there was an error, or for
19016 a flexible array member. */
19017 if (!field_size_tree)
19018 field_size_tree = bitsize_zero_node;
19019
19020 /* If the size of the field is not constant, use the type size. */
19021 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19022 field_size_in_bits = wi::to_offset (field_size_tree);
19023 else
19024 field_size_in_bits = type_size_in_bits;
19025
19026 decl_align_in_bits = simple_decl_align_in_bits (decl);
19027
19028 /* The GCC front-end doesn't make any attempt to keep track of the
19029 starting bit offset (relative to the start of the containing
19030 structure type) of the hypothetical "containing object" for a
19031 bit-field. Thus, when computing the byte offset value for the
19032 start of the "containing object" of a bit-field, we must deduce
19033 this information on our own. This can be rather tricky to do in
19034 some cases. For example, handling the following structure type
19035 definition when compiling for an i386/i486 target (which only
19036 aligns long long's to 32-bit boundaries) can be very tricky:
19037
19038 struct S { int field1; long long field2:31; };
19039
19040 Fortunately, there is a simple rule-of-thumb which can be used
19041 in such cases. When compiling for an i386/i486, GCC will
19042 allocate 8 bytes for the structure shown above. It decides to
19043 do this based upon one simple rule for bit-field allocation.
19044 GCC allocates each "containing object" for each bit-field at
19045 the first (i.e. lowest addressed) legitimate alignment boundary
19046 (based upon the required minimum alignment for the declared
19047 type of the field) which it can possibly use, subject to the
19048 condition that there is still enough available space remaining
19049 in the containing object (when allocated at the selected point)
19050 to fully accommodate all of the bits of the bit-field itself.
19051
19052 This simple rule makes it obvious why GCC allocates 8 bytes for
19053 each object of the structure type shown above. When looking
19054 for a place to allocate the "containing object" for `field2',
19055 the compiler simply tries to allocate a 64-bit "containing
19056 object" at each successive 32-bit boundary (starting at zero)
19057 until it finds a place to allocate that 64- bit field such that
19058 at least 31 contiguous (and previously unallocated) bits remain
19059 within that selected 64 bit field. (As it turns out, for the
19060 example above, the compiler finds it is OK to allocate the
19061 "containing object" 64-bit field at bit-offset zero within the
19062 structure type.)
19063
19064 Here we attempt to work backwards from the limited set of facts
19065 we're given, and we try to deduce from those facts, where GCC
19066 must have believed that the containing object started (within
19067 the structure type). The value we deduce is then used (by the
19068 callers of this routine) to generate DW_AT_location and
19069 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19070 the case of DW_AT_location, regular fields as well). */
19071
19072 /* Figure out the bit-distance from the start of the structure to
19073 the "deepest" bit of the bit-field. */
19074 deepest_bitpos = bitpos_int + field_size_in_bits;
19075
19076 /* This is the tricky part. Use some fancy footwork to deduce
19077 where the lowest addressed bit of the containing object must
19078 be. */
19079 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19080
19081 /* Round up to type_align by default. This works best for
19082 bitfields. */
19083 object_offset_in_bits
19084 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19085
19086 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19087 {
19088 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19089
19090 /* Round up to decl_align instead. */
19091 object_offset_in_bits
19092 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19093 }
19094
19095 object_offset_in_bytes
19096 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19097 if (ctx->variant_part_offset == NULL_TREE)
19098 {
19099 *cst_offset = object_offset_in_bytes.to_shwi ();
19100 return NULL;
19101 }
19102 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19103 }
19104 else
19105 tree_result = byte_position (decl);
19106
19107 if (ctx->variant_part_offset != NULL_TREE)
19108 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19109 ctx->variant_part_offset, tree_result);
19110
19111 /* If the byte offset is a constant, it's simplier to handle a native
19112 constant rather than a DWARF expression. */
19113 if (TREE_CODE (tree_result) == INTEGER_CST)
19114 {
19115 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19116 return NULL;
19117 }
19118 struct loc_descr_context loc_ctx = {
19119 ctx->struct_type, /* context_type */
19120 NULL_TREE, /* base_decl */
19121 NULL, /* dpi */
19122 false, /* placeholder_arg */
19123 false /* placeholder_seen */
19124 };
19125 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19126
19127 /* We want a DWARF expression: abort if we only have a location list with
19128 multiple elements. */
19129 if (!loc_result || !single_element_loc_list_p (loc_result))
19130 return NULL;
19131 else
19132 return loc_result->expr;
19133 }
19134 \f
19135 /* The following routines define various Dwarf attributes and any data
19136 associated with them. */
19137
19138 /* Add a location description attribute value to a DIE.
19139
19140 This emits location attributes suitable for whole variables and
19141 whole parameters. Note that the location attributes for struct fields are
19142 generated by the routine `data_member_location_attribute' below. */
19143
19144 static inline void
19145 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19146 dw_loc_list_ref descr)
19147 {
19148 bool check_no_locviews = true;
19149 if (descr == 0)
19150 return;
19151 if (single_element_loc_list_p (descr))
19152 add_AT_loc (die, attr_kind, descr->expr);
19153 else
19154 {
19155 add_AT_loc_list (die, attr_kind, descr);
19156 gcc_assert (descr->ll_symbol);
19157 if (attr_kind == DW_AT_location && descr->vl_symbol
19158 && dwarf2out_locviews_in_attribute ())
19159 {
19160 add_AT_view_list (die, DW_AT_GNU_locviews);
19161 check_no_locviews = false;
19162 }
19163 }
19164
19165 if (check_no_locviews)
19166 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19167 }
19168
19169 /* Add DW_AT_accessibility attribute to DIE if needed. */
19170
19171 static void
19172 add_accessibility_attribute (dw_die_ref die, tree decl)
19173 {
19174 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19175 children, otherwise the default is DW_ACCESS_public. In DWARF2
19176 the default has always been DW_ACCESS_public. */
19177 if (TREE_PROTECTED (decl))
19178 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19179 else if (TREE_PRIVATE (decl))
19180 {
19181 if (dwarf_version == 2
19182 || die->die_parent == NULL
19183 || die->die_parent->die_tag != DW_TAG_class_type)
19184 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19185 }
19186 else if (dwarf_version > 2
19187 && die->die_parent
19188 && die->die_parent->die_tag == DW_TAG_class_type)
19189 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19190 }
19191
19192 /* Attach the specialized form of location attribute used for data members of
19193 struct and union types. In the special case of a FIELD_DECL node which
19194 represents a bit-field, the "offset" part of this special location
19195 descriptor must indicate the distance in bytes from the lowest-addressed
19196 byte of the containing struct or union type to the lowest-addressed byte of
19197 the "containing object" for the bit-field. (See the `field_byte_offset'
19198 function above).
19199
19200 For any given bit-field, the "containing object" is a hypothetical object
19201 (of some integral or enum type) within which the given bit-field lives. The
19202 type of this hypothetical "containing object" is always the same as the
19203 declared type of the individual bit-field itself (for GCC anyway... the
19204 DWARF spec doesn't actually mandate this). Note that it is the size (in
19205 bytes) of the hypothetical "containing object" which will be given in the
19206 DW_AT_byte_size attribute for this bit-field. (See the
19207 `byte_size_attribute' function below.) It is also used when calculating the
19208 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19209 function below.)
19210
19211 CTX is required: see the comment for VLR_CONTEXT. */
19212
19213 static void
19214 add_data_member_location_attribute (dw_die_ref die,
19215 tree decl,
19216 struct vlr_context *ctx)
19217 {
19218 HOST_WIDE_INT offset;
19219 dw_loc_descr_ref loc_descr = 0;
19220
19221 if (TREE_CODE (decl) == TREE_BINFO)
19222 {
19223 /* We're working on the TAG_inheritance for a base class. */
19224 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19225 {
19226 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19227 aren't at a fixed offset from all (sub)objects of the same
19228 type. We need to extract the appropriate offset from our
19229 vtable. The following dwarf expression means
19230
19231 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19232
19233 This is specific to the V3 ABI, of course. */
19234
19235 dw_loc_descr_ref tmp;
19236
19237 /* Make a copy of the object address. */
19238 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19239 add_loc_descr (&loc_descr, tmp);
19240
19241 /* Extract the vtable address. */
19242 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19243 add_loc_descr (&loc_descr, tmp);
19244
19245 /* Calculate the address of the offset. */
19246 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19247 gcc_assert (offset < 0);
19248
19249 tmp = int_loc_descriptor (-offset);
19250 add_loc_descr (&loc_descr, tmp);
19251 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19252 add_loc_descr (&loc_descr, tmp);
19253
19254 /* Extract the offset. */
19255 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19256 add_loc_descr (&loc_descr, tmp);
19257
19258 /* Add it to the object address. */
19259 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19260 add_loc_descr (&loc_descr, tmp);
19261 }
19262 else
19263 offset = tree_to_shwi (BINFO_OFFSET (decl));
19264 }
19265 else
19266 {
19267 loc_descr = field_byte_offset (decl, ctx, &offset);
19268
19269 /* If loc_descr is available then we know the field offset is dynamic.
19270 However, GDB does not handle dynamic field offsets very well at the
19271 moment. */
19272 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19273 {
19274 loc_descr = NULL;
19275 offset = 0;
19276 }
19277
19278 /* Data member location evalutation starts with the base address on the
19279 stack. Compute the field offset and add it to this base address. */
19280 else if (loc_descr != NULL)
19281 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19282 }
19283
19284 if (! loc_descr)
19285 {
19286 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19287 e.g. GDB only added support to it in November 2016. For DWARF5
19288 we need newer debug info consumers anyway. We might change this
19289 to dwarf_version >= 4 once most consumers catched up. */
19290 if (dwarf_version >= 5
19291 && TREE_CODE (decl) == FIELD_DECL
19292 && DECL_BIT_FIELD_TYPE (decl))
19293 {
19294 tree off = bit_position (decl);
19295 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19296 {
19297 remove_AT (die, DW_AT_byte_size);
19298 remove_AT (die, DW_AT_bit_offset);
19299 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19300 return;
19301 }
19302 }
19303 if (dwarf_version > 2)
19304 {
19305 /* Don't need to output a location expression, just the constant. */
19306 if (offset < 0)
19307 add_AT_int (die, DW_AT_data_member_location, offset);
19308 else
19309 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19310 return;
19311 }
19312 else
19313 {
19314 enum dwarf_location_atom op;
19315
19316 /* The DWARF2 standard says that we should assume that the structure
19317 address is already on the stack, so we can specify a structure
19318 field address by using DW_OP_plus_uconst. */
19319 op = DW_OP_plus_uconst;
19320 loc_descr = new_loc_descr (op, offset, 0);
19321 }
19322 }
19323
19324 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19325 }
19326
19327 /* Writes integer values to dw_vec_const array. */
19328
19329 static void
19330 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19331 {
19332 while (size != 0)
19333 {
19334 *dest++ = val & 0xff;
19335 val >>= 8;
19336 --size;
19337 }
19338 }
19339
19340 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19341
19342 static HOST_WIDE_INT
19343 extract_int (const unsigned char *src, unsigned int size)
19344 {
19345 HOST_WIDE_INT val = 0;
19346
19347 src += size;
19348 while (size != 0)
19349 {
19350 val <<= 8;
19351 val |= *--src & 0xff;
19352 --size;
19353 }
19354 return val;
19355 }
19356
19357 /* Writes wide_int values to dw_vec_const array. */
19358
19359 static void
19360 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19361 {
19362 int i;
19363
19364 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19365 {
19366 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19367 return;
19368 }
19369
19370 /* We'd have to extend this code to support odd sizes. */
19371 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19372
19373 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19374
19375 if (WORDS_BIG_ENDIAN)
19376 for (i = n - 1; i >= 0; i--)
19377 {
19378 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19379 dest += sizeof (HOST_WIDE_INT);
19380 }
19381 else
19382 for (i = 0; i < n; i++)
19383 {
19384 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19385 dest += sizeof (HOST_WIDE_INT);
19386 }
19387 }
19388
19389 /* Writes floating point values to dw_vec_const array. */
19390
19391 static void
19392 insert_float (const_rtx rtl, unsigned char *array)
19393 {
19394 long val[4];
19395 int i;
19396 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19397
19398 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19399
19400 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19401 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19402 {
19403 insert_int (val[i], 4, array);
19404 array += 4;
19405 }
19406 }
19407
19408 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19409 does not have a "location" either in memory or in a register. These
19410 things can arise in GNU C when a constant is passed as an actual parameter
19411 to an inlined function. They can also arise in C++ where declared
19412 constants do not necessarily get memory "homes". */
19413
19414 static bool
19415 add_const_value_attribute (dw_die_ref die, rtx rtl)
19416 {
19417 switch (GET_CODE (rtl))
19418 {
19419 case CONST_INT:
19420 {
19421 HOST_WIDE_INT val = INTVAL (rtl);
19422
19423 if (val < 0)
19424 add_AT_int (die, DW_AT_const_value, val);
19425 else
19426 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19427 }
19428 return true;
19429
19430 case CONST_WIDE_INT:
19431 {
19432 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19433 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19434 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19435 wide_int w = wi::zext (w1, prec);
19436 add_AT_wide (die, DW_AT_const_value, w);
19437 }
19438 return true;
19439
19440 case CONST_DOUBLE:
19441 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19442 floating-point constant. A CONST_DOUBLE is used whenever the
19443 constant requires more than one word in order to be adequately
19444 represented. */
19445 if (TARGET_SUPPORTS_WIDE_INT == 0
19446 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19447 add_AT_double (die, DW_AT_const_value,
19448 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19449 else
19450 {
19451 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19452 unsigned int length = GET_MODE_SIZE (mode);
19453 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19454
19455 insert_float (rtl, array);
19456 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19457 }
19458 return true;
19459
19460 case CONST_VECTOR:
19461 {
19462 unsigned int length;
19463 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19464 return false;
19465
19466 machine_mode mode = GET_MODE (rtl);
19467 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19468 unsigned char *array
19469 = ggc_vec_alloc<unsigned char> (length * elt_size);
19470 unsigned int i;
19471 unsigned char *p;
19472 machine_mode imode = GET_MODE_INNER (mode);
19473
19474 switch (GET_MODE_CLASS (mode))
19475 {
19476 case MODE_VECTOR_INT:
19477 for (i = 0, p = array; i < length; i++, p += elt_size)
19478 {
19479 rtx elt = CONST_VECTOR_ELT (rtl, i);
19480 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19481 }
19482 break;
19483
19484 case MODE_VECTOR_FLOAT:
19485 for (i = 0, p = array; i < length; i++, p += elt_size)
19486 {
19487 rtx elt = CONST_VECTOR_ELT (rtl, i);
19488 insert_float (elt, p);
19489 }
19490 break;
19491
19492 default:
19493 gcc_unreachable ();
19494 }
19495
19496 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19497 }
19498 return true;
19499
19500 case CONST_STRING:
19501 if (dwarf_version >= 4 || !dwarf_strict)
19502 {
19503 dw_loc_descr_ref loc_result;
19504 resolve_one_addr (&rtl);
19505 rtl_addr:
19506 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19507 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19508 add_AT_loc (die, DW_AT_location, loc_result);
19509 vec_safe_push (used_rtx_array, rtl);
19510 return true;
19511 }
19512 return false;
19513
19514 case CONST:
19515 if (CONSTANT_P (XEXP (rtl, 0)))
19516 return add_const_value_attribute (die, XEXP (rtl, 0));
19517 /* FALLTHROUGH */
19518 case SYMBOL_REF:
19519 if (!const_ok_for_output (rtl))
19520 return false;
19521 /* FALLTHROUGH */
19522 case LABEL_REF:
19523 if (dwarf_version >= 4 || !dwarf_strict)
19524 goto rtl_addr;
19525 return false;
19526
19527 case PLUS:
19528 /* In cases where an inlined instance of an inline function is passed
19529 the address of an `auto' variable (which is local to the caller) we
19530 can get a situation where the DECL_RTL of the artificial local
19531 variable (for the inlining) which acts as a stand-in for the
19532 corresponding formal parameter (of the inline function) will look
19533 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19534 exactly a compile-time constant expression, but it isn't the address
19535 of the (artificial) local variable either. Rather, it represents the
19536 *value* which the artificial local variable always has during its
19537 lifetime. We currently have no way to represent such quasi-constant
19538 values in Dwarf, so for now we just punt and generate nothing. */
19539 return false;
19540
19541 case HIGH:
19542 case CONST_FIXED:
19543 return false;
19544
19545 case MEM:
19546 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19547 && MEM_READONLY_P (rtl)
19548 && GET_MODE (rtl) == BLKmode)
19549 {
19550 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19551 return true;
19552 }
19553 return false;
19554
19555 default:
19556 /* No other kinds of rtx should be possible here. */
19557 gcc_unreachable ();
19558 }
19559 return false;
19560 }
19561
19562 /* Determine whether the evaluation of EXPR references any variables
19563 or functions which aren't otherwise used (and therefore may not be
19564 output). */
19565 static tree
19566 reference_to_unused (tree * tp, int * walk_subtrees,
19567 void * data ATTRIBUTE_UNUSED)
19568 {
19569 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19570 *walk_subtrees = 0;
19571
19572 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19573 && ! TREE_ASM_WRITTEN (*tp))
19574 return *tp;
19575 /* ??? The C++ FE emits debug information for using decls, so
19576 putting gcc_unreachable here falls over. See PR31899. For now
19577 be conservative. */
19578 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19579 return *tp;
19580 else if (VAR_P (*tp))
19581 {
19582 varpool_node *node = varpool_node::get (*tp);
19583 if (!node || !node->definition)
19584 return *tp;
19585 }
19586 else if (TREE_CODE (*tp) == FUNCTION_DECL
19587 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19588 {
19589 /* The call graph machinery must have finished analyzing,
19590 optimizing and gimplifying the CU by now.
19591 So if *TP has no call graph node associated
19592 to it, it means *TP will not be emitted. */
19593 if (!cgraph_node::get (*tp))
19594 return *tp;
19595 }
19596 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19597 return *tp;
19598
19599 return NULL_TREE;
19600 }
19601
19602 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19603 for use in a later add_const_value_attribute call. */
19604
19605 static rtx
19606 rtl_for_decl_init (tree init, tree type)
19607 {
19608 rtx rtl = NULL_RTX;
19609
19610 STRIP_NOPS (init);
19611
19612 /* If a variable is initialized with a string constant without embedded
19613 zeros, build CONST_STRING. */
19614 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19615 {
19616 tree enttype = TREE_TYPE (type);
19617 tree domain = TYPE_DOMAIN (type);
19618 scalar_int_mode mode;
19619
19620 if (is_int_mode (TYPE_MODE (enttype), &mode)
19621 && GET_MODE_SIZE (mode) == 1
19622 && domain
19623 && TYPE_MAX_VALUE (domain)
19624 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19625 && integer_zerop (TYPE_MIN_VALUE (domain))
19626 && compare_tree_int (TYPE_MAX_VALUE (domain),
19627 TREE_STRING_LENGTH (init) - 1) == 0
19628 && ((size_t) TREE_STRING_LENGTH (init)
19629 == strlen (TREE_STRING_POINTER (init)) + 1))
19630 {
19631 rtl = gen_rtx_CONST_STRING (VOIDmode,
19632 ggc_strdup (TREE_STRING_POINTER (init)));
19633 rtl = gen_rtx_MEM (BLKmode, rtl);
19634 MEM_READONLY_P (rtl) = 1;
19635 }
19636 }
19637 /* Other aggregates, and complex values, could be represented using
19638 CONCAT: FIXME! */
19639 else if (AGGREGATE_TYPE_P (type)
19640 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19641 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19642 || TREE_CODE (type) == COMPLEX_TYPE)
19643 ;
19644 /* Vectors only work if their mode is supported by the target.
19645 FIXME: generic vectors ought to work too. */
19646 else if (TREE_CODE (type) == VECTOR_TYPE
19647 && !VECTOR_MODE_P (TYPE_MODE (type)))
19648 ;
19649 /* If the initializer is something that we know will expand into an
19650 immediate RTL constant, expand it now. We must be careful not to
19651 reference variables which won't be output. */
19652 else if (initializer_constant_valid_p (init, type)
19653 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19654 {
19655 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19656 possible. */
19657 if (TREE_CODE (type) == VECTOR_TYPE)
19658 switch (TREE_CODE (init))
19659 {
19660 case VECTOR_CST:
19661 break;
19662 case CONSTRUCTOR:
19663 if (TREE_CONSTANT (init))
19664 {
19665 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19666 bool constant_p = true;
19667 tree value;
19668 unsigned HOST_WIDE_INT ix;
19669
19670 /* Even when ctor is constant, it might contain non-*_CST
19671 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19672 belong into VECTOR_CST nodes. */
19673 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19674 if (!CONSTANT_CLASS_P (value))
19675 {
19676 constant_p = false;
19677 break;
19678 }
19679
19680 if (constant_p)
19681 {
19682 init = build_vector_from_ctor (type, elts);
19683 break;
19684 }
19685 }
19686 /* FALLTHRU */
19687
19688 default:
19689 return NULL;
19690 }
19691
19692 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19693
19694 /* If expand_expr returns a MEM, it wasn't immediate. */
19695 gcc_assert (!rtl || !MEM_P (rtl));
19696 }
19697
19698 return rtl;
19699 }
19700
19701 /* Generate RTL for the variable DECL to represent its location. */
19702
19703 static rtx
19704 rtl_for_decl_location (tree decl)
19705 {
19706 rtx rtl;
19707
19708 /* Here we have to decide where we are going to say the parameter "lives"
19709 (as far as the debugger is concerned). We only have a couple of
19710 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19711
19712 DECL_RTL normally indicates where the parameter lives during most of the
19713 activation of the function. If optimization is enabled however, this
19714 could be either NULL or else a pseudo-reg. Both of those cases indicate
19715 that the parameter doesn't really live anywhere (as far as the code
19716 generation parts of GCC are concerned) during most of the function's
19717 activation. That will happen (for example) if the parameter is never
19718 referenced within the function.
19719
19720 We could just generate a location descriptor here for all non-NULL
19721 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19722 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19723 where DECL_RTL is NULL or is a pseudo-reg.
19724
19725 Note however that we can only get away with using DECL_INCOMING_RTL as
19726 a backup substitute for DECL_RTL in certain limited cases. In cases
19727 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19728 we can be sure that the parameter was passed using the same type as it is
19729 declared to have within the function, and that its DECL_INCOMING_RTL
19730 points us to a place where a value of that type is passed.
19731
19732 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19733 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19734 because in these cases DECL_INCOMING_RTL points us to a value of some
19735 type which is *different* from the type of the parameter itself. Thus,
19736 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19737 such cases, the debugger would end up (for example) trying to fetch a
19738 `float' from a place which actually contains the first part of a
19739 `double'. That would lead to really incorrect and confusing
19740 output at debug-time.
19741
19742 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19743 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19744 are a couple of exceptions however. On little-endian machines we can
19745 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19746 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19747 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19748 when (on a little-endian machine) a non-prototyped function has a
19749 parameter declared to be of type `short' or `char'. In such cases,
19750 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19751 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19752 passed `int' value. If the debugger then uses that address to fetch
19753 a `short' or a `char' (on a little-endian machine) the result will be
19754 the correct data, so we allow for such exceptional cases below.
19755
19756 Note that our goal here is to describe the place where the given formal
19757 parameter lives during most of the function's activation (i.e. between the
19758 end of the prologue and the start of the epilogue). We'll do that as best
19759 as we can. Note however that if the given formal parameter is modified
19760 sometime during the execution of the function, then a stack backtrace (at
19761 debug-time) will show the function as having been called with the *new*
19762 value rather than the value which was originally passed in. This happens
19763 rarely enough that it is not a major problem, but it *is* a problem, and
19764 I'd like to fix it.
19765
19766 A future version of dwarf2out.c may generate two additional attributes for
19767 any given DW_TAG_formal_parameter DIE which will describe the "passed
19768 type" and the "passed location" for the given formal parameter in addition
19769 to the attributes we now generate to indicate the "declared type" and the
19770 "active location" for each parameter. This additional set of attributes
19771 could be used by debuggers for stack backtraces. Separately, note that
19772 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19773 This happens (for example) for inlined-instances of inline function formal
19774 parameters which are never referenced. This really shouldn't be
19775 happening. All PARM_DECL nodes should get valid non-NULL
19776 DECL_INCOMING_RTL values. FIXME. */
19777
19778 /* Use DECL_RTL as the "location" unless we find something better. */
19779 rtl = DECL_RTL_IF_SET (decl);
19780
19781 /* When generating abstract instances, ignore everything except
19782 constants, symbols living in memory, and symbols living in
19783 fixed registers. */
19784 if (! reload_completed)
19785 {
19786 if (rtl
19787 && (CONSTANT_P (rtl)
19788 || (MEM_P (rtl)
19789 && CONSTANT_P (XEXP (rtl, 0)))
19790 || (REG_P (rtl)
19791 && VAR_P (decl)
19792 && TREE_STATIC (decl))))
19793 {
19794 rtl = targetm.delegitimize_address (rtl);
19795 return rtl;
19796 }
19797 rtl = NULL_RTX;
19798 }
19799 else if (TREE_CODE (decl) == PARM_DECL)
19800 {
19801 if (rtl == NULL_RTX
19802 || is_pseudo_reg (rtl)
19803 || (MEM_P (rtl)
19804 && is_pseudo_reg (XEXP (rtl, 0))
19805 && DECL_INCOMING_RTL (decl)
19806 && MEM_P (DECL_INCOMING_RTL (decl))
19807 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19808 {
19809 tree declared_type = TREE_TYPE (decl);
19810 tree passed_type = DECL_ARG_TYPE (decl);
19811 machine_mode dmode = TYPE_MODE (declared_type);
19812 machine_mode pmode = TYPE_MODE (passed_type);
19813
19814 /* This decl represents a formal parameter which was optimized out.
19815 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19816 all cases where (rtl == NULL_RTX) just below. */
19817 if (dmode == pmode)
19818 rtl = DECL_INCOMING_RTL (decl);
19819 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19820 && SCALAR_INT_MODE_P (dmode)
19821 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19822 && DECL_INCOMING_RTL (decl))
19823 {
19824 rtx inc = DECL_INCOMING_RTL (decl);
19825 if (REG_P (inc))
19826 rtl = inc;
19827 else if (MEM_P (inc))
19828 {
19829 if (BYTES_BIG_ENDIAN)
19830 rtl = adjust_address_nv (inc, dmode,
19831 GET_MODE_SIZE (pmode)
19832 - GET_MODE_SIZE (dmode));
19833 else
19834 rtl = inc;
19835 }
19836 }
19837 }
19838
19839 /* If the parm was passed in registers, but lives on the stack, then
19840 make a big endian correction if the mode of the type of the
19841 parameter is not the same as the mode of the rtl. */
19842 /* ??? This is the same series of checks that are made in dbxout.c before
19843 we reach the big endian correction code there. It isn't clear if all
19844 of these checks are necessary here, but keeping them all is the safe
19845 thing to do. */
19846 else if (MEM_P (rtl)
19847 && XEXP (rtl, 0) != const0_rtx
19848 && ! CONSTANT_P (XEXP (rtl, 0))
19849 /* Not passed in memory. */
19850 && !MEM_P (DECL_INCOMING_RTL (decl))
19851 /* Not passed by invisible reference. */
19852 && (!REG_P (XEXP (rtl, 0))
19853 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19854 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19855 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19856 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19857 #endif
19858 )
19859 /* Big endian correction check. */
19860 && BYTES_BIG_ENDIAN
19861 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19862 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19863 UNITS_PER_WORD))
19864 {
19865 machine_mode addr_mode = get_address_mode (rtl);
19866 poly_int64 offset = (UNITS_PER_WORD
19867 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19868
19869 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19870 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19871 }
19872 }
19873 else if (VAR_P (decl)
19874 && rtl
19875 && MEM_P (rtl)
19876 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19877 {
19878 machine_mode addr_mode = get_address_mode (rtl);
19879 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19880 GET_MODE (rtl));
19881
19882 /* If a variable is declared "register" yet is smaller than
19883 a register, then if we store the variable to memory, it
19884 looks like we're storing a register-sized value, when in
19885 fact we are not. We need to adjust the offset of the
19886 storage location to reflect the actual value's bytes,
19887 else gdb will not be able to display it. */
19888 if (maybe_ne (offset, 0))
19889 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19890 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19891 }
19892
19893 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19894 and will have been substituted directly into all expressions that use it.
19895 C does not have such a concept, but C++ and other languages do. */
19896 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19897 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19898
19899 if (rtl)
19900 rtl = targetm.delegitimize_address (rtl);
19901
19902 /* If we don't look past the constant pool, we risk emitting a
19903 reference to a constant pool entry that isn't referenced from
19904 code, and thus is not emitted. */
19905 if (rtl)
19906 rtl = avoid_constant_pool_reference (rtl);
19907
19908 /* Try harder to get a rtl. If this symbol ends up not being emitted
19909 in the current CU, resolve_addr will remove the expression referencing
19910 it. */
19911 if (rtl == NULL_RTX
19912 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19913 && VAR_P (decl)
19914 && !DECL_EXTERNAL (decl)
19915 && TREE_STATIC (decl)
19916 && DECL_NAME (decl)
19917 && !DECL_HARD_REGISTER (decl)
19918 && DECL_MODE (decl) != VOIDmode)
19919 {
19920 rtl = make_decl_rtl_for_debug (decl);
19921 if (!MEM_P (rtl)
19922 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19923 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19924 rtl = NULL_RTX;
19925 }
19926
19927 return rtl;
19928 }
19929
19930 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19931 returned. If so, the decl for the COMMON block is returned, and the
19932 value is the offset into the common block for the symbol. */
19933
19934 static tree
19935 fortran_common (tree decl, HOST_WIDE_INT *value)
19936 {
19937 tree val_expr, cvar;
19938 machine_mode mode;
19939 poly_int64 bitsize, bitpos;
19940 tree offset;
19941 HOST_WIDE_INT cbitpos;
19942 int unsignedp, reversep, volatilep = 0;
19943
19944 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19945 it does not have a value (the offset into the common area), or if it
19946 is thread local (as opposed to global) then it isn't common, and shouldn't
19947 be handled as such. */
19948 if (!VAR_P (decl)
19949 || !TREE_STATIC (decl)
19950 || !DECL_HAS_VALUE_EXPR_P (decl)
19951 || !is_fortran ())
19952 return NULL_TREE;
19953
19954 val_expr = DECL_VALUE_EXPR (decl);
19955 if (TREE_CODE (val_expr) != COMPONENT_REF)
19956 return NULL_TREE;
19957
19958 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19959 &unsignedp, &reversep, &volatilep);
19960
19961 if (cvar == NULL_TREE
19962 || !VAR_P (cvar)
19963 || DECL_ARTIFICIAL (cvar)
19964 || !TREE_PUBLIC (cvar)
19965 /* We don't expect to have to cope with variable offsets,
19966 since at present all static data must have a constant size. */
19967 || !bitpos.is_constant (&cbitpos))
19968 return NULL_TREE;
19969
19970 *value = 0;
19971 if (offset != NULL)
19972 {
19973 if (!tree_fits_shwi_p (offset))
19974 return NULL_TREE;
19975 *value = tree_to_shwi (offset);
19976 }
19977 if (cbitpos != 0)
19978 *value += cbitpos / BITS_PER_UNIT;
19979
19980 return cvar;
19981 }
19982
19983 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19984 data attribute for a variable or a parameter. We generate the
19985 DW_AT_const_value attribute only in those cases where the given variable
19986 or parameter does not have a true "location" either in memory or in a
19987 register. This can happen (for example) when a constant is passed as an
19988 actual argument in a call to an inline function. (It's possible that
19989 these things can crop up in other ways also.) Note that one type of
19990 constant value which can be passed into an inlined function is a constant
19991 pointer. This can happen for example if an actual argument in an inlined
19992 function call evaluates to a compile-time constant address.
19993
19994 CACHE_P is true if it is worth caching the location list for DECL,
19995 so that future calls can reuse it rather than regenerate it from scratch.
19996 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19997 since we will need to refer to them each time the function is inlined. */
19998
19999 static bool
20000 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20001 {
20002 rtx rtl;
20003 dw_loc_list_ref list;
20004 var_loc_list *loc_list;
20005 cached_dw_loc_list *cache;
20006
20007 if (early_dwarf)
20008 return false;
20009
20010 if (TREE_CODE (decl) == ERROR_MARK)
20011 return false;
20012
20013 if (get_AT (die, DW_AT_location)
20014 || get_AT (die, DW_AT_const_value))
20015 return true;
20016
20017 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20018 || TREE_CODE (decl) == RESULT_DECL);
20019
20020 /* Try to get some constant RTL for this decl, and use that as the value of
20021 the location. */
20022
20023 rtl = rtl_for_decl_location (decl);
20024 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20025 && add_const_value_attribute (die, rtl))
20026 return true;
20027
20028 /* See if we have single element location list that is equivalent to
20029 a constant value. That way we are better to use add_const_value_attribute
20030 rather than expanding constant value equivalent. */
20031 loc_list = lookup_decl_loc (decl);
20032 if (loc_list
20033 && loc_list->first
20034 && loc_list->first->next == NULL
20035 && NOTE_P (loc_list->first->loc)
20036 && NOTE_VAR_LOCATION (loc_list->first->loc)
20037 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20038 {
20039 struct var_loc_node *node;
20040
20041 node = loc_list->first;
20042 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20043 if (GET_CODE (rtl) == EXPR_LIST)
20044 rtl = XEXP (rtl, 0);
20045 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20046 && add_const_value_attribute (die, rtl))
20047 return true;
20048 }
20049 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20050 list several times. See if we've already cached the contents. */
20051 list = NULL;
20052 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20053 cache_p = false;
20054 if (cache_p)
20055 {
20056 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20057 if (cache)
20058 list = cache->loc_list;
20059 }
20060 if (list == NULL)
20061 {
20062 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20063 NULL);
20064 /* It is usually worth caching this result if the decl is from
20065 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20066 if (cache_p && list && list->dw_loc_next)
20067 {
20068 cached_dw_loc_list **slot
20069 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20070 DECL_UID (decl),
20071 INSERT);
20072 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20073 cache->decl_id = DECL_UID (decl);
20074 cache->loc_list = list;
20075 *slot = cache;
20076 }
20077 }
20078 if (list)
20079 {
20080 add_AT_location_description (die, DW_AT_location, list);
20081 return true;
20082 }
20083 /* None of that worked, so it must not really have a location;
20084 try adding a constant value attribute from the DECL_INITIAL. */
20085 return tree_add_const_value_attribute_for_decl (die, decl);
20086 }
20087
20088 /* Helper function for tree_add_const_value_attribute. Natively encode
20089 initializer INIT into an array. Return true if successful. */
20090
20091 static bool
20092 native_encode_initializer (tree init, unsigned char *array, int size)
20093 {
20094 tree type;
20095
20096 if (init == NULL_TREE)
20097 return false;
20098
20099 STRIP_NOPS (init);
20100 switch (TREE_CODE (init))
20101 {
20102 case STRING_CST:
20103 type = TREE_TYPE (init);
20104 if (TREE_CODE (type) == ARRAY_TYPE)
20105 {
20106 tree enttype = TREE_TYPE (type);
20107 scalar_int_mode mode;
20108
20109 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20110 || GET_MODE_SIZE (mode) != 1)
20111 return false;
20112 if (int_size_in_bytes (type) != size)
20113 return false;
20114 if (size > TREE_STRING_LENGTH (init))
20115 {
20116 memcpy (array, TREE_STRING_POINTER (init),
20117 TREE_STRING_LENGTH (init));
20118 memset (array + TREE_STRING_LENGTH (init),
20119 '\0', size - TREE_STRING_LENGTH (init));
20120 }
20121 else
20122 memcpy (array, TREE_STRING_POINTER (init), size);
20123 return true;
20124 }
20125 return false;
20126 case CONSTRUCTOR:
20127 type = TREE_TYPE (init);
20128 if (int_size_in_bytes (type) != size)
20129 return false;
20130 if (TREE_CODE (type) == ARRAY_TYPE)
20131 {
20132 HOST_WIDE_INT min_index;
20133 unsigned HOST_WIDE_INT cnt;
20134 int curpos = 0, fieldsize;
20135 constructor_elt *ce;
20136
20137 if (TYPE_DOMAIN (type) == NULL_TREE
20138 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20139 return false;
20140
20141 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20142 if (fieldsize <= 0)
20143 return false;
20144
20145 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20146 memset (array, '\0', size);
20147 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20148 {
20149 tree val = ce->value;
20150 tree index = ce->index;
20151 int pos = curpos;
20152 if (index && TREE_CODE (index) == RANGE_EXPR)
20153 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20154 * fieldsize;
20155 else if (index)
20156 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20157
20158 if (val)
20159 {
20160 STRIP_NOPS (val);
20161 if (!native_encode_initializer (val, array + pos, fieldsize))
20162 return false;
20163 }
20164 curpos = pos + fieldsize;
20165 if (index && TREE_CODE (index) == RANGE_EXPR)
20166 {
20167 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20168 - tree_to_shwi (TREE_OPERAND (index, 0));
20169 while (count-- > 0)
20170 {
20171 if (val)
20172 memcpy (array + curpos, array + pos, fieldsize);
20173 curpos += fieldsize;
20174 }
20175 }
20176 gcc_assert (curpos <= size);
20177 }
20178 return true;
20179 }
20180 else if (TREE_CODE (type) == RECORD_TYPE
20181 || TREE_CODE (type) == UNION_TYPE)
20182 {
20183 tree field = NULL_TREE;
20184 unsigned HOST_WIDE_INT cnt;
20185 constructor_elt *ce;
20186
20187 if (int_size_in_bytes (type) != size)
20188 return false;
20189
20190 if (TREE_CODE (type) == RECORD_TYPE)
20191 field = TYPE_FIELDS (type);
20192
20193 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20194 {
20195 tree val = ce->value;
20196 int pos, fieldsize;
20197
20198 if (ce->index != 0)
20199 field = ce->index;
20200
20201 if (val)
20202 STRIP_NOPS (val);
20203
20204 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20205 return false;
20206
20207 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20208 && TYPE_DOMAIN (TREE_TYPE (field))
20209 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20210 return false;
20211 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20212 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20213 return false;
20214 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20215 pos = int_byte_position (field);
20216 gcc_assert (pos + fieldsize <= size);
20217 if (val && fieldsize != 0
20218 && !native_encode_initializer (val, array + pos, fieldsize))
20219 return false;
20220 }
20221 return true;
20222 }
20223 return false;
20224 case VIEW_CONVERT_EXPR:
20225 case NON_LVALUE_EXPR:
20226 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20227 default:
20228 return native_encode_expr (init, array, size) == size;
20229 }
20230 }
20231
20232 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20233 attribute is the const value T. */
20234
20235 static bool
20236 tree_add_const_value_attribute (dw_die_ref die, tree t)
20237 {
20238 tree init;
20239 tree type = TREE_TYPE (t);
20240 rtx rtl;
20241
20242 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20243 return false;
20244
20245 init = t;
20246 gcc_assert (!DECL_P (init));
20247
20248 if (TREE_CODE (init) == INTEGER_CST)
20249 {
20250 if (tree_fits_uhwi_p (init))
20251 {
20252 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20253 return true;
20254 }
20255 if (tree_fits_shwi_p (init))
20256 {
20257 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20258 return true;
20259 }
20260 }
20261 if (! early_dwarf)
20262 {
20263 rtl = rtl_for_decl_init (init, type);
20264 if (rtl)
20265 return add_const_value_attribute (die, rtl);
20266 }
20267 /* If the host and target are sane, try harder. */
20268 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20269 && initializer_constant_valid_p (init, type))
20270 {
20271 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20272 if (size > 0 && (int) size == size)
20273 {
20274 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20275
20276 if (native_encode_initializer (init, array, size))
20277 {
20278 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20279 return true;
20280 }
20281 ggc_free (array);
20282 }
20283 }
20284 return false;
20285 }
20286
20287 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20288 attribute is the const value of T, where T is an integral constant
20289 variable with static storage duration
20290 (so it can't be a PARM_DECL or a RESULT_DECL). */
20291
20292 static bool
20293 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20294 {
20295
20296 if (!decl
20297 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20298 || (VAR_P (decl) && !TREE_STATIC (decl)))
20299 return false;
20300
20301 if (TREE_READONLY (decl)
20302 && ! TREE_THIS_VOLATILE (decl)
20303 && DECL_INITIAL (decl))
20304 /* OK */;
20305 else
20306 return false;
20307
20308 /* Don't add DW_AT_const_value if abstract origin already has one. */
20309 if (get_AT (var_die, DW_AT_const_value))
20310 return false;
20311
20312 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20313 }
20314
20315 /* Convert the CFI instructions for the current function into a
20316 location list. This is used for DW_AT_frame_base when we targeting
20317 a dwarf2 consumer that does not support the dwarf3
20318 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20319 expressions. */
20320
20321 static dw_loc_list_ref
20322 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20323 {
20324 int ix;
20325 dw_fde_ref fde;
20326 dw_loc_list_ref list, *list_tail;
20327 dw_cfi_ref cfi;
20328 dw_cfa_location last_cfa, next_cfa;
20329 const char *start_label, *last_label, *section;
20330 dw_cfa_location remember;
20331
20332 fde = cfun->fde;
20333 gcc_assert (fde != NULL);
20334
20335 section = secname_for_decl (current_function_decl);
20336 list_tail = &list;
20337 list = NULL;
20338
20339 memset (&next_cfa, 0, sizeof (next_cfa));
20340 next_cfa.reg = INVALID_REGNUM;
20341 remember = next_cfa;
20342
20343 start_label = fde->dw_fde_begin;
20344
20345 /* ??? Bald assumption that the CIE opcode list does not contain
20346 advance opcodes. */
20347 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20348 lookup_cfa_1 (cfi, &next_cfa, &remember);
20349
20350 last_cfa = next_cfa;
20351 last_label = start_label;
20352
20353 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20354 {
20355 /* If the first partition contained no CFI adjustments, the
20356 CIE opcodes apply to the whole first partition. */
20357 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20358 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20359 list_tail =&(*list_tail)->dw_loc_next;
20360 start_label = last_label = fde->dw_fde_second_begin;
20361 }
20362
20363 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20364 {
20365 switch (cfi->dw_cfi_opc)
20366 {
20367 case DW_CFA_set_loc:
20368 case DW_CFA_advance_loc1:
20369 case DW_CFA_advance_loc2:
20370 case DW_CFA_advance_loc4:
20371 if (!cfa_equal_p (&last_cfa, &next_cfa))
20372 {
20373 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20374 start_label, 0, last_label, 0, section);
20375
20376 list_tail = &(*list_tail)->dw_loc_next;
20377 last_cfa = next_cfa;
20378 start_label = last_label;
20379 }
20380 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20381 break;
20382
20383 case DW_CFA_advance_loc:
20384 /* The encoding is complex enough that we should never emit this. */
20385 gcc_unreachable ();
20386
20387 default:
20388 lookup_cfa_1 (cfi, &next_cfa, &remember);
20389 break;
20390 }
20391 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20392 {
20393 if (!cfa_equal_p (&last_cfa, &next_cfa))
20394 {
20395 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20396 start_label, 0, last_label, 0, section);
20397
20398 list_tail = &(*list_tail)->dw_loc_next;
20399 last_cfa = next_cfa;
20400 start_label = last_label;
20401 }
20402 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20403 start_label, 0, fde->dw_fde_end, 0, section);
20404 list_tail = &(*list_tail)->dw_loc_next;
20405 start_label = last_label = fde->dw_fde_second_begin;
20406 }
20407 }
20408
20409 if (!cfa_equal_p (&last_cfa, &next_cfa))
20410 {
20411 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20412 start_label, 0, last_label, 0, section);
20413 list_tail = &(*list_tail)->dw_loc_next;
20414 start_label = last_label;
20415 }
20416
20417 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20418 start_label, 0,
20419 fde->dw_fde_second_begin
20420 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20421 section);
20422
20423 maybe_gen_llsym (list);
20424
20425 return list;
20426 }
20427
20428 /* Compute a displacement from the "steady-state frame pointer" to the
20429 frame base (often the same as the CFA), and store it in
20430 frame_pointer_fb_offset. OFFSET is added to the displacement
20431 before the latter is negated. */
20432
20433 static void
20434 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20435 {
20436 rtx reg, elim;
20437
20438 #ifdef FRAME_POINTER_CFA_OFFSET
20439 reg = frame_pointer_rtx;
20440 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20441 #else
20442 reg = arg_pointer_rtx;
20443 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20444 #endif
20445
20446 elim = (ira_use_lra_p
20447 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20448 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20449 elim = strip_offset_and_add (elim, &offset);
20450
20451 frame_pointer_fb_offset = -offset;
20452
20453 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20454 in which to eliminate. This is because it's stack pointer isn't
20455 directly accessible as a register within the ISA. To work around
20456 this, assume that while we cannot provide a proper value for
20457 frame_pointer_fb_offset, we won't need one either. We can use
20458 hard frame pointer in debug info even if frame pointer isn't used
20459 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20460 which uses the DW_AT_frame_base attribute, not hard frame pointer
20461 directly. */
20462 frame_pointer_fb_offset_valid
20463 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20464 }
20465
20466 /* Generate a DW_AT_name attribute given some string value to be included as
20467 the value of the attribute. */
20468
20469 static void
20470 add_name_attribute (dw_die_ref die, const char *name_string)
20471 {
20472 if (name_string != NULL && *name_string != 0)
20473 {
20474 if (demangle_name_func)
20475 name_string = (*demangle_name_func) (name_string);
20476
20477 add_AT_string (die, DW_AT_name, name_string);
20478 }
20479 }
20480
20481 /* Generate a DW_AT_description attribute given some string value to be included
20482 as the value of the attribute. */
20483
20484 static void
20485 add_desc_attribute (dw_die_ref die, const char *name_string)
20486 {
20487 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20488 return;
20489
20490 if (name_string == NULL || *name_string == 0)
20491 return;
20492
20493 if (demangle_name_func)
20494 name_string = (*demangle_name_func) (name_string);
20495
20496 add_AT_string (die, DW_AT_description, name_string);
20497 }
20498
20499 /* Generate a DW_AT_description attribute given some decl to be included
20500 as the value of the attribute. */
20501
20502 static void
20503 add_desc_attribute (dw_die_ref die, tree decl)
20504 {
20505 tree decl_name;
20506
20507 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20508 return;
20509
20510 if (decl == NULL_TREE || !DECL_P (decl))
20511 return;
20512 decl_name = DECL_NAME (decl);
20513
20514 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20515 {
20516 const char *name = dwarf2_name (decl, 0);
20517 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20518 }
20519 else
20520 {
20521 char *desc = print_generic_expr_to_str (decl);
20522 add_desc_attribute (die, desc);
20523 free (desc);
20524 }
20525 }
20526
20527 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20528 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20529 of TYPE accordingly.
20530
20531 ??? This is a temporary measure until after we're able to generate
20532 regular DWARF for the complex Ada type system. */
20533
20534 static void
20535 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20536 dw_die_ref context_die)
20537 {
20538 tree dtype;
20539 dw_die_ref dtype_die;
20540
20541 if (!lang_hooks.types.descriptive_type)
20542 return;
20543
20544 dtype = lang_hooks.types.descriptive_type (type);
20545 if (!dtype)
20546 return;
20547
20548 dtype_die = lookup_type_die (dtype);
20549 if (!dtype_die)
20550 {
20551 gen_type_die (dtype, context_die);
20552 dtype_die = lookup_type_die (dtype);
20553 gcc_assert (dtype_die);
20554 }
20555
20556 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20557 }
20558
20559 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20560
20561 static const char *
20562 comp_dir_string (void)
20563 {
20564 const char *wd;
20565 char *wd1;
20566 static const char *cached_wd = NULL;
20567
20568 if (cached_wd != NULL)
20569 return cached_wd;
20570
20571 wd = get_src_pwd ();
20572 if (wd == NULL)
20573 return NULL;
20574
20575 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20576 {
20577 int wdlen;
20578
20579 wdlen = strlen (wd);
20580 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20581 strcpy (wd1, wd);
20582 wd1 [wdlen] = DIR_SEPARATOR;
20583 wd1 [wdlen + 1] = 0;
20584 wd = wd1;
20585 }
20586
20587 cached_wd = remap_debug_filename (wd);
20588 return cached_wd;
20589 }
20590
20591 /* Generate a DW_AT_comp_dir attribute for DIE. */
20592
20593 static void
20594 add_comp_dir_attribute (dw_die_ref die)
20595 {
20596 const char * wd = comp_dir_string ();
20597 if (wd != NULL)
20598 add_AT_string (die, DW_AT_comp_dir, wd);
20599 }
20600
20601 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20602 pointer computation, ...), output a representation for that bound according
20603 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20604 loc_list_from_tree for the meaning of CONTEXT. */
20605
20606 static void
20607 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20608 int forms, struct loc_descr_context *context)
20609 {
20610 dw_die_ref context_die, decl_die = NULL;
20611 dw_loc_list_ref list;
20612 bool strip_conversions = true;
20613 bool placeholder_seen = false;
20614
20615 while (strip_conversions)
20616 switch (TREE_CODE (value))
20617 {
20618 case ERROR_MARK:
20619 case SAVE_EXPR:
20620 return;
20621
20622 CASE_CONVERT:
20623 case VIEW_CONVERT_EXPR:
20624 value = TREE_OPERAND (value, 0);
20625 break;
20626
20627 default:
20628 strip_conversions = false;
20629 break;
20630 }
20631
20632 /* If possible and permitted, output the attribute as a constant. */
20633 if ((forms & dw_scalar_form_constant) != 0
20634 && TREE_CODE (value) == INTEGER_CST)
20635 {
20636 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20637
20638 /* If HOST_WIDE_INT is big enough then represent the bound as
20639 a constant value. We need to choose a form based on
20640 whether the type is signed or unsigned. We cannot just
20641 call add_AT_unsigned if the value itself is positive
20642 (add_AT_unsigned might add the unsigned value encoded as
20643 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20644 bounds type and then sign extend any unsigned values found
20645 for signed types. This is needed only for
20646 DW_AT_{lower,upper}_bound, since for most other attributes,
20647 consumers will treat DW_FORM_data[1248] as unsigned values,
20648 regardless of the underlying type. */
20649 if (prec <= HOST_BITS_PER_WIDE_INT
20650 || tree_fits_uhwi_p (value))
20651 {
20652 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20653 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20654 else
20655 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20656 }
20657 else
20658 /* Otherwise represent the bound as an unsigned value with
20659 the precision of its type. The precision and signedness
20660 of the type will be necessary to re-interpret it
20661 unambiguously. */
20662 add_AT_wide (die, attr, wi::to_wide (value));
20663 return;
20664 }
20665
20666 /* Otherwise, if it's possible and permitted too, output a reference to
20667 another DIE. */
20668 if ((forms & dw_scalar_form_reference) != 0)
20669 {
20670 tree decl = NULL_TREE;
20671
20672 /* Some type attributes reference an outer type. For instance, the upper
20673 bound of an array may reference an embedding record (this happens in
20674 Ada). */
20675 if (TREE_CODE (value) == COMPONENT_REF
20676 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20677 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20678 decl = TREE_OPERAND (value, 1);
20679
20680 else if (VAR_P (value)
20681 || TREE_CODE (value) == PARM_DECL
20682 || TREE_CODE (value) == RESULT_DECL)
20683 decl = value;
20684
20685 if (decl != NULL_TREE)
20686 {
20687 decl_die = lookup_decl_die (decl);
20688
20689 /* ??? Can this happen, or should the variable have been bound
20690 first? Probably it can, since I imagine that we try to create
20691 the types of parameters in the order in which they exist in
20692 the list, and won't have created a forward reference to a
20693 later parameter. */
20694 if (decl_die != NULL)
20695 {
20696 if (get_AT (decl_die, DW_AT_location)
20697 || get_AT (decl_die, DW_AT_const_value))
20698 {
20699 add_AT_die_ref (die, attr, decl_die);
20700 return;
20701 }
20702 }
20703 }
20704 }
20705
20706 /* Last chance: try to create a stack operation procedure to evaluate the
20707 value. Do nothing if even that is not possible or permitted. */
20708 if ((forms & dw_scalar_form_exprloc) == 0)
20709 return;
20710
20711 list = loc_list_from_tree (value, 2, context);
20712 if (context && context->placeholder_arg)
20713 {
20714 placeholder_seen = context->placeholder_seen;
20715 context->placeholder_seen = false;
20716 }
20717 if (list == NULL || single_element_loc_list_p (list))
20718 {
20719 /* If this attribute is not a reference nor constant, it is
20720 a DWARF expression rather than location description. For that
20721 loc_list_from_tree (value, 0, &context) is needed. */
20722 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20723 if (list2 && single_element_loc_list_p (list2))
20724 {
20725 if (placeholder_seen)
20726 {
20727 struct dwarf_procedure_info dpi;
20728 dpi.fndecl = NULL_TREE;
20729 dpi.args_count = 1;
20730 if (!resolve_args_picking (list2->expr, 1, &dpi))
20731 return;
20732 }
20733 add_AT_loc (die, attr, list2->expr);
20734 return;
20735 }
20736 }
20737
20738 /* If that failed to give a single element location list, fall back to
20739 outputting this as a reference... still if permitted. */
20740 if (list == NULL
20741 || (forms & dw_scalar_form_reference) == 0
20742 || placeholder_seen)
20743 return;
20744
20745 if (!decl_die)
20746 {
20747 if (current_function_decl == 0)
20748 context_die = comp_unit_die ();
20749 else
20750 context_die = lookup_decl_die (current_function_decl);
20751
20752 decl_die = new_die (DW_TAG_variable, context_die, value);
20753 add_AT_flag (decl_die, DW_AT_artificial, 1);
20754 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20755 context_die);
20756 }
20757
20758 add_AT_location_description (decl_die, DW_AT_location, list);
20759 add_AT_die_ref (die, attr, decl_die);
20760 }
20761
20762 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20763 default. */
20764
20765 static int
20766 lower_bound_default (void)
20767 {
20768 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20769 {
20770 case DW_LANG_C:
20771 case DW_LANG_C89:
20772 case DW_LANG_C99:
20773 case DW_LANG_C11:
20774 case DW_LANG_C_plus_plus:
20775 case DW_LANG_C_plus_plus_11:
20776 case DW_LANG_C_plus_plus_14:
20777 case DW_LANG_ObjC:
20778 case DW_LANG_ObjC_plus_plus:
20779 return 0;
20780 case DW_LANG_Fortran77:
20781 case DW_LANG_Fortran90:
20782 case DW_LANG_Fortran95:
20783 case DW_LANG_Fortran03:
20784 case DW_LANG_Fortran08:
20785 return 1;
20786 case DW_LANG_UPC:
20787 case DW_LANG_D:
20788 case DW_LANG_Python:
20789 return dwarf_version >= 4 ? 0 : -1;
20790 case DW_LANG_Ada95:
20791 case DW_LANG_Ada83:
20792 case DW_LANG_Cobol74:
20793 case DW_LANG_Cobol85:
20794 case DW_LANG_Modula2:
20795 case DW_LANG_PLI:
20796 return dwarf_version >= 4 ? 1 : -1;
20797 default:
20798 return -1;
20799 }
20800 }
20801
20802 /* Given a tree node describing an array bound (either lower or upper) output
20803 a representation for that bound. */
20804
20805 static void
20806 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20807 tree bound, struct loc_descr_context *context)
20808 {
20809 int dflt;
20810
20811 while (1)
20812 switch (TREE_CODE (bound))
20813 {
20814 /* Strip all conversions. */
20815 CASE_CONVERT:
20816 case VIEW_CONVERT_EXPR:
20817 bound = TREE_OPERAND (bound, 0);
20818 break;
20819
20820 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20821 are even omitted when they are the default. */
20822 case INTEGER_CST:
20823 /* If the value for this bound is the default one, we can even omit the
20824 attribute. */
20825 if (bound_attr == DW_AT_lower_bound
20826 && tree_fits_shwi_p (bound)
20827 && (dflt = lower_bound_default ()) != -1
20828 && tree_to_shwi (bound) == dflt)
20829 return;
20830
20831 /* FALLTHRU */
20832
20833 default:
20834 /* Because of the complex interaction there can be with other GNAT
20835 encodings, GDB isn't ready yet to handle proper DWARF description
20836 for self-referencial subrange bounds: let GNAT encodings do the
20837 magic in such a case. */
20838 if (is_ada ()
20839 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20840 && contains_placeholder_p (bound))
20841 return;
20842
20843 add_scalar_info (subrange_die, bound_attr, bound,
20844 dw_scalar_form_constant
20845 | dw_scalar_form_exprloc
20846 | dw_scalar_form_reference,
20847 context);
20848 return;
20849 }
20850 }
20851
20852 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20853 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20854 Note that the block of subscript information for an array type also
20855 includes information about the element type of the given array type.
20856
20857 This function reuses previously set type and bound information if
20858 available. */
20859
20860 static void
20861 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20862 {
20863 unsigned dimension_number;
20864 tree lower, upper;
20865 dw_die_ref child = type_die->die_child;
20866
20867 for (dimension_number = 0;
20868 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20869 type = TREE_TYPE (type), dimension_number++)
20870 {
20871 tree domain = TYPE_DOMAIN (type);
20872
20873 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20874 break;
20875
20876 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20877 and (in GNU C only) variable bounds. Handle all three forms
20878 here. */
20879
20880 /* Find and reuse a previously generated DW_TAG_subrange_type if
20881 available.
20882
20883 For multi-dimensional arrays, as we iterate through the
20884 various dimensions in the enclosing for loop above, we also
20885 iterate through the DIE children and pick at each
20886 DW_TAG_subrange_type previously generated (if available).
20887 Each child DW_TAG_subrange_type DIE describes the range of
20888 the current dimension. At this point we should have as many
20889 DW_TAG_subrange_type's as we have dimensions in the
20890 array. */
20891 dw_die_ref subrange_die = NULL;
20892 if (child)
20893 while (1)
20894 {
20895 child = child->die_sib;
20896 if (child->die_tag == DW_TAG_subrange_type)
20897 subrange_die = child;
20898 if (child == type_die->die_child)
20899 {
20900 /* If we wrapped around, stop looking next time. */
20901 child = NULL;
20902 break;
20903 }
20904 if (child->die_tag == DW_TAG_subrange_type)
20905 break;
20906 }
20907 if (!subrange_die)
20908 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20909
20910 if (domain)
20911 {
20912 /* We have an array type with specified bounds. */
20913 lower = TYPE_MIN_VALUE (domain);
20914 upper = TYPE_MAX_VALUE (domain);
20915
20916 /* Define the index type. */
20917 if (TREE_TYPE (domain)
20918 && !get_AT (subrange_die, DW_AT_type))
20919 {
20920 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20921 TREE_TYPE field. We can't emit debug info for this
20922 because it is an unnamed integral type. */
20923 if (TREE_CODE (domain) == INTEGER_TYPE
20924 && TYPE_NAME (domain) == NULL_TREE
20925 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20926 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20927 ;
20928 else
20929 add_type_attribute (subrange_die, TREE_TYPE (domain),
20930 TYPE_UNQUALIFIED, false, type_die);
20931 }
20932
20933 /* ??? If upper is NULL, the array has unspecified length,
20934 but it does have a lower bound. This happens with Fortran
20935 dimension arr(N:*)
20936 Since the debugger is definitely going to need to know N
20937 to produce useful results, go ahead and output the lower
20938 bound solo, and hope the debugger can cope. */
20939
20940 if (!get_AT (subrange_die, DW_AT_lower_bound))
20941 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20942 if (!get_AT (subrange_die, DW_AT_upper_bound)
20943 && !get_AT (subrange_die, DW_AT_count))
20944 {
20945 if (upper)
20946 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20947 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20948 /* Zero-length array. */
20949 add_bound_info (subrange_die, DW_AT_count,
20950 build_int_cst (TREE_TYPE (lower), 0), NULL);
20951 }
20952 }
20953
20954 /* Otherwise we have an array type with an unspecified length. The
20955 DWARF-2 spec does not say how to handle this; let's just leave out the
20956 bounds. */
20957 }
20958 }
20959
20960 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20961
20962 static void
20963 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20964 {
20965 dw_die_ref decl_die;
20966 HOST_WIDE_INT size;
20967 dw_loc_descr_ref size_expr = NULL;
20968
20969 switch (TREE_CODE (tree_node))
20970 {
20971 case ERROR_MARK:
20972 size = 0;
20973 break;
20974 case ENUMERAL_TYPE:
20975 case RECORD_TYPE:
20976 case UNION_TYPE:
20977 case QUAL_UNION_TYPE:
20978 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20979 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20980 {
20981 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20982 return;
20983 }
20984 size_expr = type_byte_size (tree_node, &size);
20985 break;
20986 case FIELD_DECL:
20987 /* For a data member of a struct or union, the DW_AT_byte_size is
20988 generally given as the number of bytes normally allocated for an
20989 object of the *declared* type of the member itself. This is true
20990 even for bit-fields. */
20991 size = int_size_in_bytes (field_type (tree_node));
20992 break;
20993 default:
20994 gcc_unreachable ();
20995 }
20996
20997 /* Support for dynamically-sized objects was introduced by DWARFv3.
20998 At the moment, GDB does not handle variable byte sizes very well,
20999 though. */
21000 if ((dwarf_version >= 3 || !dwarf_strict)
21001 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21002 && size_expr != NULL)
21003 add_AT_loc (die, DW_AT_byte_size, size_expr);
21004
21005 /* Note that `size' might be -1 when we get to this point. If it is, that
21006 indicates that the byte size of the entity in question is variable and
21007 that we could not generate a DWARF expression that computes it. */
21008 if (size >= 0)
21009 add_AT_unsigned (die, DW_AT_byte_size, size);
21010 }
21011
21012 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21013 alignment. */
21014
21015 static void
21016 add_alignment_attribute (dw_die_ref die, tree tree_node)
21017 {
21018 if (dwarf_version < 5 && dwarf_strict)
21019 return;
21020
21021 unsigned align;
21022
21023 if (DECL_P (tree_node))
21024 {
21025 if (!DECL_USER_ALIGN (tree_node))
21026 return;
21027
21028 align = DECL_ALIGN_UNIT (tree_node);
21029 }
21030 else if (TYPE_P (tree_node))
21031 {
21032 if (!TYPE_USER_ALIGN (tree_node))
21033 return;
21034
21035 align = TYPE_ALIGN_UNIT (tree_node);
21036 }
21037 else
21038 gcc_unreachable ();
21039
21040 add_AT_unsigned (die, DW_AT_alignment, align);
21041 }
21042
21043 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21044 which specifies the distance in bits from the highest order bit of the
21045 "containing object" for the bit-field to the highest order bit of the
21046 bit-field itself.
21047
21048 For any given bit-field, the "containing object" is a hypothetical object
21049 (of some integral or enum type) within which the given bit-field lives. The
21050 type of this hypothetical "containing object" is always the same as the
21051 declared type of the individual bit-field itself. The determination of the
21052 exact location of the "containing object" for a bit-field is rather
21053 complicated. It's handled by the `field_byte_offset' function (above).
21054
21055 CTX is required: see the comment for VLR_CONTEXT.
21056
21057 Note that it is the size (in bytes) of the hypothetical "containing object"
21058 which will be given in the DW_AT_byte_size attribute for this bit-field.
21059 (See `byte_size_attribute' above). */
21060
21061 static inline void
21062 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21063 {
21064 HOST_WIDE_INT object_offset_in_bytes;
21065 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21066 HOST_WIDE_INT bitpos_int;
21067 HOST_WIDE_INT highest_order_object_bit_offset;
21068 HOST_WIDE_INT highest_order_field_bit_offset;
21069 HOST_WIDE_INT bit_offset;
21070
21071 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21072
21073 /* Must be a field and a bit field. */
21074 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21075
21076 /* We can't yet handle bit-fields whose offsets are variable, so if we
21077 encounter such things, just return without generating any attribute
21078 whatsoever. Likewise for variable or too large size. */
21079 if (! tree_fits_shwi_p (bit_position (decl))
21080 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21081 return;
21082
21083 bitpos_int = int_bit_position (decl);
21084
21085 /* Note that the bit offset is always the distance (in bits) from the
21086 highest-order bit of the "containing object" to the highest-order bit of
21087 the bit-field itself. Since the "high-order end" of any object or field
21088 is different on big-endian and little-endian machines, the computation
21089 below must take account of these differences. */
21090 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21091 highest_order_field_bit_offset = bitpos_int;
21092
21093 if (! BYTES_BIG_ENDIAN)
21094 {
21095 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21096 highest_order_object_bit_offset +=
21097 simple_type_size_in_bits (original_type);
21098 }
21099
21100 bit_offset
21101 = (! BYTES_BIG_ENDIAN
21102 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21103 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21104
21105 if (bit_offset < 0)
21106 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21107 else
21108 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21109 }
21110
21111 /* For a FIELD_DECL node which represents a bit field, output an attribute
21112 which specifies the length in bits of the given field. */
21113
21114 static inline void
21115 add_bit_size_attribute (dw_die_ref die, tree decl)
21116 {
21117 /* Must be a field and a bit field. */
21118 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21119 && DECL_BIT_FIELD_TYPE (decl));
21120
21121 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21122 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21123 }
21124
21125 /* If the compiled language is ANSI C, then add a 'prototyped'
21126 attribute, if arg types are given for the parameters of a function. */
21127
21128 static inline void
21129 add_prototyped_attribute (dw_die_ref die, tree func_type)
21130 {
21131 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21132 {
21133 case DW_LANG_C:
21134 case DW_LANG_C89:
21135 case DW_LANG_C99:
21136 case DW_LANG_C11:
21137 case DW_LANG_ObjC:
21138 if (prototype_p (func_type))
21139 add_AT_flag (die, DW_AT_prototyped, 1);
21140 break;
21141 default:
21142 break;
21143 }
21144 }
21145
21146 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21147 by looking in the type declaration, the object declaration equate table or
21148 the block mapping. */
21149
21150 static inline void
21151 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21152 {
21153 dw_die_ref origin_die = NULL;
21154
21155 /* For late LTO debug output we want to refer directly to the abstract
21156 DIE in the early debug rather to the possibly existing concrete
21157 instance and avoid creating that just for this purpose. */
21158 sym_off_pair *desc;
21159 if (in_lto_p
21160 && external_die_map
21161 && (desc = external_die_map->get (origin)))
21162 {
21163 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21164 desc->sym, desc->off);
21165 return;
21166 }
21167
21168 if (DECL_P (origin))
21169 origin_die = lookup_decl_die (origin);
21170 else if (TYPE_P (origin))
21171 origin_die = lookup_type_die (origin);
21172 else if (TREE_CODE (origin) == BLOCK)
21173 origin_die = lookup_block_die (origin);
21174
21175 /* XXX: Functions that are never lowered don't always have correct block
21176 trees (in the case of java, they simply have no block tree, in some other
21177 languages). For these functions, there is nothing we can really do to
21178 output correct debug info for inlined functions in all cases. Rather
21179 than die, we'll just produce deficient debug info now, in that we will
21180 have variables without a proper abstract origin. In the future, when all
21181 functions are lowered, we should re-add a gcc_assert (origin_die)
21182 here. */
21183
21184 if (origin_die)
21185 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21186 }
21187
21188 /* We do not currently support the pure_virtual attribute. */
21189
21190 static inline void
21191 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21192 {
21193 if (DECL_VINDEX (func_decl))
21194 {
21195 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21196
21197 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21198 add_AT_loc (die, DW_AT_vtable_elem_location,
21199 new_loc_descr (DW_OP_constu,
21200 tree_to_shwi (DECL_VINDEX (func_decl)),
21201 0));
21202
21203 /* GNU extension: Record what type this method came from originally. */
21204 if (debug_info_level > DINFO_LEVEL_TERSE
21205 && DECL_CONTEXT (func_decl))
21206 add_AT_die_ref (die, DW_AT_containing_type,
21207 lookup_type_die (DECL_CONTEXT (func_decl)));
21208 }
21209 }
21210 \f
21211 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21212 given decl. This used to be a vendor extension until after DWARF 4
21213 standardized it. */
21214
21215 static void
21216 add_linkage_attr (dw_die_ref die, tree decl)
21217 {
21218 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21219
21220 /* Mimic what assemble_name_raw does with a leading '*'. */
21221 if (name[0] == '*')
21222 name = &name[1];
21223
21224 if (dwarf_version >= 4)
21225 add_AT_string (die, DW_AT_linkage_name, name);
21226 else
21227 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21228 }
21229
21230 /* Add source coordinate attributes for the given decl. */
21231
21232 static void
21233 add_src_coords_attributes (dw_die_ref die, tree decl)
21234 {
21235 expanded_location s;
21236
21237 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21238 return;
21239 s = expand_location (DECL_SOURCE_LOCATION (decl));
21240 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21241 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21242 if (debug_column_info && s.column)
21243 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21244 }
21245
21246 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21247
21248 static void
21249 add_linkage_name_raw (dw_die_ref die, tree decl)
21250 {
21251 /* Defer until we have an assembler name set. */
21252 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21253 {
21254 limbo_die_node *asm_name;
21255
21256 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21257 asm_name->die = die;
21258 asm_name->created_for = decl;
21259 asm_name->next = deferred_asm_name;
21260 deferred_asm_name = asm_name;
21261 }
21262 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21263 add_linkage_attr (die, decl);
21264 }
21265
21266 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21267
21268 static void
21269 add_linkage_name (dw_die_ref die, tree decl)
21270 {
21271 if (debug_info_level > DINFO_LEVEL_NONE
21272 && VAR_OR_FUNCTION_DECL_P (decl)
21273 && TREE_PUBLIC (decl)
21274 && !(VAR_P (decl) && DECL_REGISTER (decl))
21275 && die->die_tag != DW_TAG_member)
21276 add_linkage_name_raw (die, decl);
21277 }
21278
21279 /* Add a DW_AT_name attribute and source coordinate attribute for the
21280 given decl, but only if it actually has a name. */
21281
21282 static void
21283 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21284 bool no_linkage_name)
21285 {
21286 tree decl_name;
21287
21288 decl_name = DECL_NAME (decl);
21289 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21290 {
21291 const char *name = dwarf2_name (decl, 0);
21292 if (name)
21293 add_name_attribute (die, name);
21294 else
21295 add_desc_attribute (die, decl);
21296
21297 if (! DECL_ARTIFICIAL (decl))
21298 add_src_coords_attributes (die, decl);
21299
21300 if (!no_linkage_name)
21301 add_linkage_name (die, decl);
21302 }
21303 else
21304 add_desc_attribute (die, decl);
21305
21306 #ifdef VMS_DEBUGGING_INFO
21307 /* Get the function's name, as described by its RTL. This may be different
21308 from the DECL_NAME name used in the source file. */
21309 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21310 {
21311 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21312 XEXP (DECL_RTL (decl), 0), false);
21313 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21314 }
21315 #endif /* VMS_DEBUGGING_INFO */
21316 }
21317
21318 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21319
21320 static void
21321 add_discr_value (dw_die_ref die, dw_discr_value *value)
21322 {
21323 dw_attr_node attr;
21324
21325 attr.dw_attr = DW_AT_discr_value;
21326 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21327 attr.dw_attr_val.val_entry = NULL;
21328 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21329 if (value->pos)
21330 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21331 else
21332 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21333 add_dwarf_attr (die, &attr);
21334 }
21335
21336 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21337
21338 static void
21339 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21340 {
21341 dw_attr_node attr;
21342
21343 attr.dw_attr = DW_AT_discr_list;
21344 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21345 attr.dw_attr_val.val_entry = NULL;
21346 attr.dw_attr_val.v.val_discr_list = discr_list;
21347 add_dwarf_attr (die, &attr);
21348 }
21349
21350 static inline dw_discr_list_ref
21351 AT_discr_list (dw_attr_node *attr)
21352 {
21353 return attr->dw_attr_val.v.val_discr_list;
21354 }
21355
21356 #ifdef VMS_DEBUGGING_INFO
21357 /* Output the debug main pointer die for VMS */
21358
21359 void
21360 dwarf2out_vms_debug_main_pointer (void)
21361 {
21362 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21363 dw_die_ref die;
21364
21365 /* Allocate the VMS debug main subprogram die. */
21366 die = new_die_raw (DW_TAG_subprogram);
21367 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21368 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21369 current_function_funcdef_no);
21370 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21371
21372 /* Make it the first child of comp_unit_die (). */
21373 die->die_parent = comp_unit_die ();
21374 if (comp_unit_die ()->die_child)
21375 {
21376 die->die_sib = comp_unit_die ()->die_child->die_sib;
21377 comp_unit_die ()->die_child->die_sib = die;
21378 }
21379 else
21380 {
21381 die->die_sib = die;
21382 comp_unit_die ()->die_child = die;
21383 }
21384 }
21385 #endif /* VMS_DEBUGGING_INFO */
21386
21387 /* walk_tree helper function for uses_local_type, below. */
21388
21389 static tree
21390 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21391 {
21392 if (!TYPE_P (*tp))
21393 *walk_subtrees = 0;
21394 else
21395 {
21396 tree name = TYPE_NAME (*tp);
21397 if (name && DECL_P (name) && decl_function_context (name))
21398 return *tp;
21399 }
21400 return NULL_TREE;
21401 }
21402
21403 /* If TYPE involves a function-local type (including a local typedef to a
21404 non-local type), returns that type; otherwise returns NULL_TREE. */
21405
21406 static tree
21407 uses_local_type (tree type)
21408 {
21409 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21410 return used;
21411 }
21412
21413 /* Return the DIE for the scope that immediately contains this type.
21414 Non-named types that do not involve a function-local type get global
21415 scope. Named types nested in namespaces or other types get their
21416 containing scope. All other types (i.e. function-local named types) get
21417 the current active scope. */
21418
21419 static dw_die_ref
21420 scope_die_for (tree t, dw_die_ref context_die)
21421 {
21422 dw_die_ref scope_die = NULL;
21423 tree containing_scope;
21424
21425 /* Non-types always go in the current scope. */
21426 gcc_assert (TYPE_P (t));
21427
21428 /* Use the scope of the typedef, rather than the scope of the type
21429 it refers to. */
21430 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21431 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21432 else
21433 containing_scope = TYPE_CONTEXT (t);
21434
21435 /* Use the containing namespace if there is one. */
21436 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21437 {
21438 if (context_die == lookup_decl_die (containing_scope))
21439 /* OK */;
21440 else if (debug_info_level > DINFO_LEVEL_TERSE)
21441 context_die = get_context_die (containing_scope);
21442 else
21443 containing_scope = NULL_TREE;
21444 }
21445
21446 /* Ignore function type "scopes" from the C frontend. They mean that
21447 a tagged type is local to a parmlist of a function declarator, but
21448 that isn't useful to DWARF. */
21449 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21450 containing_scope = NULL_TREE;
21451
21452 if (SCOPE_FILE_SCOPE_P (containing_scope))
21453 {
21454 /* If T uses a local type keep it local as well, to avoid references
21455 to function-local DIEs from outside the function. */
21456 if (current_function_decl && uses_local_type (t))
21457 scope_die = context_die;
21458 else
21459 scope_die = comp_unit_die ();
21460 }
21461 else if (TYPE_P (containing_scope))
21462 {
21463 /* For types, we can just look up the appropriate DIE. */
21464 if (debug_info_level > DINFO_LEVEL_TERSE)
21465 scope_die = get_context_die (containing_scope);
21466 else
21467 {
21468 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21469 if (scope_die == NULL)
21470 scope_die = comp_unit_die ();
21471 }
21472 }
21473 else
21474 scope_die = context_die;
21475
21476 return scope_die;
21477 }
21478
21479 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21480
21481 static inline int
21482 local_scope_p (dw_die_ref context_die)
21483 {
21484 for (; context_die; context_die = context_die->die_parent)
21485 if (context_die->die_tag == DW_TAG_inlined_subroutine
21486 || context_die->die_tag == DW_TAG_subprogram)
21487 return 1;
21488
21489 return 0;
21490 }
21491
21492 /* Returns nonzero if CONTEXT_DIE is a class. */
21493
21494 static inline int
21495 class_scope_p (dw_die_ref context_die)
21496 {
21497 return (context_die
21498 && (context_die->die_tag == DW_TAG_structure_type
21499 || context_die->die_tag == DW_TAG_class_type
21500 || context_die->die_tag == DW_TAG_interface_type
21501 || context_die->die_tag == DW_TAG_union_type));
21502 }
21503
21504 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21505 whether or not to treat a DIE in this context as a declaration. */
21506
21507 static inline int
21508 class_or_namespace_scope_p (dw_die_ref context_die)
21509 {
21510 return (class_scope_p (context_die)
21511 || (context_die && context_die->die_tag == DW_TAG_namespace));
21512 }
21513
21514 /* Many forms of DIEs require a "type description" attribute. This
21515 routine locates the proper "type descriptor" die for the type given
21516 by 'type' plus any additional qualifiers given by 'cv_quals', and
21517 adds a DW_AT_type attribute below the given die. */
21518
21519 static void
21520 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21521 bool reverse, dw_die_ref context_die)
21522 {
21523 enum tree_code code = TREE_CODE (type);
21524 dw_die_ref type_die = NULL;
21525
21526 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21527 or fixed-point type, use the inner type. This is because we have no
21528 support for unnamed types in base_type_die. This can happen if this is
21529 an Ada subrange type. Correct solution is emit a subrange type die. */
21530 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21531 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21532 type = TREE_TYPE (type), code = TREE_CODE (type);
21533
21534 if (code == ERROR_MARK
21535 /* Handle a special case. For functions whose return type is void, we
21536 generate *no* type attribute. (Note that no object may have type
21537 `void', so this only applies to function return types). */
21538 || code == VOID_TYPE)
21539 return;
21540
21541 type_die = modified_type_die (type,
21542 cv_quals | TYPE_QUALS (type),
21543 reverse,
21544 context_die);
21545
21546 if (type_die != NULL)
21547 add_AT_die_ref (object_die, DW_AT_type, type_die);
21548 }
21549
21550 /* Given an object die, add the calling convention attribute for the
21551 function call type. */
21552 static void
21553 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21554 {
21555 enum dwarf_calling_convention value = DW_CC_normal;
21556
21557 value = ((enum dwarf_calling_convention)
21558 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21559
21560 if (is_fortran ()
21561 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21562 {
21563 /* DWARF 2 doesn't provide a way to identify a program's source-level
21564 entry point. DW_AT_calling_convention attributes are only meant
21565 to describe functions' calling conventions. However, lacking a
21566 better way to signal the Fortran main program, we used this for
21567 a long time, following existing custom. Now, DWARF 4 has
21568 DW_AT_main_subprogram, which we add below, but some tools still
21569 rely on the old way, which we thus keep. */
21570 value = DW_CC_program;
21571
21572 if (dwarf_version >= 4 || !dwarf_strict)
21573 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21574 }
21575
21576 /* Only add the attribute if the backend requests it, and
21577 is not DW_CC_normal. */
21578 if (value && (value != DW_CC_normal))
21579 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21580 }
21581
21582 /* Given a tree pointer to a struct, class, union, or enum type node, return
21583 a pointer to the (string) tag name for the given type, or zero if the type
21584 was declared without a tag. */
21585
21586 static const char *
21587 type_tag (const_tree type)
21588 {
21589 const char *name = 0;
21590
21591 if (TYPE_NAME (type) != 0)
21592 {
21593 tree t = 0;
21594
21595 /* Find the IDENTIFIER_NODE for the type name. */
21596 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21597 && !TYPE_NAMELESS (type))
21598 t = TYPE_NAME (type);
21599
21600 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21601 a TYPE_DECL node, regardless of whether or not a `typedef' was
21602 involved. */
21603 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21604 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21605 {
21606 /* We want to be extra verbose. Don't call dwarf_name if
21607 DECL_NAME isn't set. The default hook for decl_printable_name
21608 doesn't like that, and in this context it's correct to return
21609 0, instead of "<anonymous>" or the like. */
21610 if (DECL_NAME (TYPE_NAME (type))
21611 && !DECL_NAMELESS (TYPE_NAME (type)))
21612 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21613 }
21614
21615 /* Now get the name as a string, or invent one. */
21616 if (!name && t != 0)
21617 name = IDENTIFIER_POINTER (t);
21618 }
21619
21620 return (name == 0 || *name == '\0') ? 0 : name;
21621 }
21622
21623 /* Return the type associated with a data member, make a special check
21624 for bit field types. */
21625
21626 static inline tree
21627 member_declared_type (const_tree member)
21628 {
21629 return (DECL_BIT_FIELD_TYPE (member)
21630 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21631 }
21632
21633 /* Get the decl's label, as described by its RTL. This may be different
21634 from the DECL_NAME name used in the source file. */
21635
21636 #if 0
21637 static const char *
21638 decl_start_label (tree decl)
21639 {
21640 rtx x;
21641 const char *fnname;
21642
21643 x = DECL_RTL (decl);
21644 gcc_assert (MEM_P (x));
21645
21646 x = XEXP (x, 0);
21647 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21648
21649 fnname = XSTR (x, 0);
21650 return fnname;
21651 }
21652 #endif
21653 \f
21654 /* For variable-length arrays that have been previously generated, but
21655 may be incomplete due to missing subscript info, fill the subscript
21656 info. Return TRUE if this is one of those cases. */
21657 static bool
21658 fill_variable_array_bounds (tree type)
21659 {
21660 if (TREE_ASM_WRITTEN (type)
21661 && TREE_CODE (type) == ARRAY_TYPE
21662 && variably_modified_type_p (type, NULL))
21663 {
21664 dw_die_ref array_die = lookup_type_die (type);
21665 if (!array_die)
21666 return false;
21667 add_subscript_info (array_die, type, !is_ada ());
21668 return true;
21669 }
21670 return false;
21671 }
21672
21673 /* These routines generate the internal representation of the DIE's for
21674 the compilation unit. Debugging information is collected by walking
21675 the declaration trees passed in from dwarf2out_decl(). */
21676
21677 static void
21678 gen_array_type_die (tree type, dw_die_ref context_die)
21679 {
21680 dw_die_ref array_die;
21681
21682 /* GNU compilers represent multidimensional array types as sequences of one
21683 dimensional array types whose element types are themselves array types.
21684 We sometimes squish that down to a single array_type DIE with multiple
21685 subscripts in the Dwarf debugging info. The draft Dwarf specification
21686 say that we are allowed to do this kind of compression in C, because
21687 there is no difference between an array of arrays and a multidimensional
21688 array. We don't do this for Ada to remain as close as possible to the
21689 actual representation, which is especially important against the language
21690 flexibilty wrt arrays of variable size. */
21691
21692 bool collapse_nested_arrays = !is_ada ();
21693
21694 if (fill_variable_array_bounds (type))
21695 return;
21696
21697 dw_die_ref scope_die = scope_die_for (type, context_die);
21698 tree element_type;
21699
21700 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21701 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21702 if (TYPE_STRING_FLAG (type)
21703 && TREE_CODE (type) == ARRAY_TYPE
21704 && is_fortran ()
21705 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21706 {
21707 HOST_WIDE_INT size;
21708
21709 array_die = new_die (DW_TAG_string_type, scope_die, type);
21710 add_name_attribute (array_die, type_tag (type));
21711 equate_type_number_to_die (type, array_die);
21712 size = int_size_in_bytes (type);
21713 if (size >= 0)
21714 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21715 /* ??? We can't annotate types late, but for LTO we may not
21716 generate a location early either (gfortran.dg/save_6.f90). */
21717 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21718 && TYPE_DOMAIN (type) != NULL_TREE
21719 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21720 {
21721 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21722 tree rszdecl = szdecl;
21723
21724 size = int_size_in_bytes (TREE_TYPE (szdecl));
21725 if (!DECL_P (szdecl))
21726 {
21727 if (TREE_CODE (szdecl) == INDIRECT_REF
21728 && DECL_P (TREE_OPERAND (szdecl, 0)))
21729 {
21730 rszdecl = TREE_OPERAND (szdecl, 0);
21731 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21732 != DWARF2_ADDR_SIZE)
21733 size = 0;
21734 }
21735 else
21736 size = 0;
21737 }
21738 if (size > 0)
21739 {
21740 dw_loc_list_ref loc
21741 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21742 NULL);
21743 if (loc)
21744 {
21745 add_AT_location_description (array_die, DW_AT_string_length,
21746 loc);
21747 if (size != DWARF2_ADDR_SIZE)
21748 add_AT_unsigned (array_die, dwarf_version >= 5
21749 ? DW_AT_string_length_byte_size
21750 : DW_AT_byte_size, size);
21751 }
21752 }
21753 }
21754 return;
21755 }
21756
21757 array_die = new_die (DW_TAG_array_type, scope_die, type);
21758 add_name_attribute (array_die, type_tag (type));
21759 equate_type_number_to_die (type, array_die);
21760
21761 if (TREE_CODE (type) == VECTOR_TYPE)
21762 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21763
21764 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21765 if (is_fortran ()
21766 && TREE_CODE (type) == ARRAY_TYPE
21767 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21768 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21769 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21770
21771 #if 0
21772 /* We default the array ordering. Debuggers will probably do the right
21773 things even if DW_AT_ordering is not present. It's not even an issue
21774 until we start to get into multidimensional arrays anyway. If a debugger
21775 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21776 then we'll have to put the DW_AT_ordering attribute back in. (But if
21777 and when we find out that we need to put these in, we will only do so
21778 for multidimensional arrays. */
21779 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21780 #endif
21781
21782 if (TREE_CODE (type) == VECTOR_TYPE)
21783 {
21784 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21785 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21786 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21787 add_bound_info (subrange_die, DW_AT_upper_bound,
21788 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21789 }
21790 else
21791 add_subscript_info (array_die, type, collapse_nested_arrays);
21792
21793 /* Add representation of the type of the elements of this array type and
21794 emit the corresponding DIE if we haven't done it already. */
21795 element_type = TREE_TYPE (type);
21796 if (collapse_nested_arrays)
21797 while (TREE_CODE (element_type) == ARRAY_TYPE)
21798 {
21799 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21800 break;
21801 element_type = TREE_TYPE (element_type);
21802 }
21803
21804 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21805 TREE_CODE (type) == ARRAY_TYPE
21806 && TYPE_REVERSE_STORAGE_ORDER (type),
21807 context_die);
21808
21809 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21810 if (TYPE_ARTIFICIAL (type))
21811 add_AT_flag (array_die, DW_AT_artificial, 1);
21812
21813 if (get_AT (array_die, DW_AT_name))
21814 add_pubtype (type, array_die);
21815
21816 add_alignment_attribute (array_die, type);
21817 }
21818
21819 /* This routine generates DIE for array with hidden descriptor, details
21820 are filled into *info by a langhook. */
21821
21822 static void
21823 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21824 dw_die_ref context_die)
21825 {
21826 const dw_die_ref scope_die = scope_die_for (type, context_die);
21827 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21828 struct loc_descr_context context = { type, info->base_decl, NULL,
21829 false, false };
21830 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21831 int dim;
21832
21833 add_name_attribute (array_die, type_tag (type));
21834 equate_type_number_to_die (type, array_die);
21835
21836 if (info->ndimensions > 1)
21837 switch (info->ordering)
21838 {
21839 case array_descr_ordering_row_major:
21840 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21841 break;
21842 case array_descr_ordering_column_major:
21843 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21844 break;
21845 default:
21846 break;
21847 }
21848
21849 if (dwarf_version >= 3 || !dwarf_strict)
21850 {
21851 if (info->data_location)
21852 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21853 dw_scalar_form_exprloc, &context);
21854 if (info->associated)
21855 add_scalar_info (array_die, DW_AT_associated, info->associated,
21856 dw_scalar_form_constant
21857 | dw_scalar_form_exprloc
21858 | dw_scalar_form_reference, &context);
21859 if (info->allocated)
21860 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21861 dw_scalar_form_constant
21862 | dw_scalar_form_exprloc
21863 | dw_scalar_form_reference, &context);
21864 if (info->stride)
21865 {
21866 const enum dwarf_attribute attr
21867 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21868 const int forms
21869 = (info->stride_in_bits)
21870 ? dw_scalar_form_constant
21871 : (dw_scalar_form_constant
21872 | dw_scalar_form_exprloc
21873 | dw_scalar_form_reference);
21874
21875 add_scalar_info (array_die, attr, info->stride, forms, &context);
21876 }
21877 }
21878 if (dwarf_version >= 5)
21879 {
21880 if (info->rank)
21881 {
21882 add_scalar_info (array_die, DW_AT_rank, info->rank,
21883 dw_scalar_form_constant
21884 | dw_scalar_form_exprloc, &context);
21885 subrange_tag = DW_TAG_generic_subrange;
21886 context.placeholder_arg = true;
21887 }
21888 }
21889
21890 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21891
21892 for (dim = 0; dim < info->ndimensions; dim++)
21893 {
21894 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21895
21896 if (info->dimen[dim].bounds_type)
21897 add_type_attribute (subrange_die,
21898 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21899 false, context_die);
21900 if (info->dimen[dim].lower_bound)
21901 add_bound_info (subrange_die, DW_AT_lower_bound,
21902 info->dimen[dim].lower_bound, &context);
21903 if (info->dimen[dim].upper_bound)
21904 add_bound_info (subrange_die, DW_AT_upper_bound,
21905 info->dimen[dim].upper_bound, &context);
21906 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21907 add_scalar_info (subrange_die, DW_AT_byte_stride,
21908 info->dimen[dim].stride,
21909 dw_scalar_form_constant
21910 | dw_scalar_form_exprloc
21911 | dw_scalar_form_reference,
21912 &context);
21913 }
21914
21915 gen_type_die (info->element_type, context_die);
21916 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21917 TREE_CODE (type) == ARRAY_TYPE
21918 && TYPE_REVERSE_STORAGE_ORDER (type),
21919 context_die);
21920
21921 if (get_AT (array_die, DW_AT_name))
21922 add_pubtype (type, array_die);
21923
21924 add_alignment_attribute (array_die, type);
21925 }
21926
21927 #if 0
21928 static void
21929 gen_entry_point_die (tree decl, dw_die_ref context_die)
21930 {
21931 tree origin = decl_ultimate_origin (decl);
21932 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21933
21934 if (origin != NULL)
21935 add_abstract_origin_attribute (decl_die, origin);
21936 else
21937 {
21938 add_name_and_src_coords_attributes (decl_die, decl);
21939 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21940 TYPE_UNQUALIFIED, false, context_die);
21941 }
21942
21943 if (DECL_ABSTRACT_P (decl))
21944 equate_decl_number_to_die (decl, decl_die);
21945 else
21946 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21947 }
21948 #endif
21949
21950 /* Walk through the list of incomplete types again, trying once more to
21951 emit full debugging info for them. */
21952
21953 static void
21954 retry_incomplete_types (void)
21955 {
21956 set_early_dwarf s;
21957 int i;
21958
21959 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21960 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21961 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21962 vec_safe_truncate (incomplete_types, 0);
21963 }
21964
21965 /* Determine what tag to use for a record type. */
21966
21967 static enum dwarf_tag
21968 record_type_tag (tree type)
21969 {
21970 if (! lang_hooks.types.classify_record)
21971 return DW_TAG_structure_type;
21972
21973 switch (lang_hooks.types.classify_record (type))
21974 {
21975 case RECORD_IS_STRUCT:
21976 return DW_TAG_structure_type;
21977
21978 case RECORD_IS_CLASS:
21979 return DW_TAG_class_type;
21980
21981 case RECORD_IS_INTERFACE:
21982 if (dwarf_version >= 3 || !dwarf_strict)
21983 return DW_TAG_interface_type;
21984 return DW_TAG_structure_type;
21985
21986 default:
21987 gcc_unreachable ();
21988 }
21989 }
21990
21991 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21992 include all of the information about the enumeration values also. Each
21993 enumerated type name/value is listed as a child of the enumerated type
21994 DIE. */
21995
21996 static dw_die_ref
21997 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21998 {
21999 dw_die_ref type_die = lookup_type_die (type);
22000 dw_die_ref orig_type_die = type_die;
22001
22002 if (type_die == NULL)
22003 {
22004 type_die = new_die (DW_TAG_enumeration_type,
22005 scope_die_for (type, context_die), type);
22006 equate_type_number_to_die (type, type_die);
22007 add_name_attribute (type_die, type_tag (type));
22008 if ((dwarf_version >= 4 || !dwarf_strict)
22009 && ENUM_IS_SCOPED (type))
22010 add_AT_flag (type_die, DW_AT_enum_class, 1);
22011 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22012 add_AT_flag (type_die, DW_AT_declaration, 1);
22013 if (!dwarf_strict)
22014 add_AT_unsigned (type_die, DW_AT_encoding,
22015 TYPE_UNSIGNED (type)
22016 ? DW_ATE_unsigned
22017 : DW_ATE_signed);
22018 }
22019 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22020 return type_die;
22021 else
22022 remove_AT (type_die, DW_AT_declaration);
22023
22024 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22025 given enum type is incomplete, do not generate the DW_AT_byte_size
22026 attribute or the DW_AT_element_list attribute. */
22027 if (TYPE_SIZE (type))
22028 {
22029 tree link;
22030
22031 if (!ENUM_IS_OPAQUE (type))
22032 TREE_ASM_WRITTEN (type) = 1;
22033 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22034 add_byte_size_attribute (type_die, type);
22035 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22036 add_alignment_attribute (type_die, type);
22037 if ((dwarf_version >= 3 || !dwarf_strict)
22038 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22039 {
22040 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22041 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22042 context_die);
22043 }
22044 if (TYPE_STUB_DECL (type) != NULL_TREE)
22045 {
22046 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22047 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22048 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22049 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22050 }
22051
22052 /* If the first reference to this type was as the return type of an
22053 inline function, then it may not have a parent. Fix this now. */
22054 if (type_die->die_parent == NULL)
22055 add_child_die (scope_die_for (type, context_die), type_die);
22056
22057 for (link = TYPE_VALUES (type);
22058 link != NULL; link = TREE_CHAIN (link))
22059 {
22060 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22061 tree value = TREE_VALUE (link);
22062
22063 gcc_assert (!ENUM_IS_OPAQUE (type));
22064 add_name_attribute (enum_die,
22065 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22066
22067 if (TREE_CODE (value) == CONST_DECL)
22068 value = DECL_INITIAL (value);
22069
22070 if (simple_type_size_in_bits (TREE_TYPE (value))
22071 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22072 {
22073 /* For constant forms created by add_AT_unsigned DWARF
22074 consumers (GDB, elfutils, etc.) always zero extend
22075 the value. Only when the actual value is negative
22076 do we need to use add_AT_int to generate a constant
22077 form that can represent negative values. */
22078 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22079 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22080 add_AT_unsigned (enum_die, DW_AT_const_value,
22081 (unsigned HOST_WIDE_INT) val);
22082 else
22083 add_AT_int (enum_die, DW_AT_const_value, val);
22084 }
22085 else
22086 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22087 that here. TODO: This should be re-worked to use correct
22088 signed/unsigned double tags for all cases. */
22089 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22090 }
22091
22092 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22093 if (TYPE_ARTIFICIAL (type)
22094 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22095 add_AT_flag (type_die, DW_AT_artificial, 1);
22096 }
22097 else
22098 add_AT_flag (type_die, DW_AT_declaration, 1);
22099
22100 add_pubtype (type, type_die);
22101
22102 return type_die;
22103 }
22104
22105 /* Generate a DIE to represent either a real live formal parameter decl or to
22106 represent just the type of some formal parameter position in some function
22107 type.
22108
22109 Note that this routine is a bit unusual because its argument may be a
22110 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22111 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22112 node. If it's the former then this function is being called to output a
22113 DIE to represent a formal parameter object (or some inlining thereof). If
22114 it's the latter, then this function is only being called to output a
22115 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22116 argument type of some subprogram type.
22117 If EMIT_NAME_P is true, name and source coordinate attributes
22118 are emitted. */
22119
22120 static dw_die_ref
22121 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22122 dw_die_ref context_die)
22123 {
22124 tree node_or_origin = node ? node : origin;
22125 tree ultimate_origin;
22126 dw_die_ref parm_die = NULL;
22127
22128 if (DECL_P (node_or_origin))
22129 {
22130 parm_die = lookup_decl_die (node);
22131
22132 /* If the contexts differ, we may not be talking about the same
22133 thing.
22134 ??? When in LTO the DIE parent is the "abstract" copy and the
22135 context_die is the specification "copy". But this whole block
22136 should eventually be no longer needed. */
22137 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22138 {
22139 if (!DECL_ABSTRACT_P (node))
22140 {
22141 /* This can happen when creating an inlined instance, in
22142 which case we need to create a new DIE that will get
22143 annotated with DW_AT_abstract_origin. */
22144 parm_die = NULL;
22145 }
22146 else
22147 gcc_unreachable ();
22148 }
22149
22150 if (parm_die && parm_die->die_parent == NULL)
22151 {
22152 /* Check that parm_die already has the right attributes that
22153 we would have added below. If any attributes are
22154 missing, fall through to add them. */
22155 if (! DECL_ABSTRACT_P (node_or_origin)
22156 && !get_AT (parm_die, DW_AT_location)
22157 && !get_AT (parm_die, DW_AT_const_value))
22158 /* We are missing location info, and are about to add it. */
22159 ;
22160 else
22161 {
22162 add_child_die (context_die, parm_die);
22163 return parm_die;
22164 }
22165 }
22166 }
22167
22168 /* If we have a previously generated DIE, use it, unless this is an
22169 concrete instance (origin != NULL), in which case we need a new
22170 DIE with a corresponding DW_AT_abstract_origin. */
22171 bool reusing_die;
22172 if (parm_die && origin == NULL)
22173 reusing_die = true;
22174 else
22175 {
22176 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22177 reusing_die = false;
22178 }
22179
22180 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22181 {
22182 case tcc_declaration:
22183 ultimate_origin = decl_ultimate_origin (node_or_origin);
22184 if (node || ultimate_origin)
22185 origin = ultimate_origin;
22186
22187 if (reusing_die)
22188 goto add_location;
22189
22190 if (origin != NULL)
22191 add_abstract_origin_attribute (parm_die, origin);
22192 else if (emit_name_p)
22193 add_name_and_src_coords_attributes (parm_die, node);
22194 if (origin == NULL
22195 || (! DECL_ABSTRACT_P (node_or_origin)
22196 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22197 decl_function_context
22198 (node_or_origin))))
22199 {
22200 tree type = TREE_TYPE (node_or_origin);
22201 if (decl_by_reference_p (node_or_origin))
22202 add_type_attribute (parm_die, TREE_TYPE (type),
22203 TYPE_UNQUALIFIED,
22204 false, context_die);
22205 else
22206 add_type_attribute (parm_die, type,
22207 decl_quals (node_or_origin),
22208 false, context_die);
22209 }
22210 if (origin == NULL && DECL_ARTIFICIAL (node))
22211 add_AT_flag (parm_die, DW_AT_artificial, 1);
22212 add_location:
22213 if (node && node != origin)
22214 equate_decl_number_to_die (node, parm_die);
22215 if (! DECL_ABSTRACT_P (node_or_origin))
22216 add_location_or_const_value_attribute (parm_die, node_or_origin,
22217 node == NULL);
22218
22219 break;
22220
22221 case tcc_type:
22222 /* We were called with some kind of a ..._TYPE node. */
22223 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22224 context_die);
22225 break;
22226
22227 default:
22228 gcc_unreachable ();
22229 }
22230
22231 return parm_die;
22232 }
22233
22234 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22235 children DW_TAG_formal_parameter DIEs representing the arguments of the
22236 parameter pack.
22237
22238 PARM_PACK must be a function parameter pack.
22239 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22240 must point to the subsequent arguments of the function PACK_ARG belongs to.
22241 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22242 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22243 following the last one for which a DIE was generated. */
22244
22245 static dw_die_ref
22246 gen_formal_parameter_pack_die (tree parm_pack,
22247 tree pack_arg,
22248 dw_die_ref subr_die,
22249 tree *next_arg)
22250 {
22251 tree arg;
22252 dw_die_ref parm_pack_die;
22253
22254 gcc_assert (parm_pack
22255 && lang_hooks.function_parameter_pack_p (parm_pack)
22256 && subr_die);
22257
22258 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22259 add_src_coords_attributes (parm_pack_die, parm_pack);
22260
22261 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22262 {
22263 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22264 parm_pack))
22265 break;
22266 gen_formal_parameter_die (arg, NULL,
22267 false /* Don't emit name attribute. */,
22268 parm_pack_die);
22269 }
22270 if (next_arg)
22271 *next_arg = arg;
22272 return parm_pack_die;
22273 }
22274
22275 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22276 at the end of an (ANSI prototyped) formal parameters list. */
22277
22278 static void
22279 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22280 {
22281 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22282 }
22283
22284 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22285 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22286 parameters as specified in some function type specification (except for
22287 those which appear as part of a function *definition*). */
22288
22289 static void
22290 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22291 {
22292 tree link;
22293 tree formal_type = NULL;
22294 tree first_parm_type;
22295 tree arg;
22296
22297 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22298 {
22299 arg = DECL_ARGUMENTS (function_or_method_type);
22300 function_or_method_type = TREE_TYPE (function_or_method_type);
22301 }
22302 else
22303 arg = NULL_TREE;
22304
22305 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22306
22307 /* Make our first pass over the list of formal parameter types and output a
22308 DW_TAG_formal_parameter DIE for each one. */
22309 for (link = first_parm_type; link; )
22310 {
22311 dw_die_ref parm_die;
22312
22313 formal_type = TREE_VALUE (link);
22314 if (formal_type == void_type_node)
22315 break;
22316
22317 /* Output a (nameless) DIE to represent the formal parameter itself. */
22318 parm_die = gen_formal_parameter_die (formal_type, NULL,
22319 true /* Emit name attribute. */,
22320 context_die);
22321 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22322 && link == first_parm_type)
22323 {
22324 add_AT_flag (parm_die, DW_AT_artificial, 1);
22325 if (dwarf_version >= 3 || !dwarf_strict)
22326 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22327 }
22328 else if (arg && DECL_ARTIFICIAL (arg))
22329 add_AT_flag (parm_die, DW_AT_artificial, 1);
22330
22331 link = TREE_CHAIN (link);
22332 if (arg)
22333 arg = DECL_CHAIN (arg);
22334 }
22335
22336 /* If this function type has an ellipsis, add a
22337 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22338 if (formal_type != void_type_node)
22339 gen_unspecified_parameters_die (function_or_method_type, context_die);
22340
22341 /* Make our second (and final) pass over the list of formal parameter types
22342 and output DIEs to represent those types (as necessary). */
22343 for (link = TYPE_ARG_TYPES (function_or_method_type);
22344 link && TREE_VALUE (link);
22345 link = TREE_CHAIN (link))
22346 gen_type_die (TREE_VALUE (link), context_die);
22347 }
22348
22349 /* We want to generate the DIE for TYPE so that we can generate the
22350 die for MEMBER, which has been defined; we will need to refer back
22351 to the member declaration nested within TYPE. If we're trying to
22352 generate minimal debug info for TYPE, processing TYPE won't do the
22353 trick; we need to attach the member declaration by hand. */
22354
22355 static void
22356 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22357 {
22358 gen_type_die (type, context_die);
22359
22360 /* If we're trying to avoid duplicate debug info, we may not have
22361 emitted the member decl for this function. Emit it now. */
22362 if (TYPE_STUB_DECL (type)
22363 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22364 && ! lookup_decl_die (member))
22365 {
22366 dw_die_ref type_die;
22367 gcc_assert (!decl_ultimate_origin (member));
22368
22369 type_die = lookup_type_die_strip_naming_typedef (type);
22370 if (TREE_CODE (member) == FUNCTION_DECL)
22371 gen_subprogram_die (member, type_die);
22372 else if (TREE_CODE (member) == FIELD_DECL)
22373 {
22374 /* Ignore the nameless fields that are used to skip bits but handle
22375 C++ anonymous unions and structs. */
22376 if (DECL_NAME (member) != NULL_TREE
22377 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22378 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22379 {
22380 struct vlr_context vlr_ctx = {
22381 DECL_CONTEXT (member), /* struct_type */
22382 NULL_TREE /* variant_part_offset */
22383 };
22384 gen_type_die (member_declared_type (member), type_die);
22385 gen_field_die (member, &vlr_ctx, type_die);
22386 }
22387 }
22388 else
22389 gen_variable_die (member, NULL_TREE, type_die);
22390 }
22391 }
22392 \f
22393 /* Forward declare these functions, because they are mutually recursive
22394 with their set_block_* pairing functions. */
22395 static void set_decl_origin_self (tree);
22396
22397 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22398 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22399 that it points to the node itself, thus indicating that the node is its
22400 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22401 the given node is NULL, recursively descend the decl/block tree which
22402 it is the root of, and for each other ..._DECL or BLOCK node contained
22403 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22404 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22405 values to point to themselves. */
22406
22407 static void
22408 set_block_origin_self (tree stmt)
22409 {
22410 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22411 {
22412 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22413
22414 {
22415 tree local_decl;
22416
22417 for (local_decl = BLOCK_VARS (stmt);
22418 local_decl != NULL_TREE;
22419 local_decl = DECL_CHAIN (local_decl))
22420 /* Do not recurse on nested functions since the inlining status
22421 of parent and child can be different as per the DWARF spec. */
22422 if (TREE_CODE (local_decl) != FUNCTION_DECL
22423 && !DECL_EXTERNAL (local_decl))
22424 set_decl_origin_self (local_decl);
22425 }
22426
22427 {
22428 tree subblock;
22429
22430 for (subblock = BLOCK_SUBBLOCKS (stmt);
22431 subblock != NULL_TREE;
22432 subblock = BLOCK_CHAIN (subblock))
22433 set_block_origin_self (subblock); /* Recurse. */
22434 }
22435 }
22436 }
22437
22438 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22439 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22440 node to so that it points to the node itself, thus indicating that the
22441 node represents its own (abstract) origin. Additionally, if the
22442 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22443 the decl/block tree of which the given node is the root of, and for
22444 each other ..._DECL or BLOCK node contained therein whose
22445 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22446 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22447 point to themselves. */
22448
22449 static void
22450 set_decl_origin_self (tree decl)
22451 {
22452 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22453 {
22454 DECL_ABSTRACT_ORIGIN (decl) = decl;
22455 if (TREE_CODE (decl) == FUNCTION_DECL)
22456 {
22457 tree arg;
22458
22459 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22460 DECL_ABSTRACT_ORIGIN (arg) = arg;
22461 if (DECL_INITIAL (decl) != NULL_TREE
22462 && DECL_INITIAL (decl) != error_mark_node)
22463 set_block_origin_self (DECL_INITIAL (decl));
22464 }
22465 }
22466 }
22467 \f
22468 /* Mark the early DIE for DECL as the abstract instance. */
22469
22470 static void
22471 dwarf2out_abstract_function (tree decl)
22472 {
22473 dw_die_ref old_die;
22474
22475 /* Make sure we have the actual abstract inline, not a clone. */
22476 decl = DECL_ORIGIN (decl);
22477
22478 if (DECL_IGNORED_P (decl))
22479 return;
22480
22481 /* In LTO we're all set. We already created abstract instances
22482 early and we want to avoid creating a concrete instance of that
22483 if we don't output it. */
22484 if (in_lto_p)
22485 return;
22486
22487 old_die = lookup_decl_die (decl);
22488 gcc_assert (old_die != NULL);
22489 if (get_AT (old_die, DW_AT_inline))
22490 /* We've already generated the abstract instance. */
22491 return;
22492
22493 /* Go ahead and put DW_AT_inline on the DIE. */
22494 if (DECL_DECLARED_INLINE_P (decl))
22495 {
22496 if (cgraph_function_possibly_inlined_p (decl))
22497 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22498 else
22499 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22500 }
22501 else
22502 {
22503 if (cgraph_function_possibly_inlined_p (decl))
22504 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22505 else
22506 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22507 }
22508
22509 if (DECL_DECLARED_INLINE_P (decl)
22510 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22511 add_AT_flag (old_die, DW_AT_artificial, 1);
22512
22513 set_decl_origin_self (decl);
22514 }
22515
22516 /* Helper function of premark_used_types() which gets called through
22517 htab_traverse.
22518
22519 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22520 marked as unused by prune_unused_types. */
22521
22522 bool
22523 premark_used_types_helper (tree const &type, void *)
22524 {
22525 dw_die_ref die;
22526
22527 die = lookup_type_die (type);
22528 if (die != NULL)
22529 die->die_perennial_p = 1;
22530 return true;
22531 }
22532
22533 /* Helper function of premark_types_used_by_global_vars which gets called
22534 through htab_traverse.
22535
22536 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22537 marked as unused by prune_unused_types. The DIE of the type is marked
22538 only if the global variable using the type will actually be emitted. */
22539
22540 int
22541 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22542 void *)
22543 {
22544 struct types_used_by_vars_entry *entry;
22545 dw_die_ref die;
22546
22547 entry = (struct types_used_by_vars_entry *) *slot;
22548 gcc_assert (entry->type != NULL
22549 && entry->var_decl != NULL);
22550 die = lookup_type_die (entry->type);
22551 if (die)
22552 {
22553 /* Ask cgraph if the global variable really is to be emitted.
22554 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22555 varpool_node *node = varpool_node::get (entry->var_decl);
22556 if (node && node->definition)
22557 {
22558 die->die_perennial_p = 1;
22559 /* Keep the parent DIEs as well. */
22560 while ((die = die->die_parent) && die->die_perennial_p == 0)
22561 die->die_perennial_p = 1;
22562 }
22563 }
22564 return 1;
22565 }
22566
22567 /* Mark all members of used_types_hash as perennial. */
22568
22569 static void
22570 premark_used_types (struct function *fun)
22571 {
22572 if (fun && fun->used_types_hash)
22573 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22574 }
22575
22576 /* Mark all members of types_used_by_vars_entry as perennial. */
22577
22578 static void
22579 premark_types_used_by_global_vars (void)
22580 {
22581 if (types_used_by_vars_hash)
22582 types_used_by_vars_hash
22583 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22584 }
22585
22586 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22587 for CA_LOC call arg loc node. */
22588
22589 static dw_die_ref
22590 gen_call_site_die (tree decl, dw_die_ref subr_die,
22591 struct call_arg_loc_node *ca_loc)
22592 {
22593 dw_die_ref stmt_die = NULL, die;
22594 tree block = ca_loc->block;
22595
22596 while (block
22597 && block != DECL_INITIAL (decl)
22598 && TREE_CODE (block) == BLOCK)
22599 {
22600 stmt_die = lookup_block_die (block);
22601 if (stmt_die)
22602 break;
22603 block = BLOCK_SUPERCONTEXT (block);
22604 }
22605 if (stmt_die == NULL)
22606 stmt_die = subr_die;
22607 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22608 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22609 if (ca_loc->tail_call_p)
22610 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22611 if (ca_loc->symbol_ref)
22612 {
22613 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22614 if (tdie)
22615 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22616 else
22617 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22618 false);
22619 }
22620 return die;
22621 }
22622
22623 /* Generate a DIE to represent a declared function (either file-scope or
22624 block-local). */
22625
22626 static void
22627 gen_subprogram_die (tree decl, dw_die_ref context_die)
22628 {
22629 tree origin = decl_ultimate_origin (decl);
22630 dw_die_ref subr_die;
22631 dw_die_ref old_die = lookup_decl_die (decl);
22632
22633 /* This function gets called multiple times for different stages of
22634 the debug process. For example, for func() in this code:
22635
22636 namespace S
22637 {
22638 void func() { ... }
22639 }
22640
22641 ...we get called 4 times. Twice in early debug and twice in
22642 late debug:
22643
22644 Early debug
22645 -----------
22646
22647 1. Once while generating func() within the namespace. This is
22648 the declaration. The declaration bit below is set, as the
22649 context is the namespace.
22650
22651 A new DIE will be generated with DW_AT_declaration set.
22652
22653 2. Once for func() itself. This is the specification. The
22654 declaration bit below is clear as the context is the CU.
22655
22656 We will use the cached DIE from (1) to create a new DIE with
22657 DW_AT_specification pointing to the declaration in (1).
22658
22659 Late debug via rest_of_handle_final()
22660 -------------------------------------
22661
22662 3. Once generating func() within the namespace. This is also the
22663 declaration, as in (1), but this time we will early exit below
22664 as we have a cached DIE and a declaration needs no additional
22665 annotations (no locations), as the source declaration line
22666 info is enough.
22667
22668 4. Once for func() itself. As in (2), this is the specification,
22669 but this time we will re-use the cached DIE, and just annotate
22670 it with the location information that should now be available.
22671
22672 For something without namespaces, but with abstract instances, we
22673 are also called a multiple times:
22674
22675 class Base
22676 {
22677 public:
22678 Base (); // constructor declaration (1)
22679 };
22680
22681 Base::Base () { } // constructor specification (2)
22682
22683 Early debug
22684 -----------
22685
22686 1. Once for the Base() constructor by virtue of it being a
22687 member of the Base class. This is done via
22688 rest_of_type_compilation.
22689
22690 This is a declaration, so a new DIE will be created with
22691 DW_AT_declaration.
22692
22693 2. Once for the Base() constructor definition, but this time
22694 while generating the abstract instance of the base
22695 constructor (__base_ctor) which is being generated via early
22696 debug of reachable functions.
22697
22698 Even though we have a cached version of the declaration (1),
22699 we will create a DW_AT_specification of the declaration DIE
22700 in (1).
22701
22702 3. Once for the __base_ctor itself, but this time, we generate
22703 an DW_AT_abstract_origin version of the DW_AT_specification in
22704 (2).
22705
22706 Late debug via rest_of_handle_final
22707 -----------------------------------
22708
22709 4. One final time for the __base_ctor (which will have a cached
22710 DIE with DW_AT_abstract_origin created in (3). This time,
22711 we will just annotate the location information now
22712 available.
22713 */
22714 int declaration = (current_function_decl != decl
22715 || class_or_namespace_scope_p (context_die));
22716
22717 /* A declaration that has been previously dumped needs no
22718 additional information. */
22719 if (old_die && declaration)
22720 return;
22721
22722 /* Now that the C++ front end lazily declares artificial member fns, we
22723 might need to retrofit the declaration into its class. */
22724 if (!declaration && !origin && !old_die
22725 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22726 && !class_or_namespace_scope_p (context_die)
22727 && debug_info_level > DINFO_LEVEL_TERSE)
22728 old_die = force_decl_die (decl);
22729
22730 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22731 if (origin != NULL)
22732 {
22733 gcc_assert (!declaration || local_scope_p (context_die));
22734
22735 /* Fixup die_parent for the abstract instance of a nested
22736 inline function. */
22737 if (old_die && old_die->die_parent == NULL)
22738 add_child_die (context_die, old_die);
22739
22740 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22741 {
22742 /* If we have a DW_AT_abstract_origin we have a working
22743 cached version. */
22744 subr_die = old_die;
22745 }
22746 else
22747 {
22748 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22749 add_abstract_origin_attribute (subr_die, origin);
22750 /* This is where the actual code for a cloned function is.
22751 Let's emit linkage name attribute for it. This helps
22752 debuggers to e.g, set breakpoints into
22753 constructors/destructors when the user asks "break
22754 K::K". */
22755 add_linkage_name (subr_die, decl);
22756 }
22757 }
22758 /* A cached copy, possibly from early dwarf generation. Reuse as
22759 much as possible. */
22760 else if (old_die)
22761 {
22762 if (!get_AT_flag (old_die, DW_AT_declaration)
22763 /* We can have a normal definition following an inline one in the
22764 case of redefinition of GNU C extern inlines.
22765 It seems reasonable to use AT_specification in this case. */
22766 && !get_AT (old_die, DW_AT_inline))
22767 {
22768 /* Detect and ignore this case, where we are trying to output
22769 something we have already output. */
22770 if (get_AT (old_die, DW_AT_low_pc)
22771 || get_AT (old_die, DW_AT_ranges))
22772 return;
22773
22774 /* If we have no location information, this must be a
22775 partially generated DIE from early dwarf generation.
22776 Fall through and generate it. */
22777 }
22778
22779 /* If the definition comes from the same place as the declaration,
22780 maybe use the old DIE. We always want the DIE for this function
22781 that has the *_pc attributes to be under comp_unit_die so the
22782 debugger can find it. We also need to do this for abstract
22783 instances of inlines, since the spec requires the out-of-line copy
22784 to have the same parent. For local class methods, this doesn't
22785 apply; we just use the old DIE. */
22786 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22787 struct dwarf_file_data * file_index = lookup_filename (s.file);
22788 if (((is_unit_die (old_die->die_parent)
22789 /* This condition fixes the inconsistency/ICE with the
22790 following Fortran test (or some derivative thereof) while
22791 building libgfortran:
22792
22793 module some_m
22794 contains
22795 logical function funky (FLAG)
22796 funky = .true.
22797 end function
22798 end module
22799 */
22800 || (old_die->die_parent
22801 && old_die->die_parent->die_tag == DW_TAG_module)
22802 || local_scope_p (old_die->die_parent)
22803 || context_die == NULL)
22804 && (DECL_ARTIFICIAL (decl)
22805 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22806 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22807 == (unsigned) s.line)
22808 && (!debug_column_info
22809 || s.column == 0
22810 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22811 == (unsigned) s.column)))))
22812 /* With LTO if there's an abstract instance for
22813 the old DIE, this is a concrete instance and
22814 thus re-use the DIE. */
22815 || get_AT (old_die, DW_AT_abstract_origin))
22816 {
22817 subr_die = old_die;
22818
22819 /* Clear out the declaration attribute, but leave the
22820 parameters so they can be augmented with location
22821 information later. Unless this was a declaration, in
22822 which case, wipe out the nameless parameters and recreate
22823 them further down. */
22824 if (remove_AT (subr_die, DW_AT_declaration))
22825 {
22826
22827 remove_AT (subr_die, DW_AT_object_pointer);
22828 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22829 }
22830 }
22831 /* Make a specification pointing to the previously built
22832 declaration. */
22833 else
22834 {
22835 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22836 add_AT_specification (subr_die, old_die);
22837 add_pubname (decl, subr_die);
22838 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22839 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22840 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22841 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22842 if (debug_column_info
22843 && s.column
22844 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22845 != (unsigned) s.column))
22846 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22847
22848 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22849 emit the real type on the definition die. */
22850 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22851 {
22852 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22853 if (die == auto_die || die == decltype_auto_die)
22854 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22855 TYPE_UNQUALIFIED, false, context_die);
22856 }
22857
22858 /* When we process the method declaration, we haven't seen
22859 the out-of-class defaulted definition yet, so we have to
22860 recheck now. */
22861 if ((dwarf_version >= 5 || ! dwarf_strict)
22862 && !get_AT (subr_die, DW_AT_defaulted))
22863 {
22864 int defaulted
22865 = lang_hooks.decls.decl_dwarf_attribute (decl,
22866 DW_AT_defaulted);
22867 if (defaulted != -1)
22868 {
22869 /* Other values must have been handled before. */
22870 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22871 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22872 }
22873 }
22874 }
22875 }
22876 /* Create a fresh DIE for anything else. */
22877 else
22878 {
22879 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22880
22881 if (TREE_PUBLIC (decl))
22882 add_AT_flag (subr_die, DW_AT_external, 1);
22883
22884 add_name_and_src_coords_attributes (subr_die, decl);
22885 add_pubname (decl, subr_die);
22886 if (debug_info_level > DINFO_LEVEL_TERSE)
22887 {
22888 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22889 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22890 TYPE_UNQUALIFIED, false, context_die);
22891 }
22892
22893 add_pure_or_virtual_attribute (subr_die, decl);
22894 if (DECL_ARTIFICIAL (decl))
22895 add_AT_flag (subr_die, DW_AT_artificial, 1);
22896
22897 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22898 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22899
22900 add_alignment_attribute (subr_die, decl);
22901
22902 add_accessibility_attribute (subr_die, decl);
22903 }
22904
22905 /* Unless we have an existing non-declaration DIE, equate the new
22906 DIE. */
22907 if (!old_die || is_declaration_die (old_die))
22908 equate_decl_number_to_die (decl, subr_die);
22909
22910 if (declaration)
22911 {
22912 if (!old_die || !get_AT (old_die, DW_AT_inline))
22913 {
22914 add_AT_flag (subr_die, DW_AT_declaration, 1);
22915
22916 /* If this is an explicit function declaration then generate
22917 a DW_AT_explicit attribute. */
22918 if ((dwarf_version >= 3 || !dwarf_strict)
22919 && lang_hooks.decls.decl_dwarf_attribute (decl,
22920 DW_AT_explicit) == 1)
22921 add_AT_flag (subr_die, DW_AT_explicit, 1);
22922
22923 /* If this is a C++11 deleted special function member then generate
22924 a DW_AT_deleted attribute. */
22925 if ((dwarf_version >= 5 || !dwarf_strict)
22926 && lang_hooks.decls.decl_dwarf_attribute (decl,
22927 DW_AT_deleted) == 1)
22928 add_AT_flag (subr_die, DW_AT_deleted, 1);
22929
22930 /* If this is a C++11 defaulted special function member then
22931 generate a DW_AT_defaulted attribute. */
22932 if (dwarf_version >= 5 || !dwarf_strict)
22933 {
22934 int defaulted
22935 = lang_hooks.decls.decl_dwarf_attribute (decl,
22936 DW_AT_defaulted);
22937 if (defaulted != -1)
22938 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22939 }
22940
22941 /* If this is a C++11 non-static member function with & ref-qualifier
22942 then generate a DW_AT_reference attribute. */
22943 if ((dwarf_version >= 5 || !dwarf_strict)
22944 && lang_hooks.decls.decl_dwarf_attribute (decl,
22945 DW_AT_reference) == 1)
22946 add_AT_flag (subr_die, DW_AT_reference, 1);
22947
22948 /* If this is a C++11 non-static member function with &&
22949 ref-qualifier then generate a DW_AT_reference attribute. */
22950 if ((dwarf_version >= 5 || !dwarf_strict)
22951 && lang_hooks.decls.decl_dwarf_attribute (decl,
22952 DW_AT_rvalue_reference)
22953 == 1)
22954 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22955 }
22956 }
22957 /* For non DECL_EXTERNALs, if range information is available, fill
22958 the DIE with it. */
22959 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22960 {
22961 HOST_WIDE_INT cfa_fb_offset;
22962
22963 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22964
22965 if (!crtl->has_bb_partition)
22966 {
22967 dw_fde_ref fde = fun->fde;
22968 if (fde->dw_fde_begin)
22969 {
22970 /* We have already generated the labels. */
22971 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22972 fde->dw_fde_end, false);
22973 }
22974 else
22975 {
22976 /* Create start/end labels and add the range. */
22977 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22978 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22979 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22980 current_function_funcdef_no);
22981 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22982 current_function_funcdef_no);
22983 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22984 false);
22985 }
22986
22987 #if VMS_DEBUGGING_INFO
22988 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22989 Section 2.3 Prologue and Epilogue Attributes:
22990 When a breakpoint is set on entry to a function, it is generally
22991 desirable for execution to be suspended, not on the very first
22992 instruction of the function, but rather at a point after the
22993 function's frame has been set up, after any language defined local
22994 declaration processing has been completed, and before execution of
22995 the first statement of the function begins. Debuggers generally
22996 cannot properly determine where this point is. Similarly for a
22997 breakpoint set on exit from a function. The prologue and epilogue
22998 attributes allow a compiler to communicate the location(s) to use. */
22999
23000 {
23001 if (fde->dw_fde_vms_end_prologue)
23002 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23003 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23004
23005 if (fde->dw_fde_vms_begin_epilogue)
23006 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23007 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23008 }
23009 #endif
23010
23011 }
23012 else
23013 {
23014 /* Generate pubnames entries for the split function code ranges. */
23015 dw_fde_ref fde = fun->fde;
23016
23017 if (fde->dw_fde_second_begin)
23018 {
23019 if (dwarf_version >= 3 || !dwarf_strict)
23020 {
23021 /* We should use ranges for non-contiguous code section
23022 addresses. Use the actual code range for the initial
23023 section, since the HOT/COLD labels might precede an
23024 alignment offset. */
23025 bool range_list_added = false;
23026 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23027 fde->dw_fde_end, &range_list_added,
23028 false);
23029 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23030 fde->dw_fde_second_end,
23031 &range_list_added, false);
23032 if (range_list_added)
23033 add_ranges (NULL);
23034 }
23035 else
23036 {
23037 /* There is no real support in DW2 for this .. so we make
23038 a work-around. First, emit the pub name for the segment
23039 containing the function label. Then make and emit a
23040 simplified subprogram DIE for the second segment with the
23041 name pre-fixed by __hot/cold_sect_of_. We use the same
23042 linkage name for the second die so that gdb will find both
23043 sections when given "b foo". */
23044 const char *name = NULL;
23045 tree decl_name = DECL_NAME (decl);
23046 dw_die_ref seg_die;
23047
23048 /* Do the 'primary' section. */
23049 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23050 fde->dw_fde_end, false);
23051
23052 /* Build a minimal DIE for the secondary section. */
23053 seg_die = new_die (DW_TAG_subprogram,
23054 subr_die->die_parent, decl);
23055
23056 if (TREE_PUBLIC (decl))
23057 add_AT_flag (seg_die, DW_AT_external, 1);
23058
23059 if (decl_name != NULL
23060 && IDENTIFIER_POINTER (decl_name) != NULL)
23061 {
23062 name = dwarf2_name (decl, 1);
23063 if (! DECL_ARTIFICIAL (decl))
23064 add_src_coords_attributes (seg_die, decl);
23065
23066 add_linkage_name (seg_die, decl);
23067 }
23068 gcc_assert (name != NULL);
23069 add_pure_or_virtual_attribute (seg_die, decl);
23070 if (DECL_ARTIFICIAL (decl))
23071 add_AT_flag (seg_die, DW_AT_artificial, 1);
23072
23073 name = concat ("__second_sect_of_", name, NULL);
23074 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23075 fde->dw_fde_second_end, false);
23076 add_name_attribute (seg_die, name);
23077 if (want_pubnames ())
23078 add_pubname_string (name, seg_die);
23079 }
23080 }
23081 else
23082 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23083 false);
23084 }
23085
23086 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23087
23088 /* We define the "frame base" as the function's CFA. This is more
23089 convenient for several reasons: (1) It's stable across the prologue
23090 and epilogue, which makes it better than just a frame pointer,
23091 (2) With dwarf3, there exists a one-byte encoding that allows us
23092 to reference the .debug_frame data by proxy, but failing that,
23093 (3) We can at least reuse the code inspection and interpretation
23094 code that determines the CFA position at various points in the
23095 function. */
23096 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23097 {
23098 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23099 add_AT_loc (subr_die, DW_AT_frame_base, op);
23100 }
23101 else
23102 {
23103 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23104 if (list->dw_loc_next)
23105 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23106 else
23107 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23108 }
23109
23110 /* Compute a displacement from the "steady-state frame pointer" to
23111 the CFA. The former is what all stack slots and argument slots
23112 will reference in the rtl; the latter is what we've told the
23113 debugger about. We'll need to adjust all frame_base references
23114 by this displacement. */
23115 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23116
23117 if (fun->static_chain_decl)
23118 {
23119 /* DWARF requires here a location expression that computes the
23120 address of the enclosing subprogram's frame base. The machinery
23121 in tree-nested.c is supposed to store this specific address in the
23122 last field of the FRAME record. */
23123 const tree frame_type
23124 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23125 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23126
23127 tree fb_expr
23128 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23129 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23130 fb_expr, fb_decl, NULL_TREE);
23131
23132 add_AT_location_description (subr_die, DW_AT_static_link,
23133 loc_list_from_tree (fb_expr, 0, NULL));
23134 }
23135
23136 resolve_variable_values ();
23137 }
23138
23139 /* Generate child dies for template paramaters. */
23140 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23141 gen_generic_params_dies (decl);
23142
23143 /* Now output descriptions of the arguments for this function. This gets
23144 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23145 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23146 `...' at the end of the formal parameter list. In order to find out if
23147 there was a trailing ellipsis or not, we must instead look at the type
23148 associated with the FUNCTION_DECL. This will be a node of type
23149 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23150 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23151 an ellipsis at the end. */
23152
23153 /* In the case where we are describing a mere function declaration, all we
23154 need to do here (and all we *can* do here) is to describe the *types* of
23155 its formal parameters. */
23156 if (debug_info_level <= DINFO_LEVEL_TERSE)
23157 ;
23158 else if (declaration)
23159 gen_formal_types_die (decl, subr_die);
23160 else
23161 {
23162 /* Generate DIEs to represent all known formal parameters. */
23163 tree parm = DECL_ARGUMENTS (decl);
23164 tree generic_decl = early_dwarf
23165 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23166 tree generic_decl_parm = generic_decl
23167 ? DECL_ARGUMENTS (generic_decl)
23168 : NULL;
23169
23170 /* Now we want to walk the list of parameters of the function and
23171 emit their relevant DIEs.
23172
23173 We consider the case of DECL being an instance of a generic function
23174 as well as it being a normal function.
23175
23176 If DECL is an instance of a generic function we walk the
23177 parameters of the generic function declaration _and_ the parameters of
23178 DECL itself. This is useful because we want to emit specific DIEs for
23179 function parameter packs and those are declared as part of the
23180 generic function declaration. In that particular case,
23181 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23182 That DIE has children DIEs representing the set of arguments
23183 of the pack. Note that the set of pack arguments can be empty.
23184 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23185 children DIE.
23186
23187 Otherwise, we just consider the parameters of DECL. */
23188 while (generic_decl_parm || parm)
23189 {
23190 if (generic_decl_parm
23191 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23192 gen_formal_parameter_pack_die (generic_decl_parm,
23193 parm, subr_die,
23194 &parm);
23195 else if (parm)
23196 {
23197 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23198
23199 if (early_dwarf
23200 && parm == DECL_ARGUMENTS (decl)
23201 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23202 && parm_die
23203 && (dwarf_version >= 3 || !dwarf_strict))
23204 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23205
23206 parm = DECL_CHAIN (parm);
23207 }
23208 else if (parm)
23209 parm = DECL_CHAIN (parm);
23210
23211 if (generic_decl_parm)
23212 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23213 }
23214
23215 /* Decide whether we need an unspecified_parameters DIE at the end.
23216 There are 2 more cases to do this for: 1) the ansi ... declaration -
23217 this is detectable when the end of the arg list is not a
23218 void_type_node 2) an unprototyped function declaration (not a
23219 definition). This just means that we have no info about the
23220 parameters at all. */
23221 if (early_dwarf)
23222 {
23223 if (prototype_p (TREE_TYPE (decl)))
23224 {
23225 /* This is the prototyped case, check for.... */
23226 if (stdarg_p (TREE_TYPE (decl)))
23227 gen_unspecified_parameters_die (decl, subr_die);
23228 }
23229 else if (DECL_INITIAL (decl) == NULL_TREE)
23230 gen_unspecified_parameters_die (decl, subr_die);
23231 }
23232 }
23233
23234 if (subr_die != old_die)
23235 /* Add the calling convention attribute if requested. */
23236 add_calling_convention_attribute (subr_die, decl);
23237
23238 /* Output Dwarf info for all of the stuff within the body of the function
23239 (if it has one - it may be just a declaration).
23240
23241 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23242 a function. This BLOCK actually represents the outermost binding contour
23243 for the function, i.e. the contour in which the function's formal
23244 parameters and labels get declared. Curiously, it appears that the front
23245 end doesn't actually put the PARM_DECL nodes for the current function onto
23246 the BLOCK_VARS list for this outer scope, but are strung off of the
23247 DECL_ARGUMENTS list for the function instead.
23248
23249 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23250 the LABEL_DECL nodes for the function however, and we output DWARF info
23251 for those in decls_for_scope. Just within the `outer_scope' there will be
23252 a BLOCK node representing the function's outermost pair of curly braces,
23253 and any blocks used for the base and member initializers of a C++
23254 constructor function. */
23255 tree outer_scope = DECL_INITIAL (decl);
23256 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23257 {
23258 int call_site_note_count = 0;
23259 int tail_call_site_note_count = 0;
23260
23261 /* Emit a DW_TAG_variable DIE for a named return value. */
23262 if (DECL_NAME (DECL_RESULT (decl)))
23263 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23264
23265 /* The first time through decls_for_scope we will generate the
23266 DIEs for the locals. The second time, we fill in the
23267 location info. */
23268 decls_for_scope (outer_scope, subr_die);
23269
23270 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23271 {
23272 struct call_arg_loc_node *ca_loc;
23273 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23274 {
23275 dw_die_ref die = NULL;
23276 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23277 rtx arg, next_arg;
23278 tree arg_decl = NULL_TREE;
23279
23280 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23281 ? XEXP (ca_loc->call_arg_loc_note, 0)
23282 : NULL_RTX);
23283 arg; arg = next_arg)
23284 {
23285 dw_loc_descr_ref reg, val;
23286 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23287 dw_die_ref cdie, tdie = NULL;
23288
23289 next_arg = XEXP (arg, 1);
23290 if (REG_P (XEXP (XEXP (arg, 0), 0))
23291 && next_arg
23292 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23293 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23294 && REGNO (XEXP (XEXP (arg, 0), 0))
23295 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23296 next_arg = XEXP (next_arg, 1);
23297 if (mode == VOIDmode)
23298 {
23299 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23300 if (mode == VOIDmode)
23301 mode = GET_MODE (XEXP (arg, 0));
23302 }
23303 if (mode == VOIDmode || mode == BLKmode)
23304 continue;
23305 /* Get dynamic information about call target only if we
23306 have no static information: we cannot generate both
23307 DW_AT_call_origin and DW_AT_call_target
23308 attributes. */
23309 if (ca_loc->symbol_ref == NULL_RTX)
23310 {
23311 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23312 {
23313 tloc = XEXP (XEXP (arg, 0), 1);
23314 continue;
23315 }
23316 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23317 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23318 {
23319 tlocc = XEXP (XEXP (arg, 0), 1);
23320 continue;
23321 }
23322 }
23323 reg = NULL;
23324 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23325 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23326 VAR_INIT_STATUS_INITIALIZED);
23327 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23328 {
23329 rtx mem = XEXP (XEXP (arg, 0), 0);
23330 reg = mem_loc_descriptor (XEXP (mem, 0),
23331 get_address_mode (mem),
23332 GET_MODE (mem),
23333 VAR_INIT_STATUS_INITIALIZED);
23334 }
23335 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23336 == DEBUG_PARAMETER_REF)
23337 {
23338 tree tdecl
23339 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23340 tdie = lookup_decl_die (tdecl);
23341 if (tdie == NULL)
23342 continue;
23343 arg_decl = tdecl;
23344 }
23345 else
23346 continue;
23347 if (reg == NULL
23348 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23349 != DEBUG_PARAMETER_REF)
23350 continue;
23351 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23352 VOIDmode,
23353 VAR_INIT_STATUS_INITIALIZED);
23354 if (val == NULL)
23355 continue;
23356 if (die == NULL)
23357 die = gen_call_site_die (decl, subr_die, ca_loc);
23358 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23359 NULL_TREE);
23360 add_desc_attribute (cdie, arg_decl);
23361 if (reg != NULL)
23362 add_AT_loc (cdie, DW_AT_location, reg);
23363 else if (tdie != NULL)
23364 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23365 tdie);
23366 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23367 if (next_arg != XEXP (arg, 1))
23368 {
23369 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23370 if (mode == VOIDmode)
23371 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23372 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23373 0), 1),
23374 mode, VOIDmode,
23375 VAR_INIT_STATUS_INITIALIZED);
23376 if (val != NULL)
23377 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23378 val);
23379 }
23380 }
23381 if (die == NULL
23382 && (ca_loc->symbol_ref || tloc))
23383 die = gen_call_site_die (decl, subr_die, ca_loc);
23384 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23385 {
23386 dw_loc_descr_ref tval = NULL;
23387
23388 if (tloc != NULL_RTX)
23389 tval = mem_loc_descriptor (tloc,
23390 GET_MODE (tloc) == VOIDmode
23391 ? Pmode : GET_MODE (tloc),
23392 VOIDmode,
23393 VAR_INIT_STATUS_INITIALIZED);
23394 if (tval)
23395 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23396 else if (tlocc != NULL_RTX)
23397 {
23398 tval = mem_loc_descriptor (tlocc,
23399 GET_MODE (tlocc) == VOIDmode
23400 ? Pmode : GET_MODE (tlocc),
23401 VOIDmode,
23402 VAR_INIT_STATUS_INITIALIZED);
23403 if (tval)
23404 add_AT_loc (die,
23405 dwarf_AT (DW_AT_call_target_clobbered),
23406 tval);
23407 }
23408 }
23409 if (die != NULL)
23410 {
23411 call_site_note_count++;
23412 if (ca_loc->tail_call_p)
23413 tail_call_site_note_count++;
23414 }
23415 }
23416 }
23417 call_arg_locations = NULL;
23418 call_arg_loc_last = NULL;
23419 if (tail_call_site_count >= 0
23420 && tail_call_site_count == tail_call_site_note_count
23421 && (!dwarf_strict || dwarf_version >= 5))
23422 {
23423 if (call_site_count >= 0
23424 && call_site_count == call_site_note_count)
23425 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23426 else
23427 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23428 }
23429 call_site_count = -1;
23430 tail_call_site_count = -1;
23431 }
23432
23433 /* Mark used types after we have created DIEs for the functions scopes. */
23434 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23435 }
23436
23437 /* Returns a hash value for X (which really is a die_struct). */
23438
23439 hashval_t
23440 block_die_hasher::hash (die_struct *d)
23441 {
23442 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23443 }
23444
23445 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23446 as decl_id and die_parent of die_struct Y. */
23447
23448 bool
23449 block_die_hasher::equal (die_struct *x, die_struct *y)
23450 {
23451 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23452 }
23453
23454 /* Hold information about markers for inlined entry points. */
23455 struct GTY ((for_user)) inline_entry_data
23456 {
23457 /* The block that's the inlined_function_outer_scope for an inlined
23458 function. */
23459 tree block;
23460
23461 /* The label at the inlined entry point. */
23462 const char *label_pfx;
23463 unsigned int label_num;
23464
23465 /* The view number to be used as the inlined entry point. */
23466 var_loc_view view;
23467 };
23468
23469 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23470 {
23471 typedef tree compare_type;
23472 static inline hashval_t hash (const inline_entry_data *);
23473 static inline bool equal (const inline_entry_data *, const_tree);
23474 };
23475
23476 /* Hash table routines for inline_entry_data. */
23477
23478 inline hashval_t
23479 inline_entry_data_hasher::hash (const inline_entry_data *data)
23480 {
23481 return htab_hash_pointer (data->block);
23482 }
23483
23484 inline bool
23485 inline_entry_data_hasher::equal (const inline_entry_data *data,
23486 const_tree block)
23487 {
23488 return data->block == block;
23489 }
23490
23491 /* Inlined entry points pending DIE creation in this compilation unit. */
23492
23493 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23494
23495
23496 /* Return TRUE if DECL, which may have been previously generated as
23497 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23498 true if decl (or its origin) is either an extern declaration or a
23499 class/namespace scoped declaration.
23500
23501 The declare_in_namespace support causes us to get two DIEs for one
23502 variable, both of which are declarations. We want to avoid
23503 considering one to be a specification, so we must test for
23504 DECLARATION and DW_AT_declaration. */
23505 static inline bool
23506 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23507 {
23508 return (old_die && TREE_STATIC (decl) && !declaration
23509 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23510 }
23511
23512 /* Return true if DECL is a local static. */
23513
23514 static inline bool
23515 local_function_static (tree decl)
23516 {
23517 gcc_assert (VAR_P (decl));
23518 return TREE_STATIC (decl)
23519 && DECL_CONTEXT (decl)
23520 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23521 }
23522
23523 /* Generate a DIE to represent a declared data object.
23524 Either DECL or ORIGIN must be non-null. */
23525
23526 static void
23527 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23528 {
23529 HOST_WIDE_INT off = 0;
23530 tree com_decl;
23531 tree decl_or_origin = decl ? decl : origin;
23532 tree ultimate_origin;
23533 dw_die_ref var_die;
23534 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23535 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23536 || class_or_namespace_scope_p (context_die));
23537 bool specialization_p = false;
23538 bool no_linkage_name = false;
23539
23540 /* While C++ inline static data members have definitions inside of the
23541 class, force the first DIE to be a declaration, then let gen_member_die
23542 reparent it to the class context and call gen_variable_die again
23543 to create the outside of the class DIE for the definition. */
23544 if (!declaration
23545 && old_die == NULL
23546 && decl
23547 && DECL_CONTEXT (decl)
23548 && TYPE_P (DECL_CONTEXT (decl))
23549 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23550 {
23551 declaration = true;
23552 if (dwarf_version < 5)
23553 no_linkage_name = true;
23554 }
23555
23556 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23557 if (decl || ultimate_origin)
23558 origin = ultimate_origin;
23559 com_decl = fortran_common (decl_or_origin, &off);
23560
23561 /* Symbol in common gets emitted as a child of the common block, in the form
23562 of a data member. */
23563 if (com_decl)
23564 {
23565 dw_die_ref com_die;
23566 dw_loc_list_ref loc = NULL;
23567 die_node com_die_arg;
23568
23569 var_die = lookup_decl_die (decl_or_origin);
23570 if (var_die)
23571 {
23572 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23573 {
23574 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23575 if (loc)
23576 {
23577 if (off)
23578 {
23579 /* Optimize the common case. */
23580 if (single_element_loc_list_p (loc)
23581 && loc->expr->dw_loc_opc == DW_OP_addr
23582 && loc->expr->dw_loc_next == NULL
23583 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23584 == SYMBOL_REF)
23585 {
23586 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23587 loc->expr->dw_loc_oprnd1.v.val_addr
23588 = plus_constant (GET_MODE (x), x , off);
23589 }
23590 else
23591 loc_list_plus_const (loc, off);
23592 }
23593 add_AT_location_description (var_die, DW_AT_location, loc);
23594 remove_AT (var_die, DW_AT_declaration);
23595 }
23596 }
23597 return;
23598 }
23599
23600 if (common_block_die_table == NULL)
23601 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23602
23603 com_die_arg.decl_id = DECL_UID (com_decl);
23604 com_die_arg.die_parent = context_die;
23605 com_die = common_block_die_table->find (&com_die_arg);
23606 if (! early_dwarf)
23607 loc = loc_list_from_tree (com_decl, 2, NULL);
23608 if (com_die == NULL)
23609 {
23610 const char *cnam
23611 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23612 die_node **slot;
23613
23614 com_die = new_die (DW_TAG_common_block, context_die, decl);
23615 add_name_and_src_coords_attributes (com_die, com_decl);
23616 if (loc)
23617 {
23618 add_AT_location_description (com_die, DW_AT_location, loc);
23619 /* Avoid sharing the same loc descriptor between
23620 DW_TAG_common_block and DW_TAG_variable. */
23621 loc = loc_list_from_tree (com_decl, 2, NULL);
23622 }
23623 else if (DECL_EXTERNAL (decl_or_origin))
23624 add_AT_flag (com_die, DW_AT_declaration, 1);
23625 if (want_pubnames ())
23626 add_pubname_string (cnam, com_die); /* ??? needed? */
23627 com_die->decl_id = DECL_UID (com_decl);
23628 slot = common_block_die_table->find_slot (com_die, INSERT);
23629 *slot = com_die;
23630 }
23631 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23632 {
23633 add_AT_location_description (com_die, DW_AT_location, loc);
23634 loc = loc_list_from_tree (com_decl, 2, NULL);
23635 remove_AT (com_die, DW_AT_declaration);
23636 }
23637 var_die = new_die (DW_TAG_variable, com_die, decl);
23638 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23639 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23640 decl_quals (decl_or_origin), false,
23641 context_die);
23642 add_alignment_attribute (var_die, decl);
23643 add_AT_flag (var_die, DW_AT_external, 1);
23644 if (loc)
23645 {
23646 if (off)
23647 {
23648 /* Optimize the common case. */
23649 if (single_element_loc_list_p (loc)
23650 && loc->expr->dw_loc_opc == DW_OP_addr
23651 && loc->expr->dw_loc_next == NULL
23652 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23653 {
23654 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23655 loc->expr->dw_loc_oprnd1.v.val_addr
23656 = plus_constant (GET_MODE (x), x, off);
23657 }
23658 else
23659 loc_list_plus_const (loc, off);
23660 }
23661 add_AT_location_description (var_die, DW_AT_location, loc);
23662 }
23663 else if (DECL_EXTERNAL (decl_or_origin))
23664 add_AT_flag (var_die, DW_AT_declaration, 1);
23665 if (decl)
23666 equate_decl_number_to_die (decl, var_die);
23667 return;
23668 }
23669
23670 if (old_die)
23671 {
23672 if (declaration)
23673 {
23674 /* A declaration that has been previously dumped, needs no
23675 further annotations, since it doesn't need location on
23676 the second pass. */
23677 return;
23678 }
23679 else if (decl_will_get_specification_p (old_die, decl, declaration)
23680 && !get_AT (old_die, DW_AT_specification))
23681 {
23682 /* Fall-thru so we can make a new variable die along with a
23683 DW_AT_specification. */
23684 }
23685 else if (origin && old_die->die_parent != context_die)
23686 {
23687 /* If we will be creating an inlined instance, we need a
23688 new DIE that will get annotated with
23689 DW_AT_abstract_origin. */
23690 gcc_assert (!DECL_ABSTRACT_P (decl));
23691 }
23692 else
23693 {
23694 /* If a DIE was dumped early, it still needs location info.
23695 Skip to where we fill the location bits. */
23696 var_die = old_die;
23697
23698 /* ??? In LTRANS we cannot annotate early created variably
23699 modified type DIEs without copying them and adjusting all
23700 references to them. Thus we dumped them again. Also add a
23701 reference to them but beware of -g0 compile and -g link
23702 in which case the reference will be already present. */
23703 tree type = TREE_TYPE (decl_or_origin);
23704 if (in_lto_p
23705 && ! get_AT (var_die, DW_AT_type)
23706 && variably_modified_type_p
23707 (type, decl_function_context (decl_or_origin)))
23708 {
23709 if (decl_by_reference_p (decl_or_origin))
23710 add_type_attribute (var_die, TREE_TYPE (type),
23711 TYPE_UNQUALIFIED, false, context_die);
23712 else
23713 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23714 false, context_die);
23715 }
23716
23717 goto gen_variable_die_location;
23718 }
23719 }
23720
23721 /* For static data members, the declaration in the class is supposed
23722 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23723 also in DWARF2; the specification should still be DW_TAG_variable
23724 referencing the DW_TAG_member DIE. */
23725 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23726 var_die = new_die (DW_TAG_member, context_die, decl);
23727 else
23728 var_die = new_die (DW_TAG_variable, context_die, decl);
23729
23730 if (origin != NULL)
23731 add_abstract_origin_attribute (var_die, origin);
23732
23733 /* Loop unrolling can create multiple blocks that refer to the same
23734 static variable, so we must test for the DW_AT_declaration flag.
23735
23736 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23737 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23738 sharing them.
23739
23740 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23741 else if (decl_will_get_specification_p (old_die, decl, declaration))
23742 {
23743 /* This is a definition of a C++ class level static. */
23744 add_AT_specification (var_die, old_die);
23745 specialization_p = true;
23746 if (DECL_NAME (decl))
23747 {
23748 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23749 struct dwarf_file_data * file_index = lookup_filename (s.file);
23750
23751 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23752 add_AT_file (var_die, DW_AT_decl_file, file_index);
23753
23754 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23755 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23756
23757 if (debug_column_info
23758 && s.column
23759 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23760 != (unsigned) s.column))
23761 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23762
23763 if (old_die->die_tag == DW_TAG_member)
23764 add_linkage_name (var_die, decl);
23765 }
23766 }
23767 else
23768 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23769
23770 if ((origin == NULL && !specialization_p)
23771 || (origin != NULL
23772 && !DECL_ABSTRACT_P (decl_or_origin)
23773 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23774 decl_function_context
23775 (decl_or_origin))))
23776 {
23777 tree type = TREE_TYPE (decl_or_origin);
23778
23779 if (decl_by_reference_p (decl_or_origin))
23780 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23781 context_die);
23782 else
23783 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23784 context_die);
23785 }
23786
23787 if (origin == NULL && !specialization_p)
23788 {
23789 if (TREE_PUBLIC (decl))
23790 add_AT_flag (var_die, DW_AT_external, 1);
23791
23792 if (DECL_ARTIFICIAL (decl))
23793 add_AT_flag (var_die, DW_AT_artificial, 1);
23794
23795 add_alignment_attribute (var_die, decl);
23796
23797 add_accessibility_attribute (var_die, decl);
23798 }
23799
23800 if (declaration)
23801 add_AT_flag (var_die, DW_AT_declaration, 1);
23802
23803 if (decl && (DECL_ABSTRACT_P (decl)
23804 || !old_die || is_declaration_die (old_die)))
23805 equate_decl_number_to_die (decl, var_die);
23806
23807 gen_variable_die_location:
23808 if (! declaration
23809 && (! DECL_ABSTRACT_P (decl_or_origin)
23810 /* Local static vars are shared between all clones/inlines,
23811 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23812 already set. */
23813 || (VAR_P (decl_or_origin)
23814 && TREE_STATIC (decl_or_origin)
23815 && DECL_RTL_SET_P (decl_or_origin))))
23816 {
23817 if (early_dwarf)
23818 add_pubname (decl_or_origin, var_die);
23819 else
23820 add_location_or_const_value_attribute (var_die, decl_or_origin,
23821 decl == NULL);
23822 }
23823 else
23824 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23825
23826 if ((dwarf_version >= 4 || !dwarf_strict)
23827 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23828 DW_AT_const_expr) == 1
23829 && !get_AT (var_die, DW_AT_const_expr)
23830 && !specialization_p)
23831 add_AT_flag (var_die, DW_AT_const_expr, 1);
23832
23833 if (!dwarf_strict)
23834 {
23835 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23836 DW_AT_inline);
23837 if (inl != -1
23838 && !get_AT (var_die, DW_AT_inline)
23839 && !specialization_p)
23840 add_AT_unsigned (var_die, DW_AT_inline, inl);
23841 }
23842 }
23843
23844 /* Generate a DIE to represent a named constant. */
23845
23846 static void
23847 gen_const_die (tree decl, dw_die_ref context_die)
23848 {
23849 dw_die_ref const_die;
23850 tree type = TREE_TYPE (decl);
23851
23852 const_die = lookup_decl_die (decl);
23853 if (const_die)
23854 return;
23855
23856 const_die = new_die (DW_TAG_constant, context_die, decl);
23857 equate_decl_number_to_die (decl, const_die);
23858 add_name_and_src_coords_attributes (const_die, decl);
23859 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23860 if (TREE_PUBLIC (decl))
23861 add_AT_flag (const_die, DW_AT_external, 1);
23862 if (DECL_ARTIFICIAL (decl))
23863 add_AT_flag (const_die, DW_AT_artificial, 1);
23864 tree_add_const_value_attribute_for_decl (const_die, decl);
23865 }
23866
23867 /* Generate a DIE to represent a label identifier. */
23868
23869 static void
23870 gen_label_die (tree decl, dw_die_ref context_die)
23871 {
23872 tree origin = decl_ultimate_origin (decl);
23873 dw_die_ref lbl_die = lookup_decl_die (decl);
23874 rtx insn;
23875 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23876
23877 if (!lbl_die)
23878 {
23879 lbl_die = new_die (DW_TAG_label, context_die, decl);
23880 equate_decl_number_to_die (decl, lbl_die);
23881
23882 if (origin != NULL)
23883 add_abstract_origin_attribute (lbl_die, origin);
23884 else
23885 add_name_and_src_coords_attributes (lbl_die, decl);
23886 }
23887
23888 if (DECL_ABSTRACT_P (decl))
23889 equate_decl_number_to_die (decl, lbl_die);
23890 else if (! early_dwarf)
23891 {
23892 insn = DECL_RTL_IF_SET (decl);
23893
23894 /* Deleted labels are programmer specified labels which have been
23895 eliminated because of various optimizations. We still emit them
23896 here so that it is possible to put breakpoints on them. */
23897 if (insn
23898 && (LABEL_P (insn)
23899 || ((NOTE_P (insn)
23900 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23901 {
23902 /* When optimization is enabled (via -O) some parts of the compiler
23903 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23904 represent source-level labels which were explicitly declared by
23905 the user. This really shouldn't be happening though, so catch
23906 it if it ever does happen. */
23907 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23908
23909 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23910 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23911 }
23912 else if (insn
23913 && NOTE_P (insn)
23914 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23915 && CODE_LABEL_NUMBER (insn) != -1)
23916 {
23917 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23918 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23919 }
23920 }
23921 }
23922
23923 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23924 attributes to the DIE for a block STMT, to describe where the inlined
23925 function was called from. This is similar to add_src_coords_attributes. */
23926
23927 static inline void
23928 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23929 {
23930 /* We can end up with BUILTINS_LOCATION here. */
23931 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
23932 return;
23933
23934 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23935
23936 if (dwarf_version >= 3 || !dwarf_strict)
23937 {
23938 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23939 add_AT_unsigned (die, DW_AT_call_line, s.line);
23940 if (debug_column_info && s.column)
23941 add_AT_unsigned (die, DW_AT_call_column, s.column);
23942 }
23943 }
23944
23945
23946 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23947 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23948
23949 static inline void
23950 add_high_low_attributes (tree stmt, dw_die_ref die)
23951 {
23952 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23953
23954 if (inline_entry_data **iedp
23955 = !inline_entry_data_table ? NULL
23956 : inline_entry_data_table->find_slot_with_hash (stmt,
23957 htab_hash_pointer (stmt),
23958 NO_INSERT))
23959 {
23960 inline_entry_data *ied = *iedp;
23961 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23962 gcc_assert (debug_inline_points);
23963 gcc_assert (inlined_function_outer_scope_p (stmt));
23964
23965 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23966 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23967
23968 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23969 && !dwarf_strict)
23970 {
23971 if (!output_asm_line_debug_info ())
23972 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23973 else
23974 {
23975 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23976 /* FIXME: this will resolve to a small number. Could we
23977 possibly emit smaller data? Ideally we'd emit a
23978 uleb128, but that would make the size of DIEs
23979 impossible for the compiler to compute, since it's
23980 the assembler that computes the value of the view
23981 label in this case. Ideally, we'd have a single form
23982 encompassing both the address and the view, and
23983 indirecting them through a table might make things
23984 easier, but even that would be more wasteful,
23985 space-wise, than what we have now. */
23986 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23987 }
23988 }
23989
23990 inline_entry_data_table->clear_slot (iedp);
23991 }
23992
23993 if (BLOCK_FRAGMENT_CHAIN (stmt)
23994 && (dwarf_version >= 3 || !dwarf_strict))
23995 {
23996 tree chain, superblock = NULL_TREE;
23997 dw_die_ref pdie;
23998 dw_attr_node *attr = NULL;
23999
24000 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24001 {
24002 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24003 BLOCK_NUMBER (stmt));
24004 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24005 }
24006
24007 /* Optimize duplicate .debug_ranges lists or even tails of
24008 lists. If this BLOCK has same ranges as its supercontext,
24009 lookup DW_AT_ranges attribute in the supercontext (and
24010 recursively so), verify that the ranges_table contains the
24011 right values and use it instead of adding a new .debug_range. */
24012 for (chain = stmt, pdie = die;
24013 BLOCK_SAME_RANGE (chain);
24014 chain = BLOCK_SUPERCONTEXT (chain))
24015 {
24016 dw_attr_node *new_attr;
24017
24018 pdie = pdie->die_parent;
24019 if (pdie == NULL)
24020 break;
24021 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24022 break;
24023 new_attr = get_AT (pdie, DW_AT_ranges);
24024 if (new_attr == NULL
24025 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24026 break;
24027 attr = new_attr;
24028 superblock = BLOCK_SUPERCONTEXT (chain);
24029 }
24030 if (attr != NULL
24031 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24032 == (int)BLOCK_NUMBER (superblock))
24033 && BLOCK_FRAGMENT_CHAIN (superblock))
24034 {
24035 unsigned long off = attr->dw_attr_val.v.val_offset;
24036 unsigned long supercnt = 0, thiscnt = 0;
24037 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24038 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24039 {
24040 ++supercnt;
24041 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24042 == (int)BLOCK_NUMBER (chain));
24043 }
24044 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24045 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24046 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24047 ++thiscnt;
24048 gcc_assert (supercnt >= thiscnt);
24049 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24050 false);
24051 note_rnglist_head (off + supercnt - thiscnt);
24052 return;
24053 }
24054
24055 unsigned int offset = add_ranges (stmt, true);
24056 add_AT_range_list (die, DW_AT_ranges, offset, false);
24057 note_rnglist_head (offset);
24058
24059 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24060 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24061 do
24062 {
24063 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24064 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24065 chain = BLOCK_FRAGMENT_CHAIN (chain);
24066 }
24067 while (chain);
24068 add_ranges (NULL);
24069 }
24070 else
24071 {
24072 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24073 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24074 BLOCK_NUMBER (stmt));
24075 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24076 BLOCK_NUMBER (stmt));
24077 add_AT_low_high_pc (die, label, label_high, false);
24078 }
24079 }
24080
24081 /* Generate a DIE for a lexical block. */
24082
24083 static void
24084 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24085 {
24086 dw_die_ref old_die = lookup_block_die (stmt);
24087 dw_die_ref stmt_die = NULL;
24088 if (!old_die)
24089 {
24090 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24091 equate_block_to_die (stmt, stmt_die);
24092 }
24093
24094 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24095 {
24096 /* If this is an inlined or conrecte instance, create a new lexical
24097 die for anything below to attach DW_AT_abstract_origin to. */
24098 if (old_die)
24099 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24100
24101 tree origin = block_ultimate_origin (stmt);
24102 if (origin != NULL_TREE && (origin != stmt || old_die))
24103 add_abstract_origin_attribute (stmt_die, origin);
24104
24105 old_die = NULL;
24106 }
24107
24108 if (old_die)
24109 stmt_die = old_die;
24110
24111 /* A non abstract block whose blocks have already been reordered
24112 should have the instruction range for this block. If so, set the
24113 high/low attributes. */
24114 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24115 {
24116 gcc_assert (stmt_die);
24117 add_high_low_attributes (stmt, stmt_die);
24118 }
24119
24120 decls_for_scope (stmt, stmt_die);
24121 }
24122
24123 /* Generate a DIE for an inlined subprogram. */
24124
24125 static void
24126 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24127 {
24128 tree decl = block_ultimate_origin (stmt);
24129
24130 /* Make sure any inlined functions are known to be inlineable. */
24131 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24132 || cgraph_function_possibly_inlined_p (decl));
24133
24134 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24135
24136 if (call_arg_locations || debug_inline_points)
24137 equate_block_to_die (stmt, subr_die);
24138 add_abstract_origin_attribute (subr_die, decl);
24139 if (TREE_ASM_WRITTEN (stmt))
24140 add_high_low_attributes (stmt, subr_die);
24141 add_call_src_coords_attributes (stmt, subr_die);
24142
24143 /* The inliner creates an extra BLOCK for the parameter setup,
24144 we want to merge that with the actual outermost BLOCK of the
24145 inlined function to avoid duplicate locals in consumers.
24146 Do that by doing the recursion to subblocks on the single subblock
24147 of STMT. */
24148 bool unwrap_one = false;
24149 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24150 {
24151 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24152 if (origin
24153 && TREE_CODE (origin) == BLOCK
24154 && BLOCK_SUPERCONTEXT (origin) == decl)
24155 unwrap_one = true;
24156 }
24157 decls_for_scope (stmt, subr_die, !unwrap_one);
24158 if (unwrap_one)
24159 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24160 }
24161
24162 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24163 the comment for VLR_CONTEXT. */
24164
24165 static void
24166 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24167 {
24168 dw_die_ref decl_die;
24169
24170 if (TREE_TYPE (decl) == error_mark_node)
24171 return;
24172
24173 decl_die = new_die (DW_TAG_member, context_die, decl);
24174 add_name_and_src_coords_attributes (decl_die, decl);
24175 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24176 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24177 context_die);
24178
24179 if (DECL_BIT_FIELD_TYPE (decl))
24180 {
24181 add_byte_size_attribute (decl_die, decl);
24182 add_bit_size_attribute (decl_die, decl);
24183 add_bit_offset_attribute (decl_die, decl, ctx);
24184 }
24185
24186 add_alignment_attribute (decl_die, decl);
24187
24188 /* If we have a variant part offset, then we are supposed to process a member
24189 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24190 trees. */
24191 gcc_assert (ctx->variant_part_offset == NULL_TREE
24192 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24193 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24194 add_data_member_location_attribute (decl_die, decl, ctx);
24195
24196 if (DECL_ARTIFICIAL (decl))
24197 add_AT_flag (decl_die, DW_AT_artificial, 1);
24198
24199 add_accessibility_attribute (decl_die, decl);
24200
24201 /* Equate decl number to die, so that we can look up this decl later on. */
24202 equate_decl_number_to_die (decl, decl_die);
24203 }
24204
24205 /* Generate a DIE for a pointer to a member type. TYPE can be an
24206 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24207 pointer to member function. */
24208
24209 static void
24210 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24211 {
24212 if (lookup_type_die (type))
24213 return;
24214
24215 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24216 scope_die_for (type, context_die), type);
24217
24218 equate_type_number_to_die (type, ptr_die);
24219 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24220 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24221 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24222 context_die);
24223 add_alignment_attribute (ptr_die, type);
24224
24225 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24226 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24227 {
24228 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24229 add_AT_loc (ptr_die, DW_AT_use_location, op);
24230 }
24231 }
24232
24233 static char *producer_string;
24234
24235 /* Return a heap allocated producer string including command line options
24236 if -grecord-gcc-switches. */
24237
24238 static char *
24239 gen_producer_string (void)
24240 {
24241 size_t j;
24242 auto_vec<const char *> switches;
24243 const char *language_string = lang_hooks.name;
24244 char *producer, *tail;
24245 const char *p;
24246 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24247 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24248
24249 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24250 switch (save_decoded_options[j].opt_index)
24251 {
24252 case OPT_o:
24253 case OPT_d:
24254 case OPT_dumpbase:
24255 case OPT_dumpdir:
24256 case OPT_auxbase:
24257 case OPT_auxbase_strip:
24258 case OPT_quiet:
24259 case OPT_version:
24260 case OPT_v:
24261 case OPT_w:
24262 case OPT_L:
24263 case OPT_D:
24264 case OPT_I:
24265 case OPT_U:
24266 case OPT_SPECIAL_unknown:
24267 case OPT_SPECIAL_ignore:
24268 case OPT_SPECIAL_deprecated:
24269 case OPT_SPECIAL_program_name:
24270 case OPT_SPECIAL_input_file:
24271 case OPT_grecord_gcc_switches:
24272 case OPT__output_pch_:
24273 case OPT_fdiagnostics_show_location_:
24274 case OPT_fdiagnostics_show_option:
24275 case OPT_fdiagnostics_show_caret:
24276 case OPT_fdiagnostics_show_labels:
24277 case OPT_fdiagnostics_show_line_numbers:
24278 case OPT_fdiagnostics_color_:
24279 case OPT_fdiagnostics_format_:
24280 case OPT_fverbose_asm:
24281 case OPT____:
24282 case OPT__sysroot_:
24283 case OPT_nostdinc:
24284 case OPT_nostdinc__:
24285 case OPT_fpreprocessed:
24286 case OPT_fltrans_output_list_:
24287 case OPT_fresolution_:
24288 case OPT_fdebug_prefix_map_:
24289 case OPT_fmacro_prefix_map_:
24290 case OPT_ffile_prefix_map_:
24291 case OPT_fcompare_debug:
24292 case OPT_fchecking:
24293 case OPT_fchecking_:
24294 /* Ignore these. */
24295 continue;
24296 default:
24297 if (cl_options[save_decoded_options[j].opt_index].flags
24298 & CL_NO_DWARF_RECORD)
24299 continue;
24300 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24301 == '-');
24302 switch (save_decoded_options[j].canonical_option[0][1])
24303 {
24304 case 'M':
24305 case 'i':
24306 case 'W':
24307 continue;
24308 case 'f':
24309 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24310 "dump", 4) == 0)
24311 continue;
24312 break;
24313 default:
24314 break;
24315 }
24316 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24317 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24318 break;
24319 }
24320
24321 producer = XNEWVEC (char, plen + 1 + len + 1);
24322 tail = producer;
24323 sprintf (tail, "%s %s", language_string, version_string);
24324 tail += plen;
24325
24326 FOR_EACH_VEC_ELT (switches, j, p)
24327 {
24328 len = strlen (p);
24329 *tail = ' ';
24330 memcpy (tail + 1, p, len);
24331 tail += len + 1;
24332 }
24333
24334 *tail = '\0';
24335 return producer;
24336 }
24337
24338 /* Given a C and/or C++ language/version string return the "highest".
24339 C++ is assumed to be "higher" than C in this case. Used for merging
24340 LTO translation unit languages. */
24341 static const char *
24342 highest_c_language (const char *lang1, const char *lang2)
24343 {
24344 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24345 return "GNU C++17";
24346 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24347 return "GNU C++14";
24348 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24349 return "GNU C++11";
24350 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24351 return "GNU C++98";
24352
24353 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24354 return "GNU C2X";
24355 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24356 return "GNU C17";
24357 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24358 return "GNU C11";
24359 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24360 return "GNU C99";
24361 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24362 return "GNU C89";
24363
24364 gcc_unreachable ();
24365 }
24366
24367
24368 /* Generate the DIE for the compilation unit. */
24369
24370 static dw_die_ref
24371 gen_compile_unit_die (const char *filename)
24372 {
24373 dw_die_ref die;
24374 const char *language_string = lang_hooks.name;
24375 int language;
24376
24377 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24378
24379 if (filename)
24380 {
24381 add_name_attribute (die, filename);
24382 /* Don't add cwd for <built-in>. */
24383 if (filename[0] != '<')
24384 add_comp_dir_attribute (die);
24385 }
24386
24387 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24388
24389 /* If our producer is LTO try to figure out a common language to use
24390 from the global list of translation units. */
24391 if (strcmp (language_string, "GNU GIMPLE") == 0)
24392 {
24393 unsigned i;
24394 tree t;
24395 const char *common_lang = NULL;
24396
24397 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24398 {
24399 if (!TRANSLATION_UNIT_LANGUAGE (t))
24400 continue;
24401 if (!common_lang)
24402 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24403 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24404 ;
24405 else if (strncmp (common_lang, "GNU C", 5) == 0
24406 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24407 /* Mixing C and C++ is ok, use C++ in that case. */
24408 common_lang = highest_c_language (common_lang,
24409 TRANSLATION_UNIT_LANGUAGE (t));
24410 else
24411 {
24412 /* Fall back to C. */
24413 common_lang = NULL;
24414 break;
24415 }
24416 }
24417
24418 if (common_lang)
24419 language_string = common_lang;
24420 }
24421
24422 language = DW_LANG_C;
24423 if (strncmp (language_string, "GNU C", 5) == 0
24424 && ISDIGIT (language_string[5]))
24425 {
24426 language = DW_LANG_C89;
24427 if (dwarf_version >= 3 || !dwarf_strict)
24428 {
24429 if (strcmp (language_string, "GNU C89") != 0)
24430 language = DW_LANG_C99;
24431
24432 if (dwarf_version >= 5 /* || !dwarf_strict */)
24433 if (strcmp (language_string, "GNU C11") == 0
24434 || strcmp (language_string, "GNU C17") == 0
24435 || strcmp (language_string, "GNU C2X"))
24436 language = DW_LANG_C11;
24437 }
24438 }
24439 else if (strncmp (language_string, "GNU C++", 7) == 0)
24440 {
24441 language = DW_LANG_C_plus_plus;
24442 if (dwarf_version >= 5 /* || !dwarf_strict */)
24443 {
24444 if (strcmp (language_string, "GNU C++11") == 0)
24445 language = DW_LANG_C_plus_plus_11;
24446 else if (strcmp (language_string, "GNU C++14") == 0)
24447 language = DW_LANG_C_plus_plus_14;
24448 else if (strcmp (language_string, "GNU C++17") == 0)
24449 /* For now. */
24450 language = DW_LANG_C_plus_plus_14;
24451 }
24452 }
24453 else if (strcmp (language_string, "GNU F77") == 0)
24454 language = DW_LANG_Fortran77;
24455 else if (dwarf_version >= 3 || !dwarf_strict)
24456 {
24457 if (strcmp (language_string, "GNU Ada") == 0)
24458 language = DW_LANG_Ada95;
24459 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24460 {
24461 language = DW_LANG_Fortran95;
24462 if (dwarf_version >= 5 /* || !dwarf_strict */)
24463 {
24464 if (strcmp (language_string, "GNU Fortran2003") == 0)
24465 language = DW_LANG_Fortran03;
24466 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24467 language = DW_LANG_Fortran08;
24468 }
24469 }
24470 else if (strcmp (language_string, "GNU Objective-C") == 0)
24471 language = DW_LANG_ObjC;
24472 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24473 language = DW_LANG_ObjC_plus_plus;
24474 else if (strcmp (language_string, "GNU D") == 0)
24475 language = DW_LANG_D;
24476 else if (dwarf_version >= 5 || !dwarf_strict)
24477 {
24478 if (strcmp (language_string, "GNU Go") == 0)
24479 language = DW_LANG_Go;
24480 }
24481 }
24482 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24483 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24484 language = DW_LANG_Fortran90;
24485 /* Likewise for Ada. */
24486 else if (strcmp (language_string, "GNU Ada") == 0)
24487 language = DW_LANG_Ada83;
24488
24489 add_AT_unsigned (die, DW_AT_language, language);
24490
24491 switch (language)
24492 {
24493 case DW_LANG_Fortran77:
24494 case DW_LANG_Fortran90:
24495 case DW_LANG_Fortran95:
24496 case DW_LANG_Fortran03:
24497 case DW_LANG_Fortran08:
24498 /* Fortran has case insensitive identifiers and the front-end
24499 lowercases everything. */
24500 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24501 break;
24502 default:
24503 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24504 break;
24505 }
24506 return die;
24507 }
24508
24509 /* Generate the DIE for a base class. */
24510
24511 static void
24512 gen_inheritance_die (tree binfo, tree access, tree type,
24513 dw_die_ref context_die)
24514 {
24515 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24516 struct vlr_context ctx = { type, NULL };
24517
24518 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24519 context_die);
24520 add_data_member_location_attribute (die, binfo, &ctx);
24521
24522 if (BINFO_VIRTUAL_P (binfo))
24523 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24524
24525 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24526 children, otherwise the default is DW_ACCESS_public. In DWARF2
24527 the default has always been DW_ACCESS_private. */
24528 if (access == access_public_node)
24529 {
24530 if (dwarf_version == 2
24531 || context_die->die_tag == DW_TAG_class_type)
24532 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24533 }
24534 else if (access == access_protected_node)
24535 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24536 else if (dwarf_version > 2
24537 && context_die->die_tag != DW_TAG_class_type)
24538 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24539 }
24540
24541 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24542 structure. */
24543
24544 static bool
24545 is_variant_part (tree decl)
24546 {
24547 return (TREE_CODE (decl) == FIELD_DECL
24548 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24549 }
24550
24551 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24552 return the FIELD_DECL. Return NULL_TREE otherwise. */
24553
24554 static tree
24555 analyze_discr_in_predicate (tree operand, tree struct_type)
24556 {
24557 while (CONVERT_EXPR_P (operand))
24558 operand = TREE_OPERAND (operand, 0);
24559
24560 /* Match field access to members of struct_type only. */
24561 if (TREE_CODE (operand) == COMPONENT_REF
24562 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24563 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24564 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24565 return TREE_OPERAND (operand, 1);
24566 else
24567 return NULL_TREE;
24568 }
24569
24570 /* Check that SRC is a constant integer that can be represented as a native
24571 integer constant (either signed or unsigned). If so, store it into DEST and
24572 return true. Return false otherwise. */
24573
24574 static bool
24575 get_discr_value (tree src, dw_discr_value *dest)
24576 {
24577 tree discr_type = TREE_TYPE (src);
24578
24579 if (lang_hooks.types.get_debug_type)
24580 {
24581 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24582 if (debug_type != NULL)
24583 discr_type = debug_type;
24584 }
24585
24586 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24587 return false;
24588
24589 /* Signedness can vary between the original type and the debug type. This
24590 can happen for character types in Ada for instance: the character type
24591 used for code generation can be signed, to be compatible with the C one,
24592 but from a debugger point of view, it must be unsigned. */
24593 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24594 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24595
24596 if (is_orig_unsigned != is_debug_unsigned)
24597 src = fold_convert (discr_type, src);
24598
24599 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24600 return false;
24601
24602 dest->pos = is_debug_unsigned;
24603 if (is_debug_unsigned)
24604 dest->v.uval = tree_to_uhwi (src);
24605 else
24606 dest->v.sval = tree_to_shwi (src);
24607
24608 return true;
24609 }
24610
24611 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24612 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24613 store NULL_TREE in DISCR_DECL. Otherwise:
24614
24615 - store the discriminant field in STRUCT_TYPE that controls the variant
24616 part to *DISCR_DECL
24617
24618 - put in *DISCR_LISTS_P an array where for each variant, the item
24619 represents the corresponding matching list of discriminant values.
24620
24621 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24622 the above array.
24623
24624 Note that when the array is allocated (i.e. when the analysis is
24625 successful), it is up to the caller to free the array. */
24626
24627 static void
24628 analyze_variants_discr (tree variant_part_decl,
24629 tree struct_type,
24630 tree *discr_decl,
24631 dw_discr_list_ref **discr_lists_p,
24632 unsigned *discr_lists_length)
24633 {
24634 tree variant_part_type = TREE_TYPE (variant_part_decl);
24635 tree variant;
24636 dw_discr_list_ref *discr_lists;
24637 unsigned i;
24638
24639 /* Compute how many variants there are in this variant part. */
24640 *discr_lists_length = 0;
24641 for (variant = TYPE_FIELDS (variant_part_type);
24642 variant != NULL_TREE;
24643 variant = DECL_CHAIN (variant))
24644 ++*discr_lists_length;
24645
24646 *discr_decl = NULL_TREE;
24647 *discr_lists_p
24648 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24649 sizeof (**discr_lists_p));
24650 discr_lists = *discr_lists_p;
24651
24652 /* And then analyze all variants to extract discriminant information for all
24653 of them. This analysis is conservative: as soon as we detect something we
24654 do not support, abort everything and pretend we found nothing. */
24655 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24656 variant != NULL_TREE;
24657 variant = DECL_CHAIN (variant), ++i)
24658 {
24659 tree match_expr = DECL_QUALIFIER (variant);
24660
24661 /* Now, try to analyze the predicate and deduce a discriminant for
24662 it. */
24663 if (match_expr == boolean_true_node)
24664 /* Typically happens for the default variant: it matches all cases that
24665 previous variants rejected. Don't output any matching value for
24666 this one. */
24667 continue;
24668
24669 /* The following loop tries to iterate over each discriminant
24670 possibility: single values or ranges. */
24671 while (match_expr != NULL_TREE)
24672 {
24673 tree next_round_match_expr;
24674 tree candidate_discr = NULL_TREE;
24675 dw_discr_list_ref new_node = NULL;
24676
24677 /* Possibilities are matched one after the other by nested
24678 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24679 continue with the rest at next iteration. */
24680 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24681 {
24682 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24683 match_expr = TREE_OPERAND (match_expr, 1);
24684 }
24685 else
24686 next_round_match_expr = NULL_TREE;
24687
24688 if (match_expr == boolean_false_node)
24689 /* This sub-expression matches nothing: just wait for the next
24690 one. */
24691 ;
24692
24693 else if (TREE_CODE (match_expr) == EQ_EXPR)
24694 {
24695 /* We are matching: <discr_field> == <integer_cst>
24696 This sub-expression matches a single value. */
24697 tree integer_cst = TREE_OPERAND (match_expr, 1);
24698
24699 candidate_discr
24700 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24701 struct_type);
24702
24703 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24704 if (!get_discr_value (integer_cst,
24705 &new_node->dw_discr_lower_bound))
24706 goto abort;
24707 new_node->dw_discr_range = false;
24708 }
24709
24710 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24711 {
24712 /* We are matching:
24713 <discr_field> > <integer_cst>
24714 && <discr_field> < <integer_cst>.
24715 This sub-expression matches the range of values between the
24716 two matched integer constants. Note that comparisons can be
24717 inclusive or exclusive. */
24718 tree candidate_discr_1, candidate_discr_2;
24719 tree lower_cst, upper_cst;
24720 bool lower_cst_included, upper_cst_included;
24721 tree lower_op = TREE_OPERAND (match_expr, 0);
24722 tree upper_op = TREE_OPERAND (match_expr, 1);
24723
24724 /* When the comparison is exclusive, the integer constant is not
24725 the discriminant range bound we are looking for: we will have
24726 to increment or decrement it. */
24727 if (TREE_CODE (lower_op) == GE_EXPR)
24728 lower_cst_included = true;
24729 else if (TREE_CODE (lower_op) == GT_EXPR)
24730 lower_cst_included = false;
24731 else
24732 goto abort;
24733
24734 if (TREE_CODE (upper_op) == LE_EXPR)
24735 upper_cst_included = true;
24736 else if (TREE_CODE (upper_op) == LT_EXPR)
24737 upper_cst_included = false;
24738 else
24739 goto abort;
24740
24741 /* Extract the discriminant from the first operand and check it
24742 is consistant with the same analysis in the second
24743 operand. */
24744 candidate_discr_1
24745 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24746 struct_type);
24747 candidate_discr_2
24748 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24749 struct_type);
24750 if (candidate_discr_1 == candidate_discr_2)
24751 candidate_discr = candidate_discr_1;
24752 else
24753 goto abort;
24754
24755 /* Extract bounds from both. */
24756 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24757 lower_cst = TREE_OPERAND (lower_op, 1);
24758 upper_cst = TREE_OPERAND (upper_op, 1);
24759
24760 if (!lower_cst_included)
24761 lower_cst
24762 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24763 build_int_cst (TREE_TYPE (lower_cst), 1));
24764 if (!upper_cst_included)
24765 upper_cst
24766 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24767 build_int_cst (TREE_TYPE (upper_cst), 1));
24768
24769 if (!get_discr_value (lower_cst,
24770 &new_node->dw_discr_lower_bound)
24771 || !get_discr_value (upper_cst,
24772 &new_node->dw_discr_upper_bound))
24773 goto abort;
24774
24775 new_node->dw_discr_range = true;
24776 }
24777
24778 else if ((candidate_discr
24779 = analyze_discr_in_predicate (match_expr, struct_type))
24780 && TREE_TYPE (candidate_discr) == boolean_type_node)
24781 {
24782 /* We are matching: <discr_field> for a boolean discriminant.
24783 This sub-expression matches boolean_true_node. */
24784 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24785 if (!get_discr_value (boolean_true_node,
24786 &new_node->dw_discr_lower_bound))
24787 goto abort;
24788 new_node->dw_discr_range = false;
24789 }
24790
24791 else
24792 /* Unsupported sub-expression: we cannot determine the set of
24793 matching discriminant values. Abort everything. */
24794 goto abort;
24795
24796 /* If the discriminant info is not consistant with what we saw so
24797 far, consider the analysis failed and abort everything. */
24798 if (candidate_discr == NULL_TREE
24799 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24800 goto abort;
24801 else
24802 *discr_decl = candidate_discr;
24803
24804 if (new_node != NULL)
24805 {
24806 new_node->dw_discr_next = discr_lists[i];
24807 discr_lists[i] = new_node;
24808 }
24809 match_expr = next_round_match_expr;
24810 }
24811 }
24812
24813 /* If we reach this point, we could match everything we were interested
24814 in. */
24815 return;
24816
24817 abort:
24818 /* Clean all data structure and return no result. */
24819 free (*discr_lists_p);
24820 *discr_lists_p = NULL;
24821 *discr_decl = NULL_TREE;
24822 }
24823
24824 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24825 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24826 under CONTEXT_DIE.
24827
24828 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24829 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24830 this type, which are record types, represent the available variants and each
24831 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24832 values are inferred from these attributes.
24833
24834 In trees, the offsets for the fields inside these sub-records are relative
24835 to the variant part itself, whereas the corresponding DIEs should have
24836 offset attributes that are relative to the embedding record base address.
24837 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24838 must be an expression that computes the offset of the variant part to
24839 describe in DWARF. */
24840
24841 static void
24842 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24843 dw_die_ref context_die)
24844 {
24845 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24846 tree variant_part_offset = vlr_ctx->variant_part_offset;
24847 struct loc_descr_context ctx = {
24848 vlr_ctx->struct_type, /* context_type */
24849 NULL_TREE, /* base_decl */
24850 NULL, /* dpi */
24851 false, /* placeholder_arg */
24852 false /* placeholder_seen */
24853 };
24854
24855 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24856 NULL_TREE if there is no such field. */
24857 tree discr_decl = NULL_TREE;
24858 dw_discr_list_ref *discr_lists;
24859 unsigned discr_lists_length = 0;
24860 unsigned i;
24861
24862 dw_die_ref dwarf_proc_die = NULL;
24863 dw_die_ref variant_part_die
24864 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24865
24866 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24867
24868 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24869 &discr_decl, &discr_lists, &discr_lists_length);
24870
24871 if (discr_decl != NULL_TREE)
24872 {
24873 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24874
24875 if (discr_die)
24876 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24877 else
24878 /* We have no DIE for the discriminant, so just discard all
24879 discrimimant information in the output. */
24880 discr_decl = NULL_TREE;
24881 }
24882
24883 /* If the offset for this variant part is more complex than a constant,
24884 create a DWARF procedure for it so that we will not have to generate DWARF
24885 expressions for it for each member. */
24886 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24887 && (dwarf_version >= 3 || !dwarf_strict))
24888 {
24889 const tree dwarf_proc_fndecl
24890 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24891 build_function_type (TREE_TYPE (variant_part_offset),
24892 NULL_TREE));
24893 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24894 const dw_loc_descr_ref dwarf_proc_body
24895 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24896
24897 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24898 dwarf_proc_fndecl, context_die);
24899 if (dwarf_proc_die != NULL)
24900 variant_part_offset = dwarf_proc_call;
24901 }
24902
24903 /* Output DIEs for all variants. */
24904 i = 0;
24905 for (tree variant = TYPE_FIELDS (variant_part_type);
24906 variant != NULL_TREE;
24907 variant = DECL_CHAIN (variant), ++i)
24908 {
24909 tree variant_type = TREE_TYPE (variant);
24910 dw_die_ref variant_die;
24911
24912 /* All variants (i.e. members of a variant part) are supposed to be
24913 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24914 under these records. */
24915 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24916
24917 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24918 equate_decl_number_to_die (variant, variant_die);
24919
24920 /* Output discriminant values this variant matches, if any. */
24921 if (discr_decl == NULL || discr_lists[i] == NULL)
24922 /* In the case we have discriminant information at all, this is
24923 probably the default variant: as the standard says, don't
24924 output any discriminant value/list attribute. */
24925 ;
24926 else if (discr_lists[i]->dw_discr_next == NULL
24927 && !discr_lists[i]->dw_discr_range)
24928 /* If there is only one accepted value, don't bother outputting a
24929 list. */
24930 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24931 else
24932 add_discr_list (variant_die, discr_lists[i]);
24933
24934 for (tree member = TYPE_FIELDS (variant_type);
24935 member != NULL_TREE;
24936 member = DECL_CHAIN (member))
24937 {
24938 struct vlr_context vlr_sub_ctx = {
24939 vlr_ctx->struct_type, /* struct_type */
24940 NULL /* variant_part_offset */
24941 };
24942 if (is_variant_part (member))
24943 {
24944 /* All offsets for fields inside variant parts are relative to
24945 the top-level embedding RECORD_TYPE's base address. On the
24946 other hand, offsets in GCC's types are relative to the
24947 nested-most variant part. So we have to sum offsets each time
24948 we recurse. */
24949
24950 vlr_sub_ctx.variant_part_offset
24951 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24952 variant_part_offset, byte_position (member));
24953 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24954 }
24955 else
24956 {
24957 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24958 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24959 }
24960 }
24961 }
24962
24963 free (discr_lists);
24964 }
24965
24966 /* Generate a DIE for a class member. */
24967
24968 static void
24969 gen_member_die (tree type, dw_die_ref context_die)
24970 {
24971 tree member;
24972 tree binfo = TYPE_BINFO (type);
24973
24974 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24975
24976 /* If this is not an incomplete type, output descriptions of each of its
24977 members. Note that as we output the DIEs necessary to represent the
24978 members of this record or union type, we will also be trying to output
24979 DIEs to represent the *types* of those members. However the `type'
24980 function (above) will specifically avoid generating type DIEs for member
24981 types *within* the list of member DIEs for this (containing) type except
24982 for those types (of members) which are explicitly marked as also being
24983 members of this (containing) type themselves. The g++ front- end can
24984 force any given type to be treated as a member of some other (containing)
24985 type by setting the TYPE_CONTEXT of the given (member) type to point to
24986 the TREE node representing the appropriate (containing) type. */
24987
24988 /* First output info about the base classes. */
24989 if (binfo)
24990 {
24991 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24992 int i;
24993 tree base;
24994
24995 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24996 gen_inheritance_die (base,
24997 (accesses ? (*accesses)[i] : access_public_node),
24998 type,
24999 context_die);
25000 }
25001
25002 /* Now output info about the data members and type members. */
25003 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25004 {
25005 struct vlr_context vlr_ctx = { type, NULL_TREE };
25006 bool static_inline_p
25007 = (TREE_STATIC (member)
25008 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25009 != -1));
25010
25011 /* Ignore clones. */
25012 if (DECL_ABSTRACT_ORIGIN (member))
25013 continue;
25014
25015 /* If we thought we were generating minimal debug info for TYPE
25016 and then changed our minds, some of the member declarations
25017 may have already been defined. Don't define them again, but
25018 do put them in the right order. */
25019
25020 if (dw_die_ref child = lookup_decl_die (member))
25021 {
25022 /* Handle inline static data members, which only have in-class
25023 declarations. */
25024 dw_die_ref ref = NULL;
25025 if (child->die_tag == DW_TAG_variable
25026 && child->die_parent == comp_unit_die ())
25027 {
25028 ref = get_AT_ref (child, DW_AT_specification);
25029 /* For C++17 inline static data members followed by redundant
25030 out of class redeclaration, we might get here with
25031 child being the DIE created for the out of class
25032 redeclaration and with its DW_AT_specification being
25033 the DIE created for in-class definition. We want to
25034 reparent the latter, and don't want to create another
25035 DIE with DW_AT_specification in that case, because
25036 we already have one. */
25037 if (ref
25038 && static_inline_p
25039 && ref->die_tag == DW_TAG_variable
25040 && ref->die_parent == comp_unit_die ()
25041 && get_AT (ref, DW_AT_specification) == NULL)
25042 {
25043 child = ref;
25044 ref = NULL;
25045 static_inline_p = false;
25046 }
25047 }
25048
25049 if (child->die_tag == DW_TAG_variable
25050 && child->die_parent == comp_unit_die ()
25051 && ref == NULL)
25052 {
25053 reparent_child (child, context_die);
25054 if (dwarf_version < 5)
25055 child->die_tag = DW_TAG_member;
25056 }
25057 else
25058 splice_child_die (context_die, child);
25059 }
25060
25061 /* Do not generate standard DWARF for variant parts if we are generating
25062 the corresponding GNAT encodings: DIEs generated for both would
25063 conflict in our mappings. */
25064 else if (is_variant_part (member)
25065 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25066 {
25067 vlr_ctx.variant_part_offset = byte_position (member);
25068 gen_variant_part (member, &vlr_ctx, context_die);
25069 }
25070 else
25071 {
25072 vlr_ctx.variant_part_offset = NULL_TREE;
25073 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25074 }
25075
25076 /* For C++ inline static data members emit immediately a DW_TAG_variable
25077 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25078 DW_AT_specification. */
25079 if (static_inline_p)
25080 {
25081 int old_extern = DECL_EXTERNAL (member);
25082 DECL_EXTERNAL (member) = 0;
25083 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25084 DECL_EXTERNAL (member) = old_extern;
25085 }
25086 }
25087 }
25088
25089 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25090 is set, we pretend that the type was never defined, so we only get the
25091 member DIEs needed by later specification DIEs. */
25092
25093 static void
25094 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25095 enum debug_info_usage usage)
25096 {
25097 if (TREE_ASM_WRITTEN (type))
25098 {
25099 /* Fill in the bound of variable-length fields in late dwarf if
25100 still incomplete. */
25101 if (!early_dwarf && variably_modified_type_p (type, NULL))
25102 for (tree member = TYPE_FIELDS (type);
25103 member;
25104 member = DECL_CHAIN (member))
25105 fill_variable_array_bounds (TREE_TYPE (member));
25106 return;
25107 }
25108
25109 dw_die_ref type_die = lookup_type_die (type);
25110 dw_die_ref scope_die = 0;
25111 int nested = 0;
25112 int complete = (TYPE_SIZE (type)
25113 && (! TYPE_STUB_DECL (type)
25114 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25115 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25116 complete = complete && should_emit_struct_debug (type, usage);
25117
25118 if (type_die && ! complete)
25119 return;
25120
25121 if (TYPE_CONTEXT (type) != NULL_TREE
25122 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25123 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25124 nested = 1;
25125
25126 scope_die = scope_die_for (type, context_die);
25127
25128 /* Generate child dies for template paramaters. */
25129 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25130 schedule_generic_params_dies_gen (type);
25131
25132 if (! type_die || (nested && is_cu_die (scope_die)))
25133 /* First occurrence of type or toplevel definition of nested class. */
25134 {
25135 dw_die_ref old_die = type_die;
25136
25137 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25138 ? record_type_tag (type) : DW_TAG_union_type,
25139 scope_die, type);
25140 equate_type_number_to_die (type, type_die);
25141 if (old_die)
25142 add_AT_specification (type_die, old_die);
25143 else
25144 add_name_attribute (type_die, type_tag (type));
25145 }
25146 else
25147 remove_AT (type_die, DW_AT_declaration);
25148
25149 /* If this type has been completed, then give it a byte_size attribute and
25150 then give a list of members. */
25151 if (complete && !ns_decl)
25152 {
25153 /* Prevent infinite recursion in cases where the type of some member of
25154 this type is expressed in terms of this type itself. */
25155 TREE_ASM_WRITTEN (type) = 1;
25156 add_byte_size_attribute (type_die, type);
25157 add_alignment_attribute (type_die, type);
25158 if (TYPE_STUB_DECL (type) != NULL_TREE)
25159 {
25160 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25161 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25162 }
25163
25164 /* If the first reference to this type was as the return type of an
25165 inline function, then it may not have a parent. Fix this now. */
25166 if (type_die->die_parent == NULL)
25167 add_child_die (scope_die, type_die);
25168
25169 gen_member_die (type, type_die);
25170
25171 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25172 if (TYPE_ARTIFICIAL (type))
25173 add_AT_flag (type_die, DW_AT_artificial, 1);
25174
25175 /* GNU extension: Record what type our vtable lives in. */
25176 if (TYPE_VFIELD (type))
25177 {
25178 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25179
25180 gen_type_die (vtype, context_die);
25181 add_AT_die_ref (type_die, DW_AT_containing_type,
25182 lookup_type_die (vtype));
25183 }
25184 }
25185 else
25186 {
25187 add_AT_flag (type_die, DW_AT_declaration, 1);
25188
25189 /* We don't need to do this for function-local types. */
25190 if (TYPE_STUB_DECL (type)
25191 && ! decl_function_context (TYPE_STUB_DECL (type)))
25192 vec_safe_push (incomplete_types, type);
25193 }
25194
25195 if (get_AT (type_die, DW_AT_name))
25196 add_pubtype (type, type_die);
25197 }
25198
25199 /* Generate a DIE for a subroutine _type_. */
25200
25201 static void
25202 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25203 {
25204 tree return_type = TREE_TYPE (type);
25205 dw_die_ref subr_die
25206 = new_die (DW_TAG_subroutine_type,
25207 scope_die_for (type, context_die), type);
25208
25209 equate_type_number_to_die (type, subr_die);
25210 add_prototyped_attribute (subr_die, type);
25211 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25212 context_die);
25213 add_alignment_attribute (subr_die, type);
25214 gen_formal_types_die (type, subr_die);
25215
25216 if (get_AT (subr_die, DW_AT_name))
25217 add_pubtype (type, subr_die);
25218 if ((dwarf_version >= 5 || !dwarf_strict)
25219 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25220 add_AT_flag (subr_die, DW_AT_reference, 1);
25221 if ((dwarf_version >= 5 || !dwarf_strict)
25222 && lang_hooks.types.type_dwarf_attribute (type,
25223 DW_AT_rvalue_reference) != -1)
25224 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25225 }
25226
25227 /* Generate a DIE for a type definition. */
25228
25229 static void
25230 gen_typedef_die (tree decl, dw_die_ref context_die)
25231 {
25232 dw_die_ref type_die;
25233 tree type;
25234
25235 if (TREE_ASM_WRITTEN (decl))
25236 {
25237 if (DECL_ORIGINAL_TYPE (decl))
25238 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25239 return;
25240 }
25241
25242 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25243 checks in process_scope_var and modified_type_die), this should be called
25244 only for original types. */
25245 gcc_assert (decl_ultimate_origin (decl) == NULL
25246 || decl_ultimate_origin (decl) == decl);
25247
25248 TREE_ASM_WRITTEN (decl) = 1;
25249 type_die = new_die (DW_TAG_typedef, context_die, decl);
25250
25251 add_name_and_src_coords_attributes (type_die, decl);
25252 if (DECL_ORIGINAL_TYPE (decl))
25253 {
25254 type = DECL_ORIGINAL_TYPE (decl);
25255 if (type == error_mark_node)
25256 return;
25257
25258 gcc_assert (type != TREE_TYPE (decl));
25259 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25260 }
25261 else
25262 {
25263 type = TREE_TYPE (decl);
25264 if (type == error_mark_node)
25265 return;
25266
25267 if (is_naming_typedef_decl (TYPE_NAME (type)))
25268 {
25269 /* Here, we are in the case of decl being a typedef naming
25270 an anonymous type, e.g:
25271 typedef struct {...} foo;
25272 In that case TREE_TYPE (decl) is not a typedef variant
25273 type and TYPE_NAME of the anonymous type is set to the
25274 TYPE_DECL of the typedef. This construct is emitted by
25275 the C++ FE.
25276
25277 TYPE is the anonymous struct named by the typedef
25278 DECL. As we need the DW_AT_type attribute of the
25279 DW_TAG_typedef to point to the DIE of TYPE, let's
25280 generate that DIE right away. add_type_attribute
25281 called below will then pick (via lookup_type_die) that
25282 anonymous struct DIE. */
25283 if (!TREE_ASM_WRITTEN (type))
25284 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25285
25286 /* This is a GNU Extension. We are adding a
25287 DW_AT_linkage_name attribute to the DIE of the
25288 anonymous struct TYPE. The value of that attribute
25289 is the name of the typedef decl naming the anonymous
25290 struct. This greatly eases the work of consumers of
25291 this debug info. */
25292 add_linkage_name_raw (lookup_type_die (type), decl);
25293 }
25294 }
25295
25296 add_type_attribute (type_die, type, decl_quals (decl), false,
25297 context_die);
25298
25299 if (is_naming_typedef_decl (decl))
25300 /* We want that all subsequent calls to lookup_type_die with
25301 TYPE in argument yield the DW_TAG_typedef we have just
25302 created. */
25303 equate_type_number_to_die (type, type_die);
25304
25305 add_alignment_attribute (type_die, TREE_TYPE (decl));
25306
25307 add_accessibility_attribute (type_die, decl);
25308
25309 if (DECL_ABSTRACT_P (decl))
25310 equate_decl_number_to_die (decl, type_die);
25311
25312 if (get_AT (type_die, DW_AT_name))
25313 add_pubtype (decl, type_die);
25314 }
25315
25316 /* Generate a DIE for a struct, class, enum or union type. */
25317
25318 static void
25319 gen_tagged_type_die (tree type,
25320 dw_die_ref context_die,
25321 enum debug_info_usage usage)
25322 {
25323 if (type == NULL_TREE
25324 || !is_tagged_type (type))
25325 return;
25326
25327 if (TREE_ASM_WRITTEN (type))
25328 ;
25329 /* If this is a nested type whose containing class hasn't been written
25330 out yet, writing it out will cover this one, too. This does not apply
25331 to instantiations of member class templates; they need to be added to
25332 the containing class as they are generated. FIXME: This hurts the
25333 idea of combining type decls from multiple TUs, since we can't predict
25334 what set of template instantiations we'll get. */
25335 else if (TYPE_CONTEXT (type)
25336 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25337 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25338 {
25339 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25340
25341 if (TREE_ASM_WRITTEN (type))
25342 return;
25343
25344 /* If that failed, attach ourselves to the stub. */
25345 context_die = lookup_type_die (TYPE_CONTEXT (type));
25346 }
25347 else if (TYPE_CONTEXT (type) != NULL_TREE
25348 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25349 {
25350 /* If this type is local to a function that hasn't been written
25351 out yet, use a NULL context for now; it will be fixed up in
25352 decls_for_scope. */
25353 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25354 /* A declaration DIE doesn't count; nested types need to go in the
25355 specification. */
25356 if (context_die && is_declaration_die (context_die))
25357 context_die = NULL;
25358 }
25359 else
25360 context_die = declare_in_namespace (type, context_die);
25361
25362 if (TREE_CODE (type) == ENUMERAL_TYPE)
25363 {
25364 /* This might have been written out by the call to
25365 declare_in_namespace. */
25366 if (!TREE_ASM_WRITTEN (type))
25367 gen_enumeration_type_die (type, context_die);
25368 }
25369 else
25370 gen_struct_or_union_type_die (type, context_die, usage);
25371
25372 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25373 it up if it is ever completed. gen_*_type_die will set it for us
25374 when appropriate. */
25375 }
25376
25377 /* Generate a type description DIE. */
25378
25379 static void
25380 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25381 enum debug_info_usage usage)
25382 {
25383 struct array_descr_info info;
25384
25385 if (type == NULL_TREE || type == error_mark_node)
25386 return;
25387
25388 if (flag_checking && type)
25389 verify_type (type);
25390
25391 if (TYPE_NAME (type) != NULL_TREE
25392 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25393 && is_redundant_typedef (TYPE_NAME (type))
25394 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25395 /* The DECL of this type is a typedef we don't want to emit debug
25396 info for but we want debug info for its underlying typedef.
25397 This can happen for e.g, the injected-class-name of a C++
25398 type. */
25399 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25400
25401 /* If TYPE is a typedef type variant, let's generate debug info
25402 for the parent typedef which TYPE is a type of. */
25403 if (typedef_variant_p (type))
25404 {
25405 if (TREE_ASM_WRITTEN (type))
25406 return;
25407
25408 tree name = TYPE_NAME (type);
25409 tree origin = decl_ultimate_origin (name);
25410 if (origin != NULL && origin != name)
25411 {
25412 gen_decl_die (origin, NULL, NULL, context_die);
25413 return;
25414 }
25415
25416 /* Prevent broken recursion; we can't hand off to the same type. */
25417 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25418
25419 /* Give typedefs the right scope. */
25420 context_die = scope_die_for (type, context_die);
25421
25422 TREE_ASM_WRITTEN (type) = 1;
25423
25424 gen_decl_die (name, NULL, NULL, context_die);
25425 return;
25426 }
25427
25428 /* If type is an anonymous tagged type named by a typedef, let's
25429 generate debug info for the typedef. */
25430 if (is_naming_typedef_decl (TYPE_NAME (type)))
25431 {
25432 /* Give typedefs the right scope. */
25433 context_die = scope_die_for (type, context_die);
25434
25435 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25436 return;
25437 }
25438
25439 if (lang_hooks.types.get_debug_type)
25440 {
25441 tree debug_type = lang_hooks.types.get_debug_type (type);
25442
25443 if (debug_type != NULL_TREE && debug_type != type)
25444 {
25445 gen_type_die_with_usage (debug_type, context_die, usage);
25446 return;
25447 }
25448 }
25449
25450 /* We are going to output a DIE to represent the unqualified version
25451 of this type (i.e. without any const or volatile qualifiers) so
25452 get the main variant (i.e. the unqualified version) of this type
25453 now. (Vectors and arrays are special because the debugging info is in the
25454 cloned type itself. Similarly function/method types can contain extra
25455 ref-qualification). */
25456 if (TREE_CODE (type) == FUNCTION_TYPE
25457 || TREE_CODE (type) == METHOD_TYPE)
25458 {
25459 /* For function/method types, can't use type_main_variant here,
25460 because that can have different ref-qualifiers for C++,
25461 but try to canonicalize. */
25462 tree main = TYPE_MAIN_VARIANT (type);
25463 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25464 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25465 && check_base_type (t, main)
25466 && check_lang_type (t, type))
25467 {
25468 type = t;
25469 break;
25470 }
25471 }
25472 else if (TREE_CODE (type) != VECTOR_TYPE
25473 && TREE_CODE (type) != ARRAY_TYPE)
25474 type = type_main_variant (type);
25475
25476 /* If this is an array type with hidden descriptor, handle it first. */
25477 if (!TREE_ASM_WRITTEN (type)
25478 && lang_hooks.types.get_array_descr_info)
25479 {
25480 memset (&info, 0, sizeof (info));
25481 if (lang_hooks.types.get_array_descr_info (type, &info))
25482 {
25483 /* Fortran sometimes emits array types with no dimension. */
25484 gcc_assert (info.ndimensions >= 0
25485 && (info.ndimensions
25486 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25487 gen_descr_array_type_die (type, &info, context_die);
25488 TREE_ASM_WRITTEN (type) = 1;
25489 return;
25490 }
25491 }
25492
25493 if (TREE_ASM_WRITTEN (type))
25494 {
25495 /* Variable-length types may be incomplete even if
25496 TREE_ASM_WRITTEN. For such types, fall through to
25497 gen_array_type_die() and possibly fill in
25498 DW_AT_{upper,lower}_bound attributes. */
25499 if ((TREE_CODE (type) != ARRAY_TYPE
25500 && TREE_CODE (type) != RECORD_TYPE
25501 && TREE_CODE (type) != UNION_TYPE
25502 && TREE_CODE (type) != QUAL_UNION_TYPE)
25503 || !variably_modified_type_p (type, NULL))
25504 return;
25505 }
25506
25507 switch (TREE_CODE (type))
25508 {
25509 case ERROR_MARK:
25510 break;
25511
25512 case POINTER_TYPE:
25513 case REFERENCE_TYPE:
25514 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25515 ensures that the gen_type_die recursion will terminate even if the
25516 type is recursive. Recursive types are possible in Ada. */
25517 /* ??? We could perhaps do this for all types before the switch
25518 statement. */
25519 TREE_ASM_WRITTEN (type) = 1;
25520
25521 /* For these types, all that is required is that we output a DIE (or a
25522 set of DIEs) to represent the "basis" type. */
25523 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25524 DINFO_USAGE_IND_USE);
25525 break;
25526
25527 case OFFSET_TYPE:
25528 /* This code is used for C++ pointer-to-data-member types.
25529 Output a description of the relevant class type. */
25530 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25531 DINFO_USAGE_IND_USE);
25532
25533 /* Output a description of the type of the object pointed to. */
25534 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25535 DINFO_USAGE_IND_USE);
25536
25537 /* Now output a DIE to represent this pointer-to-data-member type
25538 itself. */
25539 gen_ptr_to_mbr_type_die (type, context_die);
25540 break;
25541
25542 case FUNCTION_TYPE:
25543 /* Force out return type (in case it wasn't forced out already). */
25544 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25545 DINFO_USAGE_DIR_USE);
25546 gen_subroutine_type_die (type, context_die);
25547 break;
25548
25549 case METHOD_TYPE:
25550 /* Force out return type (in case it wasn't forced out already). */
25551 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25552 DINFO_USAGE_DIR_USE);
25553 gen_subroutine_type_die (type, context_die);
25554 break;
25555
25556 case ARRAY_TYPE:
25557 case VECTOR_TYPE:
25558 gen_array_type_die (type, context_die);
25559 break;
25560
25561 case ENUMERAL_TYPE:
25562 case RECORD_TYPE:
25563 case UNION_TYPE:
25564 case QUAL_UNION_TYPE:
25565 gen_tagged_type_die (type, context_die, usage);
25566 return;
25567
25568 case VOID_TYPE:
25569 case INTEGER_TYPE:
25570 case REAL_TYPE:
25571 case FIXED_POINT_TYPE:
25572 case COMPLEX_TYPE:
25573 case BOOLEAN_TYPE:
25574 /* No DIEs needed for fundamental types. */
25575 break;
25576
25577 case NULLPTR_TYPE:
25578 case LANG_TYPE:
25579 /* Just use DW_TAG_unspecified_type. */
25580 {
25581 dw_die_ref type_die = lookup_type_die (type);
25582 if (type_die == NULL)
25583 {
25584 tree name = TYPE_IDENTIFIER (type);
25585 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25586 type);
25587 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25588 equate_type_number_to_die (type, type_die);
25589 }
25590 }
25591 break;
25592
25593 default:
25594 if (is_cxx_auto (type))
25595 {
25596 tree name = TYPE_IDENTIFIER (type);
25597 dw_die_ref *die = (name == get_identifier ("auto")
25598 ? &auto_die : &decltype_auto_die);
25599 if (!*die)
25600 {
25601 *die = new_die (DW_TAG_unspecified_type,
25602 comp_unit_die (), NULL_TREE);
25603 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25604 }
25605 equate_type_number_to_die (type, *die);
25606 break;
25607 }
25608 gcc_unreachable ();
25609 }
25610
25611 TREE_ASM_WRITTEN (type) = 1;
25612 }
25613
25614 static void
25615 gen_type_die (tree type, dw_die_ref context_die)
25616 {
25617 if (type != error_mark_node)
25618 {
25619 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25620 if (flag_checking)
25621 {
25622 dw_die_ref die = lookup_type_die (type);
25623 if (die)
25624 check_die (die);
25625 }
25626 }
25627 }
25628
25629 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25630 things which are local to the given block. */
25631
25632 static void
25633 gen_block_die (tree stmt, dw_die_ref context_die)
25634 {
25635 int must_output_die = 0;
25636 bool inlined_func;
25637
25638 /* Ignore blocks that are NULL. */
25639 if (stmt == NULL_TREE)
25640 return;
25641
25642 inlined_func = inlined_function_outer_scope_p (stmt);
25643
25644 /* If the block is one fragment of a non-contiguous block, do not
25645 process the variables, since they will have been done by the
25646 origin block. Do process subblocks. */
25647 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25648 {
25649 tree sub;
25650
25651 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25652 gen_block_die (sub, context_die);
25653
25654 return;
25655 }
25656
25657 /* Determine if we need to output any Dwarf DIEs at all to represent this
25658 block. */
25659 if (inlined_func)
25660 /* The outer scopes for inlinings *must* always be represented. We
25661 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25662 must_output_die = 1;
25663 else if (lookup_block_die (stmt))
25664 /* If we already have a DIE then it was filled early. Meanwhile
25665 we might have pruned all BLOCK_VARS as optimized out but we
25666 still want to generate high/low PC attributes so output it. */
25667 must_output_die = 1;
25668 else if (TREE_USED (stmt)
25669 || TREE_ASM_WRITTEN (stmt))
25670 {
25671 /* Determine if this block directly contains any "significant"
25672 local declarations which we will need to output DIEs for. */
25673 if (debug_info_level > DINFO_LEVEL_TERSE)
25674 {
25675 /* We are not in terse mode so any local declaration that
25676 is not ignored for debug purposes counts as being a
25677 "significant" one. */
25678 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25679 must_output_die = 1;
25680 else
25681 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25682 if (!DECL_IGNORED_P (var))
25683 {
25684 must_output_die = 1;
25685 break;
25686 }
25687 }
25688 else if (!dwarf2out_ignore_block (stmt))
25689 must_output_die = 1;
25690 }
25691
25692 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25693 DIE for any block which contains no significant local declarations at
25694 all. Rather, in such cases we just call `decls_for_scope' so that any
25695 needed Dwarf info for any sub-blocks will get properly generated. Note
25696 that in terse mode, our definition of what constitutes a "significant"
25697 local declaration gets restricted to include only inlined function
25698 instances and local (nested) function definitions. */
25699 if (must_output_die)
25700 {
25701 if (inlined_func)
25702 gen_inlined_subroutine_die (stmt, context_die);
25703 else
25704 gen_lexical_block_die (stmt, context_die);
25705 }
25706 else
25707 decls_for_scope (stmt, context_die);
25708 }
25709
25710 /* Process variable DECL (or variable with origin ORIGIN) within
25711 block STMT and add it to CONTEXT_DIE. */
25712 static void
25713 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25714 {
25715 dw_die_ref die;
25716 tree decl_or_origin = decl ? decl : origin;
25717
25718 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25719 die = lookup_decl_die (decl_or_origin);
25720 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25721 {
25722 if (TYPE_DECL_IS_STUB (decl_or_origin))
25723 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25724 else
25725 die = lookup_decl_die (decl_or_origin);
25726 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25727 if (! die && ! early_dwarf)
25728 return;
25729 }
25730 else
25731 die = NULL;
25732
25733 /* Avoid creating DIEs for local typedefs and concrete static variables that
25734 will only be pruned later. */
25735 if ((origin || decl_ultimate_origin (decl))
25736 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25737 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25738 {
25739 origin = decl_ultimate_origin (decl_or_origin);
25740 if (decl && VAR_P (decl) && die != NULL)
25741 {
25742 die = lookup_decl_die (origin);
25743 if (die != NULL)
25744 equate_decl_number_to_die (decl, die);
25745 }
25746 return;
25747 }
25748
25749 if (die != NULL && die->die_parent == NULL)
25750 add_child_die (context_die, die);
25751 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25752 {
25753 if (early_dwarf)
25754 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25755 stmt, context_die);
25756 }
25757 else
25758 {
25759 if (decl && DECL_P (decl))
25760 {
25761 die = lookup_decl_die (decl);
25762
25763 /* Early created DIEs do not have a parent as the decls refer
25764 to the function as DECL_CONTEXT rather than the BLOCK. */
25765 if (die && die->die_parent == NULL)
25766 {
25767 gcc_assert (in_lto_p);
25768 add_child_die (context_die, die);
25769 }
25770 }
25771
25772 gen_decl_die (decl, origin, NULL, context_die);
25773 }
25774 }
25775
25776 /* Generate all of the decls declared within a given scope and (recursively)
25777 all of its sub-blocks. */
25778
25779 static void
25780 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25781 {
25782 tree decl;
25783 unsigned int i;
25784 tree subblocks;
25785
25786 /* Ignore NULL blocks. */
25787 if (stmt == NULL_TREE)
25788 return;
25789
25790 /* Output the DIEs to represent all of the data objects and typedefs
25791 declared directly within this block but not within any nested
25792 sub-blocks. Also, nested function and tag DIEs have been
25793 generated with a parent of NULL; fix that up now. We don't
25794 have to do this if we're at -g1. */
25795 if (debug_info_level > DINFO_LEVEL_TERSE)
25796 {
25797 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25798 process_scope_var (stmt, decl, NULL_TREE, context_die);
25799 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25800 origin - avoid doing this twice as we have no good way to see
25801 if we've done it once already. */
25802 if (! early_dwarf)
25803 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25804 {
25805 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25806 if (decl == current_function_decl)
25807 /* Ignore declarations of the current function, while they
25808 are declarations, gen_subprogram_die would treat them
25809 as definitions again, because they are equal to
25810 current_function_decl and endlessly recurse. */;
25811 else if (TREE_CODE (decl) == FUNCTION_DECL)
25812 process_scope_var (stmt, decl, NULL_TREE, context_die);
25813 else
25814 process_scope_var (stmt, NULL_TREE, decl, context_die);
25815 }
25816 }
25817
25818 /* Even if we're at -g1, we need to process the subblocks in order to get
25819 inlined call information. */
25820
25821 /* Output the DIEs to represent all sub-blocks (and the items declared
25822 therein) of this block. */
25823 if (recurse)
25824 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25825 subblocks != NULL;
25826 subblocks = BLOCK_CHAIN (subblocks))
25827 gen_block_die (subblocks, context_die);
25828 }
25829
25830 /* Is this a typedef we can avoid emitting? */
25831
25832 static bool
25833 is_redundant_typedef (const_tree decl)
25834 {
25835 if (TYPE_DECL_IS_STUB (decl))
25836 return true;
25837
25838 if (DECL_ARTIFICIAL (decl)
25839 && DECL_CONTEXT (decl)
25840 && is_tagged_type (DECL_CONTEXT (decl))
25841 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25842 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25843 /* Also ignore the artificial member typedef for the class name. */
25844 return true;
25845
25846 return false;
25847 }
25848
25849 /* Return TRUE if TYPE is a typedef that names a type for linkage
25850 purposes. This kind of typedefs is produced by the C++ FE for
25851 constructs like:
25852
25853 typedef struct {...} foo;
25854
25855 In that case, there is no typedef variant type produced for foo.
25856 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25857 struct type. */
25858
25859 static bool
25860 is_naming_typedef_decl (const_tree decl)
25861 {
25862 if (decl == NULL_TREE
25863 || TREE_CODE (decl) != TYPE_DECL
25864 || DECL_NAMELESS (decl)
25865 || !is_tagged_type (TREE_TYPE (decl))
25866 || DECL_IS_BUILTIN (decl)
25867 || is_redundant_typedef (decl)
25868 /* It looks like Ada produces TYPE_DECLs that are very similar
25869 to C++ naming typedefs but that have different
25870 semantics. Let's be specific to c++ for now. */
25871 || !is_cxx (decl))
25872 return FALSE;
25873
25874 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25875 && TYPE_NAME (TREE_TYPE (decl)) == decl
25876 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25877 != TYPE_NAME (TREE_TYPE (decl))));
25878 }
25879
25880 /* Looks up the DIE for a context. */
25881
25882 static inline dw_die_ref
25883 lookup_context_die (tree context)
25884 {
25885 if (context)
25886 {
25887 /* Find die that represents this context. */
25888 if (TYPE_P (context))
25889 {
25890 context = TYPE_MAIN_VARIANT (context);
25891 dw_die_ref ctx = lookup_type_die (context);
25892 if (!ctx)
25893 return NULL;
25894 return strip_naming_typedef (context, ctx);
25895 }
25896 else
25897 return lookup_decl_die (context);
25898 }
25899 return comp_unit_die ();
25900 }
25901
25902 /* Returns the DIE for a context. */
25903
25904 static inline dw_die_ref
25905 get_context_die (tree context)
25906 {
25907 if (context)
25908 {
25909 /* Find die that represents this context. */
25910 if (TYPE_P (context))
25911 {
25912 context = TYPE_MAIN_VARIANT (context);
25913 return strip_naming_typedef (context, force_type_die (context));
25914 }
25915 else
25916 return force_decl_die (context);
25917 }
25918 return comp_unit_die ();
25919 }
25920
25921 /* Returns the DIE for decl. A DIE will always be returned. */
25922
25923 static dw_die_ref
25924 force_decl_die (tree decl)
25925 {
25926 dw_die_ref decl_die;
25927 unsigned saved_external_flag;
25928 tree save_fn = NULL_TREE;
25929 decl_die = lookup_decl_die (decl);
25930 if (!decl_die)
25931 {
25932 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25933
25934 decl_die = lookup_decl_die (decl);
25935 if (decl_die)
25936 return decl_die;
25937
25938 switch (TREE_CODE (decl))
25939 {
25940 case FUNCTION_DECL:
25941 /* Clear current_function_decl, so that gen_subprogram_die thinks
25942 that this is a declaration. At this point, we just want to force
25943 declaration die. */
25944 save_fn = current_function_decl;
25945 current_function_decl = NULL_TREE;
25946 gen_subprogram_die (decl, context_die);
25947 current_function_decl = save_fn;
25948 break;
25949
25950 case VAR_DECL:
25951 /* Set external flag to force declaration die. Restore it after
25952 gen_decl_die() call. */
25953 saved_external_flag = DECL_EXTERNAL (decl);
25954 DECL_EXTERNAL (decl) = 1;
25955 gen_decl_die (decl, NULL, NULL, context_die);
25956 DECL_EXTERNAL (decl) = saved_external_flag;
25957 break;
25958
25959 case NAMESPACE_DECL:
25960 if (dwarf_version >= 3 || !dwarf_strict)
25961 dwarf2out_decl (decl);
25962 else
25963 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25964 decl_die = comp_unit_die ();
25965 break;
25966
25967 case TRANSLATION_UNIT_DECL:
25968 decl_die = comp_unit_die ();
25969 break;
25970
25971 default:
25972 gcc_unreachable ();
25973 }
25974
25975 /* We should be able to find the DIE now. */
25976 if (!decl_die)
25977 decl_die = lookup_decl_die (decl);
25978 gcc_assert (decl_die);
25979 }
25980
25981 return decl_die;
25982 }
25983
25984 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25985 always returned. */
25986
25987 static dw_die_ref
25988 force_type_die (tree type)
25989 {
25990 dw_die_ref type_die;
25991
25992 type_die = lookup_type_die (type);
25993 if (!type_die)
25994 {
25995 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25996
25997 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25998 false, context_die);
25999 gcc_assert (type_die);
26000 }
26001 return type_die;
26002 }
26003
26004 /* Force out any required namespaces to be able to output DECL,
26005 and return the new context_die for it, if it's changed. */
26006
26007 static dw_die_ref
26008 setup_namespace_context (tree thing, dw_die_ref context_die)
26009 {
26010 tree context = (DECL_P (thing)
26011 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26012 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26013 /* Force out the namespace. */
26014 context_die = force_decl_die (context);
26015
26016 return context_die;
26017 }
26018
26019 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26020 type) within its namespace, if appropriate.
26021
26022 For compatibility with older debuggers, namespace DIEs only contain
26023 declarations; all definitions are emitted at CU scope, with
26024 DW_AT_specification pointing to the declaration (like with class
26025 members). */
26026
26027 static dw_die_ref
26028 declare_in_namespace (tree thing, dw_die_ref context_die)
26029 {
26030 dw_die_ref ns_context;
26031
26032 if (debug_info_level <= DINFO_LEVEL_TERSE)
26033 return context_die;
26034
26035 /* External declarations in the local scope only need to be emitted
26036 once, not once in the namespace and once in the scope.
26037
26038 This avoids declaring the `extern' below in the
26039 namespace DIE as well as in the innermost scope:
26040
26041 namespace S
26042 {
26043 int i=5;
26044 int foo()
26045 {
26046 int i=8;
26047 extern int i;
26048 return i;
26049 }
26050 }
26051 */
26052 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26053 return context_die;
26054
26055 /* If this decl is from an inlined function, then don't try to emit it in its
26056 namespace, as we will get confused. It would have already been emitted
26057 when the abstract instance of the inline function was emitted anyways. */
26058 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26059 return context_die;
26060
26061 ns_context = setup_namespace_context (thing, context_die);
26062
26063 if (ns_context != context_die)
26064 {
26065 if (is_fortran () || is_dlang ())
26066 return ns_context;
26067 if (DECL_P (thing))
26068 gen_decl_die (thing, NULL, NULL, ns_context);
26069 else
26070 gen_type_die (thing, ns_context);
26071 }
26072 return context_die;
26073 }
26074
26075 /* Generate a DIE for a namespace or namespace alias. */
26076
26077 static void
26078 gen_namespace_die (tree decl, dw_die_ref context_die)
26079 {
26080 dw_die_ref namespace_die;
26081
26082 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26083 they are an alias of. */
26084 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26085 {
26086 /* Output a real namespace or module. */
26087 context_die = setup_namespace_context (decl, comp_unit_die ());
26088 namespace_die = new_die (is_fortran () || is_dlang ()
26089 ? DW_TAG_module : DW_TAG_namespace,
26090 context_die, decl);
26091 /* For Fortran modules defined in different CU don't add src coords. */
26092 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26093 {
26094 const char *name = dwarf2_name (decl, 0);
26095 if (name)
26096 add_name_attribute (namespace_die, name);
26097 }
26098 else
26099 add_name_and_src_coords_attributes (namespace_die, decl);
26100 if (DECL_EXTERNAL (decl))
26101 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26102 equate_decl_number_to_die (decl, namespace_die);
26103 }
26104 else
26105 {
26106 /* Output a namespace alias. */
26107
26108 /* Force out the namespace we are an alias of, if necessary. */
26109 dw_die_ref origin_die
26110 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26111
26112 if (DECL_FILE_SCOPE_P (decl)
26113 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26114 context_die = setup_namespace_context (decl, comp_unit_die ());
26115 /* Now create the namespace alias DIE. */
26116 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26117 add_name_and_src_coords_attributes (namespace_die, decl);
26118 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26119 equate_decl_number_to_die (decl, namespace_die);
26120 }
26121 if ((dwarf_version >= 5 || !dwarf_strict)
26122 && lang_hooks.decls.decl_dwarf_attribute (decl,
26123 DW_AT_export_symbols) == 1)
26124 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26125
26126 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26127 if (want_pubnames ())
26128 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26129 }
26130
26131 /* Generate Dwarf debug information for a decl described by DECL.
26132 The return value is currently only meaningful for PARM_DECLs,
26133 for all other decls it returns NULL.
26134
26135 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26136 It can be NULL otherwise. */
26137
26138 static dw_die_ref
26139 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26140 dw_die_ref context_die)
26141 {
26142 tree decl_or_origin = decl ? decl : origin;
26143 tree class_origin = NULL, ultimate_origin;
26144
26145 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26146 return NULL;
26147
26148 switch (TREE_CODE (decl_or_origin))
26149 {
26150 case ERROR_MARK:
26151 break;
26152
26153 case CONST_DECL:
26154 if (!is_fortran () && !is_ada () && !is_dlang ())
26155 {
26156 /* The individual enumerators of an enum type get output when we output
26157 the Dwarf representation of the relevant enum type itself. */
26158 break;
26159 }
26160
26161 /* Emit its type. */
26162 gen_type_die (TREE_TYPE (decl), context_die);
26163
26164 /* And its containing namespace. */
26165 context_die = declare_in_namespace (decl, context_die);
26166
26167 gen_const_die (decl, context_die);
26168 break;
26169
26170 case FUNCTION_DECL:
26171 #if 0
26172 /* FIXME */
26173 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26174 on local redeclarations of global functions. That seems broken. */
26175 if (current_function_decl != decl)
26176 /* This is only a declaration. */;
26177 #endif
26178
26179 /* We should have abstract copies already and should not generate
26180 stray type DIEs in late LTO dumping. */
26181 if (! early_dwarf)
26182 ;
26183
26184 /* If we're emitting a clone, emit info for the abstract instance. */
26185 else if (origin || DECL_ORIGIN (decl) != decl)
26186 dwarf2out_abstract_function (origin
26187 ? DECL_ORIGIN (origin)
26188 : DECL_ABSTRACT_ORIGIN (decl));
26189
26190 /* If we're emitting a possibly inlined function emit it as
26191 abstract instance. */
26192 else if (cgraph_function_possibly_inlined_p (decl)
26193 && ! DECL_ABSTRACT_P (decl)
26194 && ! class_or_namespace_scope_p (context_die)
26195 /* dwarf2out_abstract_function won't emit a die if this is just
26196 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26197 that case, because that works only if we have a die. */
26198 && DECL_INITIAL (decl) != NULL_TREE)
26199 dwarf2out_abstract_function (decl);
26200
26201 /* Otherwise we're emitting the primary DIE for this decl. */
26202 else if (debug_info_level > DINFO_LEVEL_TERSE)
26203 {
26204 /* Before we describe the FUNCTION_DECL itself, make sure that we
26205 have its containing type. */
26206 if (!origin)
26207 origin = decl_class_context (decl);
26208 if (origin != NULL_TREE)
26209 gen_type_die (origin, context_die);
26210
26211 /* And its return type. */
26212 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26213
26214 /* And its virtual context. */
26215 if (DECL_VINDEX (decl) != NULL_TREE)
26216 gen_type_die (DECL_CONTEXT (decl), context_die);
26217
26218 /* Make sure we have a member DIE for decl. */
26219 if (origin != NULL_TREE)
26220 gen_type_die_for_member (origin, decl, context_die);
26221
26222 /* And its containing namespace. */
26223 context_die = declare_in_namespace (decl, context_die);
26224 }
26225
26226 /* Now output a DIE to represent the function itself. */
26227 if (decl)
26228 gen_subprogram_die (decl, context_die);
26229 break;
26230
26231 case TYPE_DECL:
26232 /* If we are in terse mode, don't generate any DIEs to represent any
26233 actual typedefs. */
26234 if (debug_info_level <= DINFO_LEVEL_TERSE)
26235 break;
26236
26237 /* In the special case of a TYPE_DECL node representing the declaration
26238 of some type tag, if the given TYPE_DECL is marked as having been
26239 instantiated from some other (original) TYPE_DECL node (e.g. one which
26240 was generated within the original definition of an inline function) we
26241 used to generate a special (abbreviated) DW_TAG_structure_type,
26242 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26243 should be actually referencing those DIEs, as variable DIEs with that
26244 type would be emitted already in the abstract origin, so it was always
26245 removed during unused type prunning. Don't add anything in this
26246 case. */
26247 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26248 break;
26249
26250 if (is_redundant_typedef (decl))
26251 gen_type_die (TREE_TYPE (decl), context_die);
26252 else
26253 /* Output a DIE to represent the typedef itself. */
26254 gen_typedef_die (decl, context_die);
26255 break;
26256
26257 case LABEL_DECL:
26258 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26259 gen_label_die (decl, context_die);
26260 break;
26261
26262 case VAR_DECL:
26263 case RESULT_DECL:
26264 /* If we are in terse mode, don't generate any DIEs to represent any
26265 variable declarations or definitions. */
26266 if (debug_info_level <= DINFO_LEVEL_TERSE)
26267 break;
26268
26269 /* Avoid generating stray type DIEs during late dwarf dumping.
26270 All types have been dumped early. */
26271 if (early_dwarf
26272 /* ??? But in LTRANS we cannot annotate early created variably
26273 modified type DIEs without copying them and adjusting all
26274 references to them. Dump them again as happens for inlining
26275 which copies both the decl and the types. */
26276 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26277 in VLA bound information for example. */
26278 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26279 current_function_decl)))
26280 {
26281 /* Output any DIEs that are needed to specify the type of this data
26282 object. */
26283 if (decl_by_reference_p (decl_or_origin))
26284 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26285 else
26286 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26287 }
26288
26289 if (early_dwarf)
26290 {
26291 /* And its containing type. */
26292 class_origin = decl_class_context (decl_or_origin);
26293 if (class_origin != NULL_TREE)
26294 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26295
26296 /* And its containing namespace. */
26297 context_die = declare_in_namespace (decl_or_origin, context_die);
26298 }
26299
26300 /* Now output the DIE to represent the data object itself. This gets
26301 complicated because of the possibility that the VAR_DECL really
26302 represents an inlined instance of a formal parameter for an inline
26303 function. */
26304 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26305 if (ultimate_origin != NULL_TREE
26306 && TREE_CODE (ultimate_origin) == PARM_DECL)
26307 gen_formal_parameter_die (decl, origin,
26308 true /* Emit name attribute. */,
26309 context_die);
26310 else
26311 gen_variable_die (decl, origin, context_die);
26312 break;
26313
26314 case FIELD_DECL:
26315 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26316 /* Ignore the nameless fields that are used to skip bits but handle C++
26317 anonymous unions and structs. */
26318 if (DECL_NAME (decl) != NULL_TREE
26319 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26320 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26321 {
26322 gen_type_die (member_declared_type (decl), context_die);
26323 gen_field_die (decl, ctx, context_die);
26324 }
26325 break;
26326
26327 case PARM_DECL:
26328 /* Avoid generating stray type DIEs during late dwarf dumping.
26329 All types have been dumped early. */
26330 if (early_dwarf
26331 /* ??? But in LTRANS we cannot annotate early created variably
26332 modified type DIEs without copying them and adjusting all
26333 references to them. Dump them again as happens for inlining
26334 which copies both the decl and the types. */
26335 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26336 in VLA bound information for example. */
26337 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26338 current_function_decl)))
26339 {
26340 if (DECL_BY_REFERENCE (decl_or_origin))
26341 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26342 else
26343 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26344 }
26345 return gen_formal_parameter_die (decl, origin,
26346 true /* Emit name attribute. */,
26347 context_die);
26348
26349 case NAMESPACE_DECL:
26350 if (dwarf_version >= 3 || !dwarf_strict)
26351 gen_namespace_die (decl, context_die);
26352 break;
26353
26354 case IMPORTED_DECL:
26355 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26356 DECL_CONTEXT (decl), context_die);
26357 break;
26358
26359 case NAMELIST_DECL:
26360 gen_namelist_decl (DECL_NAME (decl), context_die,
26361 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26362 break;
26363
26364 default:
26365 /* Probably some frontend-internal decl. Assume we don't care. */
26366 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26367 break;
26368 }
26369
26370 return NULL;
26371 }
26372 \f
26373 /* Output initial debug information for global DECL. Called at the
26374 end of the parsing process.
26375
26376 This is the initial debug generation process. As such, the DIEs
26377 generated may be incomplete. A later debug generation pass
26378 (dwarf2out_late_global_decl) will augment the information generated
26379 in this pass (e.g., with complete location info). */
26380
26381 static void
26382 dwarf2out_early_global_decl (tree decl)
26383 {
26384 set_early_dwarf s;
26385
26386 /* gen_decl_die() will set DECL_ABSTRACT because
26387 cgraph_function_possibly_inlined_p() returns true. This is in
26388 turn will cause DW_AT_inline attributes to be set.
26389
26390 This happens because at early dwarf generation, there is no
26391 cgraph information, causing cgraph_function_possibly_inlined_p()
26392 to return true. Trick cgraph_function_possibly_inlined_p()
26393 while we generate dwarf early. */
26394 bool save = symtab->global_info_ready;
26395 symtab->global_info_ready = true;
26396
26397 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26398 other DECLs and they can point to template types or other things
26399 that dwarf2out can't handle when done via dwarf2out_decl. */
26400 if (TREE_CODE (decl) != TYPE_DECL
26401 && TREE_CODE (decl) != PARM_DECL)
26402 {
26403 if (TREE_CODE (decl) == FUNCTION_DECL)
26404 {
26405 tree save_fndecl = current_function_decl;
26406
26407 /* For nested functions, make sure we have DIEs for the parents first
26408 so that all nested DIEs are generated at the proper scope in the
26409 first shot. */
26410 tree context = decl_function_context (decl);
26411 if (context != NULL)
26412 {
26413 dw_die_ref context_die = lookup_decl_die (context);
26414 current_function_decl = context;
26415
26416 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26417 enough so that it lands in its own context. This avoids type
26418 pruning issues later on. */
26419 if (context_die == NULL || is_declaration_die (context_die))
26420 dwarf2out_early_global_decl (context);
26421 }
26422
26423 /* Emit an abstract origin of a function first. This happens
26424 with C++ constructor clones for example and makes
26425 dwarf2out_abstract_function happy which requires the early
26426 DIE of the abstract instance to be present. */
26427 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26428 dw_die_ref origin_die;
26429 if (origin != NULL
26430 /* Do not emit the DIE multiple times but make sure to
26431 process it fully here in case we just saw a declaration. */
26432 && ((origin_die = lookup_decl_die (origin)) == NULL
26433 || is_declaration_die (origin_die)))
26434 {
26435 current_function_decl = origin;
26436 dwarf2out_decl (origin);
26437 }
26438
26439 /* Emit the DIE for decl but avoid doing that multiple times. */
26440 dw_die_ref old_die;
26441 if ((old_die = lookup_decl_die (decl)) == NULL
26442 || is_declaration_die (old_die))
26443 {
26444 current_function_decl = decl;
26445 dwarf2out_decl (decl);
26446 }
26447
26448 current_function_decl = save_fndecl;
26449 }
26450 else
26451 dwarf2out_decl (decl);
26452 }
26453 symtab->global_info_ready = save;
26454 }
26455
26456 /* Return whether EXPR is an expression with the following pattern:
26457 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26458
26459 static bool
26460 is_trivial_indirect_ref (tree expr)
26461 {
26462 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26463 return false;
26464
26465 tree nop = TREE_OPERAND (expr, 0);
26466 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26467 return false;
26468
26469 tree int_cst = TREE_OPERAND (nop, 0);
26470 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26471 }
26472
26473 /* Output debug information for global decl DECL. Called from
26474 toplev.c after compilation proper has finished. */
26475
26476 static void
26477 dwarf2out_late_global_decl (tree decl)
26478 {
26479 /* Fill-in any location information we were unable to determine
26480 on the first pass. */
26481 if (VAR_P (decl))
26482 {
26483 dw_die_ref die = lookup_decl_die (decl);
26484
26485 /* We may have to generate early debug late for LTO in case debug
26486 was not enabled at compile-time or the target doesn't support
26487 the LTO early debug scheme. */
26488 if (! die && in_lto_p)
26489 {
26490 dwarf2out_decl (decl);
26491 die = lookup_decl_die (decl);
26492 }
26493
26494 if (die)
26495 {
26496 /* We get called via the symtab code invoking late_global_decl
26497 for symbols that are optimized out.
26498
26499 Do not add locations for those, except if they have a
26500 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26501 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26502 INDIRECT_REF expression, as this could generate relocations to
26503 text symbols in LTO object files, which is invalid. */
26504 varpool_node *node = varpool_node::get (decl);
26505 if ((! node || ! node->definition)
26506 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26507 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26508 tree_add_const_value_attribute_for_decl (die, decl);
26509 else
26510 add_location_or_const_value_attribute (die, decl, false);
26511 }
26512 }
26513 }
26514
26515 /* Output debug information for type decl DECL. Called from toplev.c
26516 and from language front ends (to record built-in types). */
26517 static void
26518 dwarf2out_type_decl (tree decl, int local)
26519 {
26520 if (!local)
26521 {
26522 set_early_dwarf s;
26523 dwarf2out_decl (decl);
26524 }
26525 }
26526
26527 /* Output debug information for imported module or decl DECL.
26528 NAME is non-NULL name in the lexical block if the decl has been renamed.
26529 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26530 that DECL belongs to.
26531 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26532 static void
26533 dwarf2out_imported_module_or_decl_1 (tree decl,
26534 tree name,
26535 tree lexical_block,
26536 dw_die_ref lexical_block_die)
26537 {
26538 expanded_location xloc;
26539 dw_die_ref imported_die = NULL;
26540 dw_die_ref at_import_die;
26541
26542 if (TREE_CODE (decl) == IMPORTED_DECL)
26543 {
26544 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26545 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26546 gcc_assert (decl);
26547 }
26548 else
26549 xloc = expand_location (input_location);
26550
26551 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26552 {
26553 at_import_die = force_type_die (TREE_TYPE (decl));
26554 /* For namespace N { typedef void T; } using N::T; base_type_die
26555 returns NULL, but DW_TAG_imported_declaration requires
26556 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26557 if (!at_import_die)
26558 {
26559 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26560 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26561 at_import_die = lookup_type_die (TREE_TYPE (decl));
26562 gcc_assert (at_import_die);
26563 }
26564 }
26565 else
26566 {
26567 at_import_die = lookup_decl_die (decl);
26568 if (!at_import_die)
26569 {
26570 /* If we're trying to avoid duplicate debug info, we may not have
26571 emitted the member decl for this field. Emit it now. */
26572 if (TREE_CODE (decl) == FIELD_DECL)
26573 {
26574 tree type = DECL_CONTEXT (decl);
26575
26576 if (TYPE_CONTEXT (type)
26577 && TYPE_P (TYPE_CONTEXT (type))
26578 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26579 DINFO_USAGE_DIR_USE))
26580 return;
26581 gen_type_die_for_member (type, decl,
26582 get_context_die (TYPE_CONTEXT (type)));
26583 }
26584 if (TREE_CODE (decl) == NAMELIST_DECL)
26585 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26586 get_context_die (DECL_CONTEXT (decl)),
26587 NULL_TREE);
26588 else
26589 at_import_die = force_decl_die (decl);
26590 }
26591 }
26592
26593 if (TREE_CODE (decl) == NAMESPACE_DECL)
26594 {
26595 if (dwarf_version >= 3 || !dwarf_strict)
26596 imported_die = new_die (DW_TAG_imported_module,
26597 lexical_block_die,
26598 lexical_block);
26599 else
26600 return;
26601 }
26602 else
26603 imported_die = new_die (DW_TAG_imported_declaration,
26604 lexical_block_die,
26605 lexical_block);
26606
26607 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26608 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26609 if (debug_column_info && xloc.column)
26610 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26611 if (name)
26612 add_AT_string (imported_die, DW_AT_name,
26613 IDENTIFIER_POINTER (name));
26614 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26615 }
26616
26617 /* Output debug information for imported module or decl DECL.
26618 NAME is non-NULL name in context if the decl has been renamed.
26619 CHILD is true if decl is one of the renamed decls as part of
26620 importing whole module.
26621 IMPLICIT is set if this hook is called for an implicit import
26622 such as inline namespace. */
26623
26624 static void
26625 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26626 bool child, bool implicit)
26627 {
26628 /* dw_die_ref at_import_die; */
26629 dw_die_ref scope_die;
26630
26631 if (debug_info_level <= DINFO_LEVEL_TERSE)
26632 return;
26633
26634 gcc_assert (decl);
26635
26636 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26637 should be enough, for DWARF4 and older even if we emit as extension
26638 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26639 for the benefit of consumers unaware of DW_AT_export_symbols. */
26640 if (implicit
26641 && dwarf_version >= 5
26642 && lang_hooks.decls.decl_dwarf_attribute (decl,
26643 DW_AT_export_symbols) == 1)
26644 return;
26645
26646 set_early_dwarf s;
26647
26648 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26649 We need decl DIE for reference and scope die. First, get DIE for the decl
26650 itself. */
26651
26652 /* Get the scope die for decl context. Use comp_unit_die for global module
26653 or decl. If die is not found for non globals, force new die. */
26654 if (context
26655 && TYPE_P (context)
26656 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26657 return;
26658
26659 scope_die = get_context_die (context);
26660
26661 if (child)
26662 {
26663 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26664 there is nothing we can do, here. */
26665 if (dwarf_version < 3 && dwarf_strict)
26666 return;
26667
26668 gcc_assert (scope_die->die_child);
26669 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26670 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26671 scope_die = scope_die->die_child;
26672 }
26673
26674 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26675 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26676 }
26677
26678 /* Output debug information for namelists. */
26679
26680 static dw_die_ref
26681 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26682 {
26683 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26684 tree value;
26685 unsigned i;
26686
26687 if (debug_info_level <= DINFO_LEVEL_TERSE)
26688 return NULL;
26689
26690 gcc_assert (scope_die != NULL);
26691 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26692 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26693
26694 /* If there are no item_decls, we have a nondefining namelist, e.g.
26695 with USE association; hence, set DW_AT_declaration. */
26696 if (item_decls == NULL_TREE)
26697 {
26698 add_AT_flag (nml_die, DW_AT_declaration, 1);
26699 return nml_die;
26700 }
26701
26702 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26703 {
26704 nml_item_ref_die = lookup_decl_die (value);
26705 if (!nml_item_ref_die)
26706 nml_item_ref_die = force_decl_die (value);
26707
26708 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26709 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26710 }
26711 return nml_die;
26712 }
26713
26714
26715 /* Write the debugging output for DECL and return the DIE. */
26716
26717 static void
26718 dwarf2out_decl (tree decl)
26719 {
26720 dw_die_ref context_die = comp_unit_die ();
26721
26722 switch (TREE_CODE (decl))
26723 {
26724 case ERROR_MARK:
26725 return;
26726
26727 case FUNCTION_DECL:
26728 /* If we're a nested function, initially use a parent of NULL; if we're
26729 a plain function, this will be fixed up in decls_for_scope. If
26730 we're a method, it will be ignored, since we already have a DIE.
26731 Avoid doing this late though since clones of class methods may
26732 otherwise end up in limbo and create type DIEs late. */
26733 if (early_dwarf
26734 && decl_function_context (decl)
26735 /* But if we're in terse mode, we don't care about scope. */
26736 && debug_info_level > DINFO_LEVEL_TERSE)
26737 context_die = NULL;
26738 break;
26739
26740 case VAR_DECL:
26741 /* For local statics lookup proper context die. */
26742 if (local_function_static (decl))
26743 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26744
26745 /* If we are in terse mode, don't generate any DIEs to represent any
26746 variable declarations or definitions. */
26747 if (debug_info_level <= DINFO_LEVEL_TERSE)
26748 return;
26749 break;
26750
26751 case CONST_DECL:
26752 if (debug_info_level <= DINFO_LEVEL_TERSE)
26753 return;
26754 if (!is_fortran () && !is_ada () && !is_dlang ())
26755 return;
26756 if (TREE_STATIC (decl) && decl_function_context (decl))
26757 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26758 break;
26759
26760 case NAMESPACE_DECL:
26761 case IMPORTED_DECL:
26762 if (debug_info_level <= DINFO_LEVEL_TERSE)
26763 return;
26764 if (lookup_decl_die (decl) != NULL)
26765 return;
26766 break;
26767
26768 case TYPE_DECL:
26769 /* Don't emit stubs for types unless they are needed by other DIEs. */
26770 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26771 return;
26772
26773 /* Don't bother trying to generate any DIEs to represent any of the
26774 normal built-in types for the language we are compiling. */
26775 if (DECL_IS_BUILTIN (decl))
26776 return;
26777
26778 /* If we are in terse mode, don't generate any DIEs for types. */
26779 if (debug_info_level <= DINFO_LEVEL_TERSE)
26780 return;
26781
26782 /* If we're a function-scope tag, initially use a parent of NULL;
26783 this will be fixed up in decls_for_scope. */
26784 if (decl_function_context (decl))
26785 context_die = NULL;
26786
26787 break;
26788
26789 case NAMELIST_DECL:
26790 break;
26791
26792 default:
26793 return;
26794 }
26795
26796 gen_decl_die (decl, NULL, NULL, context_die);
26797
26798 if (flag_checking)
26799 {
26800 dw_die_ref die = lookup_decl_die (decl);
26801 if (die)
26802 check_die (die);
26803 }
26804 }
26805
26806 /* Write the debugging output for DECL. */
26807
26808 static void
26809 dwarf2out_function_decl (tree decl)
26810 {
26811 dwarf2out_decl (decl);
26812 call_arg_locations = NULL;
26813 call_arg_loc_last = NULL;
26814 call_site_count = -1;
26815 tail_call_site_count = -1;
26816 decl_loc_table->empty ();
26817 cached_dw_loc_list_table->empty ();
26818 }
26819
26820 /* Output a marker (i.e. a label) for the beginning of the generated code for
26821 a lexical block. */
26822
26823 static void
26824 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26825 unsigned int blocknum)
26826 {
26827 switch_to_section (current_function_section ());
26828 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26829 }
26830
26831 /* Output a marker (i.e. a label) for the end of the generated code for a
26832 lexical block. */
26833
26834 static void
26835 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26836 {
26837 switch_to_section (current_function_section ());
26838 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26839 }
26840
26841 /* Returns nonzero if it is appropriate not to emit any debugging
26842 information for BLOCK, because it doesn't contain any instructions.
26843
26844 Don't allow this for blocks with nested functions or local classes
26845 as we would end up with orphans, and in the presence of scheduling
26846 we may end up calling them anyway. */
26847
26848 static bool
26849 dwarf2out_ignore_block (const_tree block)
26850 {
26851 tree decl;
26852 unsigned int i;
26853
26854 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26855 if (TREE_CODE (decl) == FUNCTION_DECL
26856 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26857 return 0;
26858 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26859 {
26860 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26861 if (TREE_CODE (decl) == FUNCTION_DECL
26862 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26863 return 0;
26864 }
26865
26866 return 1;
26867 }
26868
26869 /* Hash table routines for file_hash. */
26870
26871 bool
26872 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26873 {
26874 return filename_cmp (p1->filename, p2) == 0;
26875 }
26876
26877 hashval_t
26878 dwarf_file_hasher::hash (dwarf_file_data *p)
26879 {
26880 return htab_hash_string (p->filename);
26881 }
26882
26883 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26884 dwarf2out.c) and return its "index". The index of each (known) filename is
26885 just a unique number which is associated with only that one filename. We
26886 need such numbers for the sake of generating labels (in the .debug_sfnames
26887 section) and references to those files numbers (in the .debug_srcinfo
26888 and .debug_macinfo sections). If the filename given as an argument is not
26889 found in our current list, add it to the list and assign it the next
26890 available unique index number. */
26891
26892 static struct dwarf_file_data *
26893 lookup_filename (const char *file_name)
26894 {
26895 struct dwarf_file_data * created;
26896
26897 if (!file_name)
26898 return NULL;
26899
26900 dwarf_file_data **slot
26901 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26902 INSERT);
26903 if (*slot)
26904 return *slot;
26905
26906 created = ggc_alloc<dwarf_file_data> ();
26907 created->filename = file_name;
26908 created->emitted_number = 0;
26909 *slot = created;
26910 return created;
26911 }
26912
26913 /* If the assembler will construct the file table, then translate the compiler
26914 internal file table number into the assembler file table number, and emit
26915 a .file directive if we haven't already emitted one yet. The file table
26916 numbers are different because we prune debug info for unused variables and
26917 types, which may include filenames. */
26918
26919 static int
26920 maybe_emit_file (struct dwarf_file_data * fd)
26921 {
26922 if (! fd->emitted_number)
26923 {
26924 if (last_emitted_file)
26925 fd->emitted_number = last_emitted_file->emitted_number + 1;
26926 else
26927 fd->emitted_number = 1;
26928 last_emitted_file = fd;
26929
26930 if (output_asm_line_debug_info ())
26931 {
26932 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26933 output_quoted_string (asm_out_file,
26934 remap_debug_filename (fd->filename));
26935 fputc ('\n', asm_out_file);
26936 }
26937 }
26938
26939 return fd->emitted_number;
26940 }
26941
26942 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26943 That generation should happen after function debug info has been
26944 generated. The value of the attribute is the constant value of ARG. */
26945
26946 static void
26947 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26948 {
26949 die_arg_entry entry;
26950
26951 if (!die || !arg)
26952 return;
26953
26954 gcc_assert (early_dwarf);
26955
26956 if (!tmpl_value_parm_die_table)
26957 vec_alloc (tmpl_value_parm_die_table, 32);
26958
26959 entry.die = die;
26960 entry.arg = arg;
26961 vec_safe_push (tmpl_value_parm_die_table, entry);
26962 }
26963
26964 /* Return TRUE if T is an instance of generic type, FALSE
26965 otherwise. */
26966
26967 static bool
26968 generic_type_p (tree t)
26969 {
26970 if (t == NULL_TREE || !TYPE_P (t))
26971 return false;
26972 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26973 }
26974
26975 /* Schedule the generation of the generic parameter dies for the
26976 instance of generic type T. The proper generation itself is later
26977 done by gen_scheduled_generic_parms_dies. */
26978
26979 static void
26980 schedule_generic_params_dies_gen (tree t)
26981 {
26982 if (!generic_type_p (t))
26983 return;
26984
26985 gcc_assert (early_dwarf);
26986
26987 if (!generic_type_instances)
26988 vec_alloc (generic_type_instances, 256);
26989
26990 vec_safe_push (generic_type_instances, t);
26991 }
26992
26993 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26994 by append_entry_to_tmpl_value_parm_die_table. This function must
26995 be called after function DIEs have been generated. */
26996
26997 static void
26998 gen_remaining_tmpl_value_param_die_attribute (void)
26999 {
27000 if (tmpl_value_parm_die_table)
27001 {
27002 unsigned i, j;
27003 die_arg_entry *e;
27004
27005 /* We do this in two phases - first get the cases we can
27006 handle during early-finish, preserving those we cannot
27007 (containing symbolic constants where we don't yet know
27008 whether we are going to output the referenced symbols).
27009 For those we try again at late-finish. */
27010 j = 0;
27011 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27012 {
27013 if (!e->die->removed
27014 && !tree_add_const_value_attribute (e->die, e->arg))
27015 {
27016 dw_loc_descr_ref loc = NULL;
27017 if (! early_dwarf
27018 && (dwarf_version >= 5 || !dwarf_strict))
27019 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27020 if (loc)
27021 add_AT_loc (e->die, DW_AT_location, loc);
27022 else
27023 (*tmpl_value_parm_die_table)[j++] = *e;
27024 }
27025 }
27026 tmpl_value_parm_die_table->truncate (j);
27027 }
27028 }
27029
27030 /* Generate generic parameters DIEs for instances of generic types
27031 that have been previously scheduled by
27032 schedule_generic_params_dies_gen. This function must be called
27033 after all the types of the CU have been laid out. */
27034
27035 static void
27036 gen_scheduled_generic_parms_dies (void)
27037 {
27038 unsigned i;
27039 tree t;
27040
27041 if (!generic_type_instances)
27042 return;
27043
27044 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27045 if (COMPLETE_TYPE_P (t))
27046 gen_generic_params_dies (t);
27047
27048 generic_type_instances = NULL;
27049 }
27050
27051
27052 /* Replace DW_AT_name for the decl with name. */
27053
27054 static void
27055 dwarf2out_set_name (tree decl, tree name)
27056 {
27057 dw_die_ref die;
27058 dw_attr_node *attr;
27059 const char *dname;
27060
27061 die = TYPE_SYMTAB_DIE (decl);
27062 if (!die)
27063 return;
27064
27065 dname = dwarf2_name (name, 0);
27066 if (!dname)
27067 return;
27068
27069 attr = get_AT (die, DW_AT_name);
27070 if (attr)
27071 {
27072 struct indirect_string_node *node;
27073
27074 node = find_AT_string (dname);
27075 /* replace the string. */
27076 attr->dw_attr_val.v.val_str = node;
27077 }
27078
27079 else
27080 add_name_attribute (die, dname);
27081 }
27082
27083 /* True if before or during processing of the first function being emitted. */
27084 static bool in_first_function_p = true;
27085 /* True if loc_note during dwarf2out_var_location call might still be
27086 before first real instruction at address equal to .Ltext0. */
27087 static bool maybe_at_text_label_p = true;
27088 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27089 static unsigned int first_loclabel_num_not_at_text_label;
27090
27091 /* Look ahead for a real insn, or for a begin stmt marker. */
27092
27093 static rtx_insn *
27094 dwarf2out_next_real_insn (rtx_insn *loc_note)
27095 {
27096 rtx_insn *next_real = NEXT_INSN (loc_note);
27097
27098 while (next_real)
27099 if (INSN_P (next_real))
27100 break;
27101 else
27102 next_real = NEXT_INSN (next_real);
27103
27104 return next_real;
27105 }
27106
27107 /* Called by the final INSN scan whenever we see a var location. We
27108 use it to drop labels in the right places, and throw the location in
27109 our lookup table. */
27110
27111 static void
27112 dwarf2out_var_location (rtx_insn *loc_note)
27113 {
27114 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27115 struct var_loc_node *newloc;
27116 rtx_insn *next_real, *next_note;
27117 rtx_insn *call_insn = NULL;
27118 static const char *last_label;
27119 static const char *last_postcall_label;
27120 static bool last_in_cold_section_p;
27121 static rtx_insn *expected_next_loc_note;
27122 tree decl;
27123 bool var_loc_p;
27124 var_loc_view view = 0;
27125
27126 if (!NOTE_P (loc_note))
27127 {
27128 if (CALL_P (loc_note))
27129 {
27130 maybe_reset_location_view (loc_note, cur_line_info_table);
27131 call_site_count++;
27132 if (SIBLING_CALL_P (loc_note))
27133 tail_call_site_count++;
27134 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27135 {
27136 call_insn = loc_note;
27137 loc_note = NULL;
27138 var_loc_p = false;
27139
27140 next_real = dwarf2out_next_real_insn (call_insn);
27141 next_note = NULL;
27142 cached_next_real_insn = NULL;
27143 goto create_label;
27144 }
27145 if (optimize == 0 && !flag_var_tracking)
27146 {
27147 /* When the var-tracking pass is not running, there is no note
27148 for indirect calls whose target is compile-time known. In this
27149 case, process such calls specifically so that we generate call
27150 sites for them anyway. */
27151 rtx x = PATTERN (loc_note);
27152 if (GET_CODE (x) == PARALLEL)
27153 x = XVECEXP (x, 0, 0);
27154 if (GET_CODE (x) == SET)
27155 x = SET_SRC (x);
27156 if (GET_CODE (x) == CALL)
27157 x = XEXP (x, 0);
27158 if (!MEM_P (x)
27159 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27160 || !SYMBOL_REF_DECL (XEXP (x, 0))
27161 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27162 != FUNCTION_DECL))
27163 {
27164 call_insn = loc_note;
27165 loc_note = NULL;
27166 var_loc_p = false;
27167
27168 next_real = dwarf2out_next_real_insn (call_insn);
27169 next_note = NULL;
27170 cached_next_real_insn = NULL;
27171 goto create_label;
27172 }
27173 }
27174 }
27175 else if (!debug_variable_location_views)
27176 gcc_unreachable ();
27177 else
27178 maybe_reset_location_view (loc_note, cur_line_info_table);
27179
27180 return;
27181 }
27182
27183 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27184 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27185 return;
27186
27187 /* Optimize processing a large consecutive sequence of location
27188 notes so we don't spend too much time in next_real_insn. If the
27189 next insn is another location note, remember the next_real_insn
27190 calculation for next time. */
27191 next_real = cached_next_real_insn;
27192 if (next_real)
27193 {
27194 if (expected_next_loc_note != loc_note)
27195 next_real = NULL;
27196 }
27197
27198 next_note = NEXT_INSN (loc_note);
27199 if (! next_note
27200 || next_note->deleted ()
27201 || ! NOTE_P (next_note)
27202 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27203 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27204 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27205 next_note = NULL;
27206
27207 if (! next_real)
27208 next_real = dwarf2out_next_real_insn (loc_note);
27209
27210 if (next_note)
27211 {
27212 expected_next_loc_note = next_note;
27213 cached_next_real_insn = next_real;
27214 }
27215 else
27216 cached_next_real_insn = NULL;
27217
27218 /* If there are no instructions which would be affected by this note,
27219 don't do anything. */
27220 if (var_loc_p
27221 && next_real == NULL_RTX
27222 && !NOTE_DURING_CALL_P (loc_note))
27223 return;
27224
27225 create_label:
27226
27227 if (next_real == NULL_RTX)
27228 next_real = get_last_insn ();
27229
27230 /* If there were any real insns between note we processed last time
27231 and this note (or if it is the first note), clear
27232 last_{,postcall_}label so that they are not reused this time. */
27233 if (last_var_location_insn == NULL_RTX
27234 || last_var_location_insn != next_real
27235 || last_in_cold_section_p != in_cold_section_p)
27236 {
27237 last_label = NULL;
27238 last_postcall_label = NULL;
27239 }
27240
27241 if (var_loc_p)
27242 {
27243 const char *label
27244 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27245 view = cur_line_info_table->view;
27246 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27247 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27248 if (newloc == NULL)
27249 return;
27250 }
27251 else
27252 {
27253 decl = NULL_TREE;
27254 newloc = NULL;
27255 }
27256
27257 /* If there were no real insns between note we processed last time
27258 and this note, use the label we emitted last time. Otherwise
27259 create a new label and emit it. */
27260 if (last_label == NULL)
27261 {
27262 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27263 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27264 loclabel_num++;
27265 last_label = ggc_strdup (loclabel);
27266 /* See if loclabel might be equal to .Ltext0. If yes,
27267 bump first_loclabel_num_not_at_text_label. */
27268 if (!have_multiple_function_sections
27269 && in_first_function_p
27270 && maybe_at_text_label_p)
27271 {
27272 static rtx_insn *last_start;
27273 rtx_insn *insn;
27274 for (insn = loc_note; insn; insn = previous_insn (insn))
27275 if (insn == last_start)
27276 break;
27277 else if (!NONDEBUG_INSN_P (insn))
27278 continue;
27279 else
27280 {
27281 rtx body = PATTERN (insn);
27282 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27283 continue;
27284 /* Inline asm could occupy zero bytes. */
27285 else if (GET_CODE (body) == ASM_INPUT
27286 || asm_noperands (body) >= 0)
27287 continue;
27288 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27289 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27290 continue;
27291 #endif
27292 else
27293 {
27294 /* Assume insn has non-zero length. */
27295 maybe_at_text_label_p = false;
27296 break;
27297 }
27298 }
27299 if (maybe_at_text_label_p)
27300 {
27301 last_start = loc_note;
27302 first_loclabel_num_not_at_text_label = loclabel_num;
27303 }
27304 }
27305 }
27306
27307 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27308 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27309
27310 if (!var_loc_p)
27311 {
27312 struct call_arg_loc_node *ca_loc
27313 = ggc_cleared_alloc<call_arg_loc_node> ();
27314 rtx_insn *prev = call_insn;
27315
27316 ca_loc->call_arg_loc_note
27317 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27318 ca_loc->next = NULL;
27319 ca_loc->label = last_label;
27320 gcc_assert (prev
27321 && (CALL_P (prev)
27322 || (NONJUMP_INSN_P (prev)
27323 && GET_CODE (PATTERN (prev)) == SEQUENCE
27324 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27325 if (!CALL_P (prev))
27326 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27327 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27328
27329 /* Look for a SYMBOL_REF in the "prev" instruction. */
27330 rtx x = get_call_rtx_from (PATTERN (prev));
27331 if (x)
27332 {
27333 /* Try to get the call symbol, if any. */
27334 if (MEM_P (XEXP (x, 0)))
27335 x = XEXP (x, 0);
27336 /* First, look for a memory access to a symbol_ref. */
27337 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27338 && SYMBOL_REF_DECL (XEXP (x, 0))
27339 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27340 ca_loc->symbol_ref = XEXP (x, 0);
27341 /* Otherwise, look at a compile-time known user-level function
27342 declaration. */
27343 else if (MEM_P (x)
27344 && MEM_EXPR (x)
27345 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27346 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27347 }
27348
27349 ca_loc->block = insn_scope (prev);
27350 if (call_arg_locations)
27351 call_arg_loc_last->next = ca_loc;
27352 else
27353 call_arg_locations = ca_loc;
27354 call_arg_loc_last = ca_loc;
27355 }
27356 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27357 {
27358 newloc->label = last_label;
27359 newloc->view = view;
27360 }
27361 else
27362 {
27363 if (!last_postcall_label)
27364 {
27365 sprintf (loclabel, "%s-1", last_label);
27366 last_postcall_label = ggc_strdup (loclabel);
27367 }
27368 newloc->label = last_postcall_label;
27369 /* ??? This view is at last_label, not last_label-1, but we
27370 could only assume view at last_label-1 is zero if we could
27371 assume calls always have length greater than one. This is
27372 probably true in general, though there might be a rare
27373 exception to this rule, e.g. if a call insn is optimized out
27374 by target magic. Then, even the -1 in the label will be
27375 wrong, which might invalidate the range. Anyway, using view,
27376 though technically possibly incorrect, will work as far as
27377 ranges go: since L-1 is in the middle of the call insn,
27378 (L-1).0 and (L-1).V shouldn't make any difference, and having
27379 the loclist entry refer to the .loc entry might be useful, so
27380 leave it like this. */
27381 newloc->view = view;
27382 }
27383
27384 if (var_loc_p && flag_debug_asm)
27385 {
27386 const char *name, *sep, *patstr;
27387 if (decl && DECL_NAME (decl))
27388 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27389 else
27390 name = "";
27391 if (NOTE_VAR_LOCATION_LOC (loc_note))
27392 {
27393 sep = " => ";
27394 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27395 }
27396 else
27397 {
27398 sep = " ";
27399 patstr = "RESET";
27400 }
27401 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27402 name, sep, patstr);
27403 }
27404
27405 last_var_location_insn = next_real;
27406 last_in_cold_section_p = in_cold_section_p;
27407 }
27408
27409 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27410 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27411 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27412 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27413 BLOCK_FRAGMENT_ORIGIN links. */
27414 static bool
27415 block_within_block_p (tree block, tree outer, bool bothways)
27416 {
27417 if (block == outer)
27418 return true;
27419
27420 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27421 for (tree context = BLOCK_SUPERCONTEXT (block);
27422 context != outer;
27423 context = BLOCK_SUPERCONTEXT (context))
27424 if (!context || TREE_CODE (context) != BLOCK)
27425 return false;
27426
27427 if (!bothways)
27428 return true;
27429
27430 /* Now check that each block is actually referenced by its
27431 parent. */
27432 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27433 context = BLOCK_SUPERCONTEXT (context))
27434 {
27435 if (BLOCK_FRAGMENT_ORIGIN (context))
27436 {
27437 gcc_assert (!BLOCK_SUBBLOCKS (context));
27438 context = BLOCK_FRAGMENT_ORIGIN (context);
27439 }
27440 for (tree sub = BLOCK_SUBBLOCKS (context);
27441 sub != block;
27442 sub = BLOCK_CHAIN (sub))
27443 if (!sub)
27444 return false;
27445 if (context == outer)
27446 return true;
27447 else
27448 block = context;
27449 }
27450 }
27451
27452 /* Called during final while assembling the marker of the entry point
27453 for an inlined function. */
27454
27455 static void
27456 dwarf2out_inline_entry (tree block)
27457 {
27458 gcc_assert (debug_inline_points);
27459
27460 /* If we can't represent it, don't bother. */
27461 if (!(dwarf_version >= 3 || !dwarf_strict))
27462 return;
27463
27464 gcc_assert (DECL_P (block_ultimate_origin (block)));
27465
27466 /* Sanity check the block tree. This would catch a case in which
27467 BLOCK got removed from the tree reachable from the outermost
27468 lexical block, but got retained in markers. It would still link
27469 back to its parents, but some ancestor would be missing a link
27470 down the path to the sub BLOCK. If the block got removed, its
27471 BLOCK_NUMBER will not be a usable value. */
27472 if (flag_checking)
27473 gcc_assert (block_within_block_p (block,
27474 DECL_INITIAL (current_function_decl),
27475 true));
27476
27477 gcc_assert (inlined_function_outer_scope_p (block));
27478 gcc_assert (!lookup_block_die (block));
27479
27480 if (BLOCK_FRAGMENT_ORIGIN (block))
27481 block = BLOCK_FRAGMENT_ORIGIN (block);
27482 /* Can the entry point ever not be at the beginning of an
27483 unfragmented lexical block? */
27484 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27485 || (cur_line_info_table
27486 && !ZERO_VIEW_P (cur_line_info_table->view))))
27487 return;
27488
27489 if (!inline_entry_data_table)
27490 inline_entry_data_table
27491 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27492
27493
27494 inline_entry_data **iedp
27495 = inline_entry_data_table->find_slot_with_hash (block,
27496 htab_hash_pointer (block),
27497 INSERT);
27498 if (*iedp)
27499 /* ??? Ideally, we'd record all entry points for the same inlined
27500 function (some may have been duplicated by e.g. unrolling), but
27501 we have no way to represent that ATM. */
27502 return;
27503
27504 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27505 ied->block = block;
27506 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27507 ied->label_num = BLOCK_NUMBER (block);
27508 if (cur_line_info_table)
27509 ied->view = cur_line_info_table->view;
27510
27511 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27512
27513 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27514 BLOCK_NUMBER (block));
27515 ASM_OUTPUT_LABEL (asm_out_file, label);
27516 }
27517
27518 /* Called from finalize_size_functions for size functions so that their body
27519 can be encoded in the debug info to describe the layout of variable-length
27520 structures. */
27521
27522 static void
27523 dwarf2out_size_function (tree decl)
27524 {
27525 function_to_dwarf_procedure (decl);
27526 }
27527
27528 /* Note in one location list that text section has changed. */
27529
27530 int
27531 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27532 {
27533 var_loc_list *list = *slot;
27534 if (list->first)
27535 list->last_before_switch
27536 = list->last->next ? list->last->next : list->last;
27537 return 1;
27538 }
27539
27540 /* Note in all location lists that text section has changed. */
27541
27542 static void
27543 var_location_switch_text_section (void)
27544 {
27545 if (decl_loc_table == NULL)
27546 return;
27547
27548 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27549 }
27550
27551 /* Create a new line number table. */
27552
27553 static dw_line_info_table *
27554 new_line_info_table (void)
27555 {
27556 dw_line_info_table *table;
27557
27558 table = ggc_cleared_alloc<dw_line_info_table> ();
27559 table->file_num = 1;
27560 table->line_num = 1;
27561 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27562 FORCE_RESET_NEXT_VIEW (table->view);
27563 table->symviews_since_reset = 0;
27564
27565 return table;
27566 }
27567
27568 /* Lookup the "current" table into which we emit line info, so
27569 that we don't have to do it for every source line. */
27570
27571 static void
27572 set_cur_line_info_table (section *sec)
27573 {
27574 dw_line_info_table *table;
27575
27576 if (sec == text_section)
27577 table = text_section_line_info;
27578 else if (sec == cold_text_section)
27579 {
27580 table = cold_text_section_line_info;
27581 if (!table)
27582 {
27583 cold_text_section_line_info = table = new_line_info_table ();
27584 table->end_label = cold_end_label;
27585 }
27586 }
27587 else
27588 {
27589 const char *end_label;
27590
27591 if (crtl->has_bb_partition)
27592 {
27593 if (in_cold_section_p)
27594 end_label = crtl->subsections.cold_section_end_label;
27595 else
27596 end_label = crtl->subsections.hot_section_end_label;
27597 }
27598 else
27599 {
27600 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27601 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27602 current_function_funcdef_no);
27603 end_label = ggc_strdup (label);
27604 }
27605
27606 table = new_line_info_table ();
27607 table->end_label = end_label;
27608
27609 vec_safe_push (separate_line_info, table);
27610 }
27611
27612 if (output_asm_line_debug_info ())
27613 table->is_stmt = (cur_line_info_table
27614 ? cur_line_info_table->is_stmt
27615 : DWARF_LINE_DEFAULT_IS_STMT_START);
27616 cur_line_info_table = table;
27617 }
27618
27619
27620 /* We need to reset the locations at the beginning of each
27621 function. We can't do this in the end_function hook, because the
27622 declarations that use the locations won't have been output when
27623 that hook is called. Also compute have_multiple_function_sections here. */
27624
27625 static void
27626 dwarf2out_begin_function (tree fun)
27627 {
27628 section *sec = function_section (fun);
27629
27630 if (sec != text_section)
27631 have_multiple_function_sections = true;
27632
27633 if (crtl->has_bb_partition && !cold_text_section)
27634 {
27635 gcc_assert (current_function_decl == fun);
27636 cold_text_section = unlikely_text_section ();
27637 switch_to_section (cold_text_section);
27638 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27639 switch_to_section (sec);
27640 }
27641
27642 dwarf2out_note_section_used ();
27643 call_site_count = 0;
27644 tail_call_site_count = 0;
27645
27646 set_cur_line_info_table (sec);
27647 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27648 }
27649
27650 /* Helper function of dwarf2out_end_function, called only after emitting
27651 the very first function into assembly. Check if some .debug_loc range
27652 might end with a .LVL* label that could be equal to .Ltext0.
27653 In that case we must force using absolute addresses in .debug_loc ranges,
27654 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27655 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27656 list terminator.
27657 Set have_multiple_function_sections to true in that case and
27658 terminate htab traversal. */
27659
27660 int
27661 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27662 {
27663 var_loc_list *entry = *slot;
27664 struct var_loc_node *node;
27665
27666 node = entry->first;
27667 if (node && node->next && node->next->label)
27668 {
27669 unsigned int i;
27670 const char *label = node->next->label;
27671 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27672
27673 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27674 {
27675 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27676 if (strcmp (label, loclabel) == 0)
27677 {
27678 have_multiple_function_sections = true;
27679 return 0;
27680 }
27681 }
27682 }
27683 return 1;
27684 }
27685
27686 /* Hook called after emitting a function into assembly.
27687 This does something only for the very first function emitted. */
27688
27689 static void
27690 dwarf2out_end_function (unsigned int)
27691 {
27692 if (in_first_function_p
27693 && !have_multiple_function_sections
27694 && first_loclabel_num_not_at_text_label
27695 && decl_loc_table)
27696 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27697 in_first_function_p = false;
27698 maybe_at_text_label_p = false;
27699 }
27700
27701 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27702 front-ends register a translation unit even before dwarf2out_init is
27703 called. */
27704 static tree main_translation_unit = NULL_TREE;
27705
27706 /* Hook called by front-ends after they built their main translation unit.
27707 Associate comp_unit_die to UNIT. */
27708
27709 static void
27710 dwarf2out_register_main_translation_unit (tree unit)
27711 {
27712 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27713 && main_translation_unit == NULL_TREE);
27714 main_translation_unit = unit;
27715 /* If dwarf2out_init has not been called yet, it will perform the association
27716 itself looking at main_translation_unit. */
27717 if (decl_die_table != NULL)
27718 equate_decl_number_to_die (unit, comp_unit_die ());
27719 }
27720
27721 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27722
27723 static void
27724 push_dw_line_info_entry (dw_line_info_table *table,
27725 enum dw_line_info_opcode opcode, unsigned int val)
27726 {
27727 dw_line_info_entry e;
27728 e.opcode = opcode;
27729 e.val = val;
27730 vec_safe_push (table->entries, e);
27731 }
27732
27733 /* Output a label to mark the beginning of a source code line entry
27734 and record information relating to this source line, in
27735 'line_info_table' for later output of the .debug_line section. */
27736 /* ??? The discriminator parameter ought to be unsigned. */
27737
27738 static void
27739 dwarf2out_source_line (unsigned int line, unsigned int column,
27740 const char *filename,
27741 int discriminator, bool is_stmt)
27742 {
27743 unsigned int file_num;
27744 dw_line_info_table *table;
27745 static var_loc_view lvugid;
27746
27747 if (debug_info_level < DINFO_LEVEL_TERSE)
27748 return;
27749
27750 table = cur_line_info_table;
27751
27752 if (line == 0)
27753 {
27754 if (debug_variable_location_views
27755 && output_asm_line_debug_info ()
27756 && table && !RESETTING_VIEW_P (table->view))
27757 {
27758 /* If we're using the assembler to compute view numbers, we
27759 can't issue a .loc directive for line zero, so we can't
27760 get a view number at this point. We might attempt to
27761 compute it from the previous view, or equate it to a
27762 subsequent view (though it might not be there!), but
27763 since we're omitting the line number entry, we might as
27764 well omit the view number as well. That means pretending
27765 it's a view number zero, which might very well turn out
27766 to be correct. ??? Extend the assembler so that the
27767 compiler could emit e.g. ".locview .LVU#", to output a
27768 view without changing line number information. We'd then
27769 have to count it in symviews_since_reset; when it's omitted,
27770 it doesn't count. */
27771 if (!zero_view_p)
27772 zero_view_p = BITMAP_GGC_ALLOC ();
27773 bitmap_set_bit (zero_view_p, table->view);
27774 if (flag_debug_asm)
27775 {
27776 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27777 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27778 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27779 ASM_COMMENT_START);
27780 assemble_name (asm_out_file, label);
27781 putc ('\n', asm_out_file);
27782 }
27783 table->view = ++lvugid;
27784 }
27785 return;
27786 }
27787
27788 /* The discriminator column was added in dwarf4. Simplify the below
27789 by simply removing it if we're not supposed to output it. */
27790 if (dwarf_version < 4 && dwarf_strict)
27791 discriminator = 0;
27792
27793 if (!debug_column_info)
27794 column = 0;
27795
27796 file_num = maybe_emit_file (lookup_filename (filename));
27797
27798 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27799 the debugger has used the second (possibly duplicate) line number
27800 at the beginning of the function to mark the end of the prologue.
27801 We could eliminate any other duplicates within the function. For
27802 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27803 that second line number entry. */
27804 /* Recall that this end-of-prologue indication is *not* the same thing
27805 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27806 to which the hook corresponds, follows the last insn that was
27807 emitted by gen_prologue. What we need is to precede the first insn
27808 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27809 insn that corresponds to something the user wrote. These may be
27810 very different locations once scheduling is enabled. */
27811
27812 if (0 && file_num == table->file_num
27813 && line == table->line_num
27814 && column == table->column_num
27815 && discriminator == table->discrim_num
27816 && is_stmt == table->is_stmt)
27817 return;
27818
27819 switch_to_section (current_function_section ());
27820
27821 /* If requested, emit something human-readable. */
27822 if (flag_debug_asm)
27823 {
27824 if (debug_column_info)
27825 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27826 filename, line, column);
27827 else
27828 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27829 filename, line);
27830 }
27831
27832 if (output_asm_line_debug_info ())
27833 {
27834 /* Emit the .loc directive understood by GNU as. */
27835 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27836 file_num, line, is_stmt, discriminator */
27837 fputs ("\t.loc ", asm_out_file);
27838 fprint_ul (asm_out_file, file_num);
27839 putc (' ', asm_out_file);
27840 fprint_ul (asm_out_file, line);
27841 putc (' ', asm_out_file);
27842 fprint_ul (asm_out_file, column);
27843
27844 if (is_stmt != table->is_stmt)
27845 {
27846 #if HAVE_GAS_LOC_STMT
27847 fputs (" is_stmt ", asm_out_file);
27848 putc (is_stmt ? '1' : '0', asm_out_file);
27849 #endif
27850 }
27851 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27852 {
27853 gcc_assert (discriminator > 0);
27854 fputs (" discriminator ", asm_out_file);
27855 fprint_ul (asm_out_file, (unsigned long) discriminator);
27856 }
27857 if (debug_variable_location_views)
27858 {
27859 if (!RESETTING_VIEW_P (table->view))
27860 {
27861 table->symviews_since_reset++;
27862 if (table->symviews_since_reset > symview_upper_bound)
27863 symview_upper_bound = table->symviews_since_reset;
27864 /* When we're using the assembler to compute view
27865 numbers, we output symbolic labels after "view" in
27866 .loc directives, and the assembler will set them for
27867 us, so that we can refer to the view numbers in
27868 location lists. The only exceptions are when we know
27869 a view will be zero: "-0" is a forced reset, used
27870 e.g. in the beginning of functions, whereas "0" tells
27871 the assembler to check that there was a PC change
27872 since the previous view, in a way that implicitly
27873 resets the next view. */
27874 fputs (" view ", asm_out_file);
27875 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27876 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27877 assemble_name (asm_out_file, label);
27878 table->view = ++lvugid;
27879 }
27880 else
27881 {
27882 table->symviews_since_reset = 0;
27883 if (FORCE_RESETTING_VIEW_P (table->view))
27884 fputs (" view -0", asm_out_file);
27885 else
27886 fputs (" view 0", asm_out_file);
27887 /* Mark the present view as a zero view. Earlier debug
27888 binds may have already added its id to loclists to be
27889 emitted later, so we can't reuse the id for something
27890 else. However, it's good to know whether a view is
27891 known to be zero, because then we may be able to
27892 optimize out locviews that are all zeros, so take
27893 note of it in zero_view_p. */
27894 if (!zero_view_p)
27895 zero_view_p = BITMAP_GGC_ALLOC ();
27896 bitmap_set_bit (zero_view_p, lvugid);
27897 table->view = ++lvugid;
27898 }
27899 }
27900 putc ('\n', asm_out_file);
27901 }
27902 else
27903 {
27904 unsigned int label_num = ++line_info_label_num;
27905
27906 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27907
27908 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27909 push_dw_line_info_entry (table, LI_adv_address, label_num);
27910 else
27911 push_dw_line_info_entry (table, LI_set_address, label_num);
27912 if (debug_variable_location_views)
27913 {
27914 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27915 if (resetting)
27916 table->view = 0;
27917
27918 if (flag_debug_asm)
27919 fprintf (asm_out_file, "\t%s view %s%d\n",
27920 ASM_COMMENT_START,
27921 resetting ? "-" : "",
27922 table->view);
27923
27924 table->view++;
27925 }
27926 if (file_num != table->file_num)
27927 push_dw_line_info_entry (table, LI_set_file, file_num);
27928 if (discriminator != table->discrim_num)
27929 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27930 if (is_stmt != table->is_stmt)
27931 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27932 push_dw_line_info_entry (table, LI_set_line, line);
27933 if (debug_column_info)
27934 push_dw_line_info_entry (table, LI_set_column, column);
27935 }
27936
27937 table->file_num = file_num;
27938 table->line_num = line;
27939 table->column_num = column;
27940 table->discrim_num = discriminator;
27941 table->is_stmt = is_stmt;
27942 table->in_use = true;
27943 }
27944
27945 /* Record the beginning of a new source file. */
27946
27947 static void
27948 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27949 {
27950 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27951 {
27952 macinfo_entry e;
27953 e.code = DW_MACINFO_start_file;
27954 e.lineno = lineno;
27955 e.info = ggc_strdup (filename);
27956 vec_safe_push (macinfo_table, e);
27957 }
27958 }
27959
27960 /* Record the end of a source file. */
27961
27962 static void
27963 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27964 {
27965 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27966 {
27967 macinfo_entry e;
27968 e.code = DW_MACINFO_end_file;
27969 e.lineno = lineno;
27970 e.info = NULL;
27971 vec_safe_push (macinfo_table, e);
27972 }
27973 }
27974
27975 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27976 the tail part of the directive line, i.e. the part which is past the
27977 initial whitespace, #, whitespace, directive-name, whitespace part. */
27978
27979 static void
27980 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27981 const char *buffer ATTRIBUTE_UNUSED)
27982 {
27983 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27984 {
27985 macinfo_entry e;
27986 /* Insert a dummy first entry to be able to optimize the whole
27987 predefined macro block using DW_MACRO_import. */
27988 if (macinfo_table->is_empty () && lineno <= 1)
27989 {
27990 e.code = 0;
27991 e.lineno = 0;
27992 e.info = NULL;
27993 vec_safe_push (macinfo_table, e);
27994 }
27995 e.code = DW_MACINFO_define;
27996 e.lineno = lineno;
27997 e.info = ggc_strdup (buffer);
27998 vec_safe_push (macinfo_table, e);
27999 }
28000 }
28001
28002 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28003 the tail part of the directive line, i.e. the part which is past the
28004 initial whitespace, #, whitespace, directive-name, whitespace part. */
28005
28006 static void
28007 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28008 const char *buffer ATTRIBUTE_UNUSED)
28009 {
28010 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28011 {
28012 macinfo_entry e;
28013 /* Insert a dummy first entry to be able to optimize the whole
28014 predefined macro block using DW_MACRO_import. */
28015 if (macinfo_table->is_empty () && lineno <= 1)
28016 {
28017 e.code = 0;
28018 e.lineno = 0;
28019 e.info = NULL;
28020 vec_safe_push (macinfo_table, e);
28021 }
28022 e.code = DW_MACINFO_undef;
28023 e.lineno = lineno;
28024 e.info = ggc_strdup (buffer);
28025 vec_safe_push (macinfo_table, e);
28026 }
28027 }
28028
28029 /* Helpers to manipulate hash table of CUs. */
28030
28031 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28032 {
28033 static inline hashval_t hash (const macinfo_entry *);
28034 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28035 };
28036
28037 inline hashval_t
28038 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28039 {
28040 return htab_hash_string (entry->info);
28041 }
28042
28043 inline bool
28044 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28045 const macinfo_entry *entry2)
28046 {
28047 return !strcmp (entry1->info, entry2->info);
28048 }
28049
28050 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28051
28052 /* Output a single .debug_macinfo entry. */
28053
28054 static void
28055 output_macinfo_op (macinfo_entry *ref)
28056 {
28057 int file_num;
28058 size_t len;
28059 struct indirect_string_node *node;
28060 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28061 struct dwarf_file_data *fd;
28062
28063 switch (ref->code)
28064 {
28065 case DW_MACINFO_start_file:
28066 fd = lookup_filename (ref->info);
28067 file_num = maybe_emit_file (fd);
28068 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28069 dw2_asm_output_data_uleb128 (ref->lineno,
28070 "Included from line number %lu",
28071 (unsigned long) ref->lineno);
28072 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28073 break;
28074 case DW_MACINFO_end_file:
28075 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28076 break;
28077 case DW_MACINFO_define:
28078 case DW_MACINFO_undef:
28079 len = strlen (ref->info) + 1;
28080 if (!dwarf_strict
28081 && len > DWARF_OFFSET_SIZE
28082 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28083 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28084 {
28085 ref->code = ref->code == DW_MACINFO_define
28086 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28087 output_macinfo_op (ref);
28088 return;
28089 }
28090 dw2_asm_output_data (1, ref->code,
28091 ref->code == DW_MACINFO_define
28092 ? "Define macro" : "Undefine macro");
28093 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28094 (unsigned long) ref->lineno);
28095 dw2_asm_output_nstring (ref->info, -1, "The macro");
28096 break;
28097 case DW_MACRO_define_strp:
28098 case DW_MACRO_undef_strp:
28099 /* NB: dwarf2out_finish performs:
28100 1. save_macinfo_strings
28101 2. hash table traverse of index_string
28102 3. output_macinfo -> output_macinfo_op
28103 4. output_indirect_strings
28104 -> hash table traverse of output_index_string
28105
28106 When output_macinfo_op is called, all index strings have been
28107 added to hash table by save_macinfo_strings and we can't pass
28108 INSERT to find_slot_with_hash which may expand hash table, even
28109 if no insertion is needed, and change hash table traverse order
28110 between index_string and output_index_string. */
28111 node = find_AT_string (ref->info, NO_INSERT);
28112 gcc_assert (node
28113 && (node->form == DW_FORM_strp
28114 || node->form == dwarf_FORM (DW_FORM_strx)));
28115 dw2_asm_output_data (1, ref->code,
28116 ref->code == DW_MACRO_define_strp
28117 ? "Define macro strp"
28118 : "Undefine macro strp");
28119 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28120 (unsigned long) ref->lineno);
28121 if (node->form == DW_FORM_strp)
28122 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28123 debug_str_section, "The macro: \"%s\"",
28124 ref->info);
28125 else
28126 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28127 ref->info);
28128 break;
28129 case DW_MACRO_import:
28130 dw2_asm_output_data (1, ref->code, "Import");
28131 ASM_GENERATE_INTERNAL_LABEL (label,
28132 DEBUG_MACRO_SECTION_LABEL,
28133 ref->lineno + macinfo_label_base);
28134 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28135 break;
28136 default:
28137 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28138 ASM_COMMENT_START, (unsigned long) ref->code);
28139 break;
28140 }
28141 }
28142
28143 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28144 other compilation unit .debug_macinfo sections. IDX is the first
28145 index of a define/undef, return the number of ops that should be
28146 emitted in a comdat .debug_macinfo section and emit
28147 a DW_MACRO_import entry referencing it.
28148 If the define/undef entry should be emitted normally, return 0. */
28149
28150 static unsigned
28151 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28152 macinfo_hash_type **macinfo_htab)
28153 {
28154 macinfo_entry *first, *second, *cur, *inc;
28155 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28156 unsigned char checksum[16];
28157 struct md5_ctx ctx;
28158 char *grp_name, *tail;
28159 const char *base;
28160 unsigned int i, count, encoded_filename_len, linebuf_len;
28161 macinfo_entry **slot;
28162
28163 first = &(*macinfo_table)[idx];
28164 second = &(*macinfo_table)[idx + 1];
28165
28166 /* Optimize only if there are at least two consecutive define/undef ops,
28167 and either all of them are before first DW_MACINFO_start_file
28168 with lineno {0,1} (i.e. predefined macro block), or all of them are
28169 in some included header file. */
28170 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28171 return 0;
28172 if (vec_safe_is_empty (files))
28173 {
28174 if (first->lineno > 1 || second->lineno > 1)
28175 return 0;
28176 }
28177 else if (first->lineno == 0)
28178 return 0;
28179
28180 /* Find the last define/undef entry that can be grouped together
28181 with first and at the same time compute md5 checksum of their
28182 codes, linenumbers and strings. */
28183 md5_init_ctx (&ctx);
28184 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28185 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28186 break;
28187 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28188 break;
28189 else
28190 {
28191 unsigned char code = cur->code;
28192 md5_process_bytes (&code, 1, &ctx);
28193 checksum_uleb128 (cur->lineno, &ctx);
28194 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28195 }
28196 md5_finish_ctx (&ctx, checksum);
28197 count = i - idx;
28198
28199 /* From the containing include filename (if any) pick up just
28200 usable characters from its basename. */
28201 if (vec_safe_is_empty (files))
28202 base = "";
28203 else
28204 base = lbasename (files->last ().info);
28205 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28206 if (ISIDNUM (base[i]) || base[i] == '.')
28207 encoded_filename_len++;
28208 /* Count . at the end. */
28209 if (encoded_filename_len)
28210 encoded_filename_len++;
28211
28212 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28213 linebuf_len = strlen (linebuf);
28214
28215 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28216 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28217 + 16 * 2 + 1);
28218 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28219 tail = grp_name + 4;
28220 if (encoded_filename_len)
28221 {
28222 for (i = 0; base[i]; i++)
28223 if (ISIDNUM (base[i]) || base[i] == '.')
28224 *tail++ = base[i];
28225 *tail++ = '.';
28226 }
28227 memcpy (tail, linebuf, linebuf_len);
28228 tail += linebuf_len;
28229 *tail++ = '.';
28230 for (i = 0; i < 16; i++)
28231 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28232
28233 /* Construct a macinfo_entry for DW_MACRO_import
28234 in the empty vector entry before the first define/undef. */
28235 inc = &(*macinfo_table)[idx - 1];
28236 inc->code = DW_MACRO_import;
28237 inc->lineno = 0;
28238 inc->info = ggc_strdup (grp_name);
28239 if (!*macinfo_htab)
28240 *macinfo_htab = new macinfo_hash_type (10);
28241 /* Avoid emitting duplicates. */
28242 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28243 if (*slot != NULL)
28244 {
28245 inc->code = 0;
28246 inc->info = NULL;
28247 /* If such an entry has been used before, just emit
28248 a DW_MACRO_import op. */
28249 inc = *slot;
28250 output_macinfo_op (inc);
28251 /* And clear all macinfo_entry in the range to avoid emitting them
28252 in the second pass. */
28253 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28254 {
28255 cur->code = 0;
28256 cur->info = NULL;
28257 }
28258 }
28259 else
28260 {
28261 *slot = inc;
28262 inc->lineno = (*macinfo_htab)->elements ();
28263 output_macinfo_op (inc);
28264 }
28265 return count;
28266 }
28267
28268 /* Save any strings needed by the macinfo table in the debug str
28269 table. All strings must be collected into the table by the time
28270 index_string is called. */
28271
28272 static void
28273 save_macinfo_strings (void)
28274 {
28275 unsigned len;
28276 unsigned i;
28277 macinfo_entry *ref;
28278
28279 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28280 {
28281 switch (ref->code)
28282 {
28283 /* Match the logic in output_macinfo_op to decide on
28284 indirect strings. */
28285 case DW_MACINFO_define:
28286 case DW_MACINFO_undef:
28287 len = strlen (ref->info) + 1;
28288 if (!dwarf_strict
28289 && len > DWARF_OFFSET_SIZE
28290 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28291 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28292 set_indirect_string (find_AT_string (ref->info));
28293 break;
28294 case DW_MACINFO_start_file:
28295 /* -gsplit-dwarf -g3 will also output filename as indirect
28296 string. */
28297 if (!dwarf_split_debug_info)
28298 break;
28299 /* Fall through. */
28300 case DW_MACRO_define_strp:
28301 case DW_MACRO_undef_strp:
28302 set_indirect_string (find_AT_string (ref->info));
28303 break;
28304 default:
28305 break;
28306 }
28307 }
28308 }
28309
28310 /* Output macinfo section(s). */
28311
28312 static void
28313 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28314 {
28315 unsigned i;
28316 unsigned long length = vec_safe_length (macinfo_table);
28317 macinfo_entry *ref;
28318 vec<macinfo_entry, va_gc> *files = NULL;
28319 macinfo_hash_type *macinfo_htab = NULL;
28320 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28321
28322 if (! length)
28323 return;
28324
28325 /* output_macinfo* uses these interchangeably. */
28326 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28327 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28328 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28329 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28330
28331 /* AIX Assembler inserts the length, so adjust the reference to match the
28332 offset expected by debuggers. */
28333 strcpy (dl_section_ref, debug_line_label);
28334 if (XCOFF_DEBUGGING_INFO)
28335 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28336
28337 /* For .debug_macro emit the section header. */
28338 if (!dwarf_strict || dwarf_version >= 5)
28339 {
28340 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28341 "DWARF macro version number");
28342 if (DWARF_OFFSET_SIZE == 8)
28343 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28344 else
28345 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28346 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28347 debug_line_section, NULL);
28348 }
28349
28350 /* In the first loop, it emits the primary .debug_macinfo section
28351 and after each emitted op the macinfo_entry is cleared.
28352 If a longer range of define/undef ops can be optimized using
28353 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28354 the vector before the first define/undef in the range and the
28355 whole range of define/undef ops is not emitted and kept. */
28356 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28357 {
28358 switch (ref->code)
28359 {
28360 case DW_MACINFO_start_file:
28361 vec_safe_push (files, *ref);
28362 break;
28363 case DW_MACINFO_end_file:
28364 if (!vec_safe_is_empty (files))
28365 files->pop ();
28366 break;
28367 case DW_MACINFO_define:
28368 case DW_MACINFO_undef:
28369 if ((!dwarf_strict || dwarf_version >= 5)
28370 && HAVE_COMDAT_GROUP
28371 && vec_safe_length (files) != 1
28372 && i > 0
28373 && i + 1 < length
28374 && (*macinfo_table)[i - 1].code == 0)
28375 {
28376 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28377 if (count)
28378 {
28379 i += count - 1;
28380 continue;
28381 }
28382 }
28383 break;
28384 case 0:
28385 /* A dummy entry may be inserted at the beginning to be able
28386 to optimize the whole block of predefined macros. */
28387 if (i == 0)
28388 continue;
28389 default:
28390 break;
28391 }
28392 output_macinfo_op (ref);
28393 ref->info = NULL;
28394 ref->code = 0;
28395 }
28396
28397 if (!macinfo_htab)
28398 return;
28399
28400 /* Save the number of transparent includes so we can adjust the
28401 label number for the fat LTO object DWARF. */
28402 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28403
28404 delete macinfo_htab;
28405 macinfo_htab = NULL;
28406
28407 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28408 terminate the current chain and switch to a new comdat .debug_macinfo
28409 section and emit the define/undef entries within it. */
28410 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28411 switch (ref->code)
28412 {
28413 case 0:
28414 continue;
28415 case DW_MACRO_import:
28416 {
28417 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28418 tree comdat_key = get_identifier (ref->info);
28419 /* Terminate the previous .debug_macinfo section. */
28420 dw2_asm_output_data (1, 0, "End compilation unit");
28421 targetm.asm_out.named_section (debug_macinfo_section_name,
28422 SECTION_DEBUG
28423 | SECTION_LINKONCE
28424 | (early_lto_debug
28425 ? SECTION_EXCLUDE : 0),
28426 comdat_key);
28427 ASM_GENERATE_INTERNAL_LABEL (label,
28428 DEBUG_MACRO_SECTION_LABEL,
28429 ref->lineno + macinfo_label_base);
28430 ASM_OUTPUT_LABEL (asm_out_file, label);
28431 ref->code = 0;
28432 ref->info = NULL;
28433 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28434 "DWARF macro version number");
28435 if (DWARF_OFFSET_SIZE == 8)
28436 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28437 else
28438 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28439 }
28440 break;
28441 case DW_MACINFO_define:
28442 case DW_MACINFO_undef:
28443 output_macinfo_op (ref);
28444 ref->code = 0;
28445 ref->info = NULL;
28446 break;
28447 default:
28448 gcc_unreachable ();
28449 }
28450
28451 macinfo_label_base += macinfo_label_base_adj;
28452 }
28453
28454 /* Initialize the various sections and labels for dwarf output and prefix
28455 them with PREFIX if non-NULL. Returns the generation (zero based
28456 number of times function was called). */
28457
28458 static unsigned
28459 init_sections_and_labels (bool early_lto_debug)
28460 {
28461 /* As we may get called multiple times have a generation count for
28462 labels. */
28463 static unsigned generation = 0;
28464
28465 if (early_lto_debug)
28466 {
28467 if (!dwarf_split_debug_info)
28468 {
28469 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28470 SECTION_DEBUG | SECTION_EXCLUDE,
28471 NULL);
28472 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28473 SECTION_DEBUG | SECTION_EXCLUDE,
28474 NULL);
28475 debug_macinfo_section_name
28476 = ((dwarf_strict && dwarf_version < 5)
28477 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28478 debug_macinfo_section = get_section (debug_macinfo_section_name,
28479 SECTION_DEBUG
28480 | SECTION_EXCLUDE, NULL);
28481 }
28482 else
28483 {
28484 /* ??? Which of the following do we need early? */
28485 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28486 SECTION_DEBUG | SECTION_EXCLUDE,
28487 NULL);
28488 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28489 SECTION_DEBUG | SECTION_EXCLUDE,
28490 NULL);
28491 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28492 SECTION_DEBUG
28493 | SECTION_EXCLUDE, NULL);
28494 debug_skeleton_abbrev_section
28495 = get_section (DEBUG_LTO_ABBREV_SECTION,
28496 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28497 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28498 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28499 generation);
28500
28501 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28502 stay in the main .o, but the skeleton_line goes into the split
28503 off dwo. */
28504 debug_skeleton_line_section
28505 = get_section (DEBUG_LTO_LINE_SECTION,
28506 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28507 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28508 DEBUG_SKELETON_LINE_SECTION_LABEL,
28509 generation);
28510 debug_str_offsets_section
28511 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28512 SECTION_DEBUG | SECTION_EXCLUDE,
28513 NULL);
28514 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28515 DEBUG_SKELETON_INFO_SECTION_LABEL,
28516 generation);
28517 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28518 DEBUG_STR_DWO_SECTION_FLAGS,
28519 NULL);
28520 debug_macinfo_section_name
28521 = ((dwarf_strict && dwarf_version < 5)
28522 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28523 debug_macinfo_section = get_section (debug_macinfo_section_name,
28524 SECTION_DEBUG | SECTION_EXCLUDE,
28525 NULL);
28526 }
28527 /* For macro info and the file table we have to refer to a
28528 debug_line section. */
28529 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28530 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28531 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28532 DEBUG_LINE_SECTION_LABEL, generation);
28533
28534 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28535 DEBUG_STR_SECTION_FLAGS
28536 | SECTION_EXCLUDE, NULL);
28537 if (!dwarf_split_debug_info)
28538 debug_line_str_section
28539 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28540 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28541 }
28542 else
28543 {
28544 if (!dwarf_split_debug_info)
28545 {
28546 debug_info_section = get_section (DEBUG_INFO_SECTION,
28547 SECTION_DEBUG, NULL);
28548 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28549 SECTION_DEBUG, NULL);
28550 debug_loc_section = get_section (dwarf_version >= 5
28551 ? DEBUG_LOCLISTS_SECTION
28552 : DEBUG_LOC_SECTION,
28553 SECTION_DEBUG, NULL);
28554 debug_macinfo_section_name
28555 = ((dwarf_strict && dwarf_version < 5)
28556 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28557 debug_macinfo_section = get_section (debug_macinfo_section_name,
28558 SECTION_DEBUG, NULL);
28559 }
28560 else
28561 {
28562 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28563 SECTION_DEBUG | SECTION_EXCLUDE,
28564 NULL);
28565 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28566 SECTION_DEBUG | SECTION_EXCLUDE,
28567 NULL);
28568 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28569 SECTION_DEBUG, NULL);
28570 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28571 SECTION_DEBUG, NULL);
28572 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28573 SECTION_DEBUG, NULL);
28574 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28575 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28576 generation);
28577
28578 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28579 stay in the main .o, but the skeleton_line goes into the
28580 split off dwo. */
28581 debug_skeleton_line_section
28582 = get_section (DEBUG_DWO_LINE_SECTION,
28583 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28584 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28585 DEBUG_SKELETON_LINE_SECTION_LABEL,
28586 generation);
28587 debug_str_offsets_section
28588 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28589 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28590 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28591 DEBUG_SKELETON_INFO_SECTION_LABEL,
28592 generation);
28593 debug_loc_section = get_section (dwarf_version >= 5
28594 ? DEBUG_DWO_LOCLISTS_SECTION
28595 : DEBUG_DWO_LOC_SECTION,
28596 SECTION_DEBUG | SECTION_EXCLUDE,
28597 NULL);
28598 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28599 DEBUG_STR_DWO_SECTION_FLAGS,
28600 NULL);
28601 debug_macinfo_section_name
28602 = ((dwarf_strict && dwarf_version < 5)
28603 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28604 debug_macinfo_section = get_section (debug_macinfo_section_name,
28605 SECTION_DEBUG | SECTION_EXCLUDE,
28606 NULL);
28607 }
28608 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28609 SECTION_DEBUG, NULL);
28610 debug_line_section = get_section (DEBUG_LINE_SECTION,
28611 SECTION_DEBUG, NULL);
28612 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28613 SECTION_DEBUG, NULL);
28614 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28615 SECTION_DEBUG, NULL);
28616 debug_str_section = get_section (DEBUG_STR_SECTION,
28617 DEBUG_STR_SECTION_FLAGS, NULL);
28618 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28619 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28620 DEBUG_STR_SECTION_FLAGS, NULL);
28621
28622 debug_ranges_section = get_section (dwarf_version >= 5
28623 ? DEBUG_RNGLISTS_SECTION
28624 : DEBUG_RANGES_SECTION,
28625 SECTION_DEBUG, NULL);
28626 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28627 SECTION_DEBUG, NULL);
28628 }
28629
28630 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28631 DEBUG_ABBREV_SECTION_LABEL, generation);
28632 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28633 DEBUG_INFO_SECTION_LABEL, generation);
28634 info_section_emitted = false;
28635 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28636 DEBUG_LINE_SECTION_LABEL, generation);
28637 /* There are up to 4 unique ranges labels per generation.
28638 See also output_rnglists. */
28639 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28640 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28641 if (dwarf_version >= 5 && dwarf_split_debug_info)
28642 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28643 DEBUG_RANGES_SECTION_LABEL,
28644 1 + generation * 4);
28645 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28646 DEBUG_ADDR_SECTION_LABEL, generation);
28647 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28648 (dwarf_strict && dwarf_version < 5)
28649 ? DEBUG_MACINFO_SECTION_LABEL
28650 : DEBUG_MACRO_SECTION_LABEL, generation);
28651 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28652 generation);
28653
28654 ++generation;
28655 return generation - 1;
28656 }
28657
28658 /* Set up for Dwarf output at the start of compilation. */
28659
28660 static void
28661 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28662 {
28663 /* Allocate the file_table. */
28664 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28665
28666 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28667 /* Allocate the decl_die_table. */
28668 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28669
28670 /* Allocate the decl_loc_table. */
28671 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28672
28673 /* Allocate the cached_dw_loc_list_table. */
28674 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28675
28676 /* Allocate the initial hunk of the abbrev_die_table. */
28677 vec_alloc (abbrev_die_table, 256);
28678 /* Zero-th entry is allocated, but unused. */
28679 abbrev_die_table->quick_push (NULL);
28680
28681 /* Allocate the dwarf_proc_stack_usage_map. */
28682 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28683
28684 /* Allocate the pubtypes and pubnames vectors. */
28685 vec_alloc (pubname_table, 32);
28686 vec_alloc (pubtype_table, 32);
28687
28688 vec_alloc (incomplete_types, 64);
28689
28690 vec_alloc (used_rtx_array, 32);
28691
28692 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28693 vec_alloc (macinfo_table, 64);
28694 #endif
28695
28696 /* If front-ends already registered a main translation unit but we were not
28697 ready to perform the association, do this now. */
28698 if (main_translation_unit != NULL_TREE)
28699 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28700 }
28701
28702 /* Called before compile () starts outputtting functions, variables
28703 and toplevel asms into assembly. */
28704
28705 static void
28706 dwarf2out_assembly_start (void)
28707 {
28708 if (text_section_line_info)
28709 return;
28710
28711 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28712 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28713 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28714 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28715 COLD_TEXT_SECTION_LABEL, 0);
28716 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28717
28718 switch_to_section (text_section);
28719 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28720 #endif
28721
28722 /* Make sure the line number table for .text always exists. */
28723 text_section_line_info = new_line_info_table ();
28724 text_section_line_info->end_label = text_end_label;
28725
28726 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28727 cur_line_info_table = text_section_line_info;
28728 #endif
28729
28730 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28731 && dwarf2out_do_cfi_asm ()
28732 && !dwarf2out_do_eh_frame ())
28733 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28734 }
28735
28736 /* A helper function for dwarf2out_finish called through
28737 htab_traverse. Assign a string its index. All strings must be
28738 collected into the table by the time index_string is called,
28739 because the indexing code relies on htab_traverse to traverse nodes
28740 in the same order for each run. */
28741
28742 int
28743 index_string (indirect_string_node **h, unsigned int *index)
28744 {
28745 indirect_string_node *node = *h;
28746
28747 find_string_form (node);
28748 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28749 {
28750 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28751 node->index = *index;
28752 *index += 1;
28753 }
28754 return 1;
28755 }
28756
28757 /* A helper function for output_indirect_strings called through
28758 htab_traverse. Output the offset to a string and update the
28759 current offset. */
28760
28761 int
28762 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28763 {
28764 indirect_string_node *node = *h;
28765
28766 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28767 {
28768 /* Assert that this node has been assigned an index. */
28769 gcc_assert (node->index != NO_INDEX_ASSIGNED
28770 && node->index != NOT_INDEXED);
28771 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28772 "indexed string 0x%x: %s", node->index, node->str);
28773 *offset += strlen (node->str) + 1;
28774 }
28775 return 1;
28776 }
28777
28778 /* A helper function for dwarf2out_finish called through
28779 htab_traverse. Output the indexed string. */
28780
28781 int
28782 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28783 {
28784 struct indirect_string_node *node = *h;
28785
28786 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28787 {
28788 /* Assert that the strings are output in the same order as their
28789 indexes were assigned. */
28790 gcc_assert (*cur_idx == node->index);
28791 assemble_string (node->str, strlen (node->str) + 1);
28792 *cur_idx += 1;
28793 }
28794 return 1;
28795 }
28796
28797 /* A helper function for output_indirect_strings. Counts the number
28798 of index strings offsets. Must match the logic of the functions
28799 output_index_string[_offsets] above. */
28800 int
28801 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28802 {
28803 struct indirect_string_node *node = *h;
28804
28805 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28806 *last_idx += 1;
28807 return 1;
28808 }
28809
28810 /* A helper function for dwarf2out_finish called through
28811 htab_traverse. Emit one queued .debug_str string. */
28812
28813 int
28814 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28815 {
28816 struct indirect_string_node *node = *h;
28817
28818 node->form = find_string_form (node);
28819 if (node->form == form && node->refcount > 0)
28820 {
28821 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28822 assemble_string (node->str, strlen (node->str) + 1);
28823 }
28824
28825 return 1;
28826 }
28827
28828 /* Output the indexed string table. */
28829
28830 static void
28831 output_indirect_strings (void)
28832 {
28833 switch_to_section (debug_str_section);
28834 if (!dwarf_split_debug_info)
28835 debug_str_hash->traverse<enum dwarf_form,
28836 output_indirect_string> (DW_FORM_strp);
28837 else
28838 {
28839 unsigned int offset = 0;
28840 unsigned int cur_idx = 0;
28841
28842 if (skeleton_debug_str_hash)
28843 skeleton_debug_str_hash->traverse<enum dwarf_form,
28844 output_indirect_string> (DW_FORM_strp);
28845
28846 switch_to_section (debug_str_offsets_section);
28847 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28848 header. Note that we don't need to generate a label to the
28849 actual index table following the header here, because this is
28850 for the split dwarf case only. In an .dwo file there is only
28851 one string offsets table (and one debug info section). But
28852 if we would start using string offset tables for the main (or
28853 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28854 pointing to the actual index after the header. Split dwarf
28855 units will never have a string offsets base attribute. When
28856 a split unit is moved into a .dwp file the string offsets can
28857 be found through the .debug_cu_index section table. */
28858 if (dwarf_version >= 5)
28859 {
28860 unsigned int last_idx = 0;
28861 unsigned long str_offsets_length;
28862
28863 debug_str_hash->traverse_noresize
28864 <unsigned int *, count_index_strings> (&last_idx);
28865 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28866 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28867 dw2_asm_output_data (4, 0xffffffff,
28868 "Escape value for 64-bit DWARF extension");
28869 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28870 "Length of string offsets unit");
28871 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28872 dw2_asm_output_data (2, 0, "Header zero padding");
28873 }
28874 debug_str_hash->traverse_noresize
28875 <unsigned int *, output_index_string_offset> (&offset);
28876 switch_to_section (debug_str_dwo_section);
28877 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28878 (&cur_idx);
28879 }
28880 }
28881
28882 /* Callback for htab_traverse to assign an index to an entry in the
28883 table, and to write that entry to the .debug_addr section. */
28884
28885 int
28886 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28887 {
28888 addr_table_entry *entry = *slot;
28889
28890 if (entry->refcount == 0)
28891 {
28892 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28893 || entry->index == NOT_INDEXED);
28894 return 1;
28895 }
28896
28897 gcc_assert (entry->index == *cur_index);
28898 (*cur_index)++;
28899
28900 switch (entry->kind)
28901 {
28902 case ate_kind_rtx:
28903 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28904 "0x%x", entry->index);
28905 break;
28906 case ate_kind_rtx_dtprel:
28907 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28908 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28909 DWARF2_ADDR_SIZE,
28910 entry->addr.rtl);
28911 fputc ('\n', asm_out_file);
28912 break;
28913 case ate_kind_label:
28914 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28915 "0x%x", entry->index);
28916 break;
28917 default:
28918 gcc_unreachable ();
28919 }
28920 return 1;
28921 }
28922
28923 /* A helper function for dwarf2out_finish. Counts the number
28924 of indexed addresses. Must match the logic of the functions
28925 output_addr_table_entry above. */
28926 int
28927 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28928 {
28929 addr_table_entry *entry = *slot;
28930
28931 if (entry->refcount > 0)
28932 *last_idx += 1;
28933 return 1;
28934 }
28935
28936 /* Produce the .debug_addr section. */
28937
28938 static void
28939 output_addr_table (void)
28940 {
28941 unsigned int index = 0;
28942 if (addr_index_table == NULL || addr_index_table->size () == 0)
28943 return;
28944
28945 switch_to_section (debug_addr_section);
28946 addr_index_table
28947 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28948 }
28949
28950 #if ENABLE_ASSERT_CHECKING
28951 /* Verify that all marks are clear. */
28952
28953 static void
28954 verify_marks_clear (dw_die_ref die)
28955 {
28956 dw_die_ref c;
28957
28958 gcc_assert (! die->die_mark);
28959 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28960 }
28961 #endif /* ENABLE_ASSERT_CHECKING */
28962
28963 /* Clear the marks for a die and its children.
28964 Be cool if the mark isn't set. */
28965
28966 static void
28967 prune_unmark_dies (dw_die_ref die)
28968 {
28969 dw_die_ref c;
28970
28971 if (die->die_mark)
28972 die->die_mark = 0;
28973 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28974 }
28975
28976 /* Given LOC that is referenced by a DIE we're marking as used, find all
28977 referenced DWARF procedures it references and mark them as used. */
28978
28979 static void
28980 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28981 {
28982 for (; loc != NULL; loc = loc->dw_loc_next)
28983 switch (loc->dw_loc_opc)
28984 {
28985 case DW_OP_implicit_pointer:
28986 case DW_OP_convert:
28987 case DW_OP_reinterpret:
28988 case DW_OP_GNU_implicit_pointer:
28989 case DW_OP_GNU_convert:
28990 case DW_OP_GNU_reinterpret:
28991 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28992 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28993 break;
28994 case DW_OP_GNU_variable_value:
28995 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28996 {
28997 dw_die_ref ref
28998 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28999 if (ref == NULL)
29000 break;
29001 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29002 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29003 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29004 }
29005 /* FALLTHRU */
29006 case DW_OP_call2:
29007 case DW_OP_call4:
29008 case DW_OP_call_ref:
29009 case DW_OP_const_type:
29010 case DW_OP_GNU_const_type:
29011 case DW_OP_GNU_parameter_ref:
29012 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29013 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29014 break;
29015 case DW_OP_regval_type:
29016 case DW_OP_deref_type:
29017 case DW_OP_GNU_regval_type:
29018 case DW_OP_GNU_deref_type:
29019 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29020 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29021 break;
29022 case DW_OP_entry_value:
29023 case DW_OP_GNU_entry_value:
29024 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29025 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29026 break;
29027 default:
29028 break;
29029 }
29030 }
29031
29032 /* Given DIE that we're marking as used, find any other dies
29033 it references as attributes and mark them as used. */
29034
29035 static void
29036 prune_unused_types_walk_attribs (dw_die_ref die)
29037 {
29038 dw_attr_node *a;
29039 unsigned ix;
29040
29041 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29042 {
29043 switch (AT_class (a))
29044 {
29045 /* Make sure DWARF procedures referenced by location descriptions will
29046 get emitted. */
29047 case dw_val_class_loc:
29048 prune_unused_types_walk_loc_descr (AT_loc (a));
29049 break;
29050 case dw_val_class_loc_list:
29051 for (dw_loc_list_ref list = AT_loc_list (a);
29052 list != NULL;
29053 list = list->dw_loc_next)
29054 prune_unused_types_walk_loc_descr (list->expr);
29055 break;
29056
29057 case dw_val_class_view_list:
29058 /* This points to a loc_list in another attribute, so it's
29059 already covered. */
29060 break;
29061
29062 case dw_val_class_die_ref:
29063 /* A reference to another DIE.
29064 Make sure that it will get emitted.
29065 If it was broken out into a comdat group, don't follow it. */
29066 if (! AT_ref (a)->comdat_type_p
29067 || a->dw_attr == DW_AT_specification)
29068 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29069 break;
29070
29071 case dw_val_class_str:
29072 /* Set the string's refcount to 0 so that prune_unused_types_mark
29073 accounts properly for it. */
29074 a->dw_attr_val.v.val_str->refcount = 0;
29075 break;
29076
29077 default:
29078 break;
29079 }
29080 }
29081 }
29082
29083 /* Mark the generic parameters and arguments children DIEs of DIE. */
29084
29085 static void
29086 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29087 {
29088 dw_die_ref c;
29089
29090 if (die == NULL || die->die_child == NULL)
29091 return;
29092 c = die->die_child;
29093 do
29094 {
29095 if (is_template_parameter (c))
29096 prune_unused_types_mark (c, 1);
29097 c = c->die_sib;
29098 } while (c && c != die->die_child);
29099 }
29100
29101 /* Mark DIE as being used. If DOKIDS is true, then walk down
29102 to DIE's children. */
29103
29104 static void
29105 prune_unused_types_mark (dw_die_ref die, int dokids)
29106 {
29107 dw_die_ref c;
29108
29109 if (die->die_mark == 0)
29110 {
29111 /* We haven't done this node yet. Mark it as used. */
29112 die->die_mark = 1;
29113 /* If this is the DIE of a generic type instantiation,
29114 mark the children DIEs that describe its generic parms and
29115 args. */
29116 prune_unused_types_mark_generic_parms_dies (die);
29117
29118 /* We also have to mark its parents as used.
29119 (But we don't want to mark our parent's kids due to this,
29120 unless it is a class.) */
29121 if (die->die_parent)
29122 prune_unused_types_mark (die->die_parent,
29123 class_scope_p (die->die_parent));
29124
29125 /* Mark any referenced nodes. */
29126 prune_unused_types_walk_attribs (die);
29127
29128 /* If this node is a specification,
29129 also mark the definition, if it exists. */
29130 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29131 prune_unused_types_mark (die->die_definition, 1);
29132 }
29133
29134 if (dokids && die->die_mark != 2)
29135 {
29136 /* We need to walk the children, but haven't done so yet.
29137 Remember that we've walked the kids. */
29138 die->die_mark = 2;
29139
29140 /* If this is an array type, we need to make sure our
29141 kids get marked, even if they're types. If we're
29142 breaking out types into comdat sections, do this
29143 for all type definitions. */
29144 if (die->die_tag == DW_TAG_array_type
29145 || (use_debug_types
29146 && is_type_die (die) && ! is_declaration_die (die)))
29147 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29148 else
29149 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29150 }
29151 }
29152
29153 /* For local classes, look if any static member functions were emitted
29154 and if so, mark them. */
29155
29156 static void
29157 prune_unused_types_walk_local_classes (dw_die_ref die)
29158 {
29159 dw_die_ref c;
29160
29161 if (die->die_mark == 2)
29162 return;
29163
29164 switch (die->die_tag)
29165 {
29166 case DW_TAG_structure_type:
29167 case DW_TAG_union_type:
29168 case DW_TAG_class_type:
29169 case DW_TAG_interface_type:
29170 break;
29171
29172 case DW_TAG_subprogram:
29173 if (!get_AT_flag (die, DW_AT_declaration)
29174 || die->die_definition != NULL)
29175 prune_unused_types_mark (die, 1);
29176 return;
29177
29178 default:
29179 return;
29180 }
29181
29182 /* Mark children. */
29183 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29184 }
29185
29186 /* Walk the tree DIE and mark types that we actually use. */
29187
29188 static void
29189 prune_unused_types_walk (dw_die_ref die)
29190 {
29191 dw_die_ref c;
29192
29193 /* Don't do anything if this node is already marked and
29194 children have been marked as well. */
29195 if (die->die_mark == 2)
29196 return;
29197
29198 switch (die->die_tag)
29199 {
29200 case DW_TAG_structure_type:
29201 case DW_TAG_union_type:
29202 case DW_TAG_class_type:
29203 case DW_TAG_interface_type:
29204 if (die->die_perennial_p)
29205 break;
29206
29207 for (c = die->die_parent; c; c = c->die_parent)
29208 if (c->die_tag == DW_TAG_subprogram)
29209 break;
29210
29211 /* Finding used static member functions inside of classes
29212 is needed just for local classes, because for other classes
29213 static member function DIEs with DW_AT_specification
29214 are emitted outside of the DW_TAG_*_type. If we ever change
29215 it, we'd need to call this even for non-local classes. */
29216 if (c)
29217 prune_unused_types_walk_local_classes (die);
29218
29219 /* It's a type node --- don't mark it. */
29220 return;
29221
29222 case DW_TAG_const_type:
29223 case DW_TAG_packed_type:
29224 case DW_TAG_pointer_type:
29225 case DW_TAG_reference_type:
29226 case DW_TAG_rvalue_reference_type:
29227 case DW_TAG_volatile_type:
29228 case DW_TAG_typedef:
29229 case DW_TAG_array_type:
29230 case DW_TAG_friend:
29231 case DW_TAG_enumeration_type:
29232 case DW_TAG_subroutine_type:
29233 case DW_TAG_string_type:
29234 case DW_TAG_set_type:
29235 case DW_TAG_subrange_type:
29236 case DW_TAG_ptr_to_member_type:
29237 case DW_TAG_file_type:
29238 /* Type nodes are useful only when other DIEs reference them --- don't
29239 mark them. */
29240 /* FALLTHROUGH */
29241
29242 case DW_TAG_dwarf_procedure:
29243 /* Likewise for DWARF procedures. */
29244
29245 if (die->die_perennial_p)
29246 break;
29247
29248 return;
29249
29250 default:
29251 /* Mark everything else. */
29252 break;
29253 }
29254
29255 if (die->die_mark == 0)
29256 {
29257 die->die_mark = 1;
29258
29259 /* Now, mark any dies referenced from here. */
29260 prune_unused_types_walk_attribs (die);
29261 }
29262
29263 die->die_mark = 2;
29264
29265 /* Mark children. */
29266 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29267 }
29268
29269 /* Increment the string counts on strings referred to from DIE's
29270 attributes. */
29271
29272 static void
29273 prune_unused_types_update_strings (dw_die_ref die)
29274 {
29275 dw_attr_node *a;
29276 unsigned ix;
29277
29278 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29279 if (AT_class (a) == dw_val_class_str)
29280 {
29281 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29282 s->refcount++;
29283 /* Avoid unnecessarily putting strings that are used less than
29284 twice in the hash table. */
29285 if (s->refcount
29286 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29287 {
29288 indirect_string_node **slot
29289 = debug_str_hash->find_slot_with_hash (s->str,
29290 htab_hash_string (s->str),
29291 INSERT);
29292 gcc_assert (*slot == NULL);
29293 *slot = s;
29294 }
29295 }
29296 }
29297
29298 /* Mark DIE and its children as removed. */
29299
29300 static void
29301 mark_removed (dw_die_ref die)
29302 {
29303 dw_die_ref c;
29304 die->removed = true;
29305 FOR_EACH_CHILD (die, c, mark_removed (c));
29306 }
29307
29308 /* Remove from the tree DIE any dies that aren't marked. */
29309
29310 static void
29311 prune_unused_types_prune (dw_die_ref die)
29312 {
29313 dw_die_ref c;
29314
29315 gcc_assert (die->die_mark);
29316 prune_unused_types_update_strings (die);
29317
29318 if (! die->die_child)
29319 return;
29320
29321 c = die->die_child;
29322 do {
29323 dw_die_ref prev = c, next;
29324 for (c = c->die_sib; ! c->die_mark; c = next)
29325 if (c == die->die_child)
29326 {
29327 /* No marked children between 'prev' and the end of the list. */
29328 if (prev == c)
29329 /* No marked children at all. */
29330 die->die_child = NULL;
29331 else
29332 {
29333 prev->die_sib = c->die_sib;
29334 die->die_child = prev;
29335 }
29336 c->die_sib = NULL;
29337 mark_removed (c);
29338 return;
29339 }
29340 else
29341 {
29342 next = c->die_sib;
29343 c->die_sib = NULL;
29344 mark_removed (c);
29345 }
29346
29347 if (c != prev->die_sib)
29348 prev->die_sib = c;
29349 prune_unused_types_prune (c);
29350 } while (c != die->die_child);
29351 }
29352
29353 /* Remove dies representing declarations that we never use. */
29354
29355 static void
29356 prune_unused_types (void)
29357 {
29358 unsigned int i;
29359 limbo_die_node *node;
29360 comdat_type_node *ctnode;
29361 pubname_entry *pub;
29362 dw_die_ref base_type;
29363
29364 #if ENABLE_ASSERT_CHECKING
29365 /* All the marks should already be clear. */
29366 verify_marks_clear (comp_unit_die ());
29367 for (node = limbo_die_list; node; node = node->next)
29368 verify_marks_clear (node->die);
29369 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29370 verify_marks_clear (ctnode->root_die);
29371 #endif /* ENABLE_ASSERT_CHECKING */
29372
29373 /* Mark types that are used in global variables. */
29374 premark_types_used_by_global_vars ();
29375
29376 /* Set the mark on nodes that are actually used. */
29377 prune_unused_types_walk (comp_unit_die ());
29378 for (node = limbo_die_list; node; node = node->next)
29379 prune_unused_types_walk (node->die);
29380 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29381 {
29382 prune_unused_types_walk (ctnode->root_die);
29383 prune_unused_types_mark (ctnode->type_die, 1);
29384 }
29385
29386 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29387 are unusual in that they are pubnames that are the children of pubtypes.
29388 They should only be marked via their parent DW_TAG_enumeration_type die,
29389 not as roots in themselves. */
29390 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29391 if (pub->die->die_tag != DW_TAG_enumerator)
29392 prune_unused_types_mark (pub->die, 1);
29393 for (i = 0; base_types.iterate (i, &base_type); i++)
29394 prune_unused_types_mark (base_type, 1);
29395
29396 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29397 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29398 callees). */
29399 cgraph_node *cnode;
29400 FOR_EACH_FUNCTION (cnode)
29401 if (cnode->referred_to_p (false))
29402 {
29403 dw_die_ref die = lookup_decl_die (cnode->decl);
29404 if (die == NULL || die->die_mark)
29405 continue;
29406 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29407 if (e->caller != cnode
29408 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29409 {
29410 prune_unused_types_mark (die, 1);
29411 break;
29412 }
29413 }
29414
29415 if (debug_str_hash)
29416 debug_str_hash->empty ();
29417 if (skeleton_debug_str_hash)
29418 skeleton_debug_str_hash->empty ();
29419 prune_unused_types_prune (comp_unit_die ());
29420 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29421 {
29422 node = *pnode;
29423 if (!node->die->die_mark)
29424 *pnode = node->next;
29425 else
29426 {
29427 prune_unused_types_prune (node->die);
29428 pnode = &node->next;
29429 }
29430 }
29431 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29432 prune_unused_types_prune (ctnode->root_die);
29433
29434 /* Leave the marks clear. */
29435 prune_unmark_dies (comp_unit_die ());
29436 for (node = limbo_die_list; node; node = node->next)
29437 prune_unmark_dies (node->die);
29438 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29439 prune_unmark_dies (ctnode->root_die);
29440 }
29441
29442 /* Helpers to manipulate hash table of comdat type units. */
29443
29444 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29445 {
29446 static inline hashval_t hash (const comdat_type_node *);
29447 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29448 };
29449
29450 inline hashval_t
29451 comdat_type_hasher::hash (const comdat_type_node *type_node)
29452 {
29453 hashval_t h;
29454 memcpy (&h, type_node->signature, sizeof (h));
29455 return h;
29456 }
29457
29458 inline bool
29459 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29460 const comdat_type_node *type_node_2)
29461 {
29462 return (! memcmp (type_node_1->signature, type_node_2->signature,
29463 DWARF_TYPE_SIGNATURE_SIZE));
29464 }
29465
29466 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29467 to the location it would have been added, should we know its
29468 DECL_ASSEMBLER_NAME when we added other attributes. This will
29469 probably improve compactness of debug info, removing equivalent
29470 abbrevs, and hide any differences caused by deferring the
29471 computation of the assembler name, triggered by e.g. PCH. */
29472
29473 static inline void
29474 move_linkage_attr (dw_die_ref die)
29475 {
29476 unsigned ix = vec_safe_length (die->die_attr);
29477 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29478
29479 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29480 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29481
29482 while (--ix > 0)
29483 {
29484 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29485
29486 if (prev->dw_attr == DW_AT_decl_line
29487 || prev->dw_attr == DW_AT_decl_column
29488 || prev->dw_attr == DW_AT_name)
29489 break;
29490 }
29491
29492 if (ix != vec_safe_length (die->die_attr) - 1)
29493 {
29494 die->die_attr->pop ();
29495 die->die_attr->quick_insert (ix, linkage);
29496 }
29497 }
29498
29499 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29500 referenced from typed stack ops and count how often they are used. */
29501
29502 static void
29503 mark_base_types (dw_loc_descr_ref loc)
29504 {
29505 dw_die_ref base_type = NULL;
29506
29507 for (; loc; loc = loc->dw_loc_next)
29508 {
29509 switch (loc->dw_loc_opc)
29510 {
29511 case DW_OP_regval_type:
29512 case DW_OP_deref_type:
29513 case DW_OP_GNU_regval_type:
29514 case DW_OP_GNU_deref_type:
29515 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29516 break;
29517 case DW_OP_convert:
29518 case DW_OP_reinterpret:
29519 case DW_OP_GNU_convert:
29520 case DW_OP_GNU_reinterpret:
29521 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29522 continue;
29523 /* FALLTHRU */
29524 case DW_OP_const_type:
29525 case DW_OP_GNU_const_type:
29526 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29527 break;
29528 case DW_OP_entry_value:
29529 case DW_OP_GNU_entry_value:
29530 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29531 continue;
29532 default:
29533 continue;
29534 }
29535 gcc_assert (base_type->die_parent == comp_unit_die ());
29536 if (base_type->die_mark)
29537 base_type->die_mark++;
29538 else
29539 {
29540 base_types.safe_push (base_type);
29541 base_type->die_mark = 1;
29542 }
29543 }
29544 }
29545
29546 /* Comparison function for sorting marked base types. */
29547
29548 static int
29549 base_type_cmp (const void *x, const void *y)
29550 {
29551 dw_die_ref dx = *(const dw_die_ref *) x;
29552 dw_die_ref dy = *(const dw_die_ref *) y;
29553 unsigned int byte_size1, byte_size2;
29554 unsigned int encoding1, encoding2;
29555 unsigned int align1, align2;
29556 if (dx->die_mark > dy->die_mark)
29557 return -1;
29558 if (dx->die_mark < dy->die_mark)
29559 return 1;
29560 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29561 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29562 if (byte_size1 < byte_size2)
29563 return 1;
29564 if (byte_size1 > byte_size2)
29565 return -1;
29566 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29567 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29568 if (encoding1 < encoding2)
29569 return 1;
29570 if (encoding1 > encoding2)
29571 return -1;
29572 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29573 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29574 if (align1 < align2)
29575 return 1;
29576 if (align1 > align2)
29577 return -1;
29578 return 0;
29579 }
29580
29581 /* Move base types marked by mark_base_types as early as possible
29582 in the CU, sorted by decreasing usage count both to make the
29583 uleb128 references as small as possible and to make sure they
29584 will have die_offset already computed by calc_die_sizes when
29585 sizes of typed stack loc ops is computed. */
29586
29587 static void
29588 move_marked_base_types (void)
29589 {
29590 unsigned int i;
29591 dw_die_ref base_type, die, c;
29592
29593 if (base_types.is_empty ())
29594 return;
29595
29596 /* Sort by decreasing usage count, they will be added again in that
29597 order later on. */
29598 base_types.qsort (base_type_cmp);
29599 die = comp_unit_die ();
29600 c = die->die_child;
29601 do
29602 {
29603 dw_die_ref prev = c;
29604 c = c->die_sib;
29605 while (c->die_mark)
29606 {
29607 remove_child_with_prev (c, prev);
29608 /* As base types got marked, there must be at least
29609 one node other than DW_TAG_base_type. */
29610 gcc_assert (die->die_child != NULL);
29611 c = prev->die_sib;
29612 }
29613 }
29614 while (c != die->die_child);
29615 gcc_assert (die->die_child);
29616 c = die->die_child;
29617 for (i = 0; base_types.iterate (i, &base_type); i++)
29618 {
29619 base_type->die_mark = 0;
29620 base_type->die_sib = c->die_sib;
29621 c->die_sib = base_type;
29622 c = base_type;
29623 }
29624 }
29625
29626 /* Helper function for resolve_addr, attempt to resolve
29627 one CONST_STRING, return true if successful. Similarly verify that
29628 SYMBOL_REFs refer to variables emitted in the current CU. */
29629
29630 static bool
29631 resolve_one_addr (rtx *addr)
29632 {
29633 rtx rtl = *addr;
29634
29635 if (GET_CODE (rtl) == CONST_STRING)
29636 {
29637 size_t len = strlen (XSTR (rtl, 0)) + 1;
29638 tree t = build_string (len, XSTR (rtl, 0));
29639 tree tlen = size_int (len - 1);
29640 TREE_TYPE (t)
29641 = build_array_type (char_type_node, build_index_type (tlen));
29642 rtl = lookup_constant_def (t);
29643 if (!rtl || !MEM_P (rtl))
29644 return false;
29645 rtl = XEXP (rtl, 0);
29646 if (GET_CODE (rtl) == SYMBOL_REF
29647 && SYMBOL_REF_DECL (rtl)
29648 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29649 return false;
29650 vec_safe_push (used_rtx_array, rtl);
29651 *addr = rtl;
29652 return true;
29653 }
29654
29655 if (GET_CODE (rtl) == SYMBOL_REF
29656 && SYMBOL_REF_DECL (rtl))
29657 {
29658 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29659 {
29660 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29661 return false;
29662 }
29663 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29664 return false;
29665 }
29666
29667 if (GET_CODE (rtl) == CONST)
29668 {
29669 subrtx_ptr_iterator::array_type array;
29670 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29671 if (!resolve_one_addr (*iter))
29672 return false;
29673 }
29674
29675 return true;
29676 }
29677
29678 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29679 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29680 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29681
29682 static rtx
29683 string_cst_pool_decl (tree t)
29684 {
29685 rtx rtl = output_constant_def (t, 1);
29686 unsigned char *array;
29687 dw_loc_descr_ref l;
29688 tree decl;
29689 size_t len;
29690 dw_die_ref ref;
29691
29692 if (!rtl || !MEM_P (rtl))
29693 return NULL_RTX;
29694 rtl = XEXP (rtl, 0);
29695 if (GET_CODE (rtl) != SYMBOL_REF
29696 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29697 return NULL_RTX;
29698
29699 decl = SYMBOL_REF_DECL (rtl);
29700 if (!lookup_decl_die (decl))
29701 {
29702 len = TREE_STRING_LENGTH (t);
29703 vec_safe_push (used_rtx_array, rtl);
29704 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29705 array = ggc_vec_alloc<unsigned char> (len);
29706 memcpy (array, TREE_STRING_POINTER (t), len);
29707 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29708 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29709 l->dw_loc_oprnd2.v.val_vec.length = len;
29710 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29711 l->dw_loc_oprnd2.v.val_vec.array = array;
29712 add_AT_loc (ref, DW_AT_location, l);
29713 equate_decl_number_to_die (decl, ref);
29714 }
29715 return rtl;
29716 }
29717
29718 /* Helper function of resolve_addr_in_expr. LOC is
29719 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29720 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29721 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29722 with DW_OP_implicit_pointer if possible
29723 and return true, if unsuccessful, return false. */
29724
29725 static bool
29726 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29727 {
29728 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29729 HOST_WIDE_INT offset = 0;
29730 dw_die_ref ref = NULL;
29731 tree decl;
29732
29733 if (GET_CODE (rtl) == CONST
29734 && GET_CODE (XEXP (rtl, 0)) == PLUS
29735 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29736 {
29737 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29738 rtl = XEXP (XEXP (rtl, 0), 0);
29739 }
29740 if (GET_CODE (rtl) == CONST_STRING)
29741 {
29742 size_t len = strlen (XSTR (rtl, 0)) + 1;
29743 tree t = build_string (len, XSTR (rtl, 0));
29744 tree tlen = size_int (len - 1);
29745
29746 TREE_TYPE (t)
29747 = build_array_type (char_type_node, build_index_type (tlen));
29748 rtl = string_cst_pool_decl (t);
29749 if (!rtl)
29750 return false;
29751 }
29752 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29753 {
29754 decl = SYMBOL_REF_DECL (rtl);
29755 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29756 {
29757 ref = lookup_decl_die (decl);
29758 if (ref && (get_AT (ref, DW_AT_location)
29759 || get_AT (ref, DW_AT_const_value)))
29760 {
29761 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29762 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29763 loc->dw_loc_oprnd1.val_entry = NULL;
29764 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29765 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29766 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29767 loc->dw_loc_oprnd2.v.val_int = offset;
29768 return true;
29769 }
29770 }
29771 }
29772 return false;
29773 }
29774
29775 /* Helper function for resolve_addr, handle one location
29776 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29777 the location list couldn't be resolved. */
29778
29779 static bool
29780 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29781 {
29782 dw_loc_descr_ref keep = NULL;
29783 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29784 switch (loc->dw_loc_opc)
29785 {
29786 case DW_OP_addr:
29787 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29788 {
29789 if ((prev == NULL
29790 || prev->dw_loc_opc == DW_OP_piece
29791 || prev->dw_loc_opc == DW_OP_bit_piece)
29792 && loc->dw_loc_next
29793 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29794 && (!dwarf_strict || dwarf_version >= 5)
29795 && optimize_one_addr_into_implicit_ptr (loc))
29796 break;
29797 return false;
29798 }
29799 break;
29800 case DW_OP_GNU_addr_index:
29801 case DW_OP_addrx:
29802 case DW_OP_GNU_const_index:
29803 case DW_OP_constx:
29804 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29805 || loc->dw_loc_opc == DW_OP_addrx)
29806 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29807 || loc->dw_loc_opc == DW_OP_constx)
29808 && loc->dtprel))
29809 {
29810 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29811 if (!resolve_one_addr (&rtl))
29812 return false;
29813 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29814 loc->dw_loc_oprnd1.val_entry
29815 = add_addr_table_entry (rtl, ate_kind_rtx);
29816 }
29817 break;
29818 case DW_OP_const4u:
29819 case DW_OP_const8u:
29820 if (loc->dtprel
29821 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29822 return false;
29823 break;
29824 case DW_OP_plus_uconst:
29825 if (size_of_loc_descr (loc)
29826 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29827 + 1
29828 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29829 {
29830 dw_loc_descr_ref repl
29831 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29832 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29833 add_loc_descr (&repl, loc->dw_loc_next);
29834 *loc = *repl;
29835 }
29836 break;
29837 case DW_OP_implicit_value:
29838 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29839 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29840 return false;
29841 break;
29842 case DW_OP_implicit_pointer:
29843 case DW_OP_GNU_implicit_pointer:
29844 case DW_OP_GNU_parameter_ref:
29845 case DW_OP_GNU_variable_value:
29846 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29847 {
29848 dw_die_ref ref
29849 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29850 if (ref == NULL)
29851 return false;
29852 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29853 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29854 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29855 }
29856 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29857 {
29858 if (prev == NULL
29859 && loc->dw_loc_next == NULL
29860 && AT_class (a) == dw_val_class_loc)
29861 switch (a->dw_attr)
29862 {
29863 /* Following attributes allow both exprloc and reference,
29864 so if the whole expression is DW_OP_GNU_variable_value
29865 alone we could transform it into reference. */
29866 case DW_AT_byte_size:
29867 case DW_AT_bit_size:
29868 case DW_AT_lower_bound:
29869 case DW_AT_upper_bound:
29870 case DW_AT_bit_stride:
29871 case DW_AT_count:
29872 case DW_AT_allocated:
29873 case DW_AT_associated:
29874 case DW_AT_byte_stride:
29875 a->dw_attr_val.val_class = dw_val_class_die_ref;
29876 a->dw_attr_val.val_entry = NULL;
29877 a->dw_attr_val.v.val_die_ref.die
29878 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29879 a->dw_attr_val.v.val_die_ref.external = 0;
29880 return true;
29881 default:
29882 break;
29883 }
29884 if (dwarf_strict)
29885 return false;
29886 }
29887 break;
29888 case DW_OP_const_type:
29889 case DW_OP_regval_type:
29890 case DW_OP_deref_type:
29891 case DW_OP_convert:
29892 case DW_OP_reinterpret:
29893 case DW_OP_GNU_const_type:
29894 case DW_OP_GNU_regval_type:
29895 case DW_OP_GNU_deref_type:
29896 case DW_OP_GNU_convert:
29897 case DW_OP_GNU_reinterpret:
29898 while (loc->dw_loc_next
29899 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29900 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29901 {
29902 dw_die_ref base1, base2;
29903 unsigned enc1, enc2, size1, size2;
29904 if (loc->dw_loc_opc == DW_OP_regval_type
29905 || loc->dw_loc_opc == DW_OP_deref_type
29906 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29907 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29908 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29909 else if (loc->dw_loc_oprnd1.val_class
29910 == dw_val_class_unsigned_const)
29911 break;
29912 else
29913 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29914 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29915 == dw_val_class_unsigned_const)
29916 break;
29917 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29918 gcc_assert (base1->die_tag == DW_TAG_base_type
29919 && base2->die_tag == DW_TAG_base_type);
29920 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29921 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29922 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29923 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29924 if (size1 == size2
29925 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29926 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29927 && loc != keep)
29928 || enc1 == enc2))
29929 {
29930 /* Optimize away next DW_OP_convert after
29931 adjusting LOC's base type die reference. */
29932 if (loc->dw_loc_opc == DW_OP_regval_type
29933 || loc->dw_loc_opc == DW_OP_deref_type
29934 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29935 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29936 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29937 else
29938 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29939 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29940 continue;
29941 }
29942 /* Don't change integer DW_OP_convert after e.g. floating
29943 point typed stack entry. */
29944 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29945 keep = loc->dw_loc_next;
29946 break;
29947 }
29948 break;
29949 default:
29950 break;
29951 }
29952 return true;
29953 }
29954
29955 /* Helper function of resolve_addr. DIE had DW_AT_location of
29956 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29957 and DW_OP_addr couldn't be resolved. resolve_addr has already
29958 removed the DW_AT_location attribute. This function attempts to
29959 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29960 to it or DW_AT_const_value attribute, if possible. */
29961
29962 static void
29963 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29964 {
29965 if (!VAR_P (decl)
29966 || lookup_decl_die (decl) != die
29967 || DECL_EXTERNAL (decl)
29968 || !TREE_STATIC (decl)
29969 || DECL_INITIAL (decl) == NULL_TREE
29970 || DECL_P (DECL_INITIAL (decl))
29971 || get_AT (die, DW_AT_const_value))
29972 return;
29973
29974 tree init = DECL_INITIAL (decl);
29975 HOST_WIDE_INT offset = 0;
29976 /* For variables that have been optimized away and thus
29977 don't have a memory location, see if we can emit
29978 DW_AT_const_value instead. */
29979 if (tree_add_const_value_attribute (die, init))
29980 return;
29981 if (dwarf_strict && dwarf_version < 5)
29982 return;
29983 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29984 and ADDR_EXPR refers to a decl that has DW_AT_location or
29985 DW_AT_const_value (but isn't addressable, otherwise
29986 resolving the original DW_OP_addr wouldn't fail), see if
29987 we can add DW_OP_implicit_pointer. */
29988 STRIP_NOPS (init);
29989 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29990 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29991 {
29992 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29993 init = TREE_OPERAND (init, 0);
29994 STRIP_NOPS (init);
29995 }
29996 if (TREE_CODE (init) != ADDR_EXPR)
29997 return;
29998 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29999 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30000 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30001 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30002 && TREE_OPERAND (init, 0) != decl))
30003 {
30004 dw_die_ref ref;
30005 dw_loc_descr_ref l;
30006
30007 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30008 {
30009 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30010 if (!rtl)
30011 return;
30012 decl = SYMBOL_REF_DECL (rtl);
30013 }
30014 else
30015 decl = TREE_OPERAND (init, 0);
30016 ref = lookup_decl_die (decl);
30017 if (ref == NULL
30018 || (!get_AT (ref, DW_AT_location)
30019 && !get_AT (ref, DW_AT_const_value)))
30020 return;
30021 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30022 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30023 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30024 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30025 add_AT_loc (die, DW_AT_location, l);
30026 }
30027 }
30028
30029 /* Return NULL if l is a DWARF expression, or first op that is not
30030 valid DWARF expression. */
30031
30032 static dw_loc_descr_ref
30033 non_dwarf_expression (dw_loc_descr_ref l)
30034 {
30035 while (l)
30036 {
30037 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30038 return l;
30039 switch (l->dw_loc_opc)
30040 {
30041 case DW_OP_regx:
30042 case DW_OP_implicit_value:
30043 case DW_OP_stack_value:
30044 case DW_OP_implicit_pointer:
30045 case DW_OP_GNU_implicit_pointer:
30046 case DW_OP_GNU_parameter_ref:
30047 case DW_OP_piece:
30048 case DW_OP_bit_piece:
30049 return l;
30050 default:
30051 break;
30052 }
30053 l = l->dw_loc_next;
30054 }
30055 return NULL;
30056 }
30057
30058 /* Return adjusted copy of EXPR:
30059 If it is empty DWARF expression, return it.
30060 If it is valid non-empty DWARF expression,
30061 return copy of EXPR with DW_OP_deref appended to it.
30062 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30063 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30064 If it is DWARF expression followed by DW_OP_stack_value, return
30065 copy of the DWARF expression without anything appended.
30066 Otherwise, return NULL. */
30067
30068 static dw_loc_descr_ref
30069 copy_deref_exprloc (dw_loc_descr_ref expr)
30070 {
30071 dw_loc_descr_ref tail = NULL;
30072
30073 if (expr == NULL)
30074 return NULL;
30075
30076 dw_loc_descr_ref l = non_dwarf_expression (expr);
30077 if (l && l->dw_loc_next)
30078 return NULL;
30079
30080 if (l)
30081 {
30082 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30083 tail = new_loc_descr ((enum dwarf_location_atom)
30084 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30085 0, 0);
30086 else
30087 switch (l->dw_loc_opc)
30088 {
30089 case DW_OP_regx:
30090 tail = new_loc_descr (DW_OP_bregx,
30091 l->dw_loc_oprnd1.v.val_unsigned, 0);
30092 break;
30093 case DW_OP_stack_value:
30094 break;
30095 default:
30096 return NULL;
30097 }
30098 }
30099 else
30100 tail = new_loc_descr (DW_OP_deref, 0, 0);
30101
30102 dw_loc_descr_ref ret = NULL, *p = &ret;
30103 while (expr != l)
30104 {
30105 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30106 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30107 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30108 p = &(*p)->dw_loc_next;
30109 expr = expr->dw_loc_next;
30110 }
30111 *p = tail;
30112 return ret;
30113 }
30114
30115 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30116 reference to a variable or argument, adjust it if needed and return:
30117 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30118 attribute if present should be removed
30119 0 keep the attribute perhaps with minor modifications, no need to rescan
30120 1 if the attribute has been successfully adjusted. */
30121
30122 static int
30123 optimize_string_length (dw_attr_node *a)
30124 {
30125 dw_loc_descr_ref l = AT_loc (a), lv;
30126 dw_die_ref die;
30127 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30128 {
30129 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30130 die = lookup_decl_die (decl);
30131 if (die)
30132 {
30133 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30134 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30135 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30136 }
30137 else
30138 return -1;
30139 }
30140 else
30141 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30142
30143 /* DWARF5 allows reference class, so we can then reference the DIE.
30144 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30145 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30146 {
30147 a->dw_attr_val.val_class = dw_val_class_die_ref;
30148 a->dw_attr_val.val_entry = NULL;
30149 a->dw_attr_val.v.val_die_ref.die = die;
30150 a->dw_attr_val.v.val_die_ref.external = 0;
30151 return 0;
30152 }
30153
30154 dw_attr_node *av = get_AT (die, DW_AT_location);
30155 dw_loc_list_ref d;
30156 bool non_dwarf_expr = false;
30157
30158 if (av == NULL)
30159 return dwarf_strict ? -1 : 0;
30160 switch (AT_class (av))
30161 {
30162 case dw_val_class_loc_list:
30163 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30164 if (d->expr && non_dwarf_expression (d->expr))
30165 non_dwarf_expr = true;
30166 break;
30167 case dw_val_class_view_list:
30168 gcc_unreachable ();
30169 case dw_val_class_loc:
30170 lv = AT_loc (av);
30171 if (lv == NULL)
30172 return dwarf_strict ? -1 : 0;
30173 if (non_dwarf_expression (lv))
30174 non_dwarf_expr = true;
30175 break;
30176 default:
30177 return dwarf_strict ? -1 : 0;
30178 }
30179
30180 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30181 into DW_OP_call4 or DW_OP_GNU_variable_value into
30182 DW_OP_call4 DW_OP_deref, do so. */
30183 if (!non_dwarf_expr
30184 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30185 {
30186 l->dw_loc_opc = DW_OP_call4;
30187 if (l->dw_loc_next)
30188 l->dw_loc_next = NULL;
30189 else
30190 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30191 return 0;
30192 }
30193
30194 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30195 copy over the DW_AT_location attribute from die to a. */
30196 if (l->dw_loc_next != NULL)
30197 {
30198 a->dw_attr_val = av->dw_attr_val;
30199 return 1;
30200 }
30201
30202 dw_loc_list_ref list, *p;
30203 switch (AT_class (av))
30204 {
30205 case dw_val_class_loc_list:
30206 p = &list;
30207 list = NULL;
30208 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30209 {
30210 lv = copy_deref_exprloc (d->expr);
30211 if (lv)
30212 {
30213 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30214 p = &(*p)->dw_loc_next;
30215 }
30216 else if (!dwarf_strict && d->expr)
30217 return 0;
30218 }
30219 if (list == NULL)
30220 return dwarf_strict ? -1 : 0;
30221 a->dw_attr_val.val_class = dw_val_class_loc_list;
30222 gen_llsym (list);
30223 *AT_loc_list_ptr (a) = list;
30224 return 1;
30225 case dw_val_class_loc:
30226 lv = copy_deref_exprloc (AT_loc (av));
30227 if (lv == NULL)
30228 return dwarf_strict ? -1 : 0;
30229 a->dw_attr_val.v.val_loc = lv;
30230 return 1;
30231 default:
30232 gcc_unreachable ();
30233 }
30234 }
30235
30236 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30237 an address in .rodata section if the string literal is emitted there,
30238 or remove the containing location list or replace DW_AT_const_value
30239 with DW_AT_location and empty location expression, if it isn't found
30240 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30241 to something that has been emitted in the current CU. */
30242
30243 static void
30244 resolve_addr (dw_die_ref die)
30245 {
30246 dw_die_ref c;
30247 dw_attr_node *a;
30248 dw_loc_list_ref *curr, *start, loc;
30249 unsigned ix;
30250 bool remove_AT_byte_size = false;
30251
30252 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30253 switch (AT_class (a))
30254 {
30255 case dw_val_class_loc_list:
30256 start = curr = AT_loc_list_ptr (a);
30257 loc = *curr;
30258 gcc_assert (loc);
30259 /* The same list can be referenced more than once. See if we have
30260 already recorded the result from a previous pass. */
30261 if (loc->replaced)
30262 *curr = loc->dw_loc_next;
30263 else if (!loc->resolved_addr)
30264 {
30265 /* As things stand, we do not expect or allow one die to
30266 reference a suffix of another die's location list chain.
30267 References must be identical or completely separate.
30268 There is therefore no need to cache the result of this
30269 pass on any list other than the first; doing so
30270 would lead to unnecessary writes. */
30271 while (*curr)
30272 {
30273 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30274 if (!resolve_addr_in_expr (a, (*curr)->expr))
30275 {
30276 dw_loc_list_ref next = (*curr)->dw_loc_next;
30277 dw_loc_descr_ref l = (*curr)->expr;
30278
30279 if (next && (*curr)->ll_symbol)
30280 {
30281 gcc_assert (!next->ll_symbol);
30282 next->ll_symbol = (*curr)->ll_symbol;
30283 next->vl_symbol = (*curr)->vl_symbol;
30284 }
30285 if (dwarf_split_debug_info)
30286 remove_loc_list_addr_table_entries (l);
30287 *curr = next;
30288 }
30289 else
30290 {
30291 mark_base_types ((*curr)->expr);
30292 curr = &(*curr)->dw_loc_next;
30293 }
30294 }
30295 if (loc == *start)
30296 loc->resolved_addr = 1;
30297 else
30298 {
30299 loc->replaced = 1;
30300 loc->dw_loc_next = *start;
30301 }
30302 }
30303 if (!*start)
30304 {
30305 remove_AT (die, a->dw_attr);
30306 ix--;
30307 }
30308 break;
30309 case dw_val_class_view_list:
30310 {
30311 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30312 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30313 dw_val_node *llnode
30314 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30315 /* If we no longer have a loclist, or it no longer needs
30316 views, drop this attribute. */
30317 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30318 {
30319 remove_AT (die, a->dw_attr);
30320 ix--;
30321 }
30322 break;
30323 }
30324 case dw_val_class_loc:
30325 {
30326 dw_loc_descr_ref l = AT_loc (a);
30327 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30328 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30329 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30330 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30331 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30332 with DW_FORM_ref referencing the same DIE as
30333 DW_OP_GNU_variable_value used to reference. */
30334 if (a->dw_attr == DW_AT_string_length
30335 && l
30336 && l->dw_loc_opc == DW_OP_GNU_variable_value
30337 && (l->dw_loc_next == NULL
30338 || (l->dw_loc_next->dw_loc_next == NULL
30339 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30340 {
30341 switch (optimize_string_length (a))
30342 {
30343 case -1:
30344 remove_AT (die, a->dw_attr);
30345 ix--;
30346 /* If we drop DW_AT_string_length, we need to drop also
30347 DW_AT_{string_length_,}byte_size. */
30348 remove_AT_byte_size = true;
30349 continue;
30350 default:
30351 break;
30352 case 1:
30353 /* Even if we keep the optimized DW_AT_string_length,
30354 it might have changed AT_class, so process it again. */
30355 ix--;
30356 continue;
30357 }
30358 }
30359 /* For -gdwarf-2 don't attempt to optimize
30360 DW_AT_data_member_location containing
30361 DW_OP_plus_uconst - older consumers might
30362 rely on it being that op instead of a more complex,
30363 but shorter, location description. */
30364 if ((dwarf_version > 2
30365 || a->dw_attr != DW_AT_data_member_location
30366 || l == NULL
30367 || l->dw_loc_opc != DW_OP_plus_uconst
30368 || l->dw_loc_next != NULL)
30369 && !resolve_addr_in_expr (a, l))
30370 {
30371 if (dwarf_split_debug_info)
30372 remove_loc_list_addr_table_entries (l);
30373 if (l != NULL
30374 && l->dw_loc_next == NULL
30375 && l->dw_loc_opc == DW_OP_addr
30376 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30377 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30378 && a->dw_attr == DW_AT_location)
30379 {
30380 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30381 remove_AT (die, a->dw_attr);
30382 ix--;
30383 optimize_location_into_implicit_ptr (die, decl);
30384 break;
30385 }
30386 if (a->dw_attr == DW_AT_string_length)
30387 /* If we drop DW_AT_string_length, we need to drop also
30388 DW_AT_{string_length_,}byte_size. */
30389 remove_AT_byte_size = true;
30390 remove_AT (die, a->dw_attr);
30391 ix--;
30392 }
30393 else
30394 mark_base_types (l);
30395 }
30396 break;
30397 case dw_val_class_addr:
30398 if (a->dw_attr == DW_AT_const_value
30399 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30400 {
30401 if (AT_index (a) != NOT_INDEXED)
30402 remove_addr_table_entry (a->dw_attr_val.val_entry);
30403 remove_AT (die, a->dw_attr);
30404 ix--;
30405 }
30406 if ((die->die_tag == DW_TAG_call_site
30407 && a->dw_attr == DW_AT_call_origin)
30408 || (die->die_tag == DW_TAG_GNU_call_site
30409 && a->dw_attr == DW_AT_abstract_origin))
30410 {
30411 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30412 dw_die_ref tdie = lookup_decl_die (tdecl);
30413 dw_die_ref cdie;
30414 if (tdie == NULL
30415 && DECL_EXTERNAL (tdecl)
30416 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30417 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30418 {
30419 dw_die_ref pdie = cdie;
30420 /* Make sure we don't add these DIEs into type units.
30421 We could emit skeleton DIEs for context (namespaces,
30422 outer structs/classes) and a skeleton DIE for the
30423 innermost context with DW_AT_signature pointing to the
30424 type unit. See PR78835. */
30425 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30426 pdie = pdie->die_parent;
30427 if (pdie == NULL)
30428 {
30429 /* Creating a full DIE for tdecl is overly expensive and
30430 at this point even wrong when in the LTO phase
30431 as it can end up generating new type DIEs we didn't
30432 output and thus optimize_external_refs will crash. */
30433 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30434 add_AT_flag (tdie, DW_AT_external, 1);
30435 add_AT_flag (tdie, DW_AT_declaration, 1);
30436 add_linkage_attr (tdie, tdecl);
30437 add_name_and_src_coords_attributes (tdie, tdecl, true);
30438 equate_decl_number_to_die (tdecl, tdie);
30439 }
30440 }
30441 if (tdie)
30442 {
30443 a->dw_attr_val.val_class = dw_val_class_die_ref;
30444 a->dw_attr_val.v.val_die_ref.die = tdie;
30445 a->dw_attr_val.v.val_die_ref.external = 0;
30446 }
30447 else
30448 {
30449 if (AT_index (a) != NOT_INDEXED)
30450 remove_addr_table_entry (a->dw_attr_val.val_entry);
30451 remove_AT (die, a->dw_attr);
30452 ix--;
30453 }
30454 }
30455 break;
30456 default:
30457 break;
30458 }
30459
30460 if (remove_AT_byte_size)
30461 remove_AT (die, dwarf_version >= 5
30462 ? DW_AT_string_length_byte_size
30463 : DW_AT_byte_size);
30464
30465 FOR_EACH_CHILD (die, c, resolve_addr (c));
30466 }
30467 \f
30468 /* Helper routines for optimize_location_lists.
30469 This pass tries to share identical local lists in .debug_loc
30470 section. */
30471
30472 /* Iteratively hash operands of LOC opcode into HSTATE. */
30473
30474 static void
30475 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30476 {
30477 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30478 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30479
30480 switch (loc->dw_loc_opc)
30481 {
30482 case DW_OP_const4u:
30483 case DW_OP_const8u:
30484 if (loc->dtprel)
30485 goto hash_addr;
30486 /* FALLTHRU */
30487 case DW_OP_const1u:
30488 case DW_OP_const1s:
30489 case DW_OP_const2u:
30490 case DW_OP_const2s:
30491 case DW_OP_const4s:
30492 case DW_OP_const8s:
30493 case DW_OP_constu:
30494 case DW_OP_consts:
30495 case DW_OP_pick:
30496 case DW_OP_plus_uconst:
30497 case DW_OP_breg0:
30498 case DW_OP_breg1:
30499 case DW_OP_breg2:
30500 case DW_OP_breg3:
30501 case DW_OP_breg4:
30502 case DW_OP_breg5:
30503 case DW_OP_breg6:
30504 case DW_OP_breg7:
30505 case DW_OP_breg8:
30506 case DW_OP_breg9:
30507 case DW_OP_breg10:
30508 case DW_OP_breg11:
30509 case DW_OP_breg12:
30510 case DW_OP_breg13:
30511 case DW_OP_breg14:
30512 case DW_OP_breg15:
30513 case DW_OP_breg16:
30514 case DW_OP_breg17:
30515 case DW_OP_breg18:
30516 case DW_OP_breg19:
30517 case DW_OP_breg20:
30518 case DW_OP_breg21:
30519 case DW_OP_breg22:
30520 case DW_OP_breg23:
30521 case DW_OP_breg24:
30522 case DW_OP_breg25:
30523 case DW_OP_breg26:
30524 case DW_OP_breg27:
30525 case DW_OP_breg28:
30526 case DW_OP_breg29:
30527 case DW_OP_breg30:
30528 case DW_OP_breg31:
30529 case DW_OP_regx:
30530 case DW_OP_fbreg:
30531 case DW_OP_piece:
30532 case DW_OP_deref_size:
30533 case DW_OP_xderef_size:
30534 hstate.add_object (val1->v.val_int);
30535 break;
30536 case DW_OP_skip:
30537 case DW_OP_bra:
30538 {
30539 int offset;
30540
30541 gcc_assert (val1->val_class == dw_val_class_loc);
30542 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30543 hstate.add_object (offset);
30544 }
30545 break;
30546 case DW_OP_implicit_value:
30547 hstate.add_object (val1->v.val_unsigned);
30548 switch (val2->val_class)
30549 {
30550 case dw_val_class_const:
30551 hstate.add_object (val2->v.val_int);
30552 break;
30553 case dw_val_class_vec:
30554 {
30555 unsigned int elt_size = val2->v.val_vec.elt_size;
30556 unsigned int len = val2->v.val_vec.length;
30557
30558 hstate.add_int (elt_size);
30559 hstate.add_int (len);
30560 hstate.add (val2->v.val_vec.array, len * elt_size);
30561 }
30562 break;
30563 case dw_val_class_const_double:
30564 hstate.add_object (val2->v.val_double.low);
30565 hstate.add_object (val2->v.val_double.high);
30566 break;
30567 case dw_val_class_wide_int:
30568 hstate.add (val2->v.val_wide->get_val (),
30569 get_full_len (*val2->v.val_wide)
30570 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30571 break;
30572 case dw_val_class_addr:
30573 inchash::add_rtx (val2->v.val_addr, hstate);
30574 break;
30575 default:
30576 gcc_unreachable ();
30577 }
30578 break;
30579 case DW_OP_bregx:
30580 case DW_OP_bit_piece:
30581 hstate.add_object (val1->v.val_int);
30582 hstate.add_object (val2->v.val_int);
30583 break;
30584 case DW_OP_addr:
30585 hash_addr:
30586 if (loc->dtprel)
30587 {
30588 unsigned char dtprel = 0xd1;
30589 hstate.add_object (dtprel);
30590 }
30591 inchash::add_rtx (val1->v.val_addr, hstate);
30592 break;
30593 case DW_OP_GNU_addr_index:
30594 case DW_OP_addrx:
30595 case DW_OP_GNU_const_index:
30596 case DW_OP_constx:
30597 {
30598 if (loc->dtprel)
30599 {
30600 unsigned char dtprel = 0xd1;
30601 hstate.add_object (dtprel);
30602 }
30603 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30604 }
30605 break;
30606 case DW_OP_implicit_pointer:
30607 case DW_OP_GNU_implicit_pointer:
30608 hstate.add_int (val2->v.val_int);
30609 break;
30610 case DW_OP_entry_value:
30611 case DW_OP_GNU_entry_value:
30612 hstate.add_object (val1->v.val_loc);
30613 break;
30614 case DW_OP_regval_type:
30615 case DW_OP_deref_type:
30616 case DW_OP_GNU_regval_type:
30617 case DW_OP_GNU_deref_type:
30618 {
30619 unsigned int byte_size
30620 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30621 unsigned int encoding
30622 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30623 hstate.add_object (val1->v.val_int);
30624 hstate.add_object (byte_size);
30625 hstate.add_object (encoding);
30626 }
30627 break;
30628 case DW_OP_convert:
30629 case DW_OP_reinterpret:
30630 case DW_OP_GNU_convert:
30631 case DW_OP_GNU_reinterpret:
30632 if (val1->val_class == dw_val_class_unsigned_const)
30633 {
30634 hstate.add_object (val1->v.val_unsigned);
30635 break;
30636 }
30637 /* FALLTHRU */
30638 case DW_OP_const_type:
30639 case DW_OP_GNU_const_type:
30640 {
30641 unsigned int byte_size
30642 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30643 unsigned int encoding
30644 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30645 hstate.add_object (byte_size);
30646 hstate.add_object (encoding);
30647 if (loc->dw_loc_opc != DW_OP_const_type
30648 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30649 break;
30650 hstate.add_object (val2->val_class);
30651 switch (val2->val_class)
30652 {
30653 case dw_val_class_const:
30654 hstate.add_object (val2->v.val_int);
30655 break;
30656 case dw_val_class_vec:
30657 {
30658 unsigned int elt_size = val2->v.val_vec.elt_size;
30659 unsigned int len = val2->v.val_vec.length;
30660
30661 hstate.add_object (elt_size);
30662 hstate.add_object (len);
30663 hstate.add (val2->v.val_vec.array, len * elt_size);
30664 }
30665 break;
30666 case dw_val_class_const_double:
30667 hstate.add_object (val2->v.val_double.low);
30668 hstate.add_object (val2->v.val_double.high);
30669 break;
30670 case dw_val_class_wide_int:
30671 hstate.add (val2->v.val_wide->get_val (),
30672 get_full_len (*val2->v.val_wide)
30673 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30674 break;
30675 default:
30676 gcc_unreachable ();
30677 }
30678 }
30679 break;
30680
30681 default:
30682 /* Other codes have no operands. */
30683 break;
30684 }
30685 }
30686
30687 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30688
30689 static inline void
30690 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30691 {
30692 dw_loc_descr_ref l;
30693 bool sizes_computed = false;
30694 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30695 size_of_locs (loc);
30696
30697 for (l = loc; l != NULL; l = l->dw_loc_next)
30698 {
30699 enum dwarf_location_atom opc = l->dw_loc_opc;
30700 hstate.add_object (opc);
30701 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30702 {
30703 size_of_locs (loc);
30704 sizes_computed = true;
30705 }
30706 hash_loc_operands (l, hstate);
30707 }
30708 }
30709
30710 /* Compute hash of the whole location list LIST_HEAD. */
30711
30712 static inline void
30713 hash_loc_list (dw_loc_list_ref list_head)
30714 {
30715 dw_loc_list_ref curr = list_head;
30716 inchash::hash hstate;
30717
30718 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30719 {
30720 hstate.add (curr->begin, strlen (curr->begin) + 1);
30721 hstate.add (curr->end, strlen (curr->end) + 1);
30722 hstate.add_object (curr->vbegin);
30723 hstate.add_object (curr->vend);
30724 if (curr->section)
30725 hstate.add (curr->section, strlen (curr->section) + 1);
30726 hash_locs (curr->expr, hstate);
30727 }
30728 list_head->hash = hstate.end ();
30729 }
30730
30731 /* Return true if X and Y opcodes have the same operands. */
30732
30733 static inline bool
30734 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30735 {
30736 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30737 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30738 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30739 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30740
30741 switch (x->dw_loc_opc)
30742 {
30743 case DW_OP_const4u:
30744 case DW_OP_const8u:
30745 if (x->dtprel)
30746 goto hash_addr;
30747 /* FALLTHRU */
30748 case DW_OP_const1u:
30749 case DW_OP_const1s:
30750 case DW_OP_const2u:
30751 case DW_OP_const2s:
30752 case DW_OP_const4s:
30753 case DW_OP_const8s:
30754 case DW_OP_constu:
30755 case DW_OP_consts:
30756 case DW_OP_pick:
30757 case DW_OP_plus_uconst:
30758 case DW_OP_breg0:
30759 case DW_OP_breg1:
30760 case DW_OP_breg2:
30761 case DW_OP_breg3:
30762 case DW_OP_breg4:
30763 case DW_OP_breg5:
30764 case DW_OP_breg6:
30765 case DW_OP_breg7:
30766 case DW_OP_breg8:
30767 case DW_OP_breg9:
30768 case DW_OP_breg10:
30769 case DW_OP_breg11:
30770 case DW_OP_breg12:
30771 case DW_OP_breg13:
30772 case DW_OP_breg14:
30773 case DW_OP_breg15:
30774 case DW_OP_breg16:
30775 case DW_OP_breg17:
30776 case DW_OP_breg18:
30777 case DW_OP_breg19:
30778 case DW_OP_breg20:
30779 case DW_OP_breg21:
30780 case DW_OP_breg22:
30781 case DW_OP_breg23:
30782 case DW_OP_breg24:
30783 case DW_OP_breg25:
30784 case DW_OP_breg26:
30785 case DW_OP_breg27:
30786 case DW_OP_breg28:
30787 case DW_OP_breg29:
30788 case DW_OP_breg30:
30789 case DW_OP_breg31:
30790 case DW_OP_regx:
30791 case DW_OP_fbreg:
30792 case DW_OP_piece:
30793 case DW_OP_deref_size:
30794 case DW_OP_xderef_size:
30795 return valx1->v.val_int == valy1->v.val_int;
30796 case DW_OP_skip:
30797 case DW_OP_bra:
30798 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30799 can cause irrelevant differences in dw_loc_addr. */
30800 gcc_assert (valx1->val_class == dw_val_class_loc
30801 && valy1->val_class == dw_val_class_loc
30802 && (dwarf_split_debug_info
30803 || x->dw_loc_addr == y->dw_loc_addr));
30804 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30805 case DW_OP_implicit_value:
30806 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30807 || valx2->val_class != valy2->val_class)
30808 return false;
30809 switch (valx2->val_class)
30810 {
30811 case dw_val_class_const:
30812 return valx2->v.val_int == valy2->v.val_int;
30813 case dw_val_class_vec:
30814 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30815 && valx2->v.val_vec.length == valy2->v.val_vec.length
30816 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30817 valx2->v.val_vec.elt_size
30818 * valx2->v.val_vec.length) == 0;
30819 case dw_val_class_const_double:
30820 return valx2->v.val_double.low == valy2->v.val_double.low
30821 && valx2->v.val_double.high == valy2->v.val_double.high;
30822 case dw_val_class_wide_int:
30823 return *valx2->v.val_wide == *valy2->v.val_wide;
30824 case dw_val_class_addr:
30825 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30826 default:
30827 gcc_unreachable ();
30828 }
30829 case DW_OP_bregx:
30830 case DW_OP_bit_piece:
30831 return valx1->v.val_int == valy1->v.val_int
30832 && valx2->v.val_int == valy2->v.val_int;
30833 case DW_OP_addr:
30834 hash_addr:
30835 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30836 case DW_OP_GNU_addr_index:
30837 case DW_OP_addrx:
30838 case DW_OP_GNU_const_index:
30839 case DW_OP_constx:
30840 {
30841 rtx ax1 = valx1->val_entry->addr.rtl;
30842 rtx ay1 = valy1->val_entry->addr.rtl;
30843 return rtx_equal_p (ax1, ay1);
30844 }
30845 case DW_OP_implicit_pointer:
30846 case DW_OP_GNU_implicit_pointer:
30847 return valx1->val_class == dw_val_class_die_ref
30848 && valx1->val_class == valy1->val_class
30849 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30850 && valx2->v.val_int == valy2->v.val_int;
30851 case DW_OP_entry_value:
30852 case DW_OP_GNU_entry_value:
30853 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30854 case DW_OP_const_type:
30855 case DW_OP_GNU_const_type:
30856 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30857 || valx2->val_class != valy2->val_class)
30858 return false;
30859 switch (valx2->val_class)
30860 {
30861 case dw_val_class_const:
30862 return valx2->v.val_int == valy2->v.val_int;
30863 case dw_val_class_vec:
30864 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30865 && valx2->v.val_vec.length == valy2->v.val_vec.length
30866 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30867 valx2->v.val_vec.elt_size
30868 * valx2->v.val_vec.length) == 0;
30869 case dw_val_class_const_double:
30870 return valx2->v.val_double.low == valy2->v.val_double.low
30871 && valx2->v.val_double.high == valy2->v.val_double.high;
30872 case dw_val_class_wide_int:
30873 return *valx2->v.val_wide == *valy2->v.val_wide;
30874 default:
30875 gcc_unreachable ();
30876 }
30877 case DW_OP_regval_type:
30878 case DW_OP_deref_type:
30879 case DW_OP_GNU_regval_type:
30880 case DW_OP_GNU_deref_type:
30881 return valx1->v.val_int == valy1->v.val_int
30882 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30883 case DW_OP_convert:
30884 case DW_OP_reinterpret:
30885 case DW_OP_GNU_convert:
30886 case DW_OP_GNU_reinterpret:
30887 if (valx1->val_class != valy1->val_class)
30888 return false;
30889 if (valx1->val_class == dw_val_class_unsigned_const)
30890 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30891 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30892 case DW_OP_GNU_parameter_ref:
30893 return valx1->val_class == dw_val_class_die_ref
30894 && valx1->val_class == valy1->val_class
30895 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30896 default:
30897 /* Other codes have no operands. */
30898 return true;
30899 }
30900 }
30901
30902 /* Return true if DWARF location expressions X and Y are the same. */
30903
30904 static inline bool
30905 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30906 {
30907 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30908 if (x->dw_loc_opc != y->dw_loc_opc
30909 || x->dtprel != y->dtprel
30910 || !compare_loc_operands (x, y))
30911 break;
30912 return x == NULL && y == NULL;
30913 }
30914
30915 /* Hashtable helpers. */
30916
30917 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30918 {
30919 static inline hashval_t hash (const dw_loc_list_struct *);
30920 static inline bool equal (const dw_loc_list_struct *,
30921 const dw_loc_list_struct *);
30922 };
30923
30924 /* Return precomputed hash of location list X. */
30925
30926 inline hashval_t
30927 loc_list_hasher::hash (const dw_loc_list_struct *x)
30928 {
30929 return x->hash;
30930 }
30931
30932 /* Return true if location lists A and B are the same. */
30933
30934 inline bool
30935 loc_list_hasher::equal (const dw_loc_list_struct *a,
30936 const dw_loc_list_struct *b)
30937 {
30938 if (a == b)
30939 return 1;
30940 if (a->hash != b->hash)
30941 return 0;
30942 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30943 if (strcmp (a->begin, b->begin) != 0
30944 || strcmp (a->end, b->end) != 0
30945 || (a->section == NULL) != (b->section == NULL)
30946 || (a->section && strcmp (a->section, b->section) != 0)
30947 || a->vbegin != b->vbegin || a->vend != b->vend
30948 || !compare_locs (a->expr, b->expr))
30949 break;
30950 return a == NULL && b == NULL;
30951 }
30952
30953 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30954
30955
30956 /* Recursively optimize location lists referenced from DIE
30957 children and share them whenever possible. */
30958
30959 static void
30960 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30961 {
30962 dw_die_ref c;
30963 dw_attr_node *a;
30964 unsigned ix;
30965 dw_loc_list_struct **slot;
30966 bool drop_locviews = false;
30967 bool has_locviews = false;
30968
30969 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30970 if (AT_class (a) == dw_val_class_loc_list)
30971 {
30972 dw_loc_list_ref list = AT_loc_list (a);
30973 /* TODO: perform some optimizations here, before hashing
30974 it and storing into the hash table. */
30975 hash_loc_list (list);
30976 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30977 if (*slot == NULL)
30978 {
30979 *slot = list;
30980 if (loc_list_has_views (list))
30981 gcc_assert (list->vl_symbol);
30982 else if (list->vl_symbol)
30983 {
30984 drop_locviews = true;
30985 list->vl_symbol = NULL;
30986 }
30987 }
30988 else
30989 {
30990 if (list->vl_symbol && !(*slot)->vl_symbol)
30991 drop_locviews = true;
30992 a->dw_attr_val.v.val_loc_list = *slot;
30993 }
30994 }
30995 else if (AT_class (a) == dw_val_class_view_list)
30996 {
30997 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30998 has_locviews = true;
30999 }
31000
31001
31002 if (drop_locviews && has_locviews)
31003 remove_AT (die, DW_AT_GNU_locviews);
31004
31005 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31006 }
31007
31008
31009 /* Recursively assign each location list a unique index into the debug_addr
31010 section. */
31011
31012 static void
31013 index_location_lists (dw_die_ref die)
31014 {
31015 dw_die_ref c;
31016 dw_attr_node *a;
31017 unsigned ix;
31018
31019 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31020 if (AT_class (a) == dw_val_class_loc_list)
31021 {
31022 dw_loc_list_ref list = AT_loc_list (a);
31023 dw_loc_list_ref curr;
31024 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31025 {
31026 /* Don't index an entry that has already been indexed
31027 or won't be output. Make sure skip_loc_list_entry doesn't
31028 call size_of_locs, because that might cause circular dependency,
31029 index_location_lists requiring address table indexes to be
31030 computed, but adding new indexes through add_addr_table_entry
31031 and address table index computation requiring no new additions
31032 to the hash table. In the rare case of DWARF[234] >= 64KB
31033 location expression, we'll just waste unused address table entry
31034 for it. */
31035 if (curr->begin_entry != NULL
31036 || skip_loc_list_entry (curr))
31037 continue;
31038
31039 curr->begin_entry
31040 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31041 }
31042 }
31043
31044 FOR_EACH_CHILD (die, c, index_location_lists (c));
31045 }
31046
31047 /* Optimize location lists referenced from DIE
31048 children and share them whenever possible. */
31049
31050 static void
31051 optimize_location_lists (dw_die_ref die)
31052 {
31053 loc_list_hash_type htab (500);
31054 optimize_location_lists_1 (die, &htab);
31055 }
31056 \f
31057 /* Traverse the limbo die list, and add parent/child links. The only
31058 dies without parents that should be here are concrete instances of
31059 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31060 For concrete instances, we can get the parent die from the abstract
31061 instance. */
31062
31063 static void
31064 flush_limbo_die_list (void)
31065 {
31066 limbo_die_node *node;
31067
31068 /* get_context_die calls force_decl_die, which can put new DIEs on the
31069 limbo list in LTO mode when nested functions are put in a different
31070 partition than that of their parent function. */
31071 while ((node = limbo_die_list))
31072 {
31073 dw_die_ref die = node->die;
31074 limbo_die_list = node->next;
31075
31076 if (die->die_parent == NULL)
31077 {
31078 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31079
31080 if (origin && origin->die_parent)
31081 add_child_die (origin->die_parent, die);
31082 else if (is_cu_die (die))
31083 ;
31084 else if (seen_error ())
31085 /* It's OK to be confused by errors in the input. */
31086 add_child_die (comp_unit_die (), die);
31087 else
31088 {
31089 /* In certain situations, the lexical block containing a
31090 nested function can be optimized away, which results
31091 in the nested function die being orphaned. Likewise
31092 with the return type of that nested function. Force
31093 this to be a child of the containing function.
31094
31095 It may happen that even the containing function got fully
31096 inlined and optimized out. In that case we are lost and
31097 assign the empty child. This should not be big issue as
31098 the function is likely unreachable too. */
31099 gcc_assert (node->created_for);
31100
31101 if (DECL_P (node->created_for))
31102 origin = get_context_die (DECL_CONTEXT (node->created_for));
31103 else if (TYPE_P (node->created_for))
31104 origin = scope_die_for (node->created_for, comp_unit_die ());
31105 else
31106 origin = comp_unit_die ();
31107
31108 add_child_die (origin, die);
31109 }
31110 }
31111 }
31112 }
31113
31114 /* Reset DIEs so we can output them again. */
31115
31116 static void
31117 reset_dies (dw_die_ref die)
31118 {
31119 dw_die_ref c;
31120
31121 /* Remove stuff we re-generate. */
31122 die->die_mark = 0;
31123 die->die_offset = 0;
31124 die->die_abbrev = 0;
31125 remove_AT (die, DW_AT_sibling);
31126
31127 FOR_EACH_CHILD (die, c, reset_dies (c));
31128 }
31129
31130 /* Output stuff that dwarf requires at the end of every file,
31131 and generate the DWARF-2 debugging info. */
31132
31133 static void
31134 dwarf2out_finish (const char *filename)
31135 {
31136 comdat_type_node *ctnode;
31137 dw_die_ref main_comp_unit_die;
31138 unsigned char checksum[16];
31139 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31140
31141 /* Flush out any latecomers to the limbo party. */
31142 flush_limbo_die_list ();
31143
31144 if (inline_entry_data_table)
31145 gcc_assert (inline_entry_data_table->elements () == 0);
31146
31147 if (flag_checking)
31148 {
31149 verify_die (comp_unit_die ());
31150 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31151 verify_die (node->die);
31152 }
31153
31154 /* We shouldn't have any symbols with delayed asm names for
31155 DIEs generated after early finish. */
31156 gcc_assert (deferred_asm_name == NULL);
31157
31158 gen_remaining_tmpl_value_param_die_attribute ();
31159
31160 if (flag_generate_lto || flag_generate_offload)
31161 {
31162 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31163
31164 /* Prune stuff so that dwarf2out_finish runs successfully
31165 for the fat part of the object. */
31166 reset_dies (comp_unit_die ());
31167 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31168 reset_dies (node->die);
31169
31170 hash_table<comdat_type_hasher> comdat_type_table (100);
31171 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31172 {
31173 comdat_type_node **slot
31174 = comdat_type_table.find_slot (ctnode, INSERT);
31175
31176 /* Don't reset types twice. */
31177 if (*slot != HTAB_EMPTY_ENTRY)
31178 continue;
31179
31180 /* Remove the pointer to the line table. */
31181 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31182
31183 if (debug_info_level >= DINFO_LEVEL_TERSE)
31184 reset_dies (ctnode->root_die);
31185
31186 *slot = ctnode;
31187 }
31188
31189 /* Reset die CU symbol so we don't output it twice. */
31190 comp_unit_die ()->die_id.die_symbol = NULL;
31191
31192 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31193 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31194 if (have_macinfo)
31195 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31196
31197 /* Remove indirect string decisions. */
31198 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31199 if (debug_line_str_hash)
31200 {
31201 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31202 debug_line_str_hash = NULL;
31203 }
31204 }
31205
31206 #if ENABLE_ASSERT_CHECKING
31207 {
31208 dw_die_ref die = comp_unit_die (), c;
31209 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31210 }
31211 #endif
31212 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31213 resolve_addr (ctnode->root_die);
31214 resolve_addr (comp_unit_die ());
31215 move_marked_base_types ();
31216
31217 if (dump_file)
31218 {
31219 fprintf (dump_file, "DWARF for %s\n", filename);
31220 print_die (comp_unit_die (), dump_file);
31221 }
31222
31223 /* Initialize sections and labels used for actual assembler output. */
31224 unsigned generation = init_sections_and_labels (false);
31225
31226 /* Traverse the DIE's and add sibling attributes to those DIE's that
31227 have children. */
31228 add_sibling_attributes (comp_unit_die ());
31229 limbo_die_node *node;
31230 for (node = cu_die_list; node; node = node->next)
31231 add_sibling_attributes (node->die);
31232 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31233 add_sibling_attributes (ctnode->root_die);
31234
31235 /* When splitting DWARF info, we put some attributes in the
31236 skeleton compile_unit DIE that remains in the .o, while
31237 most attributes go in the DWO compile_unit_die. */
31238 if (dwarf_split_debug_info)
31239 {
31240 limbo_die_node *cu;
31241 main_comp_unit_die = gen_compile_unit_die (NULL);
31242 if (dwarf_version >= 5)
31243 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31244 cu = limbo_die_list;
31245 gcc_assert (cu->die == main_comp_unit_die);
31246 limbo_die_list = limbo_die_list->next;
31247 cu->next = cu_die_list;
31248 cu_die_list = cu;
31249 }
31250 else
31251 main_comp_unit_die = comp_unit_die ();
31252
31253 /* Output a terminator label for the .text section. */
31254 switch_to_section (text_section);
31255 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31256 if (cold_text_section)
31257 {
31258 switch_to_section (cold_text_section);
31259 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31260 }
31261
31262 /* We can only use the low/high_pc attributes if all of the code was
31263 in .text. */
31264 if (!have_multiple_function_sections
31265 || (dwarf_version < 3 && dwarf_strict))
31266 {
31267 /* Don't add if the CU has no associated code. */
31268 if (text_section_used)
31269 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31270 text_end_label, true);
31271 }
31272 else
31273 {
31274 unsigned fde_idx;
31275 dw_fde_ref fde;
31276 bool range_list_added = false;
31277
31278 if (text_section_used)
31279 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31280 text_end_label, &range_list_added, true);
31281 if (cold_text_section_used)
31282 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31283 cold_end_label, &range_list_added, true);
31284
31285 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31286 {
31287 if (DECL_IGNORED_P (fde->decl))
31288 continue;
31289 if (!fde->in_std_section)
31290 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31291 fde->dw_fde_end, &range_list_added,
31292 true);
31293 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31294 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31295 fde->dw_fde_second_end, &range_list_added,
31296 true);
31297 }
31298
31299 if (range_list_added)
31300 {
31301 /* We need to give .debug_loc and .debug_ranges an appropriate
31302 "base address". Use zero so that these addresses become
31303 absolute. Historically, we've emitted the unexpected
31304 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31305 Emit both to give time for other tools to adapt. */
31306 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31307 if (! dwarf_strict && dwarf_version < 4)
31308 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31309
31310 add_ranges (NULL);
31311 }
31312 }
31313
31314 /* AIX Assembler inserts the length, so adjust the reference to match the
31315 offset expected by debuggers. */
31316 strcpy (dl_section_ref, debug_line_section_label);
31317 if (XCOFF_DEBUGGING_INFO)
31318 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31319
31320 if (debug_info_level >= DINFO_LEVEL_TERSE)
31321 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31322 dl_section_ref);
31323
31324 if (have_macinfo)
31325 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31326 macinfo_section_label);
31327
31328 if (dwarf_split_debug_info)
31329 {
31330 if (have_location_lists)
31331 {
31332 /* Since we generate the loclists in the split DWARF .dwo
31333 file itself, we don't need to generate a loclists_base
31334 attribute for the split compile unit DIE. That attribute
31335 (and using relocatable sec_offset FORMs) isn't allowed
31336 for a split compile unit. Only if the .debug_loclists
31337 section was in the main file, would we need to generate a
31338 loclists_base attribute here (for the full or skeleton
31339 unit DIE). */
31340
31341 /* optimize_location_lists calculates the size of the lists,
31342 so index them first, and assign indices to the entries.
31343 Although optimize_location_lists will remove entries from
31344 the table, it only does so for duplicates, and therefore
31345 only reduces ref_counts to 1. */
31346 index_location_lists (comp_unit_die ());
31347 }
31348
31349 if (addr_index_table != NULL)
31350 {
31351 unsigned int index = 0;
31352 addr_index_table
31353 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31354 (&index);
31355 }
31356 }
31357
31358 loc_list_idx = 0;
31359 if (have_location_lists)
31360 {
31361 optimize_location_lists (comp_unit_die ());
31362 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31363 if (dwarf_version >= 5 && dwarf_split_debug_info)
31364 assign_location_list_indexes (comp_unit_die ());
31365 }
31366
31367 save_macinfo_strings ();
31368
31369 if (dwarf_split_debug_info)
31370 {
31371 unsigned int index = 0;
31372
31373 /* Add attributes common to skeleton compile_units and
31374 type_units. Because these attributes include strings, it
31375 must be done before freezing the string table. Top-level
31376 skeleton die attrs are added when the skeleton type unit is
31377 created, so ensure it is created by this point. */
31378 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31379 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31380 }
31381
31382 /* Output all of the compilation units. We put the main one last so that
31383 the offsets are available to output_pubnames. */
31384 for (node = cu_die_list; node; node = node->next)
31385 output_comp_unit (node->die, 0, NULL);
31386
31387 hash_table<comdat_type_hasher> comdat_type_table (100);
31388 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31389 {
31390 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31391
31392 /* Don't output duplicate types. */
31393 if (*slot != HTAB_EMPTY_ENTRY)
31394 continue;
31395
31396 /* Add a pointer to the line table for the main compilation unit
31397 so that the debugger can make sense of DW_AT_decl_file
31398 attributes. */
31399 if (debug_info_level >= DINFO_LEVEL_TERSE)
31400 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31401 (!dwarf_split_debug_info
31402 ? dl_section_ref
31403 : debug_skeleton_line_section_label));
31404
31405 output_comdat_type_unit (ctnode);
31406 *slot = ctnode;
31407 }
31408
31409 if (dwarf_split_debug_info)
31410 {
31411 int mark;
31412 struct md5_ctx ctx;
31413
31414 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31415 index_rnglists ();
31416
31417 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31418 md5_init_ctx (&ctx);
31419 mark = 0;
31420 die_checksum (comp_unit_die (), &ctx, &mark);
31421 unmark_all_dies (comp_unit_die ());
31422 md5_finish_ctx (&ctx, checksum);
31423
31424 if (dwarf_version < 5)
31425 {
31426 /* Use the first 8 bytes of the checksum as the dwo_id,
31427 and add it to both comp-unit DIEs. */
31428 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31429 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31430 }
31431
31432 /* Add the base offset of the ranges table to the skeleton
31433 comp-unit DIE. */
31434 if (!vec_safe_is_empty (ranges_table))
31435 {
31436 if (dwarf_version >= 5)
31437 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31438 ranges_base_label);
31439 else
31440 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31441 ranges_section_label);
31442 }
31443
31444 switch_to_section (debug_addr_section);
31445 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31446 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31447 before DWARF5, didn't have a header for .debug_addr units.
31448 DWARF5 specifies a small header when address tables are used. */
31449 if (dwarf_version >= 5)
31450 {
31451 unsigned int last_idx = 0;
31452 unsigned long addrs_length;
31453
31454 addr_index_table->traverse_noresize
31455 <unsigned int *, count_index_addrs> (&last_idx);
31456 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31457
31458 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31459 dw2_asm_output_data (4, 0xffffffff,
31460 "Escape value for 64-bit DWARF extension");
31461 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31462 "Length of Address Unit");
31463 dw2_asm_output_data (2, 5, "DWARF addr version");
31464 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31465 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31466 }
31467 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31468 output_addr_table ();
31469 }
31470
31471 /* Output the main compilation unit if non-empty or if .debug_macinfo
31472 or .debug_macro will be emitted. */
31473 output_comp_unit (comp_unit_die (), have_macinfo,
31474 dwarf_split_debug_info ? checksum : NULL);
31475
31476 if (dwarf_split_debug_info && info_section_emitted)
31477 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31478
31479 /* Output the abbreviation table. */
31480 if (vec_safe_length (abbrev_die_table) != 1)
31481 {
31482 switch_to_section (debug_abbrev_section);
31483 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31484 output_abbrev_section ();
31485 }
31486
31487 /* Output location list section if necessary. */
31488 if (have_location_lists)
31489 {
31490 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31491 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31492 /* Output the location lists info. */
31493 switch_to_section (debug_loc_section);
31494 if (dwarf_version >= 5)
31495 {
31496 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31497 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31498 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31499 dw2_asm_output_data (4, 0xffffffff,
31500 "Initial length escape value indicating "
31501 "64-bit DWARF extension");
31502 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31503 "Length of Location Lists");
31504 ASM_OUTPUT_LABEL (asm_out_file, l1);
31505 output_dwarf_version ();
31506 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31507 dw2_asm_output_data (1, 0, "Segment Size");
31508 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31509 "Offset Entry Count");
31510 }
31511 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31512 if (dwarf_version >= 5 && dwarf_split_debug_info)
31513 {
31514 unsigned int save_loc_list_idx = loc_list_idx;
31515 loc_list_idx = 0;
31516 output_loclists_offsets (comp_unit_die ());
31517 gcc_assert (save_loc_list_idx == loc_list_idx);
31518 }
31519 output_location_lists (comp_unit_die ());
31520 if (dwarf_version >= 5)
31521 ASM_OUTPUT_LABEL (asm_out_file, l2);
31522 }
31523
31524 output_pubtables ();
31525
31526 /* Output the address range information if a CU (.debug_info section)
31527 was emitted. We output an empty table even if we had no functions
31528 to put in it. This because the consumer has no way to tell the
31529 difference between an empty table that we omitted and failure to
31530 generate a table that would have contained data. */
31531 if (info_section_emitted)
31532 {
31533 switch_to_section (debug_aranges_section);
31534 output_aranges ();
31535 }
31536
31537 /* Output ranges section if necessary. */
31538 if (!vec_safe_is_empty (ranges_table))
31539 {
31540 if (dwarf_version >= 5)
31541 output_rnglists (generation);
31542 else
31543 output_ranges ();
31544 }
31545
31546 /* Have to end the macro section. */
31547 if (have_macinfo)
31548 {
31549 switch_to_section (debug_macinfo_section);
31550 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31551 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31552 : debug_skeleton_line_section_label, false);
31553 dw2_asm_output_data (1, 0, "End compilation unit");
31554 }
31555
31556 /* Output the source line correspondence table. We must do this
31557 even if there is no line information. Otherwise, on an empty
31558 translation unit, we will generate a present, but empty,
31559 .debug_info section. IRIX 6.5 `nm' will then complain when
31560 examining the file. This is done late so that any filenames
31561 used by the debug_info section are marked as 'used'. */
31562 switch_to_section (debug_line_section);
31563 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31564 if (! output_asm_line_debug_info ())
31565 output_line_info (false);
31566
31567 if (dwarf_split_debug_info && info_section_emitted)
31568 {
31569 switch_to_section (debug_skeleton_line_section);
31570 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31571 output_line_info (true);
31572 }
31573
31574 /* If we emitted any indirect strings, output the string table too. */
31575 if (debug_str_hash || skeleton_debug_str_hash)
31576 output_indirect_strings ();
31577 if (debug_line_str_hash)
31578 {
31579 switch_to_section (debug_line_str_section);
31580 const enum dwarf_form form = DW_FORM_line_strp;
31581 debug_line_str_hash->traverse<enum dwarf_form,
31582 output_indirect_string> (form);
31583 }
31584
31585 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31586 symview_upper_bound = 0;
31587 if (zero_view_p)
31588 bitmap_clear (zero_view_p);
31589 }
31590
31591 /* Returns a hash value for X (which really is a variable_value_struct). */
31592
31593 inline hashval_t
31594 variable_value_hasher::hash (variable_value_struct *x)
31595 {
31596 return (hashval_t) x->decl_id;
31597 }
31598
31599 /* Return nonzero if decl_id of variable_value_struct X is the same as
31600 UID of decl Y. */
31601
31602 inline bool
31603 variable_value_hasher::equal (variable_value_struct *x, tree y)
31604 {
31605 return x->decl_id == DECL_UID (y);
31606 }
31607
31608 /* Helper function for resolve_variable_value, handle
31609 DW_OP_GNU_variable_value in one location expression.
31610 Return true if exprloc has been changed into loclist. */
31611
31612 static bool
31613 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31614 {
31615 dw_loc_descr_ref next;
31616 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31617 {
31618 next = loc->dw_loc_next;
31619 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31620 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31621 continue;
31622
31623 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31624 if (DECL_CONTEXT (decl) != current_function_decl)
31625 continue;
31626
31627 dw_die_ref ref = lookup_decl_die (decl);
31628 if (ref)
31629 {
31630 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31631 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31632 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31633 continue;
31634 }
31635 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31636 if (l == NULL)
31637 continue;
31638 if (l->dw_loc_next)
31639 {
31640 if (AT_class (a) != dw_val_class_loc)
31641 continue;
31642 switch (a->dw_attr)
31643 {
31644 /* Following attributes allow both exprloc and loclist
31645 classes, so we can change them into a loclist. */
31646 case DW_AT_location:
31647 case DW_AT_string_length:
31648 case DW_AT_return_addr:
31649 case DW_AT_data_member_location:
31650 case DW_AT_frame_base:
31651 case DW_AT_segment:
31652 case DW_AT_static_link:
31653 case DW_AT_use_location:
31654 case DW_AT_vtable_elem_location:
31655 if (prev)
31656 {
31657 prev->dw_loc_next = NULL;
31658 prepend_loc_descr_to_each (l, AT_loc (a));
31659 }
31660 if (next)
31661 add_loc_descr_to_each (l, next);
31662 a->dw_attr_val.val_class = dw_val_class_loc_list;
31663 a->dw_attr_val.val_entry = NULL;
31664 a->dw_attr_val.v.val_loc_list = l;
31665 have_location_lists = true;
31666 return true;
31667 /* Following attributes allow both exprloc and reference,
31668 so if the whole expression is DW_OP_GNU_variable_value alone
31669 we could transform it into reference. */
31670 case DW_AT_byte_size:
31671 case DW_AT_bit_size:
31672 case DW_AT_lower_bound:
31673 case DW_AT_upper_bound:
31674 case DW_AT_bit_stride:
31675 case DW_AT_count:
31676 case DW_AT_allocated:
31677 case DW_AT_associated:
31678 case DW_AT_byte_stride:
31679 if (prev == NULL && next == NULL)
31680 break;
31681 /* FALLTHRU */
31682 default:
31683 if (dwarf_strict)
31684 continue;
31685 break;
31686 }
31687 /* Create DW_TAG_variable that we can refer to. */
31688 gen_decl_die (decl, NULL_TREE, NULL,
31689 lookup_decl_die (current_function_decl));
31690 ref = lookup_decl_die (decl);
31691 if (ref)
31692 {
31693 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31694 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31695 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31696 }
31697 continue;
31698 }
31699 if (prev)
31700 {
31701 prev->dw_loc_next = l->expr;
31702 add_loc_descr (&prev->dw_loc_next, next);
31703 free_loc_descr (loc, NULL);
31704 next = prev->dw_loc_next;
31705 }
31706 else
31707 {
31708 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31709 add_loc_descr (&loc, next);
31710 next = loc;
31711 }
31712 loc = prev;
31713 }
31714 return false;
31715 }
31716
31717 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31718
31719 static void
31720 resolve_variable_value (dw_die_ref die)
31721 {
31722 dw_attr_node *a;
31723 dw_loc_list_ref loc;
31724 unsigned ix;
31725
31726 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31727 switch (AT_class (a))
31728 {
31729 case dw_val_class_loc:
31730 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31731 break;
31732 /* FALLTHRU */
31733 case dw_val_class_loc_list:
31734 loc = AT_loc_list (a);
31735 gcc_assert (loc);
31736 for (; loc; loc = loc->dw_loc_next)
31737 resolve_variable_value_in_expr (a, loc->expr);
31738 break;
31739 default:
31740 break;
31741 }
31742 }
31743
31744 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31745 temporaries in the current function. */
31746
31747 static void
31748 resolve_variable_values (void)
31749 {
31750 if (!variable_value_hash || !current_function_decl)
31751 return;
31752
31753 struct variable_value_struct *node
31754 = variable_value_hash->find_with_hash (current_function_decl,
31755 DECL_UID (current_function_decl));
31756
31757 if (node == NULL)
31758 return;
31759
31760 unsigned int i;
31761 dw_die_ref die;
31762 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31763 resolve_variable_value (die);
31764 }
31765
31766 /* Helper function for note_variable_value, handle one location
31767 expression. */
31768
31769 static void
31770 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31771 {
31772 for (; loc; loc = loc->dw_loc_next)
31773 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31774 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31775 {
31776 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31777 dw_die_ref ref = lookup_decl_die (decl);
31778 if (! ref && (flag_generate_lto || flag_generate_offload))
31779 {
31780 /* ??? This is somewhat a hack because we do not create DIEs
31781 for variables not in BLOCK trees early but when generating
31782 early LTO output we need the dw_val_class_decl_ref to be
31783 fully resolved. For fat LTO objects we'd also like to
31784 undo this after LTO dwarf output. */
31785 gcc_assert (DECL_CONTEXT (decl));
31786 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31787 gcc_assert (ctx != NULL);
31788 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31789 ref = lookup_decl_die (decl);
31790 gcc_assert (ref != NULL);
31791 }
31792 if (ref)
31793 {
31794 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31795 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31796 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31797 continue;
31798 }
31799 if (VAR_P (decl)
31800 && DECL_CONTEXT (decl)
31801 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31802 && lookup_decl_die (DECL_CONTEXT (decl)))
31803 {
31804 if (!variable_value_hash)
31805 variable_value_hash
31806 = hash_table<variable_value_hasher>::create_ggc (10);
31807
31808 tree fndecl = DECL_CONTEXT (decl);
31809 struct variable_value_struct *node;
31810 struct variable_value_struct **slot
31811 = variable_value_hash->find_slot_with_hash (fndecl,
31812 DECL_UID (fndecl),
31813 INSERT);
31814 if (*slot == NULL)
31815 {
31816 node = ggc_cleared_alloc<variable_value_struct> ();
31817 node->decl_id = DECL_UID (fndecl);
31818 *slot = node;
31819 }
31820 else
31821 node = *slot;
31822
31823 vec_safe_push (node->dies, die);
31824 }
31825 }
31826 }
31827
31828 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31829 with dw_val_class_decl_ref operand. */
31830
31831 static void
31832 note_variable_value (dw_die_ref die)
31833 {
31834 dw_die_ref c;
31835 dw_attr_node *a;
31836 dw_loc_list_ref loc;
31837 unsigned ix;
31838
31839 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31840 switch (AT_class (a))
31841 {
31842 case dw_val_class_loc_list:
31843 loc = AT_loc_list (a);
31844 gcc_assert (loc);
31845 if (!loc->noted_variable_value)
31846 {
31847 loc->noted_variable_value = 1;
31848 for (; loc; loc = loc->dw_loc_next)
31849 note_variable_value_in_expr (die, loc->expr);
31850 }
31851 break;
31852 case dw_val_class_loc:
31853 note_variable_value_in_expr (die, AT_loc (a));
31854 break;
31855 default:
31856 break;
31857 }
31858
31859 /* Mark children. */
31860 FOR_EACH_CHILD (die, c, note_variable_value (c));
31861 }
31862
31863 /* Perform any cleanups needed after the early debug generation pass
31864 has run. */
31865
31866 static void
31867 dwarf2out_early_finish (const char *filename)
31868 {
31869 set_early_dwarf s;
31870 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31871
31872 /* PCH might result in DW_AT_producer string being restored from the
31873 header compilation, so always fill it with empty string initially
31874 and overwrite only here. */
31875 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31876 producer_string = gen_producer_string ();
31877 producer->dw_attr_val.v.val_str->refcount--;
31878 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31879
31880 /* Add the name for the main input file now. We delayed this from
31881 dwarf2out_init to avoid complications with PCH. */
31882 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31883 add_comp_dir_attribute (comp_unit_die ());
31884
31885 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31886 DW_AT_comp_dir into .debug_line_str section. */
31887 if (!output_asm_line_debug_info ()
31888 && dwarf_version >= 5
31889 && DWARF5_USE_DEBUG_LINE_STR)
31890 {
31891 for (int i = 0; i < 2; i++)
31892 {
31893 dw_attr_node *a = get_AT (comp_unit_die (),
31894 i ? DW_AT_comp_dir : DW_AT_name);
31895 if (a == NULL
31896 || AT_class (a) != dw_val_class_str
31897 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31898 continue;
31899
31900 if (! debug_line_str_hash)
31901 debug_line_str_hash
31902 = hash_table<indirect_string_hasher>::create_ggc (10);
31903
31904 struct indirect_string_node *node
31905 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31906 set_indirect_string (node);
31907 node->form = DW_FORM_line_strp;
31908 a->dw_attr_val.v.val_str->refcount--;
31909 a->dw_attr_val.v.val_str = node;
31910 }
31911 }
31912
31913 /* With LTO early dwarf was really finished at compile-time, so make
31914 sure to adjust the phase after annotating the LTRANS CU DIE. */
31915 if (in_lto_p)
31916 {
31917 /* Force DW_TAG_imported_unit to be created now, otherwise
31918 we might end up without it or ordered after DW_TAG_inlined_subroutine
31919 referencing DIEs from it. */
31920 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
31921 {
31922 unsigned i;
31923 tree tu;
31924 if (external_die_map)
31925 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
31926 if (sym_off_pair *desc = external_die_map->get (tu))
31927 {
31928 dw_die_ref import = new_die (DW_TAG_imported_unit,
31929 comp_unit_die (), NULL_TREE);
31930 add_AT_external_die_ref (import, DW_AT_import,
31931 desc->sym, desc->off);
31932 }
31933 }
31934
31935 early_dwarf_finished = true;
31936 if (dump_file)
31937 {
31938 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31939 print_die (comp_unit_die (), dump_file);
31940 }
31941 return;
31942 }
31943
31944 /* Walk through the list of incomplete types again, trying once more to
31945 emit full debugging info for them. */
31946 retry_incomplete_types ();
31947
31948 /* The point here is to flush out the limbo list so that it is empty
31949 and we don't need to stream it for LTO. */
31950 flush_limbo_die_list ();
31951
31952 gen_scheduled_generic_parms_dies ();
31953 gen_remaining_tmpl_value_param_die_attribute ();
31954
31955 /* Add DW_AT_linkage_name for all deferred DIEs. */
31956 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31957 {
31958 tree decl = node->created_for;
31959 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31960 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31961 ended up in deferred_asm_name before we knew it was
31962 constant and never written to disk. */
31963 && DECL_ASSEMBLER_NAME (decl))
31964 {
31965 add_linkage_attr (node->die, decl);
31966 move_linkage_attr (node->die);
31967 }
31968 }
31969 deferred_asm_name = NULL;
31970
31971 if (flag_eliminate_unused_debug_types)
31972 prune_unused_types ();
31973
31974 /* Generate separate COMDAT sections for type DIEs. */
31975 if (use_debug_types)
31976 {
31977 break_out_comdat_types (comp_unit_die ());
31978
31979 /* Each new type_unit DIE was added to the limbo die list when created.
31980 Since these have all been added to comdat_type_list, clear the
31981 limbo die list. */
31982 limbo_die_list = NULL;
31983
31984 /* For each new comdat type unit, copy declarations for incomplete
31985 types to make the new unit self-contained (i.e., no direct
31986 references to the main compile unit). */
31987 for (comdat_type_node *ctnode = comdat_type_list;
31988 ctnode != NULL; ctnode = ctnode->next)
31989 copy_decls_for_unworthy_types (ctnode->root_die);
31990 copy_decls_for_unworthy_types (comp_unit_die ());
31991
31992 /* In the process of copying declarations from one unit to another,
31993 we may have left some declarations behind that are no longer
31994 referenced. Prune them. */
31995 prune_unused_types ();
31996 }
31997
31998 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31999 with dw_val_class_decl_ref operand. */
32000 note_variable_value (comp_unit_die ());
32001 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32002 note_variable_value (node->die);
32003 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32004 ctnode = ctnode->next)
32005 note_variable_value (ctnode->root_die);
32006 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32007 note_variable_value (node->die);
32008
32009 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32010 both the main_cu and all skeleton TUs. Making this call unconditional
32011 would end up either adding a second copy of the AT_pubnames attribute, or
32012 requiring a special case in add_top_level_skeleton_die_attrs. */
32013 if (!dwarf_split_debug_info)
32014 add_AT_pubnames (comp_unit_die ());
32015
32016 /* The early debug phase is now finished. */
32017 early_dwarf_finished = true;
32018 if (dump_file)
32019 {
32020 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32021 print_die (comp_unit_die (), dump_file);
32022 }
32023
32024 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32025 if ((!flag_generate_lto && !flag_generate_offload)
32026 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32027 copy_lto_debug_sections operation of the simple object support in
32028 libiberty is not implemented for them yet. */
32029 || TARGET_PECOFF || TARGET_COFF)
32030 return;
32031
32032 /* Now as we are going to output for LTO initialize sections and labels
32033 to the LTO variants. We don't need a random-seed postfix as other
32034 LTO sections as linking the LTO debug sections into one in a partial
32035 link is fine. */
32036 init_sections_and_labels (true);
32037
32038 /* The output below is modeled after dwarf2out_finish with all
32039 location related output removed and some LTO specific changes.
32040 Some refactoring might make both smaller and easier to match up. */
32041
32042 /* Traverse the DIE's and add add sibling attributes to those DIE's
32043 that have children. */
32044 add_sibling_attributes (comp_unit_die ());
32045 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32046 add_sibling_attributes (node->die);
32047 for (comdat_type_node *ctnode = comdat_type_list;
32048 ctnode != NULL; ctnode = ctnode->next)
32049 add_sibling_attributes (ctnode->root_die);
32050
32051 /* AIX Assembler inserts the length, so adjust the reference to match the
32052 offset expected by debuggers. */
32053 strcpy (dl_section_ref, debug_line_section_label);
32054 if (XCOFF_DEBUGGING_INFO)
32055 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32056
32057 if (debug_info_level >= DINFO_LEVEL_TERSE)
32058 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32059
32060 if (have_macinfo)
32061 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32062 macinfo_section_label);
32063
32064 save_macinfo_strings ();
32065
32066 if (dwarf_split_debug_info)
32067 {
32068 unsigned int index = 0;
32069 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32070 }
32071
32072 /* Output all of the compilation units. We put the main one last so that
32073 the offsets are available to output_pubnames. */
32074 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32075 output_comp_unit (node->die, 0, NULL);
32076
32077 hash_table<comdat_type_hasher> comdat_type_table (100);
32078 for (comdat_type_node *ctnode = comdat_type_list;
32079 ctnode != NULL; ctnode = ctnode->next)
32080 {
32081 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32082
32083 /* Don't output duplicate types. */
32084 if (*slot != HTAB_EMPTY_ENTRY)
32085 continue;
32086
32087 /* Add a pointer to the line table for the main compilation unit
32088 so that the debugger can make sense of DW_AT_decl_file
32089 attributes. */
32090 if (debug_info_level >= DINFO_LEVEL_TERSE)
32091 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32092 (!dwarf_split_debug_info
32093 ? debug_line_section_label
32094 : debug_skeleton_line_section_label));
32095
32096 output_comdat_type_unit (ctnode);
32097 *slot = ctnode;
32098 }
32099
32100 /* Stick a unique symbol to the main debuginfo section. */
32101 compute_comp_unit_symbol (comp_unit_die ());
32102
32103 /* Output the main compilation unit. We always need it if only for
32104 the CU symbol. */
32105 output_comp_unit (comp_unit_die (), true, NULL);
32106
32107 /* Output the abbreviation table. */
32108 if (vec_safe_length (abbrev_die_table) != 1)
32109 {
32110 switch_to_section (debug_abbrev_section);
32111 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32112 output_abbrev_section ();
32113 }
32114
32115 /* Have to end the macro section. */
32116 if (have_macinfo)
32117 {
32118 /* We have to save macinfo state if we need to output it again
32119 for the FAT part of the object. */
32120 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32121 if (flag_fat_lto_objects)
32122 macinfo_table = macinfo_table->copy ();
32123
32124 switch_to_section (debug_macinfo_section);
32125 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32126 output_macinfo (debug_line_section_label, true);
32127 dw2_asm_output_data (1, 0, "End compilation unit");
32128
32129 if (flag_fat_lto_objects)
32130 {
32131 vec_free (macinfo_table);
32132 macinfo_table = saved_macinfo_table;
32133 }
32134 }
32135
32136 /* Emit a skeleton debug_line section. */
32137 switch_to_section (debug_line_section);
32138 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32139 output_line_info (true);
32140
32141 /* If we emitted any indirect strings, output the string table too. */
32142 if (debug_str_hash || skeleton_debug_str_hash)
32143 output_indirect_strings ();
32144 if (debug_line_str_hash)
32145 {
32146 switch_to_section (debug_line_str_section);
32147 const enum dwarf_form form = DW_FORM_line_strp;
32148 debug_line_str_hash->traverse<enum dwarf_form,
32149 output_indirect_string> (form);
32150 }
32151
32152 /* Switch back to the text section. */
32153 switch_to_section (text_section);
32154 }
32155
32156 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32157 within the same process. For use by toplev::finalize. */
32158
32159 void
32160 dwarf2out_c_finalize (void)
32161 {
32162 last_var_location_insn = NULL;
32163 cached_next_real_insn = NULL;
32164 used_rtx_array = NULL;
32165 incomplete_types = NULL;
32166 debug_info_section = NULL;
32167 debug_skeleton_info_section = NULL;
32168 debug_abbrev_section = NULL;
32169 debug_skeleton_abbrev_section = NULL;
32170 debug_aranges_section = NULL;
32171 debug_addr_section = NULL;
32172 debug_macinfo_section = NULL;
32173 debug_line_section = NULL;
32174 debug_skeleton_line_section = NULL;
32175 debug_loc_section = NULL;
32176 debug_pubnames_section = NULL;
32177 debug_pubtypes_section = NULL;
32178 debug_str_section = NULL;
32179 debug_line_str_section = NULL;
32180 debug_str_dwo_section = NULL;
32181 debug_str_offsets_section = NULL;
32182 debug_ranges_section = NULL;
32183 debug_frame_section = NULL;
32184 fde_vec = NULL;
32185 debug_str_hash = NULL;
32186 debug_line_str_hash = NULL;
32187 skeleton_debug_str_hash = NULL;
32188 dw2_string_counter = 0;
32189 have_multiple_function_sections = false;
32190 text_section_used = false;
32191 cold_text_section_used = false;
32192 cold_text_section = NULL;
32193 current_unit_personality = NULL;
32194
32195 early_dwarf = false;
32196 early_dwarf_finished = false;
32197
32198 next_die_offset = 0;
32199 single_comp_unit_die = NULL;
32200 comdat_type_list = NULL;
32201 limbo_die_list = NULL;
32202 file_table = NULL;
32203 decl_die_table = NULL;
32204 common_block_die_table = NULL;
32205 decl_loc_table = NULL;
32206 call_arg_locations = NULL;
32207 call_arg_loc_last = NULL;
32208 call_site_count = -1;
32209 tail_call_site_count = -1;
32210 cached_dw_loc_list_table = NULL;
32211 abbrev_die_table = NULL;
32212 delete dwarf_proc_stack_usage_map;
32213 dwarf_proc_stack_usage_map = NULL;
32214 line_info_label_num = 0;
32215 cur_line_info_table = NULL;
32216 text_section_line_info = NULL;
32217 cold_text_section_line_info = NULL;
32218 separate_line_info = NULL;
32219 info_section_emitted = false;
32220 pubname_table = NULL;
32221 pubtype_table = NULL;
32222 macinfo_table = NULL;
32223 ranges_table = NULL;
32224 ranges_by_label = NULL;
32225 rnglist_idx = 0;
32226 have_location_lists = false;
32227 loclabel_num = 0;
32228 poc_label_num = 0;
32229 last_emitted_file = NULL;
32230 label_num = 0;
32231 tmpl_value_parm_die_table = NULL;
32232 generic_type_instances = NULL;
32233 frame_pointer_fb_offset = 0;
32234 frame_pointer_fb_offset_valid = false;
32235 base_types.release ();
32236 XDELETEVEC (producer_string);
32237 producer_string = NULL;
32238 }
32239
32240 #include "gt-dwarf2out.h"
This page took 1.437306 seconds and 6 git commands to generate.