]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
dwarf2out.c (add_scalar_info): Add back refererence to existing DIE if it has the...
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting %<.cfi_personality%> directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 struct set_early_dwarf {
3108 bool saved;
3109 set_early_dwarf () : saved(early_dwarf)
3110 {
3111 gcc_assert (! early_dwarf_finished);
3112 early_dwarf = true;
3113 }
3114 ~set_early_dwarf () { early_dwarf = saved; }
3115 };
3116
3117 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3118 #define FOR_EACH_CHILD(die, c, expr) do { \
3119 c = die->die_child; \
3120 if (c) do { \
3121 c = c->die_sib; \
3122 expr; \
3123 } while (c != die->die_child); \
3124 } while (0)
3125
3126 /* The pubname structure */
3127
3128 typedef struct GTY(()) pubname_struct {
3129 dw_die_ref die;
3130 const char *name;
3131 }
3132 pubname_entry;
3133
3134
3135 struct GTY(()) dw_ranges {
3136 const char *label;
3137 /* If this is positive, it's a block number, otherwise it's a
3138 bitwise-negated index into dw_ranges_by_label. */
3139 int num;
3140 /* Index for the range list for DW_FORM_rnglistx. */
3141 unsigned int idx : 31;
3142 /* True if this range might be possibly in a different section
3143 from previous entry. */
3144 unsigned int maybe_new_sec : 1;
3145 };
3146
3147 /* A structure to hold a macinfo entry. */
3148
3149 typedef struct GTY(()) macinfo_struct {
3150 unsigned char code;
3151 unsigned HOST_WIDE_INT lineno;
3152 const char *info;
3153 }
3154 macinfo_entry;
3155
3156
3157 struct GTY(()) dw_ranges_by_label {
3158 const char *begin;
3159 const char *end;
3160 };
3161
3162 /* The comdat type node structure. */
3163 struct GTY(()) comdat_type_node
3164 {
3165 dw_die_ref root_die;
3166 dw_die_ref type_die;
3167 dw_die_ref skeleton_die;
3168 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3169 comdat_type_node *next;
3170 };
3171
3172 /* A list of DIEs for which we can't determine ancestry (parent_die
3173 field) just yet. Later in dwarf2out_finish we will fill in the
3174 missing bits. */
3175 typedef struct GTY(()) limbo_die_struct {
3176 dw_die_ref die;
3177 /* The tree for which this DIE was created. We use this to
3178 determine ancestry later. */
3179 tree created_for;
3180 struct limbo_die_struct *next;
3181 }
3182 limbo_die_node;
3183
3184 typedef struct skeleton_chain_struct
3185 {
3186 dw_die_ref old_die;
3187 dw_die_ref new_die;
3188 struct skeleton_chain_struct *parent;
3189 }
3190 skeleton_chain_node;
3191
3192 /* Define a macro which returns nonzero for a TYPE_DECL which was
3193 implicitly generated for a type.
3194
3195 Note that, unlike the C front-end (which generates a NULL named
3196 TYPE_DECL node for each complete tagged type, each array type,
3197 and each function type node created) the C++ front-end generates
3198 a _named_ TYPE_DECL node for each tagged type node created.
3199 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3200 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3201 front-end, but for each type, tagged or not. */
3202
3203 #define TYPE_DECL_IS_STUB(decl) \
3204 (DECL_NAME (decl) == NULL_TREE \
3205 || (DECL_ARTIFICIAL (decl) \
3206 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3207 /* This is necessary for stub decls that \
3208 appear in nested inline functions. */ \
3209 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3210 && (decl_ultimate_origin (decl) \
3211 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3212
3213 /* Information concerning the compilation unit's programming
3214 language, and compiler version. */
3215
3216 /* Fixed size portion of the DWARF compilation unit header. */
3217 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3219 + (dwarf_version >= 5 ? 4 : 3))
3220
3221 /* Fixed size portion of the DWARF comdat type unit header. */
3222 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3223 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3224 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3225
3226 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3227 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3228 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3229
3230 /* Fixed size portion of public names info. */
3231 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3232
3233 /* Fixed size portion of the address range info. */
3234 #define DWARF_ARANGES_HEADER_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - DWARF_INITIAL_LENGTH_SIZE)
3238
3239 /* Size of padding portion in the address range info. It must be
3240 aligned to twice the pointer size. */
3241 #define DWARF_ARANGES_PAD_SIZE \
3242 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3243 DWARF2_ADDR_SIZE * 2) \
3244 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3245
3246 /* Use assembler line directives if available. */
3247 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3248 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3249 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3250 #else
3251 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3252 #endif
3253 #endif
3254
3255 /* Use assembler views in line directives if available. */
3256 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3257 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3258 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3259 #else
3260 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3261 #endif
3262 #endif
3263
3264 /* Return true if GCC configure detected assembler support for .loc. */
3265
3266 bool
3267 dwarf2out_default_as_loc_support (void)
3268 {
3269 return DWARF2_ASM_LINE_DEBUG_INFO;
3270 #if (GCC_VERSION >= 3000)
3271 # undef DWARF2_ASM_LINE_DEBUG_INFO
3272 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3273 #endif
3274 }
3275
3276 /* Return true if GCC configure detected assembler support for views
3277 in .loc directives. */
3278
3279 bool
3280 dwarf2out_default_as_locview_support (void)
3281 {
3282 return DWARF2_ASM_VIEW_DEBUG_INFO;
3283 #if (GCC_VERSION >= 3000)
3284 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3285 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3286 #endif
3287 }
3288
3289 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3290 view computation, and it refers to a view identifier for which we
3291 will not emit a label because it is known to map to a view number
3292 zero. We won't allocate the bitmap if we're not using assembler
3293 support for location views, but we have to make the variable
3294 visible for GGC and for code that will be optimized out for lack of
3295 support but that's still parsed and compiled. We could abstract it
3296 out with macros, but it's not worth it. */
3297 static GTY(()) bitmap zero_view_p;
3298
3299 /* Evaluate to TRUE iff N is known to identify the first location view
3300 at its PC. When not using assembler location view computation,
3301 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3302 and views label numbers recorded in it are the ones known to be
3303 zero. */
3304 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3305 || (N) == (var_loc_view)-1 \
3306 || (zero_view_p \
3307 && bitmap_bit_p (zero_view_p, (N))))
3308
3309 /* Return true iff we're to emit .loc directives for the assembler to
3310 generate line number sections.
3311
3312 When we're not emitting views, all we need from the assembler is
3313 support for .loc directives.
3314
3315 If we are emitting views, we can only use the assembler's .loc
3316 support if it also supports views.
3317
3318 When the compiler is emitting the line number programs and
3319 computing view numbers itself, it resets view numbers at known PC
3320 changes and counts from that, and then it emits view numbers as
3321 literal constants in locviewlists. There are cases in which the
3322 compiler is not sure about PC changes, e.g. when extra alignment is
3323 requested for a label. In these cases, the compiler may not reset
3324 the view counter, and the potential PC advance in the line number
3325 program will use an opcode that does not reset the view counter
3326 even if the PC actually changes, so that compiler and debug info
3327 consumer can keep view numbers in sync.
3328
3329 When the compiler defers view computation to the assembler, it
3330 emits symbolic view numbers in locviewlists, with the exception of
3331 views known to be zero (forced resets, or reset after
3332 compiler-visible PC changes): instead of emitting symbols for
3333 these, we emit literal zero and assert the assembler agrees with
3334 the compiler's assessment. We could use symbolic views everywhere,
3335 instead of special-casing zero views, but then we'd be unable to
3336 optimize out locviewlists that contain only zeros. */
3337
3338 static bool
3339 output_asm_line_debug_info (void)
3340 {
3341 return (dwarf2out_as_loc_support
3342 && (dwarf2out_as_locview_support
3343 || !debug_variable_location_views));
3344 }
3345
3346 /* Minimum line offset in a special line info. opcode.
3347 This value was chosen to give a reasonable range of values. */
3348 #define DWARF_LINE_BASE -10
3349
3350 /* First special line opcode - leave room for the standard opcodes. */
3351 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3352
3353 /* Range of line offsets in a special line info. opcode. */
3354 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3355
3356 /* Flag that indicates the initial value of the is_stmt_start flag.
3357 In the present implementation, we do not mark any lines as
3358 the beginning of a source statement, because that information
3359 is not made available by the GCC front-end. */
3360 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3361
3362 /* Maximum number of operations per instruction bundle. */
3363 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3364 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3365 #endif
3366
3367 /* This location is used by calc_die_sizes() to keep track
3368 the offset of each DIE within the .debug_info section. */
3369 static unsigned long next_die_offset;
3370
3371 /* Record the root of the DIE's built for the current compilation unit. */
3372 static GTY(()) dw_die_ref single_comp_unit_die;
3373
3374 /* A list of type DIEs that have been separated into comdat sections. */
3375 static GTY(()) comdat_type_node *comdat_type_list;
3376
3377 /* A list of CU DIEs that have been separated. */
3378 static GTY(()) limbo_die_node *cu_die_list;
3379
3380 /* A list of DIEs with a NULL parent waiting to be relocated. */
3381 static GTY(()) limbo_die_node *limbo_die_list;
3382
3383 /* A list of DIEs for which we may have to generate
3384 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3385 static GTY(()) limbo_die_node *deferred_asm_name;
3386
3387 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3388 {
3389 typedef const char *compare_type;
3390
3391 static hashval_t hash (dwarf_file_data *);
3392 static bool equal (dwarf_file_data *, const char *);
3393 };
3394
3395 /* Filenames referenced by this compilation unit. */
3396 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3397
3398 struct decl_die_hasher : ggc_ptr_hash<die_node>
3399 {
3400 typedef tree compare_type;
3401
3402 static hashval_t hash (die_node *);
3403 static bool equal (die_node *, tree);
3404 };
3405 /* A hash table of references to DIE's that describe declarations.
3406 The key is a DECL_UID() which is a unique number identifying each decl. */
3407 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3408
3409 struct GTY ((for_user)) variable_value_struct {
3410 unsigned int decl_id;
3411 vec<dw_die_ref, va_gc> *dies;
3412 };
3413
3414 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (variable_value_struct *);
3419 static bool equal (variable_value_struct *, tree);
3420 };
3421 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3422 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3423 DECL_CONTEXT of the referenced VAR_DECLs. */
3424 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3425
3426 struct block_die_hasher : ggc_ptr_hash<die_struct>
3427 {
3428 static hashval_t hash (die_struct *);
3429 static bool equal (die_struct *, die_struct *);
3430 };
3431
3432 /* A hash table of references to DIE's that describe COMMON blocks.
3433 The key is DECL_UID() ^ die_parent. */
3434 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3435
3436 typedef struct GTY(()) die_arg_entry_struct {
3437 dw_die_ref die;
3438 tree arg;
3439 } die_arg_entry;
3440
3441
3442 /* Node of the variable location list. */
3443 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3444 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3445 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3446 in mode of the EXPR_LIST node and first EXPR_LIST operand
3447 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3448 location or NULL for padding. For larger bitsizes,
3449 mode is 0 and first operand is a CONCAT with bitsize
3450 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3451 NULL as second operand. */
3452 rtx GTY (()) loc;
3453 const char * GTY (()) label;
3454 struct var_loc_node * GTY (()) next;
3455 var_loc_view view;
3456 };
3457
3458 /* Variable location list. */
3459 struct GTY ((for_user)) var_loc_list_def {
3460 struct var_loc_node * GTY (()) first;
3461
3462 /* Pointer to the last but one or last element of the
3463 chained list. If the list is empty, both first and
3464 last are NULL, if the list contains just one node
3465 or the last node certainly is not redundant, it points
3466 to the last node, otherwise points to the last but one.
3467 Do not mark it for GC because it is marked through the chain. */
3468 struct var_loc_node * GTY ((skip ("%h"))) last;
3469
3470 /* Pointer to the last element before section switch,
3471 if NULL, either sections weren't switched or first
3472 is after section switch. */
3473 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3474
3475 /* DECL_UID of the variable decl. */
3476 unsigned int decl_id;
3477 };
3478 typedef struct var_loc_list_def var_loc_list;
3479
3480 /* Call argument location list. */
3481 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3482 rtx GTY (()) call_arg_loc_note;
3483 const char * GTY (()) label;
3484 tree GTY (()) block;
3485 bool tail_call_p;
3486 rtx GTY (()) symbol_ref;
3487 struct call_arg_loc_node * GTY (()) next;
3488 };
3489
3490
3491 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3492 {
3493 typedef const_tree compare_type;
3494
3495 static hashval_t hash (var_loc_list *);
3496 static bool equal (var_loc_list *, const_tree);
3497 };
3498
3499 /* Table of decl location linked lists. */
3500 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3501
3502 /* Head and tail of call_arg_loc chain. */
3503 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3504 static struct call_arg_loc_node *call_arg_loc_last;
3505
3506 /* Number of call sites in the current function. */
3507 static int call_site_count = -1;
3508 /* Number of tail call sites in the current function. */
3509 static int tail_call_site_count = -1;
3510
3511 /* A cached location list. */
3512 struct GTY ((for_user)) cached_dw_loc_list_def {
3513 /* The DECL_UID of the decl that this entry describes. */
3514 unsigned int decl_id;
3515
3516 /* The cached location list. */
3517 dw_loc_list_ref loc_list;
3518 };
3519 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3520
3521 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3522 {
3523
3524 typedef const_tree compare_type;
3525
3526 static hashval_t hash (cached_dw_loc_list *);
3527 static bool equal (cached_dw_loc_list *, const_tree);
3528 };
3529
3530 /* Table of cached location lists. */
3531 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3532
3533 /* A vector of references to DIE's that are uniquely identified by their tag,
3534 presence/absence of children DIE's, and list of attribute/value pairs. */
3535 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3536
3537 /* A hash map to remember the stack usage for DWARF procedures. The value
3538 stored is the stack size difference between before the DWARF procedure
3539 invokation and after it returned. In other words, for a DWARF procedure
3540 that consumes N stack slots and that pushes M ones, this stores M - N. */
3541 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3542
3543 /* A global counter for generating labels for line number data. */
3544 static unsigned int line_info_label_num;
3545
3546 /* The current table to which we should emit line number information
3547 for the current function. This will be set up at the beginning of
3548 assembly for the function. */
3549 static GTY(()) dw_line_info_table *cur_line_info_table;
3550
3551 /* The two default tables of line number info. */
3552 static GTY(()) dw_line_info_table *text_section_line_info;
3553 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3554
3555 /* The set of all non-default tables of line number info. */
3556 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3557
3558 /* A flag to tell pubnames/types export if there is an info section to
3559 refer to. */
3560 static bool info_section_emitted;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible names. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3565
3566 /* A pointer to the base of a table that contains a list of publicly
3567 accessible types. */
3568 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3569
3570 /* A pointer to the base of a table that contains a list of macro
3571 defines/undefines (and file start/end markers). */
3572 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3573
3574 /* True if .debug_macinfo or .debug_macros section is going to be
3575 emitted. */
3576 #define have_macinfo \
3577 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3578 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3579 && !macinfo_table->is_empty ())
3580
3581 /* Vector of dies for which we should generate .debug_ranges info. */
3582 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3583
3584 /* Vector of pairs of labels referenced in ranges_table. */
3585 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3586
3587 /* Whether we have location lists that need outputting */
3588 static GTY(()) bool have_location_lists;
3589
3590 /* Unique label counter. */
3591 static GTY(()) unsigned int loclabel_num;
3592
3593 /* Unique label counter for point-of-call tables. */
3594 static GTY(()) unsigned int poc_label_num;
3595
3596 /* The last file entry emitted by maybe_emit_file(). */
3597 static GTY(()) struct dwarf_file_data * last_emitted_file;
3598
3599 /* Number of internal labels generated by gen_internal_sym(). */
3600 static GTY(()) int label_num;
3601
3602 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3603
3604 /* Instances of generic types for which we need to generate debug
3605 info that describe their generic parameters and arguments. That
3606 generation needs to happen once all types are properly laid out so
3607 we do it at the end of compilation. */
3608 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3609
3610 /* Offset from the "steady-state frame pointer" to the frame base,
3611 within the current function. */
3612 static poly_int64 frame_pointer_fb_offset;
3613 static bool frame_pointer_fb_offset_valid;
3614
3615 static vec<dw_die_ref> base_types;
3616
3617 /* Flags to represent a set of attribute classes for attributes that represent
3618 a scalar value (bounds, pointers, ...). */
3619 enum dw_scalar_form
3620 {
3621 dw_scalar_form_constant = 0x01,
3622 dw_scalar_form_exprloc = 0x02,
3623 dw_scalar_form_reference = 0x04
3624 };
3625
3626 /* Forward declarations for functions defined in this file. */
3627
3628 static int is_pseudo_reg (const_rtx);
3629 static tree type_main_variant (tree);
3630 static int is_tagged_type (const_tree);
3631 static const char *dwarf_tag_name (unsigned);
3632 static const char *dwarf_attr_name (unsigned);
3633 static const char *dwarf_form_name (unsigned);
3634 static tree decl_ultimate_origin (const_tree);
3635 static tree decl_class_context (tree);
3636 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3637 static inline enum dw_val_class AT_class (dw_attr_node *);
3638 static inline unsigned int AT_index (dw_attr_node *);
3639 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3640 static inline unsigned AT_flag (dw_attr_node *);
3641 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3642 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3643 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3644 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3645 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3646 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3647 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3648 unsigned int, unsigned char *);
3649 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3650 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3651 static inline const char *AT_string (dw_attr_node *);
3652 static enum dwarf_form AT_string_form (dw_attr_node *);
3653 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3654 static void add_AT_specification (dw_die_ref, dw_die_ref);
3655 static inline dw_die_ref AT_ref (dw_attr_node *);
3656 static inline int AT_ref_external (dw_attr_node *);
3657 static inline void set_AT_ref_external (dw_attr_node *, int);
3658 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3659 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3660 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3661 dw_loc_list_ref);
3662 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3663 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3664 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3665 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3666 static void remove_addr_table_entry (addr_table_entry *);
3667 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3668 static inline rtx AT_addr (dw_attr_node *);
3669 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3670 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3674 unsigned long, bool);
3675 static inline const char *AT_lbl (dw_attr_node *);
3676 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3677 static const char *get_AT_low_pc (dw_die_ref);
3678 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3679 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3680 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3681 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3682 static bool is_c (void);
3683 static bool is_cxx (void);
3684 static bool is_cxx (const_tree);
3685 static bool is_fortran (void);
3686 static bool is_ada (void);
3687 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3688 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3689 static void add_child_die (dw_die_ref, dw_die_ref);
3690 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3691 static dw_die_ref lookup_type_die (tree);
3692 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3693 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3694 static void equate_type_number_to_die (tree, dw_die_ref);
3695 static dw_die_ref lookup_decl_die (tree);
3696 static var_loc_list *lookup_decl_loc (const_tree);
3697 static void equate_decl_number_to_die (tree, dw_die_ref);
3698 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3699 static void print_spaces (FILE *);
3700 static void print_die (dw_die_ref, FILE *);
3701 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3702 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3703 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3704 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3705 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3706 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3707 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3708 struct md5_ctx *, int *);
3709 struct checksum_attributes;
3710 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3711 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3712 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3713 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3714 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3715 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3716 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3717 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3718 static int is_type_die (dw_die_ref);
3719 static inline bool is_template_instantiation (dw_die_ref);
3720 static int is_declaration_die (dw_die_ref);
3721 static int should_move_die_to_comdat (dw_die_ref);
3722 static dw_die_ref clone_as_declaration (dw_die_ref);
3723 static dw_die_ref clone_die (dw_die_ref);
3724 static dw_die_ref clone_tree (dw_die_ref);
3725 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3726 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3727 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3728 static dw_die_ref generate_skeleton (dw_die_ref);
3729 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3730 dw_die_ref,
3731 dw_die_ref);
3732 static void break_out_comdat_types (dw_die_ref);
3733 static void copy_decls_for_unworthy_types (dw_die_ref);
3734
3735 static void add_sibling_attributes (dw_die_ref);
3736 static void output_location_lists (dw_die_ref);
3737 static int constant_size (unsigned HOST_WIDE_INT);
3738 static unsigned long size_of_die (dw_die_ref);
3739 static void calc_die_sizes (dw_die_ref);
3740 static void calc_base_type_die_sizes (void);
3741 static void mark_dies (dw_die_ref);
3742 static void unmark_dies (dw_die_ref);
3743 static void unmark_all_dies (dw_die_ref);
3744 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3745 static unsigned long size_of_aranges (void);
3746 static enum dwarf_form value_format (dw_attr_node *);
3747 static void output_value_format (dw_attr_node *);
3748 static void output_abbrev_section (void);
3749 static void output_die_abbrevs (unsigned long, dw_die_ref);
3750 static void output_die (dw_die_ref);
3751 static void output_compilation_unit_header (enum dwarf_unit_type);
3752 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3753 static void output_comdat_type_unit (comdat_type_node *, bool);
3754 static const char *dwarf2_name (tree, int);
3755 static void add_pubname (tree, dw_die_ref);
3756 static void add_enumerator_pubname (const char *, dw_die_ref);
3757 static void add_pubname_string (const char *, dw_die_ref);
3758 static void add_pubtype (tree, dw_die_ref);
3759 static void output_pubnames (vec<pubname_entry, va_gc> *);
3760 static void output_aranges (void);
3761 static unsigned int add_ranges (const_tree, bool = false);
3762 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3763 bool *, bool);
3764 static void output_ranges (void);
3765 static dw_line_info_table *new_line_info_table (void);
3766 static void output_line_info (bool);
3767 static void output_file_names (void);
3768 static dw_die_ref base_type_die (tree, bool);
3769 static int is_base_type (tree);
3770 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3771 static int decl_quals (const_tree);
3772 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3773 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3774 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static tree field_type (const_tree);
3798 static unsigned int simple_type_align_in_bits (const_tree);
3799 static unsigned int simple_decl_align_in_bits (const_tree);
3800 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3801 struct vlr_context;
3802 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3803 HOST_WIDE_INT *);
3804 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3805 dw_loc_list_ref);
3806 static void add_data_member_location_attribute (dw_die_ref, tree,
3807 struct vlr_context *);
3808 static bool add_const_value_attribute (dw_die_ref, rtx);
3809 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3810 static void insert_wide_int (const wide_int &, unsigned char *, int);
3811 static void insert_float (const_rtx, unsigned char *);
3812 static rtx rtl_for_decl_location (tree);
3813 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3814 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3815 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3816 static void add_name_attribute (dw_die_ref, const char *);
3817 static void add_desc_attribute (dw_die_ref, tree);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static void add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static dw_die_ref scope_die_for (tree, dw_die_ref);
3839 static inline int local_scope_p (dw_die_ref);
3840 static inline int class_scope_p (dw_die_ref);
3841 static inline int class_or_namespace_scope_p (dw_die_ref);
3842 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3843 static void add_calling_convention_attribute (dw_die_ref, tree);
3844 static const char *type_tag (const_tree);
3845 static tree member_declared_type (const_tree);
3846 #if 0
3847 static const char *decl_start_label (tree);
3848 #endif
3849 static void gen_array_type_die (tree, dw_die_ref);
3850 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3851 #if 0
3852 static void gen_entry_point_die (tree, dw_die_ref);
3853 #endif
3854 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3855 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3857 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3858 static void gen_formal_types_die (tree, dw_die_ref);
3859 static void gen_subprogram_die (tree, dw_die_ref);
3860 static void gen_variable_die (tree, tree, dw_die_ref);
3861 static void gen_const_die (tree, dw_die_ref);
3862 static void gen_label_die (tree, dw_die_ref);
3863 static void gen_lexical_block_die (tree, dw_die_ref);
3864 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3865 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3866 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3867 static dw_die_ref gen_compile_unit_die (const char *);
3868 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3869 static void gen_member_die (tree, dw_die_ref);
3870 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3871 enum debug_info_usage);
3872 static void gen_subroutine_type_die (tree, dw_die_ref);
3873 static void gen_typedef_die (tree, dw_die_ref);
3874 static void gen_type_die (tree, dw_die_ref);
3875 static void gen_block_die (tree, dw_die_ref);
3876 static void decls_for_scope (tree, dw_die_ref, bool = true);
3877 static bool is_naming_typedef_decl (const_tree);
3878 static inline dw_die_ref get_context_die (tree);
3879 static void gen_namespace_die (tree, dw_die_ref);
3880 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3881 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3882 static dw_die_ref force_decl_die (tree);
3883 static dw_die_ref force_type_die (tree);
3884 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3885 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3886 static struct dwarf_file_data * lookup_filename (const char *);
3887 static void retry_incomplete_types (void);
3888 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3889 static void gen_generic_params_dies (tree);
3890 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3891 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3892 static void splice_child_die (dw_die_ref, dw_die_ref);
3893 static int file_info_cmp (const void *, const void *);
3894 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3895 const char *, var_loc_view, const char *);
3896 static void output_loc_list (dw_loc_list_ref);
3897 static char *gen_internal_sym (const char *);
3898 static bool want_pubnames (void);
3899
3900 static void prune_unmark_dies (dw_die_ref);
3901 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3902 static void prune_unused_types_mark (dw_die_ref, int);
3903 static void prune_unused_types_walk (dw_die_ref);
3904 static void prune_unused_types_walk_attribs (dw_die_ref);
3905 static void prune_unused_types_prune (dw_die_ref);
3906 static void prune_unused_types (void);
3907 static int maybe_emit_file (struct dwarf_file_data *fd);
3908 static inline const char *AT_vms_delta1 (dw_attr_node *);
3909 static inline const char *AT_vms_delta2 (dw_attr_node *);
3910 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3911 const char *, const char *);
3912 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3913 static void gen_remaining_tmpl_value_param_die_attribute (void);
3914 static bool generic_type_p (tree);
3915 static void schedule_generic_params_dies_gen (tree t);
3916 static void gen_scheduled_generic_parms_dies (void);
3917 static void resolve_variable_values (void);
3918
3919 static const char *comp_dir_string (void);
3920
3921 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3922
3923 /* enum for tracking thread-local variables whose address is really an offset
3924 relative to the TLS pointer, which will need link-time relocation, but will
3925 not need relocation by the DWARF consumer. */
3926
3927 enum dtprel_bool
3928 {
3929 dtprel_false = 0,
3930 dtprel_true = 1
3931 };
3932
3933 /* Return the operator to use for an address of a variable. For dtprel_true, we
3934 use DW_OP_const*. For regular variables, which need both link-time
3935 relocation and consumer-level relocation (e.g., to account for shared objects
3936 loaded at a random address), we use DW_OP_addr*. */
3937
3938 static inline enum dwarf_location_atom
3939 dw_addr_op (enum dtprel_bool dtprel)
3940 {
3941 if (dtprel == dtprel_true)
3942 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3943 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3944 else
3945 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3946 }
3947
3948 /* Return a pointer to a newly allocated address location description. If
3949 dwarf_split_debug_info is true, then record the address with the appropriate
3950 relocation. */
3951 static inline dw_loc_descr_ref
3952 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3953 {
3954 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3955
3956 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3957 ref->dw_loc_oprnd1.v.val_addr = addr;
3958 ref->dtprel = dtprel;
3959 if (dwarf_split_debug_info)
3960 ref->dw_loc_oprnd1.val_entry
3961 = add_addr_table_entry (addr,
3962 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3963 else
3964 ref->dw_loc_oprnd1.val_entry = NULL;
3965
3966 return ref;
3967 }
3968
3969 /* Section names used to hold DWARF debugging information. */
3970
3971 #ifndef DEBUG_INFO_SECTION
3972 #define DEBUG_INFO_SECTION ".debug_info"
3973 #endif
3974 #ifndef DEBUG_DWO_INFO_SECTION
3975 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_LTO_INFO_SECTION
3978 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3979 #endif
3980 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3981 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3982 #endif
3983 #ifndef DEBUG_ABBREV_SECTION
3984 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3985 #endif
3986 #ifndef DEBUG_LTO_ABBREV_SECTION
3987 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3988 #endif
3989 #ifndef DEBUG_DWO_ABBREV_SECTION
3990 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3993 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3994 #endif
3995 #ifndef DEBUG_ARANGES_SECTION
3996 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3997 #endif
3998 #ifndef DEBUG_ADDR_SECTION
3999 #define DEBUG_ADDR_SECTION ".debug_addr"
4000 #endif
4001 #ifndef DEBUG_MACINFO_SECTION
4002 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4003 #endif
4004 #ifndef DEBUG_LTO_MACINFO_SECTION
4005 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4006 #endif
4007 #ifndef DEBUG_DWO_MACINFO_SECTION
4008 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4009 #endif
4010 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4011 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4012 #endif
4013 #ifndef DEBUG_MACRO_SECTION
4014 #define DEBUG_MACRO_SECTION ".debug_macro"
4015 #endif
4016 #ifndef DEBUG_LTO_MACRO_SECTION
4017 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4018 #endif
4019 #ifndef DEBUG_DWO_MACRO_SECTION
4020 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4021 #endif
4022 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4023 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4024 #endif
4025 #ifndef DEBUG_LINE_SECTION
4026 #define DEBUG_LINE_SECTION ".debug_line"
4027 #endif
4028 #ifndef DEBUG_LTO_LINE_SECTION
4029 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4030 #endif
4031 #ifndef DEBUG_DWO_LINE_SECTION
4032 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4033 #endif
4034 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4035 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4036 #endif
4037 #ifndef DEBUG_LOC_SECTION
4038 #define DEBUG_LOC_SECTION ".debug_loc"
4039 #endif
4040 #ifndef DEBUG_DWO_LOC_SECTION
4041 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4042 #endif
4043 #ifndef DEBUG_LOCLISTS_SECTION
4044 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4045 #endif
4046 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4047 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4048 #endif
4049 #ifndef DEBUG_PUBNAMES_SECTION
4050 #define DEBUG_PUBNAMES_SECTION \
4051 ((debug_generate_pub_sections == 2) \
4052 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4053 #endif
4054 #ifndef DEBUG_PUBTYPES_SECTION
4055 #define DEBUG_PUBTYPES_SECTION \
4056 ((debug_generate_pub_sections == 2) \
4057 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4058 #endif
4059 #ifndef DEBUG_STR_OFFSETS_SECTION
4060 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4061 #endif
4062 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4063 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4064 #endif
4065 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4066 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4067 #endif
4068 #ifndef DEBUG_STR_SECTION
4069 #define DEBUG_STR_SECTION ".debug_str"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_SECTION
4072 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4073 #endif
4074 #ifndef DEBUG_STR_DWO_SECTION
4075 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4076 #endif
4077 #ifndef DEBUG_LTO_STR_DWO_SECTION
4078 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4079 #endif
4080 #ifndef DEBUG_RANGES_SECTION
4081 #define DEBUG_RANGES_SECTION ".debug_ranges"
4082 #endif
4083 #ifndef DEBUG_RNGLISTS_SECTION
4084 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4085 #endif
4086 #ifndef DEBUG_LINE_STR_SECTION
4087 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4088 #endif
4089 #ifndef DEBUG_LTO_LINE_STR_SECTION
4090 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4091 #endif
4092
4093 /* Standard ELF section names for compiled code and data. */
4094 #ifndef TEXT_SECTION_NAME
4095 #define TEXT_SECTION_NAME ".text"
4096 #endif
4097
4098 /* Section flags for .debug_str section. */
4099 #define DEBUG_STR_SECTION_FLAGS \
4100 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4101 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4102 : SECTION_DEBUG)
4103
4104 /* Section flags for .debug_str.dwo section. */
4105 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4106
4107 /* Attribute used to refer to the macro section. */
4108 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4109 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4110
4111 /* Labels we insert at beginning sections we can reference instead of
4112 the section names themselves. */
4113
4114 #ifndef TEXT_SECTION_LABEL
4115 #define TEXT_SECTION_LABEL "Ltext"
4116 #endif
4117 #ifndef COLD_TEXT_SECTION_LABEL
4118 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4119 #endif
4120 #ifndef DEBUG_LINE_SECTION_LABEL
4121 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4122 #endif
4123 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4124 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4125 #endif
4126 #ifndef DEBUG_INFO_SECTION_LABEL
4127 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4128 #endif
4129 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4130 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4131 #endif
4132 #ifndef DEBUG_ABBREV_SECTION_LABEL
4133 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4134 #endif
4135 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4136 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4137 #endif
4138 #ifndef DEBUG_ADDR_SECTION_LABEL
4139 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4140 #endif
4141 #ifndef DEBUG_LOC_SECTION_LABEL
4142 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4143 #endif
4144 #ifndef DEBUG_RANGES_SECTION_LABEL
4145 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4146 #endif
4147 #ifndef DEBUG_MACINFO_SECTION_LABEL
4148 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4149 #endif
4150 #ifndef DEBUG_MACRO_SECTION_LABEL
4151 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4152 #endif
4153 #define SKELETON_COMP_DIE_ABBREV 1
4154 #define SKELETON_TYPE_DIE_ABBREV 2
4155
4156 /* Definitions of defaults for formats and names of various special
4157 (artificial) labels which may be generated within this file (when the -g
4158 options is used and DWARF2_DEBUGGING_INFO is in effect.
4159 If necessary, these may be overridden from within the tm.h file, but
4160 typically, overriding these defaults is unnecessary. */
4161
4162 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177
4178 #ifndef TEXT_END_LABEL
4179 #define TEXT_END_LABEL "Letext"
4180 #endif
4181 #ifndef COLD_END_LABEL
4182 #define COLD_END_LABEL "Letext_cold"
4183 #endif
4184 #ifndef BLOCK_BEGIN_LABEL
4185 #define BLOCK_BEGIN_LABEL "LBB"
4186 #endif
4187 #ifndef BLOCK_INLINE_ENTRY_LABEL
4188 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4189 #endif
4190 #ifndef BLOCK_END_LABEL
4191 #define BLOCK_END_LABEL "LBE"
4192 #endif
4193 #ifndef LINE_CODE_LABEL
4194 #define LINE_CODE_LABEL "LM"
4195 #endif
4196
4197 \f
4198 /* Return the root of the DIE's built for the current compilation unit. */
4199 static dw_die_ref
4200 comp_unit_die (void)
4201 {
4202 if (!single_comp_unit_die)
4203 single_comp_unit_die = gen_compile_unit_die (NULL);
4204 return single_comp_unit_die;
4205 }
4206
4207 /* We allow a language front-end to designate a function that is to be
4208 called to "demangle" any name before it is put into a DIE. */
4209
4210 static const char *(*demangle_name_func) (const char *);
4211
4212 void
4213 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4214 {
4215 demangle_name_func = func;
4216 }
4217
4218 /* Test if rtl node points to a pseudo register. */
4219
4220 static inline int
4221 is_pseudo_reg (const_rtx rtl)
4222 {
4223 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4224 || (GET_CODE (rtl) == SUBREG
4225 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4226 }
4227
4228 /* Return a reference to a type, with its const and volatile qualifiers
4229 removed. */
4230
4231 static inline tree
4232 type_main_variant (tree type)
4233 {
4234 type = TYPE_MAIN_VARIANT (type);
4235
4236 /* ??? There really should be only one main variant among any group of
4237 variants of a given type (and all of the MAIN_VARIANT values for all
4238 members of the group should point to that one type) but sometimes the C
4239 front-end messes this up for array types, so we work around that bug
4240 here. */
4241 if (TREE_CODE (type) == ARRAY_TYPE)
4242 while (type != TYPE_MAIN_VARIANT (type))
4243 type = TYPE_MAIN_VARIANT (type);
4244
4245 return type;
4246 }
4247
4248 /* Return nonzero if the given type node represents a tagged type. */
4249
4250 static inline int
4251 is_tagged_type (const_tree type)
4252 {
4253 enum tree_code code = TREE_CODE (type);
4254
4255 return (code == RECORD_TYPE || code == UNION_TYPE
4256 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4257 }
4258
4259 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4260
4261 static void
4262 get_ref_die_offset_label (char *label, dw_die_ref ref)
4263 {
4264 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4265 }
4266
4267 /* Return die_offset of a DIE reference to a base type. */
4268
4269 static unsigned long int
4270 get_base_type_offset (dw_die_ref ref)
4271 {
4272 if (ref->die_offset)
4273 return ref->die_offset;
4274 if (comp_unit_die ()->die_abbrev)
4275 {
4276 calc_base_type_die_sizes ();
4277 gcc_assert (ref->die_offset);
4278 }
4279 return ref->die_offset;
4280 }
4281
4282 /* Return die_offset of a DIE reference other than base type. */
4283
4284 static unsigned long int
4285 get_ref_die_offset (dw_die_ref ref)
4286 {
4287 gcc_assert (ref->die_offset);
4288 return ref->die_offset;
4289 }
4290
4291 /* Convert a DIE tag into its string name. */
4292
4293 static const char *
4294 dwarf_tag_name (unsigned int tag)
4295 {
4296 const char *name = get_DW_TAG_name (tag);
4297
4298 if (name != NULL)
4299 return name;
4300
4301 return "DW_TAG_<unknown>";
4302 }
4303
4304 /* Convert a DWARF attribute code into its string name. */
4305
4306 static const char *
4307 dwarf_attr_name (unsigned int attr)
4308 {
4309 const char *name;
4310
4311 switch (attr)
4312 {
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_prologue:
4315 return "DW_AT_HP_prologue";
4316 #else
4317 case DW_AT_MIPS_loop_unroll_factor:
4318 return "DW_AT_MIPS_loop_unroll_factor";
4319 #endif
4320
4321 #if VMS_DEBUGGING_INFO
4322 case DW_AT_HP_epilogue:
4323 return "DW_AT_HP_epilogue";
4324 #else
4325 case DW_AT_MIPS_stride:
4326 return "DW_AT_MIPS_stride";
4327 #endif
4328 }
4329
4330 name = get_DW_AT_name (attr);
4331
4332 if (name != NULL)
4333 return name;
4334
4335 return "DW_AT_<unknown>";
4336 }
4337
4338 /* Convert a DWARF value form code into its string name. */
4339
4340 static const char *
4341 dwarf_form_name (unsigned int form)
4342 {
4343 const char *name = get_DW_FORM_name (form);
4344
4345 if (name != NULL)
4346 return name;
4347
4348 return "DW_FORM_<unknown>";
4349 }
4350 \f
4351 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4352 instance of an inlined instance of a decl which is local to an inline
4353 function, so we have to trace all of the way back through the origin chain
4354 to find out what sort of node actually served as the original seed for the
4355 given block. */
4356
4357 static tree
4358 decl_ultimate_origin (const_tree decl)
4359 {
4360 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4361 return NULL_TREE;
4362
4363 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4364 we're trying to output the abstract instance of this function. */
4365 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4366 return NULL_TREE;
4367
4368 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4369 most distant ancestor, this should never happen. */
4370 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4371
4372 return DECL_ABSTRACT_ORIGIN (decl);
4373 }
4374
4375 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4376 of a virtual function may refer to a base class, so we check the 'this'
4377 parameter. */
4378
4379 static tree
4380 decl_class_context (tree decl)
4381 {
4382 tree context = NULL_TREE;
4383
4384 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4385 context = DECL_CONTEXT (decl);
4386 else
4387 context = TYPE_MAIN_VARIANT
4388 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4389
4390 if (context && !TYPE_P (context))
4391 context = NULL_TREE;
4392
4393 return context;
4394 }
4395 \f
4396 /* Add an attribute/value pair to a DIE. */
4397
4398 static inline void
4399 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4400 {
4401 /* Maybe this should be an assert? */
4402 if (die == NULL)
4403 return;
4404
4405 if (flag_checking)
4406 {
4407 /* Check we do not add duplicate attrs. Can't use get_AT here
4408 because that recurses to the specification/abstract origin DIE. */
4409 dw_attr_node *a;
4410 unsigned ix;
4411 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4412 gcc_assert (a->dw_attr != attr->dw_attr);
4413 }
4414
4415 vec_safe_reserve (die->die_attr, 1);
4416 vec_safe_push (die->die_attr, *attr);
4417 }
4418
4419 static inline enum dw_val_class
4420 AT_class (dw_attr_node *a)
4421 {
4422 return a->dw_attr_val.val_class;
4423 }
4424
4425 /* Return the index for any attribute that will be referenced with a
4426 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4427 indices are stored in dw_attr_val.v.val_str for reference counting
4428 pruning. */
4429
4430 static inline unsigned int
4431 AT_index (dw_attr_node *a)
4432 {
4433 if (AT_class (a) == dw_val_class_str)
4434 return a->dw_attr_val.v.val_str->index;
4435 else if (a->dw_attr_val.val_entry != NULL)
4436 return a->dw_attr_val.val_entry->index;
4437 return NOT_INDEXED;
4438 }
4439
4440 /* Add a flag value attribute to a DIE. */
4441
4442 static inline void
4443 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4444 {
4445 dw_attr_node attr;
4446
4447 attr.dw_attr = attr_kind;
4448 attr.dw_attr_val.val_class = dw_val_class_flag;
4449 attr.dw_attr_val.val_entry = NULL;
4450 attr.dw_attr_val.v.val_flag = flag;
4451 add_dwarf_attr (die, &attr);
4452 }
4453
4454 static inline unsigned
4455 AT_flag (dw_attr_node *a)
4456 {
4457 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4458 return a->dw_attr_val.v.val_flag;
4459 }
4460
4461 /* Add a signed integer attribute value to a DIE. */
4462
4463 static inline void
4464 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4465 {
4466 dw_attr_node attr;
4467
4468 attr.dw_attr = attr_kind;
4469 attr.dw_attr_val.val_class = dw_val_class_const;
4470 attr.dw_attr_val.val_entry = NULL;
4471 attr.dw_attr_val.v.val_int = int_val;
4472 add_dwarf_attr (die, &attr);
4473 }
4474
4475 static inline HOST_WIDE_INT
4476 AT_int (dw_attr_node *a)
4477 {
4478 gcc_assert (a && (AT_class (a) == dw_val_class_const
4479 || AT_class (a) == dw_val_class_const_implicit));
4480 return a->dw_attr_val.v.val_int;
4481 }
4482
4483 /* Add an unsigned integer attribute value to a DIE. */
4484
4485 static inline void
4486 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4487 unsigned HOST_WIDE_INT unsigned_val)
4488 {
4489 dw_attr_node attr;
4490
4491 attr.dw_attr = attr_kind;
4492 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4493 attr.dw_attr_val.val_entry = NULL;
4494 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4495 add_dwarf_attr (die, &attr);
4496 }
4497
4498 static inline unsigned HOST_WIDE_INT
4499 AT_unsigned (dw_attr_node *a)
4500 {
4501 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4502 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4503 return a->dw_attr_val.v.val_unsigned;
4504 }
4505
4506 /* Add an unsigned wide integer attribute value to a DIE. */
4507
4508 static inline void
4509 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4510 const wide_int& w)
4511 {
4512 dw_attr_node attr;
4513
4514 attr.dw_attr = attr_kind;
4515 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4516 attr.dw_attr_val.val_entry = NULL;
4517 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4518 *attr.dw_attr_val.v.val_wide = w;
4519 add_dwarf_attr (die, &attr);
4520 }
4521
4522 /* Add an unsigned double integer attribute value to a DIE. */
4523
4524 static inline void
4525 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4526 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4527 {
4528 dw_attr_node attr;
4529
4530 attr.dw_attr = attr_kind;
4531 attr.dw_attr_val.val_class = dw_val_class_const_double;
4532 attr.dw_attr_val.val_entry = NULL;
4533 attr.dw_attr_val.v.val_double.high = high;
4534 attr.dw_attr_val.v.val_double.low = low;
4535 add_dwarf_attr (die, &attr);
4536 }
4537
4538 /* Add a floating point attribute value to a DIE and return it. */
4539
4540 static inline void
4541 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4542 unsigned int length, unsigned int elt_size, unsigned char *array)
4543 {
4544 dw_attr_node attr;
4545
4546 attr.dw_attr = attr_kind;
4547 attr.dw_attr_val.val_class = dw_val_class_vec;
4548 attr.dw_attr_val.val_entry = NULL;
4549 attr.dw_attr_val.v.val_vec.length = length;
4550 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4551 attr.dw_attr_val.v.val_vec.array = array;
4552 add_dwarf_attr (die, &attr);
4553 }
4554
4555 /* Add an 8-byte data attribute value to a DIE. */
4556
4557 static inline void
4558 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4559 unsigned char data8[8])
4560 {
4561 dw_attr_node attr;
4562
4563 attr.dw_attr = attr_kind;
4564 attr.dw_attr_val.val_class = dw_val_class_data8;
4565 attr.dw_attr_val.val_entry = NULL;
4566 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4571 dwarf_split_debug_info, address attributes in dies destined for the
4572 final executable have force_direct set to avoid using indexed
4573 references. */
4574
4575 static inline void
4576 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4577 bool force_direct)
4578 {
4579 dw_attr_node attr;
4580 char * lbl_id;
4581
4582 lbl_id = xstrdup (lbl_low);
4583 attr.dw_attr = DW_AT_low_pc;
4584 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4585 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4586 if (dwarf_split_debug_info && !force_direct)
4587 attr.dw_attr_val.val_entry
4588 = add_addr_table_entry (lbl_id, ate_kind_label);
4589 else
4590 attr.dw_attr_val.val_entry = NULL;
4591 add_dwarf_attr (die, &attr);
4592
4593 attr.dw_attr = DW_AT_high_pc;
4594 if (dwarf_version < 4)
4595 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4596 else
4597 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4598 lbl_id = xstrdup (lbl_high);
4599 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4600 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4601 && dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607 }
4608
4609 /* Hash and equality functions for debug_str_hash. */
4610
4611 hashval_t
4612 indirect_string_hasher::hash (indirect_string_node *x)
4613 {
4614 return htab_hash_string (x->str);
4615 }
4616
4617 bool
4618 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4619 {
4620 return strcmp (x1->str, x2) == 0;
4621 }
4622
4623 /* Add STR to the given string hash table. */
4624
4625 static struct indirect_string_node *
4626 find_AT_string_in_table (const char *str,
4627 hash_table<indirect_string_hasher> *table,
4628 enum insert_option insert = INSERT)
4629 {
4630 struct indirect_string_node *node;
4631
4632 indirect_string_node **slot
4633 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4634 if (*slot == NULL)
4635 {
4636 node = ggc_cleared_alloc<indirect_string_node> ();
4637 node->str = ggc_strdup (str);
4638 *slot = node;
4639 }
4640 else
4641 node = *slot;
4642
4643 node->refcount++;
4644 return node;
4645 }
4646
4647 /* Add STR to the indirect string hash table. */
4648
4649 static struct indirect_string_node *
4650 find_AT_string (const char *str, enum insert_option insert = INSERT)
4651 {
4652 if (! debug_str_hash)
4653 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4654
4655 return find_AT_string_in_table (str, debug_str_hash, insert);
4656 }
4657
4658 /* Add a string attribute value to a DIE. */
4659
4660 static inline void
4661 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4662 {
4663 dw_attr_node attr;
4664 struct indirect_string_node *node;
4665
4666 node = find_AT_string (str);
4667
4668 attr.dw_attr = attr_kind;
4669 attr.dw_attr_val.val_class = dw_val_class_str;
4670 attr.dw_attr_val.val_entry = NULL;
4671 attr.dw_attr_val.v.val_str = node;
4672 add_dwarf_attr (die, &attr);
4673 }
4674
4675 static inline const char *
4676 AT_string (dw_attr_node *a)
4677 {
4678 gcc_assert (a && AT_class (a) == dw_val_class_str);
4679 return a->dw_attr_val.v.val_str->str;
4680 }
4681
4682 /* Call this function directly to bypass AT_string_form's logic to put
4683 the string inline in the die. */
4684
4685 static void
4686 set_indirect_string (struct indirect_string_node *node)
4687 {
4688 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4689 /* Already indirect is a no op. */
4690 if (node->form == DW_FORM_strp
4691 || node->form == DW_FORM_line_strp
4692 || node->form == dwarf_FORM (DW_FORM_strx))
4693 {
4694 gcc_assert (node->label);
4695 return;
4696 }
4697 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4698 ++dw2_string_counter;
4699 node->label = xstrdup (label);
4700
4701 if (!dwarf_split_debug_info)
4702 {
4703 node->form = DW_FORM_strp;
4704 node->index = NOT_INDEXED;
4705 }
4706 else
4707 {
4708 node->form = dwarf_FORM (DW_FORM_strx);
4709 node->index = NO_INDEX_ASSIGNED;
4710 }
4711 }
4712
4713 /* A helper function for dwarf2out_finish, called to reset indirect
4714 string decisions done for early LTO dwarf output before fat object
4715 dwarf output. */
4716
4717 int
4718 reset_indirect_string (indirect_string_node **h, void *)
4719 {
4720 struct indirect_string_node *node = *h;
4721 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4722 {
4723 free (node->label);
4724 node->label = NULL;
4725 node->form = (dwarf_form) 0;
4726 node->index = 0;
4727 }
4728 return 1;
4729 }
4730
4731 /* Find out whether a string should be output inline in DIE
4732 or out-of-line in .debug_str section. */
4733
4734 static enum dwarf_form
4735 find_string_form (struct indirect_string_node *node)
4736 {
4737 unsigned int len;
4738
4739 if (node->form)
4740 return node->form;
4741
4742 len = strlen (node->str) + 1;
4743
4744 /* If the string is shorter or equal to the size of the reference, it is
4745 always better to put it inline. */
4746 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4747 return node->form = DW_FORM_string;
4748
4749 /* If we cannot expect the linker to merge strings in .debug_str
4750 section, only put it into .debug_str if it is worth even in this
4751 single module. */
4752 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4753 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4754 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4755 return node->form = DW_FORM_string;
4756
4757 set_indirect_string (node);
4758
4759 return node->form;
4760 }
4761
4762 /* Find out whether the string referenced from the attribute should be
4763 output inline in DIE or out-of-line in .debug_str section. */
4764
4765 static enum dwarf_form
4766 AT_string_form (dw_attr_node *a)
4767 {
4768 gcc_assert (a && AT_class (a) == dw_val_class_str);
4769 return find_string_form (a->dw_attr_val.v.val_str);
4770 }
4771
4772 /* Add a DIE reference attribute value to a DIE. */
4773
4774 static inline void
4775 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4776 {
4777 dw_attr_node attr;
4778 gcc_checking_assert (targ_die != NULL);
4779
4780 /* With LTO we can end up trying to reference something we didn't create
4781 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4782 if (targ_die == NULL)
4783 return;
4784
4785 attr.dw_attr = attr_kind;
4786 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4787 attr.dw_attr_val.val_entry = NULL;
4788 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4789 attr.dw_attr_val.v.val_die_ref.external = 0;
4790 add_dwarf_attr (die, &attr);
4791 }
4792
4793 /* Change DIE reference REF to point to NEW_DIE instead. */
4794
4795 static inline void
4796 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4797 {
4798 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4799 ref->dw_attr_val.v.val_die_ref.die = new_die;
4800 ref->dw_attr_val.v.val_die_ref.external = 0;
4801 }
4802
4803 /* Add an AT_specification attribute to a DIE, and also make the back
4804 pointer from the specification to the definition. */
4805
4806 static inline void
4807 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4808 {
4809 add_AT_die_ref (die, DW_AT_specification, targ_die);
4810 gcc_assert (!targ_die->die_definition);
4811 targ_die->die_definition = die;
4812 }
4813
4814 static inline dw_die_ref
4815 AT_ref (dw_attr_node *a)
4816 {
4817 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4818 return a->dw_attr_val.v.val_die_ref.die;
4819 }
4820
4821 static inline int
4822 AT_ref_external (dw_attr_node *a)
4823 {
4824 if (a && AT_class (a) == dw_val_class_die_ref)
4825 return a->dw_attr_val.v.val_die_ref.external;
4826
4827 return 0;
4828 }
4829
4830 static inline void
4831 set_AT_ref_external (dw_attr_node *a, int i)
4832 {
4833 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4834 a->dw_attr_val.v.val_die_ref.external = i;
4835 }
4836
4837 /* Add a location description attribute value to a DIE. */
4838
4839 static inline void
4840 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4841 {
4842 dw_attr_node attr;
4843
4844 attr.dw_attr = attr_kind;
4845 attr.dw_attr_val.val_class = dw_val_class_loc;
4846 attr.dw_attr_val.val_entry = NULL;
4847 attr.dw_attr_val.v.val_loc = loc;
4848 add_dwarf_attr (die, &attr);
4849 }
4850
4851 static inline dw_loc_descr_ref
4852 AT_loc (dw_attr_node *a)
4853 {
4854 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4855 return a->dw_attr_val.v.val_loc;
4856 }
4857
4858 static inline void
4859 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4860 {
4861 dw_attr_node attr;
4862
4863 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4864 return;
4865
4866 attr.dw_attr = attr_kind;
4867 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4868 attr.dw_attr_val.val_entry = NULL;
4869 attr.dw_attr_val.v.val_loc_list = loc_list;
4870 add_dwarf_attr (die, &attr);
4871 have_location_lists = true;
4872 }
4873
4874 static inline dw_loc_list_ref
4875 AT_loc_list (dw_attr_node *a)
4876 {
4877 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4878 return a->dw_attr_val.v.val_loc_list;
4879 }
4880
4881 /* Add a view list attribute to DIE. It must have a DW_AT_location
4882 attribute, because the view list complements the location list. */
4883
4884 static inline void
4885 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4886 {
4887 dw_attr_node attr;
4888
4889 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4890 return;
4891
4892 attr.dw_attr = attr_kind;
4893 attr.dw_attr_val.val_class = dw_val_class_view_list;
4894 attr.dw_attr_val.val_entry = NULL;
4895 attr.dw_attr_val.v.val_view_list = die;
4896 add_dwarf_attr (die, &attr);
4897 gcc_checking_assert (get_AT (die, DW_AT_location));
4898 gcc_assert (have_location_lists);
4899 }
4900
4901 /* Return a pointer to the location list referenced by the attribute.
4902 If the named attribute is a view list, look up the corresponding
4903 DW_AT_location attribute and return its location list. */
4904
4905 static inline dw_loc_list_ref *
4906 AT_loc_list_ptr (dw_attr_node *a)
4907 {
4908 gcc_assert (a);
4909 switch (AT_class (a))
4910 {
4911 case dw_val_class_loc_list:
4912 return &a->dw_attr_val.v.val_loc_list;
4913 case dw_val_class_view_list:
4914 {
4915 dw_attr_node *l;
4916 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4917 if (!l)
4918 return NULL;
4919 gcc_checking_assert (l + 1 == a);
4920 return AT_loc_list_ptr (l);
4921 }
4922 default:
4923 gcc_unreachable ();
4924 }
4925 }
4926
4927 /* Return the location attribute value associated with a view list
4928 attribute value. */
4929
4930 static inline dw_val_node *
4931 view_list_to_loc_list_val_node (dw_val_node *val)
4932 {
4933 gcc_assert (val->val_class == dw_val_class_view_list);
4934 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4935 if (!loc)
4936 return NULL;
4937 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4938 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4939 return &loc->dw_attr_val;
4940 }
4941
4942 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4943 {
4944 static hashval_t hash (addr_table_entry *);
4945 static bool equal (addr_table_entry *, addr_table_entry *);
4946 };
4947
4948 /* Table of entries into the .debug_addr section. */
4949
4950 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4951
4952 /* Hash an address_table_entry. */
4953
4954 hashval_t
4955 addr_hasher::hash (addr_table_entry *a)
4956 {
4957 inchash::hash hstate;
4958 switch (a->kind)
4959 {
4960 case ate_kind_rtx:
4961 hstate.add_int (0);
4962 break;
4963 case ate_kind_rtx_dtprel:
4964 hstate.add_int (1);
4965 break;
4966 case ate_kind_label:
4967 return htab_hash_string (a->addr.label);
4968 default:
4969 gcc_unreachable ();
4970 }
4971 inchash::add_rtx (a->addr.rtl, hstate);
4972 return hstate.end ();
4973 }
4974
4975 /* Determine equality for two address_table_entries. */
4976
4977 bool
4978 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4979 {
4980 if (a1->kind != a2->kind)
4981 return 0;
4982 switch (a1->kind)
4983 {
4984 case ate_kind_rtx:
4985 case ate_kind_rtx_dtprel:
4986 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4987 case ate_kind_label:
4988 return strcmp (a1->addr.label, a2->addr.label) == 0;
4989 default:
4990 gcc_unreachable ();
4991 }
4992 }
4993
4994 /* Initialize an addr_table_entry. */
4995
4996 void
4997 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4998 {
4999 e->kind = kind;
5000 switch (kind)
5001 {
5002 case ate_kind_rtx:
5003 case ate_kind_rtx_dtprel:
5004 e->addr.rtl = (rtx) addr;
5005 break;
5006 case ate_kind_label:
5007 e->addr.label = (char *) addr;
5008 break;
5009 }
5010 e->refcount = 0;
5011 e->index = NO_INDEX_ASSIGNED;
5012 }
5013
5014 /* Add attr to the address table entry to the table. Defer setting an
5015 index until output time. */
5016
5017 static addr_table_entry *
5018 add_addr_table_entry (void *addr, enum ate_kind kind)
5019 {
5020 addr_table_entry *node;
5021 addr_table_entry finder;
5022
5023 gcc_assert (dwarf_split_debug_info);
5024 if (! addr_index_table)
5025 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5026 init_addr_table_entry (&finder, kind, addr);
5027 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5028
5029 if (*slot == HTAB_EMPTY_ENTRY)
5030 {
5031 node = ggc_cleared_alloc<addr_table_entry> ();
5032 init_addr_table_entry (node, kind, addr);
5033 *slot = node;
5034 }
5035 else
5036 node = *slot;
5037
5038 node->refcount++;
5039 return node;
5040 }
5041
5042 /* Remove an entry from the addr table by decrementing its refcount.
5043 Strictly, decrementing the refcount would be enough, but the
5044 assertion that the entry is actually in the table has found
5045 bugs. */
5046
5047 static void
5048 remove_addr_table_entry (addr_table_entry *entry)
5049 {
5050 gcc_assert (dwarf_split_debug_info && addr_index_table);
5051 /* After an index is assigned, the table is frozen. */
5052 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5053 entry->refcount--;
5054 }
5055
5056 /* Given a location list, remove all addresses it refers to from the
5057 address_table. */
5058
5059 static void
5060 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5061 {
5062 for (; descr; descr = descr->dw_loc_next)
5063 if (descr->dw_loc_oprnd1.val_entry != NULL)
5064 {
5065 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5066 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5067 }
5068 }
5069
5070 /* A helper function for dwarf2out_finish called through
5071 htab_traverse. Assign an addr_table_entry its index. All entries
5072 must be collected into the table when this function is called,
5073 because the indexing code relies on htab_traverse to traverse nodes
5074 in the same order for each run. */
5075
5076 int
5077 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5078 {
5079 addr_table_entry *node = *h;
5080
5081 /* Don't index unreferenced nodes. */
5082 if (node->refcount == 0)
5083 return 1;
5084
5085 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5086 node->index = *index;
5087 *index += 1;
5088
5089 return 1;
5090 }
5091
5092 /* Add an address constant attribute value to a DIE. When using
5093 dwarf_split_debug_info, address attributes in dies destined for the
5094 final executable should be direct references--setting the parameter
5095 force_direct ensures this behavior. */
5096
5097 static inline void
5098 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5099 bool force_direct)
5100 {
5101 dw_attr_node attr;
5102
5103 attr.dw_attr = attr_kind;
5104 attr.dw_attr_val.val_class = dw_val_class_addr;
5105 attr.dw_attr_val.v.val_addr = addr;
5106 if (dwarf_split_debug_info && !force_direct)
5107 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5108 else
5109 attr.dw_attr_val.val_entry = NULL;
5110 add_dwarf_attr (die, &attr);
5111 }
5112
5113 /* Get the RTX from to an address DIE attribute. */
5114
5115 static inline rtx
5116 AT_addr (dw_attr_node *a)
5117 {
5118 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5119 return a->dw_attr_val.v.val_addr;
5120 }
5121
5122 /* Add a file attribute value to a DIE. */
5123
5124 static inline void
5125 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5126 struct dwarf_file_data *fd)
5127 {
5128 dw_attr_node attr;
5129
5130 attr.dw_attr = attr_kind;
5131 attr.dw_attr_val.val_class = dw_val_class_file;
5132 attr.dw_attr_val.val_entry = NULL;
5133 attr.dw_attr_val.v.val_file = fd;
5134 add_dwarf_attr (die, &attr);
5135 }
5136
5137 /* Get the dwarf_file_data from a file DIE attribute. */
5138
5139 static inline struct dwarf_file_data *
5140 AT_file (dw_attr_node *a)
5141 {
5142 gcc_assert (a && (AT_class (a) == dw_val_class_file
5143 || AT_class (a) == dw_val_class_file_implicit));
5144 return a->dw_attr_val.v.val_file;
5145 }
5146
5147 /* Add a vms delta attribute value to a DIE. */
5148
5149 static inline void
5150 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5151 const char *lbl1, const char *lbl2)
5152 {
5153 dw_attr_node attr;
5154
5155 attr.dw_attr = attr_kind;
5156 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5157 attr.dw_attr_val.val_entry = NULL;
5158 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5159 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5160 add_dwarf_attr (die, &attr);
5161 }
5162
5163 /* Add a symbolic view identifier attribute value to a DIE. */
5164
5165 static inline void
5166 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5167 const char *view_label)
5168 {
5169 dw_attr_node attr;
5170
5171 attr.dw_attr = attr_kind;
5172 attr.dw_attr_val.val_class = dw_val_class_symview;
5173 attr.dw_attr_val.val_entry = NULL;
5174 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a label identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *lbl_id)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5190 if (dwarf_split_debug_info)
5191 attr.dw_attr_val.val_entry
5192 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5193 ate_kind_label);
5194 add_dwarf_attr (die, &attr);
5195 }
5196
5197 /* Add a section offset attribute value to a DIE, an offset into the
5198 debug_line section. */
5199
5200 static inline void
5201 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5202 const char *label)
5203 {
5204 dw_attr_node attr;
5205
5206 attr.dw_attr = attr_kind;
5207 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5208 attr.dw_attr_val.val_entry = NULL;
5209 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5210 add_dwarf_attr (die, &attr);
5211 }
5212
5213 /* Add a section offset attribute value to a DIE, an offset into the
5214 debug_macinfo section. */
5215
5216 static inline void
5217 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5218 const char *label)
5219 {
5220 dw_attr_node attr;
5221
5222 attr.dw_attr = attr_kind;
5223 attr.dw_attr_val.val_class = dw_val_class_macptr;
5224 attr.dw_attr_val.val_entry = NULL;
5225 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5226 add_dwarf_attr (die, &attr);
5227 }
5228
5229 /* Add a range_list attribute value to a DIE. When using
5230 dwarf_split_debug_info, address attributes in dies destined for the
5231 final executable should be direct references--setting the parameter
5232 force_direct ensures this behavior. */
5233
5234 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5235 #define RELOCATED_OFFSET (NULL)
5236
5237 static void
5238 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5239 long unsigned int offset, bool force_direct)
5240 {
5241 dw_attr_node attr;
5242
5243 attr.dw_attr = attr_kind;
5244 attr.dw_attr_val.val_class = dw_val_class_range_list;
5245 /* For the range_list attribute, use val_entry to store whether the
5246 offset should follow split-debug-info or normal semantics. This
5247 value is read in output_range_list_offset. */
5248 if (dwarf_split_debug_info && !force_direct)
5249 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5250 else
5251 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5252 attr.dw_attr_val.v.val_offset = offset;
5253 add_dwarf_attr (die, &attr);
5254 }
5255
5256 /* Return the start label of a delta attribute. */
5257
5258 static inline const char *
5259 AT_vms_delta1 (dw_attr_node *a)
5260 {
5261 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5262 return a->dw_attr_val.v.val_vms_delta.lbl1;
5263 }
5264
5265 /* Return the end label of a delta attribute. */
5266
5267 static inline const char *
5268 AT_vms_delta2 (dw_attr_node *a)
5269 {
5270 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5271 return a->dw_attr_val.v.val_vms_delta.lbl2;
5272 }
5273
5274 static inline const char *
5275 AT_lbl (dw_attr_node *a)
5276 {
5277 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5278 || AT_class (a) == dw_val_class_lineptr
5279 || AT_class (a) == dw_val_class_macptr
5280 || AT_class (a) == dw_val_class_loclistsptr
5281 || AT_class (a) == dw_val_class_high_pc));
5282 return a->dw_attr_val.v.val_lbl_id;
5283 }
5284
5285 /* Get the attribute of type attr_kind. */
5286
5287 static dw_attr_node *
5288 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5289 {
5290 dw_attr_node *a;
5291 unsigned ix;
5292 dw_die_ref spec = NULL;
5293
5294 if (! die)
5295 return NULL;
5296
5297 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5298 if (a->dw_attr == attr_kind)
5299 return a;
5300 else if (a->dw_attr == DW_AT_specification
5301 || a->dw_attr == DW_AT_abstract_origin)
5302 spec = AT_ref (a);
5303
5304 if (spec)
5305 return get_AT (spec, attr_kind);
5306
5307 return NULL;
5308 }
5309
5310 /* Returns the parent of the declaration of DIE. */
5311
5312 static dw_die_ref
5313 get_die_parent (dw_die_ref die)
5314 {
5315 dw_die_ref t;
5316
5317 if (!die)
5318 return NULL;
5319
5320 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5321 || (t = get_AT_ref (die, DW_AT_specification)))
5322 die = t;
5323
5324 return die->die_parent;
5325 }
5326
5327 /* Return the "low pc" attribute value, typically associated with a subprogram
5328 DIE. Return null if the "low pc" attribute is either not present, or if it
5329 cannot be represented as an assembler label identifier. */
5330
5331 static inline const char *
5332 get_AT_low_pc (dw_die_ref die)
5333 {
5334 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5335
5336 return a ? AT_lbl (a) : NULL;
5337 }
5338
5339 /* Return the value of the string attribute designated by ATTR_KIND, or
5340 NULL if it is not present. */
5341
5342 static inline const char *
5343 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_string (a) : NULL;
5348 }
5349
5350 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5351 if it is not present. */
5352
5353 static inline int
5354 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5355 {
5356 dw_attr_node *a = get_AT (die, attr_kind);
5357
5358 return a ? AT_flag (a) : 0;
5359 }
5360
5361 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5362 if it is not present. */
5363
5364 static inline unsigned
5365 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5366 {
5367 dw_attr_node *a = get_AT (die, attr_kind);
5368
5369 return a ? AT_unsigned (a) : 0;
5370 }
5371
5372 static inline dw_die_ref
5373 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5374 {
5375 dw_attr_node *a = get_AT (die, attr_kind);
5376
5377 return a ? AT_ref (a) : NULL;
5378 }
5379
5380 static inline struct dwarf_file_data *
5381 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5382 {
5383 dw_attr_node *a = get_AT (die, attr_kind);
5384
5385 return a ? AT_file (a) : NULL;
5386 }
5387
5388 /* Return TRUE if the language is C. */
5389
5390 static inline bool
5391 is_c (void)
5392 {
5393 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5394
5395 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5396 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5397
5398
5399 }
5400
5401 /* Return TRUE if the language is C++. */
5402
5403 static inline bool
5404 is_cxx (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5409 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5410 }
5411
5412 /* Return TRUE if DECL was created by the C++ frontend. */
5413
5414 static bool
5415 is_cxx (const_tree decl)
5416 {
5417 if (in_lto_p)
5418 {
5419 const_tree context = get_ultimate_context (decl);
5420 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5421 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5422 }
5423 return is_cxx ();
5424 }
5425
5426 /* Return TRUE if the language is Fortran. */
5427
5428 static inline bool
5429 is_fortran (void)
5430 {
5431 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5432
5433 return (lang == DW_LANG_Fortran77
5434 || lang == DW_LANG_Fortran90
5435 || lang == DW_LANG_Fortran95
5436 || lang == DW_LANG_Fortran03
5437 || lang == DW_LANG_Fortran08);
5438 }
5439
5440 static inline bool
5441 is_fortran (const_tree decl)
5442 {
5443 if (in_lto_p)
5444 {
5445 const_tree context = get_ultimate_context (decl);
5446 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5447 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5448 "GNU Fortran", 11) == 0
5449 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU F77") == 0);
5451 }
5452 return is_fortran ();
5453 }
5454
5455 /* Return TRUE if the language is Ada. */
5456
5457 static inline bool
5458 is_ada (void)
5459 {
5460 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5461
5462 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5463 }
5464
5465 /* Return TRUE if the language is D. */
5466
5467 static inline bool
5468 is_dlang (void)
5469 {
5470 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5471
5472 return lang == DW_LANG_D;
5473 }
5474
5475 /* Remove the specified attribute if present. Return TRUE if removal
5476 was successful. */
5477
5478 static bool
5479 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5480 {
5481 dw_attr_node *a;
5482 unsigned ix;
5483
5484 if (! die)
5485 return false;
5486
5487 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5488 if (a->dw_attr == attr_kind)
5489 {
5490 if (AT_class (a) == dw_val_class_str)
5491 if (a->dw_attr_val.v.val_str->refcount)
5492 a->dw_attr_val.v.val_str->refcount--;
5493
5494 /* vec::ordered_remove should help reduce the number of abbrevs
5495 that are needed. */
5496 die->die_attr->ordered_remove (ix);
5497 return true;
5498 }
5499 return false;
5500 }
5501
5502 /* Remove CHILD from its parent. PREV must have the property that
5503 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5504
5505 static void
5506 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5507 {
5508 gcc_assert (child->die_parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == child);
5510 if (prev == child)
5511 {
5512 gcc_assert (child->die_parent->die_child == child);
5513 prev = NULL;
5514 }
5515 else
5516 prev->die_sib = child->die_sib;
5517 if (child->die_parent->die_child == child)
5518 child->die_parent->die_child = prev;
5519 child->die_sib = NULL;
5520 }
5521
5522 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5523 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5524
5525 static void
5526 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5527 {
5528 dw_die_ref parent = old_child->die_parent;
5529
5530 gcc_assert (parent == prev->die_parent);
5531 gcc_assert (prev->die_sib == old_child);
5532
5533 new_child->die_parent = parent;
5534 if (prev == old_child)
5535 {
5536 gcc_assert (parent->die_child == old_child);
5537 new_child->die_sib = new_child;
5538 }
5539 else
5540 {
5541 prev->die_sib = new_child;
5542 new_child->die_sib = old_child->die_sib;
5543 }
5544 if (old_child->die_parent->die_child == old_child)
5545 old_child->die_parent->die_child = new_child;
5546 old_child->die_sib = NULL;
5547 }
5548
5549 /* Move all children from OLD_PARENT to NEW_PARENT. */
5550
5551 static void
5552 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5553 {
5554 dw_die_ref c;
5555 new_parent->die_child = old_parent->die_child;
5556 old_parent->die_child = NULL;
5557 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5558 }
5559
5560 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5561 matches TAG. */
5562
5563 static void
5564 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5565 {
5566 dw_die_ref c;
5567
5568 c = die->die_child;
5569 if (c) do {
5570 dw_die_ref prev = c;
5571 c = c->die_sib;
5572 while (c->die_tag == tag)
5573 {
5574 remove_child_with_prev (c, prev);
5575 c->die_parent = NULL;
5576 /* Might have removed every child. */
5577 if (die->die_child == NULL)
5578 return;
5579 c = prev->die_sib;
5580 }
5581 } while (c != die->die_child);
5582 }
5583
5584 /* Add a CHILD_DIE as the last child of DIE. */
5585
5586 static void
5587 add_child_die (dw_die_ref die, dw_die_ref child_die)
5588 {
5589 /* FIXME this should probably be an assert. */
5590 if (! die || ! child_die)
5591 return;
5592 gcc_assert (die != child_die);
5593
5594 child_die->die_parent = die;
5595 if (die->die_child)
5596 {
5597 child_die->die_sib = die->die_child->die_sib;
5598 die->die_child->die_sib = child_die;
5599 }
5600 else
5601 child_die->die_sib = child_die;
5602 die->die_child = child_die;
5603 }
5604
5605 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5606
5607 static void
5608 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5609 dw_die_ref after_die)
5610 {
5611 gcc_assert (die
5612 && child_die
5613 && after_die
5614 && die->die_child
5615 && die != child_die);
5616
5617 child_die->die_parent = die;
5618 child_die->die_sib = after_die->die_sib;
5619 after_die->die_sib = child_die;
5620 if (die->die_child == after_die)
5621 die->die_child = child_die;
5622 }
5623
5624 /* Unassociate CHILD from its parent, and make its parent be
5625 NEW_PARENT. */
5626
5627 static void
5628 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5629 {
5630 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5631 if (p->die_sib == child)
5632 {
5633 remove_child_with_prev (child, p);
5634 break;
5635 }
5636 add_child_die (new_parent, child);
5637 }
5638
5639 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5640 is the specification, to the end of PARENT's list of children.
5641 This is done by removing and re-adding it. */
5642
5643 static void
5644 splice_child_die (dw_die_ref parent, dw_die_ref child)
5645 {
5646 /* We want the declaration DIE from inside the class, not the
5647 specification DIE at toplevel. */
5648 if (child->die_parent != parent)
5649 {
5650 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5651
5652 if (tmp)
5653 child = tmp;
5654 }
5655
5656 gcc_assert (child->die_parent == parent
5657 || (child->die_parent
5658 == get_AT_ref (parent, DW_AT_specification)));
5659
5660 reparent_child (child, parent);
5661 }
5662
5663 /* Create and return a new die with TAG_VALUE as tag. */
5664
5665 static inline dw_die_ref
5666 new_die_raw (enum dwarf_tag tag_value)
5667 {
5668 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5669 die->die_tag = tag_value;
5670 return die;
5671 }
5672
5673 /* Create and return a new die with a parent of PARENT_DIE. If
5674 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5675 associated tree T must be supplied to determine parenthood
5676 later. */
5677
5678 static inline dw_die_ref
5679 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5680 {
5681 dw_die_ref die = new_die_raw (tag_value);
5682
5683 if (parent_die != NULL)
5684 add_child_die (parent_die, die);
5685 else
5686 {
5687 limbo_die_node *limbo_node;
5688
5689 /* No DIEs created after early dwarf should end up in limbo,
5690 because the limbo list should not persist past LTO
5691 streaming. */
5692 if (tag_value != DW_TAG_compile_unit
5693 /* These are allowed because they're generated while
5694 breaking out COMDAT units late. */
5695 && tag_value != DW_TAG_type_unit
5696 && tag_value != DW_TAG_skeleton_unit
5697 && !early_dwarf
5698 /* Allow nested functions to live in limbo because they will
5699 only temporarily live there, as decls_for_scope will fix
5700 them up. */
5701 && (TREE_CODE (t) != FUNCTION_DECL
5702 || !decl_function_context (t))
5703 /* Same as nested functions above but for types. Types that
5704 are local to a function will be fixed in
5705 decls_for_scope. */
5706 && (!RECORD_OR_UNION_TYPE_P (t)
5707 || !TYPE_CONTEXT (t)
5708 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5709 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5710 especially in the ltrans stage, but once we implement LTO
5711 dwarf streaming, we should remove this exception. */
5712 && !in_lto_p)
5713 {
5714 fprintf (stderr, "symbol ended up in limbo too late:");
5715 debug_generic_stmt (t);
5716 gcc_unreachable ();
5717 }
5718
5719 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5720 limbo_node->die = die;
5721 limbo_node->created_for = t;
5722 limbo_node->next = limbo_die_list;
5723 limbo_die_list = limbo_node;
5724 }
5725
5726 return die;
5727 }
5728
5729 /* Return the DIE associated with the given type specifier. */
5730
5731 static inline dw_die_ref
5732 lookup_type_die (tree type)
5733 {
5734 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5735 if (die && die->removed)
5736 {
5737 TYPE_SYMTAB_DIE (type) = NULL;
5738 return NULL;
5739 }
5740 return die;
5741 }
5742
5743 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5744 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5745 anonymous type instead the one of the naming typedef. */
5746
5747 static inline dw_die_ref
5748 strip_naming_typedef (tree type, dw_die_ref type_die)
5749 {
5750 if (type
5751 && TREE_CODE (type) == RECORD_TYPE
5752 && type_die
5753 && type_die->die_tag == DW_TAG_typedef
5754 && is_naming_typedef_decl (TYPE_NAME (type)))
5755 type_die = get_AT_ref (type_die, DW_AT_type);
5756 return type_die;
5757 }
5758
5759 /* Like lookup_type_die, but if type is an anonymous type named by a
5760 typedef[1], return the DIE of the anonymous type instead the one of
5761 the naming typedef. This is because in gen_typedef_die, we did
5762 equate the anonymous struct named by the typedef with the DIE of
5763 the naming typedef. So by default, lookup_type_die on an anonymous
5764 struct yields the DIE of the naming typedef.
5765
5766 [1]: Read the comment of is_naming_typedef_decl to learn about what
5767 a naming typedef is. */
5768
5769 static inline dw_die_ref
5770 lookup_type_die_strip_naming_typedef (tree type)
5771 {
5772 dw_die_ref die = lookup_type_die (type);
5773 return strip_naming_typedef (type, die);
5774 }
5775
5776 /* Equate a DIE to a given type specifier. */
5777
5778 static inline void
5779 equate_type_number_to_die (tree type, dw_die_ref type_die)
5780 {
5781 TYPE_SYMTAB_DIE (type) = type_die;
5782 }
5783
5784 static dw_die_ref maybe_create_die_with_external_ref (tree);
5785 struct GTY(()) sym_off_pair
5786 {
5787 const char * GTY((skip)) sym;
5788 unsigned HOST_WIDE_INT off;
5789 };
5790 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5791
5792 /* Returns a hash value for X (which really is a die_struct). */
5793
5794 inline hashval_t
5795 decl_die_hasher::hash (die_node *x)
5796 {
5797 return (hashval_t) x->decl_id;
5798 }
5799
5800 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5801
5802 inline bool
5803 decl_die_hasher::equal (die_node *x, tree y)
5804 {
5805 return (x->decl_id == DECL_UID (y));
5806 }
5807
5808 /* Return the DIE associated with a given declaration. */
5809
5810 static inline dw_die_ref
5811 lookup_decl_die (tree decl)
5812 {
5813 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5814 NO_INSERT);
5815 if (!die)
5816 {
5817 if (in_lto_p)
5818 return maybe_create_die_with_external_ref (decl);
5819 return NULL;
5820 }
5821 if ((*die)->removed)
5822 {
5823 decl_die_table->clear_slot (die);
5824 return NULL;
5825 }
5826 return *die;
5827 }
5828
5829
5830 /* Return the DIE associated with BLOCK. */
5831
5832 static inline dw_die_ref
5833 lookup_block_die (tree block)
5834 {
5835 dw_die_ref die = BLOCK_DIE (block);
5836 if (!die && in_lto_p)
5837 return maybe_create_die_with_external_ref (block);
5838 return die;
5839 }
5840
5841 /* Associate DIE with BLOCK. */
5842
5843 static inline void
5844 equate_block_to_die (tree block, dw_die_ref die)
5845 {
5846 BLOCK_DIE (block) = die;
5847 }
5848 #undef BLOCK_DIE
5849
5850
5851 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5852 style reference. Return true if we found one refering to a DIE for
5853 DECL, otherwise return false. */
5854
5855 static bool
5856 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5857 unsigned HOST_WIDE_INT *off)
5858 {
5859 dw_die_ref die;
5860
5861 if (in_lto_p)
5862 {
5863 /* During WPA stage and incremental linking we use a hash-map
5864 to store the decl <-> label + offset map. */
5865 if (!external_die_map)
5866 return false;
5867 sym_off_pair *desc = external_die_map->get (decl);
5868 if (!desc)
5869 return false;
5870 *sym = desc->sym;
5871 *off = desc->off;
5872 return true;
5873 }
5874
5875 if (TREE_CODE (decl) == BLOCK)
5876 die = lookup_block_die (decl);
5877 else
5878 die = lookup_decl_die (decl);
5879 if (!die)
5880 return false;
5881
5882 /* Similar to get_ref_die_offset_label, but using the "correct"
5883 label. */
5884 *off = die->die_offset;
5885 while (die->die_parent)
5886 die = die->die_parent;
5887 /* For the containing CU DIE we compute a die_symbol in
5888 compute_comp_unit_symbol. */
5889 gcc_assert (die->die_tag == DW_TAG_compile_unit
5890 && die->die_id.die_symbol != NULL);
5891 *sym = die->die_id.die_symbol;
5892 return true;
5893 }
5894
5895 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5896
5897 static void
5898 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5899 const char *symbol, HOST_WIDE_INT offset)
5900 {
5901 /* Create a fake DIE that contains the reference. Don't use
5902 new_die because we don't want to end up in the limbo list. */
5903 /* ??? We probably want to share these, thus put a ref to the DIE
5904 we create here to the external_die_map entry. */
5905 dw_die_ref ref = new_die_raw (die->die_tag);
5906 ref->die_id.die_symbol = symbol;
5907 ref->die_offset = offset;
5908 ref->with_offset = 1;
5909 add_AT_die_ref (die, attr_kind, ref);
5910 }
5911
5912 /* Create a DIE for DECL if required and add a reference to a DIE
5913 at SYMBOL + OFFSET which contains attributes dumped early. */
5914
5915 static void
5916 dwarf2out_register_external_die (tree decl, const char *sym,
5917 unsigned HOST_WIDE_INT off)
5918 {
5919 if (debug_info_level == DINFO_LEVEL_NONE)
5920 return;
5921
5922 if (!external_die_map)
5923 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5924 gcc_checking_assert (!external_die_map->get (decl));
5925 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5926 external_die_map->put (decl, p);
5927 }
5928
5929 /* If we have a registered external DIE for DECL return a new DIE for
5930 the concrete instance with an appropriate abstract origin. */
5931
5932 static dw_die_ref
5933 maybe_create_die_with_external_ref (tree decl)
5934 {
5935 if (!external_die_map)
5936 return NULL;
5937 sym_off_pair *desc = external_die_map->get (decl);
5938 if (!desc)
5939 return NULL;
5940
5941 const char *sym = desc->sym;
5942 unsigned HOST_WIDE_INT off = desc->off;
5943
5944 in_lto_p = false;
5945 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5946 ? lookup_block_die (decl) : lookup_decl_die (decl));
5947 gcc_assert (!die);
5948 in_lto_p = true;
5949
5950 tree ctx;
5951 dw_die_ref parent = NULL;
5952 /* Need to lookup a DIE for the decls context - the containing
5953 function or translation unit. */
5954 if (TREE_CODE (decl) == BLOCK)
5955 {
5956 ctx = BLOCK_SUPERCONTEXT (decl);
5957 /* ??? We do not output DIEs for all scopes thus skip as
5958 many DIEs as needed. */
5959 while (TREE_CODE (ctx) == BLOCK
5960 && !lookup_block_die (ctx))
5961 ctx = BLOCK_SUPERCONTEXT (ctx);
5962 }
5963 else
5964 ctx = DECL_CONTEXT (decl);
5965 /* Peel types in the context stack. */
5966 while (ctx && TYPE_P (ctx))
5967 ctx = TYPE_CONTEXT (ctx);
5968 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5969 if (debug_info_level <= DINFO_LEVEL_TERSE)
5970 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5971 ctx = DECL_CONTEXT (ctx);
5972 if (ctx)
5973 {
5974 if (TREE_CODE (ctx) == BLOCK)
5975 parent = lookup_block_die (ctx);
5976 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5977 /* Keep the 1:1 association during WPA. */
5978 && !flag_wpa
5979 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5980 /* Otherwise all late annotations go to the main CU which
5981 imports the original CUs. */
5982 parent = comp_unit_die ();
5983 else if (TREE_CODE (ctx) == FUNCTION_DECL
5984 && TREE_CODE (decl) != FUNCTION_DECL
5985 && TREE_CODE (decl) != PARM_DECL
5986 && TREE_CODE (decl) != RESULT_DECL
5987 && TREE_CODE (decl) != BLOCK)
5988 /* Leave function local entities parent determination to when
5989 we process scope vars. */
5990 ;
5991 else
5992 parent = lookup_decl_die (ctx);
5993 }
5994 else
5995 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5996 Handle this case gracefully by globalizing stuff. */
5997 parent = comp_unit_die ();
5998 /* Create a DIE "stub". */
5999 switch (TREE_CODE (decl))
6000 {
6001 case TRANSLATION_UNIT_DECL:
6002 {
6003 die = comp_unit_die ();
6004 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6005 to create a DIE for the original CUs. */
6006 return die;
6007 }
6008 case NAMESPACE_DECL:
6009 if (is_fortran (decl))
6010 die = new_die (DW_TAG_module, parent, decl);
6011 else
6012 die = new_die (DW_TAG_namespace, parent, decl);
6013 break;
6014 case FUNCTION_DECL:
6015 die = new_die (DW_TAG_subprogram, parent, decl);
6016 break;
6017 case VAR_DECL:
6018 die = new_die (DW_TAG_variable, parent, decl);
6019 break;
6020 case RESULT_DECL:
6021 die = new_die (DW_TAG_variable, parent, decl);
6022 break;
6023 case PARM_DECL:
6024 die = new_die (DW_TAG_formal_parameter, parent, decl);
6025 break;
6026 case CONST_DECL:
6027 die = new_die (DW_TAG_constant, parent, decl);
6028 break;
6029 case LABEL_DECL:
6030 die = new_die (DW_TAG_label, parent, decl);
6031 break;
6032 case BLOCK:
6033 die = new_die (DW_TAG_lexical_block, parent, decl);
6034 break;
6035 default:
6036 gcc_unreachable ();
6037 }
6038 if (TREE_CODE (decl) == BLOCK)
6039 equate_block_to_die (decl, die);
6040 else
6041 equate_decl_number_to_die (decl, die);
6042
6043 add_desc_attribute (die, decl);
6044
6045 /* Add a reference to the DIE providing early debug at $sym + off. */
6046 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6047
6048 return die;
6049 }
6050
6051 /* Returns a hash value for X (which really is a var_loc_list). */
6052
6053 inline hashval_t
6054 decl_loc_hasher::hash (var_loc_list *x)
6055 {
6056 return (hashval_t) x->decl_id;
6057 }
6058
6059 /* Return nonzero if decl_id of var_loc_list X is the same as
6060 UID of decl *Y. */
6061
6062 inline bool
6063 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6064 {
6065 return (x->decl_id == DECL_UID (y));
6066 }
6067
6068 /* Return the var_loc list associated with a given declaration. */
6069
6070 static inline var_loc_list *
6071 lookup_decl_loc (const_tree decl)
6072 {
6073 if (!decl_loc_table)
6074 return NULL;
6075 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6076 }
6077
6078 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6079
6080 inline hashval_t
6081 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6082 {
6083 return (hashval_t) x->decl_id;
6084 }
6085
6086 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6087 UID of decl *Y. */
6088
6089 inline bool
6090 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6091 {
6092 return (x->decl_id == DECL_UID (y));
6093 }
6094
6095 /* Equate a DIE to a particular declaration. */
6096
6097 static void
6098 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6099 {
6100 unsigned int decl_id = DECL_UID (decl);
6101
6102 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6103 decl_die->decl_id = decl_id;
6104 }
6105
6106 /* Return how many bits covers PIECE EXPR_LIST. */
6107
6108 static HOST_WIDE_INT
6109 decl_piece_bitsize (rtx piece)
6110 {
6111 int ret = (int) GET_MODE (piece);
6112 if (ret)
6113 return ret;
6114 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6115 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6116 return INTVAL (XEXP (XEXP (piece, 0), 0));
6117 }
6118
6119 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6120
6121 static rtx *
6122 decl_piece_varloc_ptr (rtx piece)
6123 {
6124 if ((int) GET_MODE (piece))
6125 return &XEXP (piece, 0);
6126 else
6127 return &XEXP (XEXP (piece, 0), 1);
6128 }
6129
6130 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6131 Next is the chain of following piece nodes. */
6132
6133 static rtx_expr_list *
6134 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6135 {
6136 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6137 return alloc_EXPR_LIST (bitsize, loc_note, next);
6138 else
6139 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6140 GEN_INT (bitsize),
6141 loc_note), next);
6142 }
6143
6144 /* Return rtx that should be stored into loc field for
6145 LOC_NOTE and BITPOS/BITSIZE. */
6146
6147 static rtx
6148 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6149 HOST_WIDE_INT bitsize)
6150 {
6151 if (bitsize != -1)
6152 {
6153 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6154 if (bitpos != 0)
6155 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6156 }
6157 return loc_note;
6158 }
6159
6160 /* This function either modifies location piece list *DEST in
6161 place (if SRC and INNER is NULL), or copies location piece list
6162 *SRC to *DEST while modifying it. Location BITPOS is modified
6163 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6164 not copied and if needed some padding around it is added.
6165 When modifying in place, DEST should point to EXPR_LIST where
6166 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6167 to the start of the whole list and INNER points to the EXPR_LIST
6168 where earlier pieces cover PIECE_BITPOS bits. */
6169
6170 static void
6171 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6172 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6173 HOST_WIDE_INT bitsize, rtx loc_note)
6174 {
6175 HOST_WIDE_INT diff;
6176 bool copy = inner != NULL;
6177
6178 if (copy)
6179 {
6180 /* First copy all nodes preceding the current bitpos. */
6181 while (src != inner)
6182 {
6183 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6184 decl_piece_bitsize (*src), NULL_RTX);
6185 dest = &XEXP (*dest, 1);
6186 src = &XEXP (*src, 1);
6187 }
6188 }
6189 /* Add padding if needed. */
6190 if (bitpos != piece_bitpos)
6191 {
6192 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6193 copy ? NULL_RTX : *dest);
6194 dest = &XEXP (*dest, 1);
6195 }
6196 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6197 {
6198 gcc_assert (!copy);
6199 /* A piece with correct bitpos and bitsize already exist,
6200 just update the location for it and return. */
6201 *decl_piece_varloc_ptr (*dest) = loc_note;
6202 return;
6203 }
6204 /* Add the piece that changed. */
6205 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6206 dest = &XEXP (*dest, 1);
6207 /* Skip over pieces that overlap it. */
6208 diff = bitpos - piece_bitpos + bitsize;
6209 if (!copy)
6210 src = dest;
6211 while (diff > 0 && *src)
6212 {
6213 rtx piece = *src;
6214 diff -= decl_piece_bitsize (piece);
6215 if (copy)
6216 src = &XEXP (piece, 1);
6217 else
6218 {
6219 *src = XEXP (piece, 1);
6220 free_EXPR_LIST_node (piece);
6221 }
6222 }
6223 /* Add padding if needed. */
6224 if (diff < 0 && *src)
6225 {
6226 if (!copy)
6227 dest = src;
6228 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6229 dest = &XEXP (*dest, 1);
6230 }
6231 if (!copy)
6232 return;
6233 /* Finally copy all nodes following it. */
6234 while (*src)
6235 {
6236 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6237 decl_piece_bitsize (*src), NULL_RTX);
6238 dest = &XEXP (*dest, 1);
6239 src = &XEXP (*src, 1);
6240 }
6241 }
6242
6243 /* Add a variable location node to the linked list for DECL. */
6244
6245 static struct var_loc_node *
6246 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6247 {
6248 unsigned int decl_id;
6249 var_loc_list *temp;
6250 struct var_loc_node *loc = NULL;
6251 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6252
6253 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6254 {
6255 tree realdecl = DECL_DEBUG_EXPR (decl);
6256 if (handled_component_p (realdecl)
6257 || (TREE_CODE (realdecl) == MEM_REF
6258 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6259 {
6260 bool reverse;
6261 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6262 &bitsize, &reverse);
6263 if (!innerdecl
6264 || !DECL_P (innerdecl)
6265 || DECL_IGNORED_P (innerdecl)
6266 || TREE_STATIC (innerdecl)
6267 || bitsize == 0
6268 || bitpos + bitsize > 256)
6269 return NULL;
6270 decl = innerdecl;
6271 }
6272 }
6273
6274 decl_id = DECL_UID (decl);
6275 var_loc_list **slot
6276 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6277 if (*slot == NULL)
6278 {
6279 temp = ggc_cleared_alloc<var_loc_list> ();
6280 temp->decl_id = decl_id;
6281 *slot = temp;
6282 }
6283 else
6284 temp = *slot;
6285
6286 /* For PARM_DECLs try to keep around the original incoming value,
6287 even if that means we'll emit a zero-range .debug_loc entry. */
6288 if (temp->last
6289 && temp->first == temp->last
6290 && TREE_CODE (decl) == PARM_DECL
6291 && NOTE_P (temp->first->loc)
6292 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6293 && DECL_INCOMING_RTL (decl)
6294 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6295 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6296 == GET_CODE (DECL_INCOMING_RTL (decl))
6297 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6298 && (bitsize != -1
6299 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6300 NOTE_VAR_LOCATION_LOC (loc_note))
6301 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6302 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6303 {
6304 loc = ggc_cleared_alloc<var_loc_node> ();
6305 temp->first->next = loc;
6306 temp->last = loc;
6307 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6308 }
6309 else if (temp->last)
6310 {
6311 struct var_loc_node *last = temp->last, *unused = NULL;
6312 rtx *piece_loc = NULL, last_loc_note;
6313 HOST_WIDE_INT piece_bitpos = 0;
6314 if (last->next)
6315 {
6316 last = last->next;
6317 gcc_assert (last->next == NULL);
6318 }
6319 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6320 {
6321 piece_loc = &last->loc;
6322 do
6323 {
6324 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6325 if (piece_bitpos + cur_bitsize > bitpos)
6326 break;
6327 piece_bitpos += cur_bitsize;
6328 piece_loc = &XEXP (*piece_loc, 1);
6329 }
6330 while (*piece_loc);
6331 }
6332 /* TEMP->LAST here is either pointer to the last but one or
6333 last element in the chained list, LAST is pointer to the
6334 last element. */
6335 if (label && strcmp (last->label, label) == 0 && last->view == view)
6336 {
6337 /* For SRA optimized variables if there weren't any real
6338 insns since last note, just modify the last node. */
6339 if (piece_loc != NULL)
6340 {
6341 adjust_piece_list (piece_loc, NULL, NULL,
6342 bitpos, piece_bitpos, bitsize, loc_note);
6343 return NULL;
6344 }
6345 /* If the last note doesn't cover any instructions, remove it. */
6346 if (temp->last != last)
6347 {
6348 temp->last->next = NULL;
6349 unused = last;
6350 last = temp->last;
6351 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6352 }
6353 else
6354 {
6355 gcc_assert (temp->first == temp->last
6356 || (temp->first->next == temp->last
6357 && TREE_CODE (decl) == PARM_DECL));
6358 memset (temp->last, '\0', sizeof (*temp->last));
6359 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6360 return temp->last;
6361 }
6362 }
6363 if (bitsize == -1 && NOTE_P (last->loc))
6364 last_loc_note = last->loc;
6365 else if (piece_loc != NULL
6366 && *piece_loc != NULL_RTX
6367 && piece_bitpos == bitpos
6368 && decl_piece_bitsize (*piece_loc) == bitsize)
6369 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6370 else
6371 last_loc_note = NULL_RTX;
6372 /* If the current location is the same as the end of the list,
6373 and either both or neither of the locations is uninitialized,
6374 we have nothing to do. */
6375 if (last_loc_note == NULL_RTX
6376 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6377 NOTE_VAR_LOCATION_LOC (loc_note)))
6378 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6379 != NOTE_VAR_LOCATION_STATUS (loc_note))
6380 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6381 == VAR_INIT_STATUS_UNINITIALIZED)
6382 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6383 == VAR_INIT_STATUS_UNINITIALIZED))))
6384 {
6385 /* Add LOC to the end of list and update LAST. If the last
6386 element of the list has been removed above, reuse its
6387 memory for the new node, otherwise allocate a new one. */
6388 if (unused)
6389 {
6390 loc = unused;
6391 memset (loc, '\0', sizeof (*loc));
6392 }
6393 else
6394 loc = ggc_cleared_alloc<var_loc_node> ();
6395 if (bitsize == -1 || piece_loc == NULL)
6396 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6397 else
6398 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6399 bitpos, piece_bitpos, bitsize, loc_note);
6400 last->next = loc;
6401 /* Ensure TEMP->LAST will point either to the new last but one
6402 element of the chain, or to the last element in it. */
6403 if (last != temp->last)
6404 temp->last = last;
6405 }
6406 else if (unused)
6407 ggc_free (unused);
6408 }
6409 else
6410 {
6411 loc = ggc_cleared_alloc<var_loc_node> ();
6412 temp->first = loc;
6413 temp->last = loc;
6414 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6415 }
6416 return loc;
6417 }
6418 \f
6419 /* Keep track of the number of spaces used to indent the
6420 output of the debugging routines that print the structure of
6421 the DIE internal representation. */
6422 static int print_indent;
6423
6424 /* Indent the line the number of spaces given by print_indent. */
6425
6426 static inline void
6427 print_spaces (FILE *outfile)
6428 {
6429 fprintf (outfile, "%*s", print_indent, "");
6430 }
6431
6432 /* Print a type signature in hex. */
6433
6434 static inline void
6435 print_signature (FILE *outfile, char *sig)
6436 {
6437 int i;
6438
6439 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6440 fprintf (outfile, "%02x", sig[i] & 0xff);
6441 }
6442
6443 static inline void
6444 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6445 {
6446 if (discr_value->pos)
6447 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6448 else
6449 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6450 }
6451
6452 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6453
6454 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6455 RECURSE, output location descriptor operations. */
6456
6457 static void
6458 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6459 {
6460 switch (val->val_class)
6461 {
6462 case dw_val_class_addr:
6463 fprintf (outfile, "address");
6464 break;
6465 case dw_val_class_offset:
6466 fprintf (outfile, "offset");
6467 break;
6468 case dw_val_class_loc:
6469 fprintf (outfile, "location descriptor");
6470 if (val->v.val_loc == NULL)
6471 fprintf (outfile, " -> <null>\n");
6472 else if (recurse)
6473 {
6474 fprintf (outfile, ":\n");
6475 print_indent += 4;
6476 print_loc_descr (val->v.val_loc, outfile);
6477 print_indent -= 4;
6478 }
6479 else
6480 {
6481 if (flag_dump_noaddr || flag_dump_unnumbered)
6482 fprintf (outfile, " #\n");
6483 else
6484 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6485 }
6486 break;
6487 case dw_val_class_loc_list:
6488 fprintf (outfile, "location list -> label:%s",
6489 val->v.val_loc_list->ll_symbol);
6490 break;
6491 case dw_val_class_view_list:
6492 val = view_list_to_loc_list_val_node (val);
6493 fprintf (outfile, "location list with views -> labels:%s and %s",
6494 val->v.val_loc_list->ll_symbol,
6495 val->v.val_loc_list->vl_symbol);
6496 break;
6497 case dw_val_class_range_list:
6498 fprintf (outfile, "range list");
6499 break;
6500 case dw_val_class_const:
6501 case dw_val_class_const_implicit:
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6503 break;
6504 case dw_val_class_unsigned_const:
6505 case dw_val_class_unsigned_const_implicit:
6506 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6507 break;
6508 case dw_val_class_const_double:
6509 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6510 HOST_WIDE_INT_PRINT_UNSIGNED")",
6511 val->v.val_double.high,
6512 val->v.val_double.low);
6513 break;
6514 case dw_val_class_wide_int:
6515 {
6516 int i = val->v.val_wide->get_len ();
6517 fprintf (outfile, "constant (");
6518 gcc_assert (i > 0);
6519 if (val->v.val_wide->elt (i - 1) == 0)
6520 fprintf (outfile, "0x");
6521 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6522 val->v.val_wide->elt (--i));
6523 while (--i >= 0)
6524 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6525 val->v.val_wide->elt (i));
6526 fprintf (outfile, ")");
6527 break;
6528 }
6529 case dw_val_class_vec:
6530 fprintf (outfile, "floating-point or vector constant");
6531 break;
6532 case dw_val_class_flag:
6533 fprintf (outfile, "%u", val->v.val_flag);
6534 break;
6535 case dw_val_class_die_ref:
6536 if (val->v.val_die_ref.die != NULL)
6537 {
6538 dw_die_ref die = val->v.val_die_ref.die;
6539
6540 if (die->comdat_type_p)
6541 {
6542 fprintf (outfile, "die -> signature: ");
6543 print_signature (outfile,
6544 die->die_id.die_type_node->signature);
6545 }
6546 else if (die->die_id.die_symbol)
6547 {
6548 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6549 if (die->with_offset)
6550 fprintf (outfile, " + %ld", die->die_offset);
6551 }
6552 else
6553 fprintf (outfile, "die -> %ld", die->die_offset);
6554 if (flag_dump_noaddr || flag_dump_unnumbered)
6555 fprintf (outfile, " #");
6556 else
6557 fprintf (outfile, " (%p)", (void *) die);
6558 }
6559 else
6560 fprintf (outfile, "die -> <null>");
6561 break;
6562 case dw_val_class_vms_delta:
6563 fprintf (outfile, "delta: @slotcount(%s-%s)",
6564 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6565 break;
6566 case dw_val_class_symview:
6567 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6568 break;
6569 case dw_val_class_lbl_id:
6570 case dw_val_class_lineptr:
6571 case dw_val_class_macptr:
6572 case dw_val_class_loclistsptr:
6573 case dw_val_class_high_pc:
6574 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6575 break;
6576 case dw_val_class_str:
6577 if (val->v.val_str->str != NULL)
6578 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6579 else
6580 fprintf (outfile, "<null>");
6581 break;
6582 case dw_val_class_file:
6583 case dw_val_class_file_implicit:
6584 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6585 val->v.val_file->emitted_number);
6586 break;
6587 case dw_val_class_data8:
6588 {
6589 int i;
6590
6591 for (i = 0; i < 8; i++)
6592 fprintf (outfile, "%02x", val->v.val_data8[i]);
6593 break;
6594 }
6595 case dw_val_class_discr_value:
6596 print_discr_value (outfile, &val->v.val_discr_value);
6597 break;
6598 case dw_val_class_discr_list:
6599 for (dw_discr_list_ref node = val->v.val_discr_list;
6600 node != NULL;
6601 node = node->dw_discr_next)
6602 {
6603 if (node->dw_discr_range)
6604 {
6605 fprintf (outfile, " .. ");
6606 print_discr_value (outfile, &node->dw_discr_lower_bound);
6607 print_discr_value (outfile, &node->dw_discr_upper_bound);
6608 }
6609 else
6610 print_discr_value (outfile, &node->dw_discr_lower_bound);
6611
6612 if (node->dw_discr_next != NULL)
6613 fprintf (outfile, " | ");
6614 }
6615 default:
6616 break;
6617 }
6618 }
6619
6620 /* Likewise, for a DIE attribute. */
6621
6622 static void
6623 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6624 {
6625 print_dw_val (&a->dw_attr_val, recurse, outfile);
6626 }
6627
6628
6629 /* Print the list of operands in the LOC location description to OUTFILE. This
6630 routine is a debugging aid only. */
6631
6632 static void
6633 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6634 {
6635 dw_loc_descr_ref l = loc;
6636
6637 if (loc == NULL)
6638 {
6639 print_spaces (outfile);
6640 fprintf (outfile, "<null>\n");
6641 return;
6642 }
6643
6644 for (l = loc; l != NULL; l = l->dw_loc_next)
6645 {
6646 print_spaces (outfile);
6647 if (flag_dump_noaddr || flag_dump_unnumbered)
6648 fprintf (outfile, "#");
6649 else
6650 fprintf (outfile, "(%p)", (void *) l);
6651 fprintf (outfile, " %s",
6652 dwarf_stack_op_name (l->dw_loc_opc));
6653 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6654 {
6655 fprintf (outfile, " ");
6656 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6657 }
6658 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6659 {
6660 fprintf (outfile, ", ");
6661 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6662 }
6663 fprintf (outfile, "\n");
6664 }
6665 }
6666
6667 /* Print the information associated with a given DIE, and its children.
6668 This routine is a debugging aid only. */
6669
6670 static void
6671 print_die (dw_die_ref die, FILE *outfile)
6672 {
6673 dw_attr_node *a;
6674 dw_die_ref c;
6675 unsigned ix;
6676
6677 print_spaces (outfile);
6678 fprintf (outfile, "DIE %4ld: %s ",
6679 die->die_offset, dwarf_tag_name (die->die_tag));
6680 if (flag_dump_noaddr || flag_dump_unnumbered)
6681 fprintf (outfile, "#\n");
6682 else
6683 fprintf (outfile, "(%p)\n", (void*) die);
6684 print_spaces (outfile);
6685 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6686 fprintf (outfile, " offset: %ld", die->die_offset);
6687 fprintf (outfile, " mark: %d\n", die->die_mark);
6688
6689 if (die->comdat_type_p)
6690 {
6691 print_spaces (outfile);
6692 fprintf (outfile, " signature: ");
6693 print_signature (outfile, die->die_id.die_type_node->signature);
6694 fprintf (outfile, "\n");
6695 }
6696
6697 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6698 {
6699 print_spaces (outfile);
6700 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6701
6702 print_attribute (a, true, outfile);
6703 fprintf (outfile, "\n");
6704 }
6705
6706 if (die->die_child != NULL)
6707 {
6708 print_indent += 4;
6709 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6710 print_indent -= 4;
6711 }
6712 if (print_indent == 0)
6713 fprintf (outfile, "\n");
6714 }
6715
6716 /* Print the list of operations in the LOC location description. */
6717
6718 DEBUG_FUNCTION void
6719 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6720 {
6721 print_loc_descr (loc, stderr);
6722 }
6723
6724 /* Print the information collected for a given DIE. */
6725
6726 DEBUG_FUNCTION void
6727 debug_dwarf_die (dw_die_ref die)
6728 {
6729 print_die (die, stderr);
6730 }
6731
6732 DEBUG_FUNCTION void
6733 debug (die_struct &ref)
6734 {
6735 print_die (&ref, stderr);
6736 }
6737
6738 DEBUG_FUNCTION void
6739 debug (die_struct *ptr)
6740 {
6741 if (ptr)
6742 debug (*ptr);
6743 else
6744 fprintf (stderr, "<nil>\n");
6745 }
6746
6747
6748 /* Print all DWARF information collected for the compilation unit.
6749 This routine is a debugging aid only. */
6750
6751 DEBUG_FUNCTION void
6752 debug_dwarf (void)
6753 {
6754 print_indent = 0;
6755 print_die (comp_unit_die (), stderr);
6756 }
6757
6758 /* Verify the DIE tree structure. */
6759
6760 DEBUG_FUNCTION void
6761 verify_die (dw_die_ref die)
6762 {
6763 gcc_assert (!die->die_mark);
6764 if (die->die_parent == NULL
6765 && die->die_sib == NULL)
6766 return;
6767 /* Verify the die_sib list is cyclic. */
6768 dw_die_ref x = die;
6769 do
6770 {
6771 x->die_mark = 1;
6772 x = x->die_sib;
6773 }
6774 while (x && !x->die_mark);
6775 gcc_assert (x == die);
6776 x = die;
6777 do
6778 {
6779 /* Verify all dies have the same parent. */
6780 gcc_assert (x->die_parent == die->die_parent);
6781 if (x->die_child)
6782 {
6783 /* Verify the child has the proper parent and recurse. */
6784 gcc_assert (x->die_child->die_parent == x);
6785 verify_die (x->die_child);
6786 }
6787 x->die_mark = 0;
6788 x = x->die_sib;
6789 }
6790 while (x && x->die_mark);
6791 }
6792
6793 /* Sanity checks on DIEs. */
6794
6795 static void
6796 check_die (dw_die_ref die)
6797 {
6798 unsigned ix;
6799 dw_attr_node *a;
6800 bool inline_found = false;
6801 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6802 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6803 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6804 {
6805 switch (a->dw_attr)
6806 {
6807 case DW_AT_inline:
6808 if (a->dw_attr_val.v.val_unsigned)
6809 inline_found = true;
6810 break;
6811 case DW_AT_location:
6812 ++n_location;
6813 break;
6814 case DW_AT_low_pc:
6815 ++n_low_pc;
6816 break;
6817 case DW_AT_high_pc:
6818 ++n_high_pc;
6819 break;
6820 case DW_AT_artificial:
6821 ++n_artificial;
6822 break;
6823 case DW_AT_decl_column:
6824 ++n_decl_column;
6825 break;
6826 case DW_AT_decl_line:
6827 ++n_decl_line;
6828 break;
6829 case DW_AT_decl_file:
6830 ++n_decl_file;
6831 break;
6832 default:
6833 break;
6834 }
6835 }
6836 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6837 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6838 {
6839 fprintf (stderr, "Duplicate attributes in DIE:\n");
6840 debug_dwarf_die (die);
6841 gcc_unreachable ();
6842 }
6843 if (inline_found)
6844 {
6845 /* A debugging information entry that is a member of an abstract
6846 instance tree [that has DW_AT_inline] should not contain any
6847 attributes which describe aspects of the subroutine which vary
6848 between distinct inlined expansions or distinct out-of-line
6849 expansions. */
6850 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6851 gcc_assert (a->dw_attr != DW_AT_low_pc
6852 && a->dw_attr != DW_AT_high_pc
6853 && a->dw_attr != DW_AT_location
6854 && a->dw_attr != DW_AT_frame_base
6855 && a->dw_attr != DW_AT_call_all_calls
6856 && a->dw_attr != DW_AT_GNU_all_call_sites);
6857 }
6858 }
6859 \f
6860 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6861 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6862 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6863
6864 /* Calculate the checksum of a location expression. */
6865
6866 static inline void
6867 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6868 {
6869 int tem;
6870 inchash::hash hstate;
6871 hashval_t hash;
6872
6873 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6874 CHECKSUM (tem);
6875 hash_loc_operands (loc, hstate);
6876 hash = hstate.end();
6877 CHECKSUM (hash);
6878 }
6879
6880 /* Calculate the checksum of an attribute. */
6881
6882 static void
6883 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6884 {
6885 dw_loc_descr_ref loc;
6886 rtx r;
6887
6888 CHECKSUM (at->dw_attr);
6889
6890 /* We don't care that this was compiled with a different compiler
6891 snapshot; if the output is the same, that's what matters. */
6892 if (at->dw_attr == DW_AT_producer)
6893 return;
6894
6895 switch (AT_class (at))
6896 {
6897 case dw_val_class_const:
6898 case dw_val_class_const_implicit:
6899 CHECKSUM (at->dw_attr_val.v.val_int);
6900 break;
6901 case dw_val_class_unsigned_const:
6902 case dw_val_class_unsigned_const_implicit:
6903 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6904 break;
6905 case dw_val_class_const_double:
6906 CHECKSUM (at->dw_attr_val.v.val_double);
6907 break;
6908 case dw_val_class_wide_int:
6909 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6910 get_full_len (*at->dw_attr_val.v.val_wide)
6911 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6912 break;
6913 case dw_val_class_vec:
6914 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6915 (at->dw_attr_val.v.val_vec.length
6916 * at->dw_attr_val.v.val_vec.elt_size));
6917 break;
6918 case dw_val_class_flag:
6919 CHECKSUM (at->dw_attr_val.v.val_flag);
6920 break;
6921 case dw_val_class_str:
6922 CHECKSUM_STRING (AT_string (at));
6923 break;
6924
6925 case dw_val_class_addr:
6926 r = AT_addr (at);
6927 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6928 CHECKSUM_STRING (XSTR (r, 0));
6929 break;
6930
6931 case dw_val_class_offset:
6932 CHECKSUM (at->dw_attr_val.v.val_offset);
6933 break;
6934
6935 case dw_val_class_loc:
6936 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6937 loc_checksum (loc, ctx);
6938 break;
6939
6940 case dw_val_class_die_ref:
6941 die_checksum (AT_ref (at), ctx, mark);
6942 break;
6943
6944 case dw_val_class_fde_ref:
6945 case dw_val_class_vms_delta:
6946 case dw_val_class_symview:
6947 case dw_val_class_lbl_id:
6948 case dw_val_class_lineptr:
6949 case dw_val_class_macptr:
6950 case dw_val_class_loclistsptr:
6951 case dw_val_class_high_pc:
6952 break;
6953
6954 case dw_val_class_file:
6955 case dw_val_class_file_implicit:
6956 CHECKSUM_STRING (AT_file (at)->filename);
6957 break;
6958
6959 case dw_val_class_data8:
6960 CHECKSUM (at->dw_attr_val.v.val_data8);
6961 break;
6962
6963 default:
6964 break;
6965 }
6966 }
6967
6968 /* Calculate the checksum of a DIE. */
6969
6970 static void
6971 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6972 {
6973 dw_die_ref c;
6974 dw_attr_node *a;
6975 unsigned ix;
6976
6977 /* To avoid infinite recursion. */
6978 if (die->die_mark)
6979 {
6980 CHECKSUM (die->die_mark);
6981 return;
6982 }
6983 die->die_mark = ++(*mark);
6984
6985 CHECKSUM (die->die_tag);
6986
6987 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6988 attr_checksum (a, ctx, mark);
6989
6990 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6991 }
6992
6993 #undef CHECKSUM
6994 #undef CHECKSUM_BLOCK
6995 #undef CHECKSUM_STRING
6996
6997 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6998 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6999 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7000 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7001 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7002 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7003 #define CHECKSUM_ATTR(FOO) \
7004 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7005
7006 /* Calculate the checksum of a number in signed LEB128 format. */
7007
7008 static void
7009 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 unsigned char byte;
7012 bool more;
7013
7014 while (1)
7015 {
7016 byte = (value & 0x7f);
7017 value >>= 7;
7018 more = !((value == 0 && (byte & 0x40) == 0)
7019 || (value == -1 && (byte & 0x40) != 0));
7020 if (more)
7021 byte |= 0x80;
7022 CHECKSUM (byte);
7023 if (!more)
7024 break;
7025 }
7026 }
7027
7028 /* Calculate the checksum of a number in unsigned LEB128 format. */
7029
7030 static void
7031 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7032 {
7033 while (1)
7034 {
7035 unsigned char byte = (value & 0x7f);
7036 value >>= 7;
7037 if (value != 0)
7038 /* More bytes to follow. */
7039 byte |= 0x80;
7040 CHECKSUM (byte);
7041 if (value == 0)
7042 break;
7043 }
7044 }
7045
7046 /* Checksum the context of the DIE. This adds the names of any
7047 surrounding namespaces or structures to the checksum. */
7048
7049 static void
7050 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7051 {
7052 const char *name;
7053 dw_die_ref spec;
7054 int tag = die->die_tag;
7055
7056 if (tag != DW_TAG_namespace
7057 && tag != DW_TAG_structure_type
7058 && tag != DW_TAG_class_type)
7059 return;
7060
7061 name = get_AT_string (die, DW_AT_name);
7062
7063 spec = get_AT_ref (die, DW_AT_specification);
7064 if (spec != NULL)
7065 die = spec;
7066
7067 if (die->die_parent != NULL)
7068 checksum_die_context (die->die_parent, ctx);
7069
7070 CHECKSUM_ULEB128 ('C');
7071 CHECKSUM_ULEB128 (tag);
7072 if (name != NULL)
7073 CHECKSUM_STRING (name);
7074 }
7075
7076 /* Calculate the checksum of a location expression. */
7077
7078 static inline void
7079 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7080 {
7081 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7082 were emitted as a DW_FORM_sdata instead of a location expression. */
7083 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7084 {
7085 CHECKSUM_ULEB128 (DW_FORM_sdata);
7086 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7087 return;
7088 }
7089
7090 /* Otherwise, just checksum the raw location expression. */
7091 while (loc != NULL)
7092 {
7093 inchash::hash hstate;
7094 hashval_t hash;
7095
7096 CHECKSUM_ULEB128 (loc->dtprel);
7097 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7098 hash_loc_operands (loc, hstate);
7099 hash = hstate.end ();
7100 CHECKSUM (hash);
7101 loc = loc->dw_loc_next;
7102 }
7103 }
7104
7105 /* Calculate the checksum of an attribute. */
7106
7107 static void
7108 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7109 struct md5_ctx *ctx, int *mark)
7110 {
7111 dw_loc_descr_ref loc;
7112 rtx r;
7113
7114 if (AT_class (at) == dw_val_class_die_ref)
7115 {
7116 dw_die_ref target_die = AT_ref (at);
7117
7118 /* For pointer and reference types, we checksum only the (qualified)
7119 name of the target type (if there is a name). For friend entries,
7120 we checksum only the (qualified) name of the target type or function.
7121 This allows the checksum to remain the same whether the target type
7122 is complete or not. */
7123 if ((at->dw_attr == DW_AT_type
7124 && (tag == DW_TAG_pointer_type
7125 || tag == DW_TAG_reference_type
7126 || tag == DW_TAG_rvalue_reference_type
7127 || tag == DW_TAG_ptr_to_member_type))
7128 || (at->dw_attr == DW_AT_friend
7129 && tag == DW_TAG_friend))
7130 {
7131 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7132
7133 if (name_attr != NULL)
7134 {
7135 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7136
7137 if (decl == NULL)
7138 decl = target_die;
7139 CHECKSUM_ULEB128 ('N');
7140 CHECKSUM_ULEB128 (at->dw_attr);
7141 if (decl->die_parent != NULL)
7142 checksum_die_context (decl->die_parent, ctx);
7143 CHECKSUM_ULEB128 ('E');
7144 CHECKSUM_STRING (AT_string (name_attr));
7145 return;
7146 }
7147 }
7148
7149 /* For all other references to another DIE, we check to see if the
7150 target DIE has already been visited. If it has, we emit a
7151 backward reference; if not, we descend recursively. */
7152 if (target_die->die_mark > 0)
7153 {
7154 CHECKSUM_ULEB128 ('R');
7155 CHECKSUM_ULEB128 (at->dw_attr);
7156 CHECKSUM_ULEB128 (target_die->die_mark);
7157 }
7158 else
7159 {
7160 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7161
7162 if (decl == NULL)
7163 decl = target_die;
7164 target_die->die_mark = ++(*mark);
7165 CHECKSUM_ULEB128 ('T');
7166 CHECKSUM_ULEB128 (at->dw_attr);
7167 if (decl->die_parent != NULL)
7168 checksum_die_context (decl->die_parent, ctx);
7169 die_checksum_ordered (target_die, ctx, mark);
7170 }
7171 return;
7172 }
7173
7174 CHECKSUM_ULEB128 ('A');
7175 CHECKSUM_ULEB128 (at->dw_attr);
7176
7177 switch (AT_class (at))
7178 {
7179 case dw_val_class_const:
7180 case dw_val_class_const_implicit:
7181 CHECKSUM_ULEB128 (DW_FORM_sdata);
7182 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7183 break;
7184
7185 case dw_val_class_unsigned_const:
7186 case dw_val_class_unsigned_const_implicit:
7187 CHECKSUM_ULEB128 (DW_FORM_sdata);
7188 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7189 break;
7190
7191 case dw_val_class_const_double:
7192 CHECKSUM_ULEB128 (DW_FORM_block);
7193 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7194 CHECKSUM (at->dw_attr_val.v.val_double);
7195 break;
7196
7197 case dw_val_class_wide_int:
7198 CHECKSUM_ULEB128 (DW_FORM_block);
7199 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7200 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7201 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7202 get_full_len (*at->dw_attr_val.v.val_wide)
7203 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7204 break;
7205
7206 case dw_val_class_vec:
7207 CHECKSUM_ULEB128 (DW_FORM_block);
7208 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7209 * at->dw_attr_val.v.val_vec.elt_size);
7210 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7211 (at->dw_attr_val.v.val_vec.length
7212 * at->dw_attr_val.v.val_vec.elt_size));
7213 break;
7214
7215 case dw_val_class_flag:
7216 CHECKSUM_ULEB128 (DW_FORM_flag);
7217 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7218 break;
7219
7220 case dw_val_class_str:
7221 CHECKSUM_ULEB128 (DW_FORM_string);
7222 CHECKSUM_STRING (AT_string (at));
7223 break;
7224
7225 case dw_val_class_addr:
7226 r = AT_addr (at);
7227 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7228 CHECKSUM_ULEB128 (DW_FORM_string);
7229 CHECKSUM_STRING (XSTR (r, 0));
7230 break;
7231
7232 case dw_val_class_offset:
7233 CHECKSUM_ULEB128 (DW_FORM_sdata);
7234 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7235 break;
7236
7237 case dw_val_class_loc:
7238 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7239 loc_checksum_ordered (loc, ctx);
7240 break;
7241
7242 case dw_val_class_fde_ref:
7243 case dw_val_class_symview:
7244 case dw_val_class_lbl_id:
7245 case dw_val_class_lineptr:
7246 case dw_val_class_macptr:
7247 case dw_val_class_loclistsptr:
7248 case dw_val_class_high_pc:
7249 break;
7250
7251 case dw_val_class_file:
7252 case dw_val_class_file_implicit:
7253 CHECKSUM_ULEB128 (DW_FORM_string);
7254 CHECKSUM_STRING (AT_file (at)->filename);
7255 break;
7256
7257 case dw_val_class_data8:
7258 CHECKSUM (at->dw_attr_val.v.val_data8);
7259 break;
7260
7261 default:
7262 break;
7263 }
7264 }
7265
7266 struct checksum_attributes
7267 {
7268 dw_attr_node *at_name;
7269 dw_attr_node *at_type;
7270 dw_attr_node *at_friend;
7271 dw_attr_node *at_accessibility;
7272 dw_attr_node *at_address_class;
7273 dw_attr_node *at_alignment;
7274 dw_attr_node *at_allocated;
7275 dw_attr_node *at_artificial;
7276 dw_attr_node *at_associated;
7277 dw_attr_node *at_binary_scale;
7278 dw_attr_node *at_bit_offset;
7279 dw_attr_node *at_bit_size;
7280 dw_attr_node *at_bit_stride;
7281 dw_attr_node *at_byte_size;
7282 dw_attr_node *at_byte_stride;
7283 dw_attr_node *at_const_value;
7284 dw_attr_node *at_containing_type;
7285 dw_attr_node *at_count;
7286 dw_attr_node *at_data_location;
7287 dw_attr_node *at_data_member_location;
7288 dw_attr_node *at_decimal_scale;
7289 dw_attr_node *at_decimal_sign;
7290 dw_attr_node *at_default_value;
7291 dw_attr_node *at_digit_count;
7292 dw_attr_node *at_discr;
7293 dw_attr_node *at_discr_list;
7294 dw_attr_node *at_discr_value;
7295 dw_attr_node *at_encoding;
7296 dw_attr_node *at_endianity;
7297 dw_attr_node *at_explicit;
7298 dw_attr_node *at_is_optional;
7299 dw_attr_node *at_location;
7300 dw_attr_node *at_lower_bound;
7301 dw_attr_node *at_mutable;
7302 dw_attr_node *at_ordering;
7303 dw_attr_node *at_picture_string;
7304 dw_attr_node *at_prototyped;
7305 dw_attr_node *at_small;
7306 dw_attr_node *at_segment;
7307 dw_attr_node *at_string_length;
7308 dw_attr_node *at_string_length_bit_size;
7309 dw_attr_node *at_string_length_byte_size;
7310 dw_attr_node *at_threads_scaled;
7311 dw_attr_node *at_upper_bound;
7312 dw_attr_node *at_use_location;
7313 dw_attr_node *at_use_UTF8;
7314 dw_attr_node *at_variable_parameter;
7315 dw_attr_node *at_virtuality;
7316 dw_attr_node *at_visibility;
7317 dw_attr_node *at_vtable_elem_location;
7318 };
7319
7320 /* Collect the attributes that we will want to use for the checksum. */
7321
7322 static void
7323 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7324 {
7325 dw_attr_node *a;
7326 unsigned ix;
7327
7328 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7329 {
7330 switch (a->dw_attr)
7331 {
7332 case DW_AT_name:
7333 attrs->at_name = a;
7334 break;
7335 case DW_AT_type:
7336 attrs->at_type = a;
7337 break;
7338 case DW_AT_friend:
7339 attrs->at_friend = a;
7340 break;
7341 case DW_AT_accessibility:
7342 attrs->at_accessibility = a;
7343 break;
7344 case DW_AT_address_class:
7345 attrs->at_address_class = a;
7346 break;
7347 case DW_AT_alignment:
7348 attrs->at_alignment = a;
7349 break;
7350 case DW_AT_allocated:
7351 attrs->at_allocated = a;
7352 break;
7353 case DW_AT_artificial:
7354 attrs->at_artificial = a;
7355 break;
7356 case DW_AT_associated:
7357 attrs->at_associated = a;
7358 break;
7359 case DW_AT_binary_scale:
7360 attrs->at_binary_scale = a;
7361 break;
7362 case DW_AT_bit_offset:
7363 attrs->at_bit_offset = a;
7364 break;
7365 case DW_AT_bit_size:
7366 attrs->at_bit_size = a;
7367 break;
7368 case DW_AT_bit_stride:
7369 attrs->at_bit_stride = a;
7370 break;
7371 case DW_AT_byte_size:
7372 attrs->at_byte_size = a;
7373 break;
7374 case DW_AT_byte_stride:
7375 attrs->at_byte_stride = a;
7376 break;
7377 case DW_AT_const_value:
7378 attrs->at_const_value = a;
7379 break;
7380 case DW_AT_containing_type:
7381 attrs->at_containing_type = a;
7382 break;
7383 case DW_AT_count:
7384 attrs->at_count = a;
7385 break;
7386 case DW_AT_data_location:
7387 attrs->at_data_location = a;
7388 break;
7389 case DW_AT_data_member_location:
7390 attrs->at_data_member_location = a;
7391 break;
7392 case DW_AT_decimal_scale:
7393 attrs->at_decimal_scale = a;
7394 break;
7395 case DW_AT_decimal_sign:
7396 attrs->at_decimal_sign = a;
7397 break;
7398 case DW_AT_default_value:
7399 attrs->at_default_value = a;
7400 break;
7401 case DW_AT_digit_count:
7402 attrs->at_digit_count = a;
7403 break;
7404 case DW_AT_discr:
7405 attrs->at_discr = a;
7406 break;
7407 case DW_AT_discr_list:
7408 attrs->at_discr_list = a;
7409 break;
7410 case DW_AT_discr_value:
7411 attrs->at_discr_value = a;
7412 break;
7413 case DW_AT_encoding:
7414 attrs->at_encoding = a;
7415 break;
7416 case DW_AT_endianity:
7417 attrs->at_endianity = a;
7418 break;
7419 case DW_AT_explicit:
7420 attrs->at_explicit = a;
7421 break;
7422 case DW_AT_is_optional:
7423 attrs->at_is_optional = a;
7424 break;
7425 case DW_AT_location:
7426 attrs->at_location = a;
7427 break;
7428 case DW_AT_lower_bound:
7429 attrs->at_lower_bound = a;
7430 break;
7431 case DW_AT_mutable:
7432 attrs->at_mutable = a;
7433 break;
7434 case DW_AT_ordering:
7435 attrs->at_ordering = a;
7436 break;
7437 case DW_AT_picture_string:
7438 attrs->at_picture_string = a;
7439 break;
7440 case DW_AT_prototyped:
7441 attrs->at_prototyped = a;
7442 break;
7443 case DW_AT_small:
7444 attrs->at_small = a;
7445 break;
7446 case DW_AT_segment:
7447 attrs->at_segment = a;
7448 break;
7449 case DW_AT_string_length:
7450 attrs->at_string_length = a;
7451 break;
7452 case DW_AT_string_length_bit_size:
7453 attrs->at_string_length_bit_size = a;
7454 break;
7455 case DW_AT_string_length_byte_size:
7456 attrs->at_string_length_byte_size = a;
7457 break;
7458 case DW_AT_threads_scaled:
7459 attrs->at_threads_scaled = a;
7460 break;
7461 case DW_AT_upper_bound:
7462 attrs->at_upper_bound = a;
7463 break;
7464 case DW_AT_use_location:
7465 attrs->at_use_location = a;
7466 break;
7467 case DW_AT_use_UTF8:
7468 attrs->at_use_UTF8 = a;
7469 break;
7470 case DW_AT_variable_parameter:
7471 attrs->at_variable_parameter = a;
7472 break;
7473 case DW_AT_virtuality:
7474 attrs->at_virtuality = a;
7475 break;
7476 case DW_AT_visibility:
7477 attrs->at_visibility = a;
7478 break;
7479 case DW_AT_vtable_elem_location:
7480 attrs->at_vtable_elem_location = a;
7481 break;
7482 default:
7483 break;
7484 }
7485 }
7486 }
7487
7488 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7489
7490 static void
7491 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7492 {
7493 dw_die_ref c;
7494 dw_die_ref decl;
7495 struct checksum_attributes attrs;
7496
7497 CHECKSUM_ULEB128 ('D');
7498 CHECKSUM_ULEB128 (die->die_tag);
7499
7500 memset (&attrs, 0, sizeof (attrs));
7501
7502 decl = get_AT_ref (die, DW_AT_specification);
7503 if (decl != NULL)
7504 collect_checksum_attributes (&attrs, decl);
7505 collect_checksum_attributes (&attrs, die);
7506
7507 CHECKSUM_ATTR (attrs.at_name);
7508 CHECKSUM_ATTR (attrs.at_accessibility);
7509 CHECKSUM_ATTR (attrs.at_address_class);
7510 CHECKSUM_ATTR (attrs.at_allocated);
7511 CHECKSUM_ATTR (attrs.at_artificial);
7512 CHECKSUM_ATTR (attrs.at_associated);
7513 CHECKSUM_ATTR (attrs.at_binary_scale);
7514 CHECKSUM_ATTR (attrs.at_bit_offset);
7515 CHECKSUM_ATTR (attrs.at_bit_size);
7516 CHECKSUM_ATTR (attrs.at_bit_stride);
7517 CHECKSUM_ATTR (attrs.at_byte_size);
7518 CHECKSUM_ATTR (attrs.at_byte_stride);
7519 CHECKSUM_ATTR (attrs.at_const_value);
7520 CHECKSUM_ATTR (attrs.at_containing_type);
7521 CHECKSUM_ATTR (attrs.at_count);
7522 CHECKSUM_ATTR (attrs.at_data_location);
7523 CHECKSUM_ATTR (attrs.at_data_member_location);
7524 CHECKSUM_ATTR (attrs.at_decimal_scale);
7525 CHECKSUM_ATTR (attrs.at_decimal_sign);
7526 CHECKSUM_ATTR (attrs.at_default_value);
7527 CHECKSUM_ATTR (attrs.at_digit_count);
7528 CHECKSUM_ATTR (attrs.at_discr);
7529 CHECKSUM_ATTR (attrs.at_discr_list);
7530 CHECKSUM_ATTR (attrs.at_discr_value);
7531 CHECKSUM_ATTR (attrs.at_encoding);
7532 CHECKSUM_ATTR (attrs.at_endianity);
7533 CHECKSUM_ATTR (attrs.at_explicit);
7534 CHECKSUM_ATTR (attrs.at_is_optional);
7535 CHECKSUM_ATTR (attrs.at_location);
7536 CHECKSUM_ATTR (attrs.at_lower_bound);
7537 CHECKSUM_ATTR (attrs.at_mutable);
7538 CHECKSUM_ATTR (attrs.at_ordering);
7539 CHECKSUM_ATTR (attrs.at_picture_string);
7540 CHECKSUM_ATTR (attrs.at_prototyped);
7541 CHECKSUM_ATTR (attrs.at_small);
7542 CHECKSUM_ATTR (attrs.at_segment);
7543 CHECKSUM_ATTR (attrs.at_string_length);
7544 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7545 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7546 CHECKSUM_ATTR (attrs.at_threads_scaled);
7547 CHECKSUM_ATTR (attrs.at_upper_bound);
7548 CHECKSUM_ATTR (attrs.at_use_location);
7549 CHECKSUM_ATTR (attrs.at_use_UTF8);
7550 CHECKSUM_ATTR (attrs.at_variable_parameter);
7551 CHECKSUM_ATTR (attrs.at_virtuality);
7552 CHECKSUM_ATTR (attrs.at_visibility);
7553 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7554 CHECKSUM_ATTR (attrs.at_type);
7555 CHECKSUM_ATTR (attrs.at_friend);
7556 CHECKSUM_ATTR (attrs.at_alignment);
7557
7558 /* Checksum the child DIEs. */
7559 c = die->die_child;
7560 if (c) do {
7561 dw_attr_node *name_attr;
7562
7563 c = c->die_sib;
7564 name_attr = get_AT (c, DW_AT_name);
7565 if (is_template_instantiation (c))
7566 {
7567 /* Ignore instantiations of member type and function templates. */
7568 }
7569 else if (name_attr != NULL
7570 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7571 {
7572 /* Use a shallow checksum for named nested types and member
7573 functions. */
7574 CHECKSUM_ULEB128 ('S');
7575 CHECKSUM_ULEB128 (c->die_tag);
7576 CHECKSUM_STRING (AT_string (name_attr));
7577 }
7578 else
7579 {
7580 /* Use a deep checksum for other children. */
7581 /* Mark this DIE so it gets processed when unmarking. */
7582 if (c->die_mark == 0)
7583 c->die_mark = -1;
7584 die_checksum_ordered (c, ctx, mark);
7585 }
7586 } while (c != die->die_child);
7587
7588 CHECKSUM_ULEB128 (0);
7589 }
7590
7591 /* Add a type name and tag to a hash. */
7592 static void
7593 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7594 {
7595 CHECKSUM_ULEB128 (tag);
7596 CHECKSUM_STRING (name);
7597 }
7598
7599 #undef CHECKSUM
7600 #undef CHECKSUM_STRING
7601 #undef CHECKSUM_ATTR
7602 #undef CHECKSUM_LEB128
7603 #undef CHECKSUM_ULEB128
7604
7605 /* Generate the type signature for DIE. This is computed by generating an
7606 MD5 checksum over the DIE's tag, its relevant attributes, and its
7607 children. Attributes that are references to other DIEs are processed
7608 by recursion, using the MARK field to prevent infinite recursion.
7609 If the DIE is nested inside a namespace or another type, we also
7610 need to include that context in the signature. The lower 64 bits
7611 of the resulting MD5 checksum comprise the signature. */
7612
7613 static void
7614 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7615 {
7616 int mark;
7617 const char *name;
7618 unsigned char checksum[16];
7619 struct md5_ctx ctx;
7620 dw_die_ref decl;
7621 dw_die_ref parent;
7622
7623 name = get_AT_string (die, DW_AT_name);
7624 decl = get_AT_ref (die, DW_AT_specification);
7625 parent = get_die_parent (die);
7626
7627 /* First, compute a signature for just the type name (and its surrounding
7628 context, if any. This is stored in the type unit DIE for link-time
7629 ODR (one-definition rule) checking. */
7630
7631 if (is_cxx () && name != NULL)
7632 {
7633 md5_init_ctx (&ctx);
7634
7635 /* Checksum the names of surrounding namespaces and structures. */
7636 if (parent != NULL)
7637 checksum_die_context (parent, &ctx);
7638
7639 /* Checksum the current DIE. */
7640 die_odr_checksum (die->die_tag, name, &ctx);
7641 md5_finish_ctx (&ctx, checksum);
7642
7643 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7644 }
7645
7646 /* Next, compute the complete type signature. */
7647
7648 md5_init_ctx (&ctx);
7649 mark = 1;
7650 die->die_mark = mark;
7651
7652 /* Checksum the names of surrounding namespaces and structures. */
7653 if (parent != NULL)
7654 checksum_die_context (parent, &ctx);
7655
7656 /* Checksum the DIE and its children. */
7657 die_checksum_ordered (die, &ctx, &mark);
7658 unmark_all_dies (die);
7659 md5_finish_ctx (&ctx, checksum);
7660
7661 /* Store the signature in the type node and link the type DIE and the
7662 type node together. */
7663 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7664 DWARF_TYPE_SIGNATURE_SIZE);
7665 die->comdat_type_p = true;
7666 die->die_id.die_type_node = type_node;
7667 type_node->type_die = die;
7668
7669 /* If the DIE is a specification, link its declaration to the type node
7670 as well. */
7671 if (decl != NULL)
7672 {
7673 decl->comdat_type_p = true;
7674 decl->die_id.die_type_node = type_node;
7675 }
7676 }
7677
7678 /* Do the location expressions look same? */
7679 static inline int
7680 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7681 {
7682 return loc1->dw_loc_opc == loc2->dw_loc_opc
7683 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7684 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7685 }
7686
7687 /* Do the values look the same? */
7688 static int
7689 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7690 {
7691 dw_loc_descr_ref loc1, loc2;
7692 rtx r1, r2;
7693
7694 if (v1->val_class != v2->val_class)
7695 return 0;
7696
7697 switch (v1->val_class)
7698 {
7699 case dw_val_class_const:
7700 case dw_val_class_const_implicit:
7701 return v1->v.val_int == v2->v.val_int;
7702 case dw_val_class_unsigned_const:
7703 case dw_val_class_unsigned_const_implicit:
7704 return v1->v.val_unsigned == v2->v.val_unsigned;
7705 case dw_val_class_const_double:
7706 return v1->v.val_double.high == v2->v.val_double.high
7707 && v1->v.val_double.low == v2->v.val_double.low;
7708 case dw_val_class_wide_int:
7709 return *v1->v.val_wide == *v2->v.val_wide;
7710 case dw_val_class_vec:
7711 if (v1->v.val_vec.length != v2->v.val_vec.length
7712 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7713 return 0;
7714 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7715 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7716 return 0;
7717 return 1;
7718 case dw_val_class_flag:
7719 return v1->v.val_flag == v2->v.val_flag;
7720 case dw_val_class_str:
7721 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7722
7723 case dw_val_class_addr:
7724 r1 = v1->v.val_addr;
7725 r2 = v2->v.val_addr;
7726 if (GET_CODE (r1) != GET_CODE (r2))
7727 return 0;
7728 return !rtx_equal_p (r1, r2);
7729
7730 case dw_val_class_offset:
7731 return v1->v.val_offset == v2->v.val_offset;
7732
7733 case dw_val_class_loc:
7734 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7735 loc1 && loc2;
7736 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7737 if (!same_loc_p (loc1, loc2, mark))
7738 return 0;
7739 return !loc1 && !loc2;
7740
7741 case dw_val_class_die_ref:
7742 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7743
7744 case dw_val_class_symview:
7745 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7746
7747 case dw_val_class_fde_ref:
7748 case dw_val_class_vms_delta:
7749 case dw_val_class_lbl_id:
7750 case dw_val_class_lineptr:
7751 case dw_val_class_macptr:
7752 case dw_val_class_loclistsptr:
7753 case dw_val_class_high_pc:
7754 return 1;
7755
7756 case dw_val_class_file:
7757 case dw_val_class_file_implicit:
7758 return v1->v.val_file == v2->v.val_file;
7759
7760 case dw_val_class_data8:
7761 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7762
7763 default:
7764 return 1;
7765 }
7766 }
7767
7768 /* Do the attributes look the same? */
7769
7770 static int
7771 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7772 {
7773 if (at1->dw_attr != at2->dw_attr)
7774 return 0;
7775
7776 /* We don't care that this was compiled with a different compiler
7777 snapshot; if the output is the same, that's what matters. */
7778 if (at1->dw_attr == DW_AT_producer)
7779 return 1;
7780
7781 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7782 }
7783
7784 /* Do the dies look the same? */
7785
7786 static int
7787 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7788 {
7789 dw_die_ref c1, c2;
7790 dw_attr_node *a1;
7791 unsigned ix;
7792
7793 /* To avoid infinite recursion. */
7794 if (die1->die_mark)
7795 return die1->die_mark == die2->die_mark;
7796 die1->die_mark = die2->die_mark = ++(*mark);
7797
7798 if (die1->die_tag != die2->die_tag)
7799 return 0;
7800
7801 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7802 return 0;
7803
7804 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7805 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7806 return 0;
7807
7808 c1 = die1->die_child;
7809 c2 = die2->die_child;
7810 if (! c1)
7811 {
7812 if (c2)
7813 return 0;
7814 }
7815 else
7816 for (;;)
7817 {
7818 if (!same_die_p (c1, c2, mark))
7819 return 0;
7820 c1 = c1->die_sib;
7821 c2 = c2->die_sib;
7822 if (c1 == die1->die_child)
7823 {
7824 if (c2 == die2->die_child)
7825 break;
7826 else
7827 return 0;
7828 }
7829 }
7830
7831 return 1;
7832 }
7833
7834 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7835 children, and set die_symbol. */
7836
7837 static void
7838 compute_comp_unit_symbol (dw_die_ref unit_die)
7839 {
7840 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7841 const char *base = die_name ? lbasename (die_name) : "anonymous";
7842 char *name = XALLOCAVEC (char, strlen (base) + 64);
7843 char *p;
7844 int i, mark;
7845 unsigned char checksum[16];
7846 struct md5_ctx ctx;
7847
7848 /* Compute the checksum of the DIE, then append part of it as hex digits to
7849 the name filename of the unit. */
7850
7851 md5_init_ctx (&ctx);
7852 mark = 0;
7853 die_checksum (unit_die, &ctx, &mark);
7854 unmark_all_dies (unit_die);
7855 md5_finish_ctx (&ctx, checksum);
7856
7857 /* When we this for comp_unit_die () we have a DW_AT_name that might
7858 not start with a letter but with anything valid for filenames and
7859 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7860 character is not a letter. */
7861 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7862 clean_symbol_name (name);
7863
7864 p = name + strlen (name);
7865 for (i = 0; i < 4; i++)
7866 {
7867 sprintf (p, "%.2x", checksum[i]);
7868 p += 2;
7869 }
7870
7871 unit_die->die_id.die_symbol = xstrdup (name);
7872 }
7873
7874 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7875
7876 static int
7877 is_type_die (dw_die_ref die)
7878 {
7879 switch (die->die_tag)
7880 {
7881 case DW_TAG_array_type:
7882 case DW_TAG_class_type:
7883 case DW_TAG_interface_type:
7884 case DW_TAG_enumeration_type:
7885 case DW_TAG_pointer_type:
7886 case DW_TAG_reference_type:
7887 case DW_TAG_rvalue_reference_type:
7888 case DW_TAG_string_type:
7889 case DW_TAG_structure_type:
7890 case DW_TAG_subroutine_type:
7891 case DW_TAG_union_type:
7892 case DW_TAG_ptr_to_member_type:
7893 case DW_TAG_set_type:
7894 case DW_TAG_subrange_type:
7895 case DW_TAG_base_type:
7896 case DW_TAG_const_type:
7897 case DW_TAG_file_type:
7898 case DW_TAG_packed_type:
7899 case DW_TAG_volatile_type:
7900 case DW_TAG_typedef:
7901 return 1;
7902 default:
7903 return 0;
7904 }
7905 }
7906
7907 /* Returns true iff C is a compile-unit DIE. */
7908
7909 static inline bool
7910 is_cu_die (dw_die_ref c)
7911 {
7912 return c && (c->die_tag == DW_TAG_compile_unit
7913 || c->die_tag == DW_TAG_skeleton_unit);
7914 }
7915
7916 /* Returns true iff C is a unit DIE of some sort. */
7917
7918 static inline bool
7919 is_unit_die (dw_die_ref c)
7920 {
7921 return c && (c->die_tag == DW_TAG_compile_unit
7922 || c->die_tag == DW_TAG_partial_unit
7923 || c->die_tag == DW_TAG_type_unit
7924 || c->die_tag == DW_TAG_skeleton_unit);
7925 }
7926
7927 /* Returns true iff C is a namespace DIE. */
7928
7929 static inline bool
7930 is_namespace_die (dw_die_ref c)
7931 {
7932 return c && c->die_tag == DW_TAG_namespace;
7933 }
7934
7935 /* Return non-zero if this DIE is a template parameter. */
7936
7937 static inline bool
7938 is_template_parameter (dw_die_ref die)
7939 {
7940 switch (die->die_tag)
7941 {
7942 case DW_TAG_template_type_param:
7943 case DW_TAG_template_value_param:
7944 case DW_TAG_GNU_template_template_param:
7945 case DW_TAG_GNU_template_parameter_pack:
7946 return true;
7947 default:
7948 return false;
7949 }
7950 }
7951
7952 /* Return non-zero if this DIE represents a template instantiation. */
7953
7954 static inline bool
7955 is_template_instantiation (dw_die_ref die)
7956 {
7957 dw_die_ref c;
7958
7959 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7960 return false;
7961 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7962 return false;
7963 }
7964
7965 static char *
7966 gen_internal_sym (const char *prefix)
7967 {
7968 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7969
7970 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7971 return xstrdup (buf);
7972 }
7973
7974 /* Return non-zero if this DIE is a declaration. */
7975
7976 static int
7977 is_declaration_die (dw_die_ref die)
7978 {
7979 dw_attr_node *a;
7980 unsigned ix;
7981
7982 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7983 if (a->dw_attr == DW_AT_declaration)
7984 return 1;
7985
7986 return 0;
7987 }
7988
7989 /* Return non-zero if this DIE is nested inside a subprogram. */
7990
7991 static int
7992 is_nested_in_subprogram (dw_die_ref die)
7993 {
7994 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7995
7996 if (decl == NULL)
7997 decl = die;
7998 return local_scope_p (decl);
7999 }
8000
8001 /* Return non-zero if this DIE contains a defining declaration of a
8002 subprogram. */
8003
8004 static int
8005 contains_subprogram_definition (dw_die_ref die)
8006 {
8007 dw_die_ref c;
8008
8009 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8010 return 1;
8011 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8012 return 0;
8013 }
8014
8015 /* Return non-zero if this is a type DIE that should be moved to a
8016 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8017 unit type. */
8018
8019 static int
8020 should_move_die_to_comdat (dw_die_ref die)
8021 {
8022 switch (die->die_tag)
8023 {
8024 case DW_TAG_class_type:
8025 case DW_TAG_structure_type:
8026 case DW_TAG_enumeration_type:
8027 case DW_TAG_union_type:
8028 /* Don't move declarations, inlined instances, types nested in a
8029 subprogram, or types that contain subprogram definitions. */
8030 if (is_declaration_die (die)
8031 || get_AT (die, DW_AT_abstract_origin)
8032 || is_nested_in_subprogram (die)
8033 || contains_subprogram_definition (die))
8034 return 0;
8035 return 1;
8036 case DW_TAG_array_type:
8037 case DW_TAG_interface_type:
8038 case DW_TAG_pointer_type:
8039 case DW_TAG_reference_type:
8040 case DW_TAG_rvalue_reference_type:
8041 case DW_TAG_string_type:
8042 case DW_TAG_subroutine_type:
8043 case DW_TAG_ptr_to_member_type:
8044 case DW_TAG_set_type:
8045 case DW_TAG_subrange_type:
8046 case DW_TAG_base_type:
8047 case DW_TAG_const_type:
8048 case DW_TAG_file_type:
8049 case DW_TAG_packed_type:
8050 case DW_TAG_volatile_type:
8051 case DW_TAG_typedef:
8052 default:
8053 return 0;
8054 }
8055 }
8056
8057 /* Make a clone of DIE. */
8058
8059 static dw_die_ref
8060 clone_die (dw_die_ref die)
8061 {
8062 dw_die_ref clone = new_die_raw (die->die_tag);
8063 dw_attr_node *a;
8064 unsigned ix;
8065
8066 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8067 add_dwarf_attr (clone, a);
8068
8069 return clone;
8070 }
8071
8072 /* Make a clone of the tree rooted at DIE. */
8073
8074 static dw_die_ref
8075 clone_tree (dw_die_ref die)
8076 {
8077 dw_die_ref c;
8078 dw_die_ref clone = clone_die (die);
8079
8080 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8081
8082 return clone;
8083 }
8084
8085 /* Make a clone of DIE as a declaration. */
8086
8087 static dw_die_ref
8088 clone_as_declaration (dw_die_ref die)
8089 {
8090 dw_die_ref clone;
8091 dw_die_ref decl;
8092 dw_attr_node *a;
8093 unsigned ix;
8094
8095 /* If the DIE is already a declaration, just clone it. */
8096 if (is_declaration_die (die))
8097 return clone_die (die);
8098
8099 /* If the DIE is a specification, just clone its declaration DIE. */
8100 decl = get_AT_ref (die, DW_AT_specification);
8101 if (decl != NULL)
8102 {
8103 clone = clone_die (decl);
8104 if (die->comdat_type_p)
8105 add_AT_die_ref (clone, DW_AT_signature, die);
8106 return clone;
8107 }
8108
8109 clone = new_die_raw (die->die_tag);
8110
8111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8112 {
8113 /* We don't want to copy over all attributes.
8114 For example we don't want DW_AT_byte_size because otherwise we will no
8115 longer have a declaration and GDB will treat it as a definition. */
8116
8117 switch (a->dw_attr)
8118 {
8119 case DW_AT_abstract_origin:
8120 case DW_AT_artificial:
8121 case DW_AT_containing_type:
8122 case DW_AT_external:
8123 case DW_AT_name:
8124 case DW_AT_type:
8125 case DW_AT_virtuality:
8126 case DW_AT_linkage_name:
8127 case DW_AT_MIPS_linkage_name:
8128 add_dwarf_attr (clone, a);
8129 break;
8130 case DW_AT_byte_size:
8131 case DW_AT_alignment:
8132 default:
8133 break;
8134 }
8135 }
8136
8137 if (die->comdat_type_p)
8138 add_AT_die_ref (clone, DW_AT_signature, die);
8139
8140 add_AT_flag (clone, DW_AT_declaration, 1);
8141 return clone;
8142 }
8143
8144
8145 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8146
8147 struct decl_table_entry
8148 {
8149 dw_die_ref orig;
8150 dw_die_ref copy;
8151 };
8152
8153 /* Helpers to manipulate hash table of copied declarations. */
8154
8155 /* Hashtable helpers. */
8156
8157 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8158 {
8159 typedef die_struct *compare_type;
8160 static inline hashval_t hash (const decl_table_entry *);
8161 static inline bool equal (const decl_table_entry *, const die_struct *);
8162 };
8163
8164 inline hashval_t
8165 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8166 {
8167 return htab_hash_pointer (entry->orig);
8168 }
8169
8170 inline bool
8171 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8172 const die_struct *entry2)
8173 {
8174 return entry1->orig == entry2;
8175 }
8176
8177 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8178
8179 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8180 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8181 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8182 to check if the ancestor has already been copied into UNIT. */
8183
8184 static dw_die_ref
8185 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8186 decl_hash_type *decl_table)
8187 {
8188 dw_die_ref parent = die->die_parent;
8189 dw_die_ref new_parent = unit;
8190 dw_die_ref copy;
8191 decl_table_entry **slot = NULL;
8192 struct decl_table_entry *entry = NULL;
8193
8194 /* If DIE refers to a stub unfold that so we get the appropriate
8195 DIE registered as orig in decl_table. */
8196 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8197 die = c;
8198
8199 if (decl_table)
8200 {
8201 /* Check if the entry has already been copied to UNIT. */
8202 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8203 INSERT);
8204 if (*slot != HTAB_EMPTY_ENTRY)
8205 {
8206 entry = *slot;
8207 return entry->copy;
8208 }
8209
8210 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8211 entry = XCNEW (struct decl_table_entry);
8212 entry->orig = die;
8213 entry->copy = NULL;
8214 *slot = entry;
8215 }
8216
8217 if (parent != NULL)
8218 {
8219 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8220 if (spec != NULL)
8221 parent = spec;
8222 if (!is_unit_die (parent))
8223 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8224 }
8225
8226 copy = clone_as_declaration (die);
8227 add_child_die (new_parent, copy);
8228
8229 if (decl_table)
8230 {
8231 /* Record the pointer to the copy. */
8232 entry->copy = copy;
8233 }
8234
8235 return copy;
8236 }
8237 /* Copy the declaration context to the new type unit DIE. This includes
8238 any surrounding namespace or type declarations. If the DIE has an
8239 AT_specification attribute, it also includes attributes and children
8240 attached to the specification, and returns a pointer to the original
8241 parent of the declaration DIE. Returns NULL otherwise. */
8242
8243 static dw_die_ref
8244 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8245 {
8246 dw_die_ref decl;
8247 dw_die_ref new_decl;
8248 dw_die_ref orig_parent = NULL;
8249
8250 decl = get_AT_ref (die, DW_AT_specification);
8251 if (decl == NULL)
8252 decl = die;
8253 else
8254 {
8255 unsigned ix;
8256 dw_die_ref c;
8257 dw_attr_node *a;
8258
8259 /* The original DIE will be changed to a declaration, and must
8260 be moved to be a child of the original declaration DIE. */
8261 orig_parent = decl->die_parent;
8262
8263 /* Copy the type node pointer from the new DIE to the original
8264 declaration DIE so we can forward references later. */
8265 decl->comdat_type_p = true;
8266 decl->die_id.die_type_node = die->die_id.die_type_node;
8267
8268 remove_AT (die, DW_AT_specification);
8269
8270 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8271 {
8272 if (a->dw_attr != DW_AT_name
8273 && a->dw_attr != DW_AT_declaration
8274 && a->dw_attr != DW_AT_external)
8275 add_dwarf_attr (die, a);
8276 }
8277
8278 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8279 }
8280
8281 if (decl->die_parent != NULL
8282 && !is_unit_die (decl->die_parent))
8283 {
8284 new_decl = copy_ancestor_tree (unit, decl, NULL);
8285 if (new_decl != NULL)
8286 {
8287 remove_AT (new_decl, DW_AT_signature);
8288 add_AT_specification (die, new_decl);
8289 }
8290 }
8291
8292 return orig_parent;
8293 }
8294
8295 /* Generate the skeleton ancestor tree for the given NODE, then clone
8296 the DIE and add the clone into the tree. */
8297
8298 static void
8299 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8300 {
8301 if (node->new_die != NULL)
8302 return;
8303
8304 node->new_die = clone_as_declaration (node->old_die);
8305
8306 if (node->parent != NULL)
8307 {
8308 generate_skeleton_ancestor_tree (node->parent);
8309 add_child_die (node->parent->new_die, node->new_die);
8310 }
8311 }
8312
8313 /* Generate a skeleton tree of DIEs containing any declarations that are
8314 found in the original tree. We traverse the tree looking for declaration
8315 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8316
8317 static void
8318 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8319 {
8320 skeleton_chain_node node;
8321 dw_die_ref c;
8322 dw_die_ref first;
8323 dw_die_ref prev = NULL;
8324 dw_die_ref next = NULL;
8325
8326 node.parent = parent;
8327
8328 first = c = parent->old_die->die_child;
8329 if (c)
8330 next = c->die_sib;
8331 if (c) do {
8332 if (prev == NULL || prev->die_sib == c)
8333 prev = c;
8334 c = next;
8335 next = (c == first ? NULL : c->die_sib);
8336 node.old_die = c;
8337 node.new_die = NULL;
8338 if (is_declaration_die (c))
8339 {
8340 if (is_template_instantiation (c))
8341 {
8342 /* Instantiated templates do not need to be cloned into the
8343 type unit. Just move the DIE and its children back to
8344 the skeleton tree (in the main CU). */
8345 remove_child_with_prev (c, prev);
8346 add_child_die (parent->new_die, c);
8347 c = prev;
8348 }
8349 else if (c->comdat_type_p)
8350 {
8351 /* This is the skeleton of earlier break_out_comdat_types
8352 type. Clone the existing DIE, but keep the children
8353 under the original (which is in the main CU). */
8354 dw_die_ref clone = clone_die (c);
8355
8356 replace_child (c, clone, prev);
8357 generate_skeleton_ancestor_tree (parent);
8358 add_child_die (parent->new_die, c);
8359 c = clone;
8360 continue;
8361 }
8362 else
8363 {
8364 /* Clone the existing DIE, move the original to the skeleton
8365 tree (which is in the main CU), and put the clone, with
8366 all the original's children, where the original came from
8367 (which is about to be moved to the type unit). */
8368 dw_die_ref clone = clone_die (c);
8369 move_all_children (c, clone);
8370
8371 /* If the original has a DW_AT_object_pointer attribute,
8372 it would now point to a child DIE just moved to the
8373 cloned tree, so we need to remove that attribute from
8374 the original. */
8375 remove_AT (c, DW_AT_object_pointer);
8376
8377 replace_child (c, clone, prev);
8378 generate_skeleton_ancestor_tree (parent);
8379 add_child_die (parent->new_die, c);
8380 node.old_die = clone;
8381 node.new_die = c;
8382 c = clone;
8383 }
8384 }
8385 generate_skeleton_bottom_up (&node);
8386 } while (next != NULL);
8387 }
8388
8389 /* Wrapper function for generate_skeleton_bottom_up. */
8390
8391 static dw_die_ref
8392 generate_skeleton (dw_die_ref die)
8393 {
8394 skeleton_chain_node node;
8395
8396 node.old_die = die;
8397 node.new_die = NULL;
8398 node.parent = NULL;
8399
8400 /* If this type definition is nested inside another type,
8401 and is not an instantiation of a template, always leave
8402 at least a declaration in its place. */
8403 if (die->die_parent != NULL
8404 && is_type_die (die->die_parent)
8405 && !is_template_instantiation (die))
8406 node.new_die = clone_as_declaration (die);
8407
8408 generate_skeleton_bottom_up (&node);
8409 return node.new_die;
8410 }
8411
8412 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8413 declaration. The original DIE is moved to a new compile unit so that
8414 existing references to it follow it to the new location. If any of the
8415 original DIE's descendants is a declaration, we need to replace the
8416 original DIE with a skeleton tree and move the declarations back into the
8417 skeleton tree. */
8418
8419 static dw_die_ref
8420 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8421 dw_die_ref prev)
8422 {
8423 dw_die_ref skeleton, orig_parent;
8424
8425 /* Copy the declaration context to the type unit DIE. If the returned
8426 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8427 that DIE. */
8428 orig_parent = copy_declaration_context (unit, child);
8429
8430 skeleton = generate_skeleton (child);
8431 if (skeleton == NULL)
8432 remove_child_with_prev (child, prev);
8433 else
8434 {
8435 skeleton->comdat_type_p = true;
8436 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8437
8438 /* If the original DIE was a specification, we need to put
8439 the skeleton under the parent DIE of the declaration.
8440 This leaves the original declaration in the tree, but
8441 it will be pruned later since there are no longer any
8442 references to it. */
8443 if (orig_parent != NULL)
8444 {
8445 remove_child_with_prev (child, prev);
8446 add_child_die (orig_parent, skeleton);
8447 }
8448 else
8449 replace_child (child, skeleton, prev);
8450 }
8451
8452 return skeleton;
8453 }
8454
8455 static void
8456 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8457 comdat_type_node *type_node,
8458 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8459
8460 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8461 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8462 DWARF procedure references in the DW_AT_location attribute. */
8463
8464 static dw_die_ref
8465 copy_dwarf_procedure (dw_die_ref die,
8466 comdat_type_node *type_node,
8467 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8468 {
8469 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8470
8471 /* DWARF procedures are not supposed to have children... */
8472 gcc_assert (die->die_child == NULL);
8473
8474 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8475 gcc_assert (vec_safe_length (die->die_attr) == 1
8476 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8477
8478 /* Do not copy more than once DWARF procedures. */
8479 bool existed;
8480 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8481 if (existed)
8482 return die_copy;
8483
8484 die_copy = clone_die (die);
8485 add_child_die (type_node->root_die, die_copy);
8486 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8487 return die_copy;
8488 }
8489
8490 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8491 procedures in DIE's attributes. */
8492
8493 static void
8494 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8495 comdat_type_node *type_node,
8496 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8497 {
8498 dw_attr_node *a;
8499 unsigned i;
8500
8501 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8502 {
8503 dw_loc_descr_ref loc;
8504
8505 if (a->dw_attr_val.val_class != dw_val_class_loc)
8506 continue;
8507
8508 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8509 {
8510 switch (loc->dw_loc_opc)
8511 {
8512 case DW_OP_call2:
8513 case DW_OP_call4:
8514 case DW_OP_call_ref:
8515 gcc_assert (loc->dw_loc_oprnd1.val_class
8516 == dw_val_class_die_ref);
8517 loc->dw_loc_oprnd1.v.val_die_ref.die
8518 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8519 type_node,
8520 copied_dwarf_procs);
8521
8522 default:
8523 break;
8524 }
8525 }
8526 }
8527 }
8528
8529 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8530 rewrite references to point to the copies.
8531
8532 References are looked for in DIE's attributes and recursively in all its
8533 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8534 mapping from old DWARF procedures to their copy. It is used not to copy
8535 twice the same DWARF procedure under TYPE_NODE. */
8536
8537 static void
8538 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8539 comdat_type_node *type_node,
8540 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8541 {
8542 dw_die_ref c;
8543
8544 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8545 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8546 type_node,
8547 copied_dwarf_procs));
8548 }
8549
8550 /* Traverse the DIE and set up additional .debug_types or .debug_info
8551 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8552 section. */
8553
8554 static void
8555 break_out_comdat_types (dw_die_ref die)
8556 {
8557 dw_die_ref c;
8558 dw_die_ref first;
8559 dw_die_ref prev = NULL;
8560 dw_die_ref next = NULL;
8561 dw_die_ref unit = NULL;
8562
8563 first = c = die->die_child;
8564 if (c)
8565 next = c->die_sib;
8566 if (c) do {
8567 if (prev == NULL || prev->die_sib == c)
8568 prev = c;
8569 c = next;
8570 next = (c == first ? NULL : c->die_sib);
8571 if (should_move_die_to_comdat (c))
8572 {
8573 dw_die_ref replacement;
8574 comdat_type_node *type_node;
8575
8576 /* Break out nested types into their own type units. */
8577 break_out_comdat_types (c);
8578
8579 /* Create a new type unit DIE as the root for the new tree. */
8580 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8581 add_AT_unsigned (unit, DW_AT_language,
8582 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8583
8584 /* Add the new unit's type DIE into the comdat type list. */
8585 type_node = ggc_cleared_alloc<comdat_type_node> ();
8586 type_node->root_die = unit;
8587 type_node->next = comdat_type_list;
8588 comdat_type_list = type_node;
8589
8590 /* Generate the type signature. */
8591 generate_type_signature (c, type_node);
8592
8593 /* Copy the declaration context, attributes, and children of the
8594 declaration into the new type unit DIE, then remove this DIE
8595 from the main CU (or replace it with a skeleton if necessary). */
8596 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8597 type_node->skeleton_die = replacement;
8598
8599 /* Add the DIE to the new compunit. */
8600 add_child_die (unit, c);
8601
8602 /* Types can reference DWARF procedures for type size or data location
8603 expressions. Calls in DWARF expressions cannot target procedures
8604 that are not in the same section. So we must copy DWARF procedures
8605 along with this type and then rewrite references to them. */
8606 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8607 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8608
8609 if (replacement != NULL)
8610 c = replacement;
8611 }
8612 else if (c->die_tag == DW_TAG_namespace
8613 || c->die_tag == DW_TAG_class_type
8614 || c->die_tag == DW_TAG_structure_type
8615 || c->die_tag == DW_TAG_union_type)
8616 {
8617 /* Look for nested types that can be broken out. */
8618 break_out_comdat_types (c);
8619 }
8620 } while (next != NULL);
8621 }
8622
8623 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8624 Enter all the cloned children into the hash table decl_table. */
8625
8626 static dw_die_ref
8627 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8628 {
8629 dw_die_ref c;
8630 dw_die_ref clone;
8631 struct decl_table_entry *entry;
8632 decl_table_entry **slot;
8633
8634 if (die->die_tag == DW_TAG_subprogram)
8635 clone = clone_as_declaration (die);
8636 else
8637 clone = clone_die (die);
8638
8639 slot = decl_table->find_slot_with_hash (die,
8640 htab_hash_pointer (die), INSERT);
8641
8642 /* Assert that DIE isn't in the hash table yet. If it would be there
8643 before, the ancestors would be necessarily there as well, therefore
8644 clone_tree_partial wouldn't be called. */
8645 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8646
8647 entry = XCNEW (struct decl_table_entry);
8648 entry->orig = die;
8649 entry->copy = clone;
8650 *slot = entry;
8651
8652 if (die->die_tag != DW_TAG_subprogram)
8653 FOR_EACH_CHILD (die, c,
8654 add_child_die (clone, clone_tree_partial (c, decl_table)));
8655
8656 return clone;
8657 }
8658
8659 /* Walk the DIE and its children, looking for references to incomplete
8660 or trivial types that are unmarked (i.e., that are not in the current
8661 type_unit). */
8662
8663 static void
8664 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8665 {
8666 dw_die_ref c;
8667 dw_attr_node *a;
8668 unsigned ix;
8669
8670 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8671 {
8672 if (AT_class (a) == dw_val_class_die_ref)
8673 {
8674 dw_die_ref targ = AT_ref (a);
8675 decl_table_entry **slot;
8676 struct decl_table_entry *entry;
8677
8678 if (targ->die_mark != 0 || targ->comdat_type_p)
8679 continue;
8680
8681 slot = decl_table->find_slot_with_hash (targ,
8682 htab_hash_pointer (targ),
8683 INSERT);
8684
8685 if (*slot != HTAB_EMPTY_ENTRY)
8686 {
8687 /* TARG has already been copied, so we just need to
8688 modify the reference to point to the copy. */
8689 entry = *slot;
8690 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8691 }
8692 else
8693 {
8694 dw_die_ref parent = unit;
8695 dw_die_ref copy = clone_die (targ);
8696
8697 /* Record in DECL_TABLE that TARG has been copied.
8698 Need to do this now, before the recursive call,
8699 because DECL_TABLE may be expanded and SLOT
8700 would no longer be a valid pointer. */
8701 entry = XCNEW (struct decl_table_entry);
8702 entry->orig = targ;
8703 entry->copy = copy;
8704 *slot = entry;
8705
8706 /* If TARG is not a declaration DIE, we need to copy its
8707 children. */
8708 if (!is_declaration_die (targ))
8709 {
8710 FOR_EACH_CHILD (
8711 targ, c,
8712 add_child_die (copy,
8713 clone_tree_partial (c, decl_table)));
8714 }
8715
8716 /* Make sure the cloned tree is marked as part of the
8717 type unit. */
8718 mark_dies (copy);
8719
8720 /* If TARG has surrounding context, copy its ancestor tree
8721 into the new type unit. */
8722 if (targ->die_parent != NULL
8723 && !is_unit_die (targ->die_parent))
8724 parent = copy_ancestor_tree (unit, targ->die_parent,
8725 decl_table);
8726
8727 add_child_die (parent, copy);
8728 a->dw_attr_val.v.val_die_ref.die = copy;
8729
8730 /* Make sure the newly-copied DIE is walked. If it was
8731 installed in a previously-added context, it won't
8732 get visited otherwise. */
8733 if (parent != unit)
8734 {
8735 /* Find the highest point of the newly-added tree,
8736 mark each node along the way, and walk from there. */
8737 parent->die_mark = 1;
8738 while (parent->die_parent
8739 && parent->die_parent->die_mark == 0)
8740 {
8741 parent = parent->die_parent;
8742 parent->die_mark = 1;
8743 }
8744 copy_decls_walk (unit, parent, decl_table);
8745 }
8746 }
8747 }
8748 }
8749
8750 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8751 }
8752
8753 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8754 and record them in DECL_TABLE. */
8755
8756 static void
8757 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8758 {
8759 dw_die_ref c;
8760
8761 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8762 {
8763 dw_die_ref targ = AT_ref (a);
8764 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8765 decl_table_entry **slot
8766 = decl_table->find_slot_with_hash (targ,
8767 htab_hash_pointer (targ),
8768 INSERT);
8769 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8770 /* Record in DECL_TABLE that TARG has been already copied
8771 by remove_child_or_replace_with_skeleton. */
8772 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8773 entry->orig = targ;
8774 entry->copy = die;
8775 *slot = entry;
8776 }
8777 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8778 }
8779
8780 /* Copy declarations for "unworthy" types into the new comdat section.
8781 Incomplete types, modified types, and certain other types aren't broken
8782 out into comdat sections of their own, so they don't have a signature,
8783 and we need to copy the declaration into the same section so that we
8784 don't have an external reference. */
8785
8786 static void
8787 copy_decls_for_unworthy_types (dw_die_ref unit)
8788 {
8789 mark_dies (unit);
8790 decl_hash_type decl_table (10);
8791 collect_skeleton_dies (unit, &decl_table);
8792 copy_decls_walk (unit, unit, &decl_table);
8793 unmark_dies (unit);
8794 }
8795
8796 /* Traverse the DIE and add a sibling attribute if it may have the
8797 effect of speeding up access to siblings. To save some space,
8798 avoid generating sibling attributes for DIE's without children. */
8799
8800 static void
8801 add_sibling_attributes (dw_die_ref die)
8802 {
8803 dw_die_ref c;
8804
8805 if (! die->die_child)
8806 return;
8807
8808 if (die->die_parent && die != die->die_parent->die_child)
8809 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8810
8811 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8812 }
8813
8814 /* Output all location lists for the DIE and its children. */
8815
8816 static void
8817 output_location_lists (dw_die_ref die)
8818 {
8819 dw_die_ref c;
8820 dw_attr_node *a;
8821 unsigned ix;
8822
8823 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8824 if (AT_class (a) == dw_val_class_loc_list)
8825 output_loc_list (AT_loc_list (a));
8826
8827 FOR_EACH_CHILD (die, c, output_location_lists (c));
8828 }
8829
8830 /* During assign_location_list_indexes and output_loclists_offset the
8831 current index, after it the number of assigned indexes (i.e. how
8832 large the .debug_loclists* offset table should be). */
8833 static unsigned int loc_list_idx;
8834
8835 /* Output all location list offsets for the DIE and its children. */
8836
8837 static void
8838 output_loclists_offsets (dw_die_ref die)
8839 {
8840 dw_die_ref c;
8841 dw_attr_node *a;
8842 unsigned ix;
8843
8844 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8845 if (AT_class (a) == dw_val_class_loc_list)
8846 {
8847 dw_loc_list_ref l = AT_loc_list (a);
8848 if (l->offset_emitted)
8849 continue;
8850 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8851 loc_section_label, NULL);
8852 gcc_assert (l->hash == loc_list_idx);
8853 loc_list_idx++;
8854 l->offset_emitted = true;
8855 }
8856
8857 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8858 }
8859
8860 /* Recursively set indexes of location lists. */
8861
8862 static void
8863 assign_location_list_indexes (dw_die_ref die)
8864 {
8865 dw_die_ref c;
8866 dw_attr_node *a;
8867 unsigned ix;
8868
8869 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8870 if (AT_class (a) == dw_val_class_loc_list)
8871 {
8872 dw_loc_list_ref list = AT_loc_list (a);
8873 if (!list->num_assigned)
8874 {
8875 list->num_assigned = true;
8876 list->hash = loc_list_idx++;
8877 }
8878 }
8879
8880 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8881 }
8882
8883 /* We want to limit the number of external references, because they are
8884 larger than local references: a relocation takes multiple words, and
8885 even a sig8 reference is always eight bytes, whereas a local reference
8886 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8887 So if we encounter multiple external references to the same type DIE, we
8888 make a local typedef stub for it and redirect all references there.
8889
8890 This is the element of the hash table for keeping track of these
8891 references. */
8892
8893 struct external_ref
8894 {
8895 dw_die_ref type;
8896 dw_die_ref stub;
8897 unsigned n_refs;
8898 };
8899
8900 /* Hashtable helpers. */
8901
8902 struct external_ref_hasher : free_ptr_hash <external_ref>
8903 {
8904 static inline hashval_t hash (const external_ref *);
8905 static inline bool equal (const external_ref *, const external_ref *);
8906 };
8907
8908 inline hashval_t
8909 external_ref_hasher::hash (const external_ref *r)
8910 {
8911 dw_die_ref die = r->type;
8912 hashval_t h = 0;
8913
8914 /* We can't use the address of the DIE for hashing, because
8915 that will make the order of the stub DIEs non-deterministic. */
8916 if (! die->comdat_type_p)
8917 /* We have a symbol; use it to compute a hash. */
8918 h = htab_hash_string (die->die_id.die_symbol);
8919 else
8920 {
8921 /* We have a type signature; use a subset of the bits as the hash.
8922 The 8-byte signature is at least as large as hashval_t. */
8923 comdat_type_node *type_node = die->die_id.die_type_node;
8924 memcpy (&h, type_node->signature, sizeof (h));
8925 }
8926 return h;
8927 }
8928
8929 inline bool
8930 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8931 {
8932 return r1->type == r2->type;
8933 }
8934
8935 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8936
8937 /* Return a pointer to the external_ref for references to DIE. */
8938
8939 static struct external_ref *
8940 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8941 {
8942 struct external_ref ref, *ref_p;
8943 external_ref **slot;
8944
8945 ref.type = die;
8946 slot = map->find_slot (&ref, INSERT);
8947 if (*slot != HTAB_EMPTY_ENTRY)
8948 return *slot;
8949
8950 ref_p = XCNEW (struct external_ref);
8951 ref_p->type = die;
8952 *slot = ref_p;
8953 return ref_p;
8954 }
8955
8956 /* Subroutine of optimize_external_refs, below.
8957
8958 If we see a type skeleton, record it as our stub. If we see external
8959 references, remember how many we've seen. */
8960
8961 static void
8962 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8963 {
8964 dw_die_ref c;
8965 dw_attr_node *a;
8966 unsigned ix;
8967 struct external_ref *ref_p;
8968
8969 if (is_type_die (die)
8970 && (c = get_AT_ref (die, DW_AT_signature)))
8971 {
8972 /* This is a local skeleton; use it for local references. */
8973 ref_p = lookup_external_ref (map, c);
8974 ref_p->stub = die;
8975 }
8976
8977 /* Scan the DIE references, and remember any that refer to DIEs from
8978 other CUs (i.e. those which are not marked). */
8979 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8980 if (AT_class (a) == dw_val_class_die_ref
8981 && (c = AT_ref (a))->die_mark == 0
8982 && is_type_die (c))
8983 {
8984 ref_p = lookup_external_ref (map, c);
8985 ref_p->n_refs++;
8986 }
8987
8988 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8989 }
8990
8991 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8992 points to an external_ref, DATA is the CU we're processing. If we don't
8993 already have a local stub, and we have multiple refs, build a stub. */
8994
8995 int
8996 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8997 {
8998 struct external_ref *ref_p = *slot;
8999
9000 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9001 {
9002 /* We have multiple references to this type, so build a small stub.
9003 Both of these forms are a bit dodgy from the perspective of the
9004 DWARF standard, since technically they should have names. */
9005 dw_die_ref cu = data;
9006 dw_die_ref type = ref_p->type;
9007 dw_die_ref stub = NULL;
9008
9009 if (type->comdat_type_p)
9010 {
9011 /* If we refer to this type via sig8, use AT_signature. */
9012 stub = new_die (type->die_tag, cu, NULL_TREE);
9013 add_AT_die_ref (stub, DW_AT_signature, type);
9014 }
9015 else
9016 {
9017 /* Otherwise, use a typedef with no name. */
9018 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9019 add_AT_die_ref (stub, DW_AT_type, type);
9020 }
9021
9022 stub->die_mark++;
9023 ref_p->stub = stub;
9024 }
9025 return 1;
9026 }
9027
9028 /* DIE is a unit; look through all the DIE references to see if there are
9029 any external references to types, and if so, create local stubs for
9030 them which will be applied in build_abbrev_table. This is useful because
9031 references to local DIEs are smaller. */
9032
9033 static external_ref_hash_type *
9034 optimize_external_refs (dw_die_ref die)
9035 {
9036 external_ref_hash_type *map = new external_ref_hash_type (10);
9037 optimize_external_refs_1 (die, map);
9038 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9039 return map;
9040 }
9041
9042 /* The following 3 variables are temporaries that are computed only during the
9043 build_abbrev_table call and used and released during the following
9044 optimize_abbrev_table call. */
9045
9046 /* First abbrev_id that can be optimized based on usage. */
9047 static unsigned int abbrev_opt_start;
9048
9049 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9050 abbrev_id smaller than this, because they must be already sized
9051 during build_abbrev_table). */
9052 static unsigned int abbrev_opt_base_type_end;
9053
9054 /* Vector of usage counts during build_abbrev_table. Indexed by
9055 abbrev_id - abbrev_opt_start. */
9056 static vec<unsigned int> abbrev_usage_count;
9057
9058 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9059 static vec<dw_die_ref> sorted_abbrev_dies;
9060
9061 /* The format of each DIE (and its attribute value pairs) is encoded in an
9062 abbreviation table. This routine builds the abbreviation table and assigns
9063 a unique abbreviation id for each abbreviation entry. The children of each
9064 die are visited recursively. */
9065
9066 static void
9067 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9068 {
9069 unsigned int abbrev_id = 0;
9070 dw_die_ref c;
9071 dw_attr_node *a;
9072 unsigned ix;
9073 dw_die_ref abbrev;
9074
9075 /* Scan the DIE references, and replace any that refer to
9076 DIEs from other CUs (i.e. those which are not marked) with
9077 the local stubs we built in optimize_external_refs. */
9078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9079 if (AT_class (a) == dw_val_class_die_ref
9080 && (c = AT_ref (a))->die_mark == 0)
9081 {
9082 struct external_ref *ref_p;
9083 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9084
9085 if (is_type_die (c)
9086 && (ref_p = lookup_external_ref (extern_map, c))
9087 && ref_p->stub && ref_p->stub != die)
9088 {
9089 gcc_assert (a->dw_attr != DW_AT_signature);
9090 change_AT_die_ref (a, ref_p->stub);
9091 }
9092 else
9093 /* We aren't changing this reference, so mark it external. */
9094 set_AT_ref_external (a, 1);
9095 }
9096
9097 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9098 {
9099 dw_attr_node *die_a, *abbrev_a;
9100 unsigned ix;
9101 bool ok = true;
9102
9103 if (abbrev_id == 0)
9104 continue;
9105 if (abbrev->die_tag != die->die_tag)
9106 continue;
9107 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9108 continue;
9109
9110 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9111 continue;
9112
9113 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9114 {
9115 abbrev_a = &(*abbrev->die_attr)[ix];
9116 if ((abbrev_a->dw_attr != die_a->dw_attr)
9117 || (value_format (abbrev_a) != value_format (die_a)))
9118 {
9119 ok = false;
9120 break;
9121 }
9122 }
9123 if (ok)
9124 break;
9125 }
9126
9127 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9128 {
9129 vec_safe_push (abbrev_die_table, die);
9130 if (abbrev_opt_start)
9131 abbrev_usage_count.safe_push (0);
9132 }
9133 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9134 {
9135 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9136 sorted_abbrev_dies.safe_push (die);
9137 }
9138
9139 die->die_abbrev = abbrev_id;
9140 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9141 }
9142
9143 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9144 by die_abbrev's usage count, from the most commonly used
9145 abbreviation to the least. */
9146
9147 static int
9148 die_abbrev_cmp (const void *p1, const void *p2)
9149 {
9150 dw_die_ref die1 = *(const dw_die_ref *) p1;
9151 dw_die_ref die2 = *(const dw_die_ref *) p2;
9152
9153 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9154 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9155
9156 if (die1->die_abbrev >= abbrev_opt_base_type_end
9157 && die2->die_abbrev >= abbrev_opt_base_type_end)
9158 {
9159 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9160 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9161 return -1;
9162 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9163 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9164 return 1;
9165 }
9166
9167 /* Stabilize the sort. */
9168 if (die1->die_abbrev < die2->die_abbrev)
9169 return -1;
9170 if (die1->die_abbrev > die2->die_abbrev)
9171 return 1;
9172
9173 return 0;
9174 }
9175
9176 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9177 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9178 into dw_val_class_const_implicit or
9179 dw_val_class_unsigned_const_implicit. */
9180
9181 static void
9182 optimize_implicit_const (unsigned int first_id, unsigned int end,
9183 vec<bool> &implicit_consts)
9184 {
9185 /* It never makes sense if there is just one DIE using the abbreviation. */
9186 if (end < first_id + 2)
9187 return;
9188
9189 dw_attr_node *a;
9190 unsigned ix, i;
9191 dw_die_ref die = sorted_abbrev_dies[first_id];
9192 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9193 if (implicit_consts[ix])
9194 {
9195 enum dw_val_class new_class = dw_val_class_none;
9196 switch (AT_class (a))
9197 {
9198 case dw_val_class_unsigned_const:
9199 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9200 continue;
9201
9202 /* The .debug_abbrev section will grow by
9203 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9204 in all the DIEs using that abbreviation. */
9205 if (constant_size (AT_unsigned (a)) * (end - first_id)
9206 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9207 continue;
9208
9209 new_class = dw_val_class_unsigned_const_implicit;
9210 break;
9211
9212 case dw_val_class_const:
9213 new_class = dw_val_class_const_implicit;
9214 break;
9215
9216 case dw_val_class_file:
9217 new_class = dw_val_class_file_implicit;
9218 break;
9219
9220 default:
9221 continue;
9222 }
9223 for (i = first_id; i < end; i++)
9224 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9225 = new_class;
9226 }
9227 }
9228
9229 /* Attempt to optimize abbreviation table from abbrev_opt_start
9230 abbreviation above. */
9231
9232 static void
9233 optimize_abbrev_table (void)
9234 {
9235 if (abbrev_opt_start
9236 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9237 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9238 {
9239 auto_vec<bool, 32> implicit_consts;
9240 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9241
9242 unsigned int abbrev_id = abbrev_opt_start - 1;
9243 unsigned int first_id = ~0U;
9244 unsigned int last_abbrev_id = 0;
9245 unsigned int i;
9246 dw_die_ref die;
9247 if (abbrev_opt_base_type_end > abbrev_opt_start)
9248 abbrev_id = abbrev_opt_base_type_end - 1;
9249 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9250 most commonly used abbreviations come first. */
9251 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9252 {
9253 dw_attr_node *a;
9254 unsigned ix;
9255
9256 /* If calc_base_type_die_sizes has been called, the CU and
9257 base types after it can't be optimized, because we've already
9258 calculated their DIE offsets. We've sorted them first. */
9259 if (die->die_abbrev < abbrev_opt_base_type_end)
9260 continue;
9261 if (die->die_abbrev != last_abbrev_id)
9262 {
9263 last_abbrev_id = die->die_abbrev;
9264 if (dwarf_version >= 5 && first_id != ~0U)
9265 optimize_implicit_const (first_id, i, implicit_consts);
9266 abbrev_id++;
9267 (*abbrev_die_table)[abbrev_id] = die;
9268 if (dwarf_version >= 5)
9269 {
9270 first_id = i;
9271 implicit_consts.truncate (0);
9272
9273 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9274 switch (AT_class (a))
9275 {
9276 case dw_val_class_const:
9277 case dw_val_class_unsigned_const:
9278 case dw_val_class_file:
9279 implicit_consts.safe_push (true);
9280 break;
9281 default:
9282 implicit_consts.safe_push (false);
9283 break;
9284 }
9285 }
9286 }
9287 else if (dwarf_version >= 5)
9288 {
9289 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9290 if (!implicit_consts[ix])
9291 continue;
9292 else
9293 {
9294 dw_attr_node *other_a
9295 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9296 if (!dw_val_equal_p (&a->dw_attr_val,
9297 &other_a->dw_attr_val))
9298 implicit_consts[ix] = false;
9299 }
9300 }
9301 die->die_abbrev = abbrev_id;
9302 }
9303 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9304 if (dwarf_version >= 5 && first_id != ~0U)
9305 optimize_implicit_const (first_id, i, implicit_consts);
9306 }
9307
9308 abbrev_opt_start = 0;
9309 abbrev_opt_base_type_end = 0;
9310 abbrev_usage_count.release ();
9311 sorted_abbrev_dies.release ();
9312 }
9313 \f
9314 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9315
9316 static int
9317 constant_size (unsigned HOST_WIDE_INT value)
9318 {
9319 int log;
9320
9321 if (value == 0)
9322 log = 0;
9323 else
9324 log = floor_log2 (value);
9325
9326 log = log / 8;
9327 log = 1 << (floor_log2 (log) + 1);
9328
9329 return log;
9330 }
9331
9332 /* Return the size of a DIE as it is represented in the
9333 .debug_info section. */
9334
9335 static unsigned long
9336 size_of_die (dw_die_ref die)
9337 {
9338 unsigned long size = 0;
9339 dw_attr_node *a;
9340 unsigned ix;
9341 enum dwarf_form form;
9342
9343 size += size_of_uleb128 (die->die_abbrev);
9344 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9345 {
9346 switch (AT_class (a))
9347 {
9348 case dw_val_class_addr:
9349 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9350 {
9351 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9352 size += size_of_uleb128 (AT_index (a));
9353 }
9354 else
9355 size += DWARF2_ADDR_SIZE;
9356 break;
9357 case dw_val_class_offset:
9358 size += DWARF_OFFSET_SIZE;
9359 break;
9360 case dw_val_class_loc:
9361 {
9362 unsigned long lsize = size_of_locs (AT_loc (a));
9363
9364 /* Block length. */
9365 if (dwarf_version >= 4)
9366 size += size_of_uleb128 (lsize);
9367 else
9368 size += constant_size (lsize);
9369 size += lsize;
9370 }
9371 break;
9372 case dw_val_class_loc_list:
9373 if (dwarf_split_debug_info && dwarf_version >= 5)
9374 {
9375 gcc_assert (AT_loc_list (a)->num_assigned);
9376 size += size_of_uleb128 (AT_loc_list (a)->hash);
9377 }
9378 else
9379 size += DWARF_OFFSET_SIZE;
9380 break;
9381 case dw_val_class_view_list:
9382 size += DWARF_OFFSET_SIZE;
9383 break;
9384 case dw_val_class_range_list:
9385 if (value_format (a) == DW_FORM_rnglistx)
9386 {
9387 gcc_assert (rnglist_idx);
9388 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9389 size += size_of_uleb128 (r->idx);
9390 }
9391 else
9392 size += DWARF_OFFSET_SIZE;
9393 break;
9394 case dw_val_class_const:
9395 size += size_of_sleb128 (AT_int (a));
9396 break;
9397 case dw_val_class_unsigned_const:
9398 {
9399 int csize = constant_size (AT_unsigned (a));
9400 if (dwarf_version == 3
9401 && a->dw_attr == DW_AT_data_member_location
9402 && csize >= 4)
9403 size += size_of_uleb128 (AT_unsigned (a));
9404 else
9405 size += csize;
9406 }
9407 break;
9408 case dw_val_class_symview:
9409 if (symview_upper_bound <= 0xff)
9410 size += 1;
9411 else if (symview_upper_bound <= 0xffff)
9412 size += 2;
9413 else if (symview_upper_bound <= 0xffffffff)
9414 size += 4;
9415 else
9416 size += 8;
9417 break;
9418 case dw_val_class_const_implicit:
9419 case dw_val_class_unsigned_const_implicit:
9420 case dw_val_class_file_implicit:
9421 /* These occupy no size in the DIE, just an extra sleb128 in
9422 .debug_abbrev. */
9423 break;
9424 case dw_val_class_const_double:
9425 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9426 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9427 size++; /* block */
9428 break;
9429 case dw_val_class_wide_int:
9430 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9431 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9432 if (get_full_len (*a->dw_attr_val.v.val_wide)
9433 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9434 size++; /* block */
9435 break;
9436 case dw_val_class_vec:
9437 size += constant_size (a->dw_attr_val.v.val_vec.length
9438 * a->dw_attr_val.v.val_vec.elt_size)
9439 + a->dw_attr_val.v.val_vec.length
9440 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9441 break;
9442 case dw_val_class_flag:
9443 if (dwarf_version >= 4)
9444 /* Currently all add_AT_flag calls pass in 1 as last argument,
9445 so DW_FORM_flag_present can be used. If that ever changes,
9446 we'll need to use DW_FORM_flag and have some optimization
9447 in build_abbrev_table that will change those to
9448 DW_FORM_flag_present if it is set to 1 in all DIEs using
9449 the same abbrev entry. */
9450 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9451 else
9452 size += 1;
9453 break;
9454 case dw_val_class_die_ref:
9455 if (AT_ref_external (a))
9456 {
9457 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9458 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9459 is sized by target address length, whereas in DWARF3
9460 it's always sized as an offset. */
9461 if (AT_ref (a)->comdat_type_p)
9462 size += DWARF_TYPE_SIGNATURE_SIZE;
9463 else if (dwarf_version == 2)
9464 size += DWARF2_ADDR_SIZE;
9465 else
9466 size += DWARF_OFFSET_SIZE;
9467 }
9468 else
9469 size += DWARF_OFFSET_SIZE;
9470 break;
9471 case dw_val_class_fde_ref:
9472 size += DWARF_OFFSET_SIZE;
9473 break;
9474 case dw_val_class_lbl_id:
9475 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9476 {
9477 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9478 size += size_of_uleb128 (AT_index (a));
9479 }
9480 else
9481 size += DWARF2_ADDR_SIZE;
9482 break;
9483 case dw_val_class_lineptr:
9484 case dw_val_class_macptr:
9485 case dw_val_class_loclistsptr:
9486 size += DWARF_OFFSET_SIZE;
9487 break;
9488 case dw_val_class_str:
9489 form = AT_string_form (a);
9490 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9491 size += DWARF_OFFSET_SIZE;
9492 else if (form == dwarf_FORM (DW_FORM_strx))
9493 size += size_of_uleb128 (AT_index (a));
9494 else
9495 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9496 break;
9497 case dw_val_class_file:
9498 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9499 break;
9500 case dw_val_class_data8:
9501 size += 8;
9502 break;
9503 case dw_val_class_vms_delta:
9504 size += DWARF_OFFSET_SIZE;
9505 break;
9506 case dw_val_class_high_pc:
9507 size += DWARF2_ADDR_SIZE;
9508 break;
9509 case dw_val_class_discr_value:
9510 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9511 break;
9512 case dw_val_class_discr_list:
9513 {
9514 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9515
9516 /* This is a block, so we have the block length and then its
9517 data. */
9518 size += constant_size (block_size) + block_size;
9519 }
9520 break;
9521 default:
9522 gcc_unreachable ();
9523 }
9524 }
9525
9526 return size;
9527 }
9528
9529 /* Size the debugging information associated with a given DIE. Visits the
9530 DIE's children recursively. Updates the global variable next_die_offset, on
9531 each time through. Uses the current value of next_die_offset to update the
9532 die_offset field in each DIE. */
9533
9534 static void
9535 calc_die_sizes (dw_die_ref die)
9536 {
9537 dw_die_ref c;
9538
9539 gcc_assert (die->die_offset == 0
9540 || (unsigned long int) die->die_offset == next_die_offset);
9541 die->die_offset = next_die_offset;
9542 next_die_offset += size_of_die (die);
9543
9544 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9545
9546 if (die->die_child != NULL)
9547 /* Count the null byte used to terminate sibling lists. */
9548 next_die_offset += 1;
9549 }
9550
9551 /* Size just the base type children at the start of the CU.
9552 This is needed because build_abbrev needs to size locs
9553 and sizing of type based stack ops needs to know die_offset
9554 values for the base types. */
9555
9556 static void
9557 calc_base_type_die_sizes (void)
9558 {
9559 unsigned long die_offset = (dwarf_split_debug_info
9560 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9561 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9562 unsigned int i;
9563 dw_die_ref base_type;
9564 #if ENABLE_ASSERT_CHECKING
9565 dw_die_ref prev = comp_unit_die ()->die_child;
9566 #endif
9567
9568 die_offset += size_of_die (comp_unit_die ());
9569 for (i = 0; base_types.iterate (i, &base_type); i++)
9570 {
9571 #if ENABLE_ASSERT_CHECKING
9572 gcc_assert (base_type->die_offset == 0
9573 && prev->die_sib == base_type
9574 && base_type->die_child == NULL
9575 && base_type->die_abbrev);
9576 prev = base_type;
9577 #endif
9578 if (abbrev_opt_start
9579 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9580 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9581 base_type->die_offset = die_offset;
9582 die_offset += size_of_die (base_type);
9583 }
9584 }
9585
9586 /* Set the marks for a die and its children. We do this so
9587 that we know whether or not a reference needs to use FORM_ref_addr; only
9588 DIEs in the same CU will be marked. We used to clear out the offset
9589 and use that as the flag, but ran into ordering problems. */
9590
9591 static void
9592 mark_dies (dw_die_ref die)
9593 {
9594 dw_die_ref c;
9595
9596 gcc_assert (!die->die_mark);
9597
9598 die->die_mark = 1;
9599 FOR_EACH_CHILD (die, c, mark_dies (c));
9600 }
9601
9602 /* Clear the marks for a die and its children. */
9603
9604 static void
9605 unmark_dies (dw_die_ref die)
9606 {
9607 dw_die_ref c;
9608
9609 if (! use_debug_types)
9610 gcc_assert (die->die_mark);
9611
9612 die->die_mark = 0;
9613 FOR_EACH_CHILD (die, c, unmark_dies (c));
9614 }
9615
9616 /* Clear the marks for a die, its children and referred dies. */
9617
9618 static void
9619 unmark_all_dies (dw_die_ref die)
9620 {
9621 dw_die_ref c;
9622 dw_attr_node *a;
9623 unsigned ix;
9624
9625 if (!die->die_mark)
9626 return;
9627 die->die_mark = 0;
9628
9629 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9630
9631 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9632 if (AT_class (a) == dw_val_class_die_ref)
9633 unmark_all_dies (AT_ref (a));
9634 }
9635
9636 /* Calculate if the entry should appear in the final output file. It may be
9637 from a pruned a type. */
9638
9639 static bool
9640 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9641 {
9642 /* By limiting gnu pubnames to definitions only, gold can generate a
9643 gdb index without entries for declarations, which don't include
9644 enough information to be useful. */
9645 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9646 return false;
9647
9648 if (table == pubname_table)
9649 {
9650 /* Enumerator names are part of the pubname table, but the
9651 parent DW_TAG_enumeration_type die may have been pruned.
9652 Don't output them if that is the case. */
9653 if (p->die->die_tag == DW_TAG_enumerator &&
9654 (p->die->die_parent == NULL
9655 || !p->die->die_parent->die_perennial_p))
9656 return false;
9657
9658 /* Everything else in the pubname table is included. */
9659 return true;
9660 }
9661
9662 /* The pubtypes table shouldn't include types that have been
9663 pruned. */
9664 return (p->die->die_offset != 0
9665 || !flag_eliminate_unused_debug_types);
9666 }
9667
9668 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9669 generated for the compilation unit. */
9670
9671 static unsigned long
9672 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9673 {
9674 unsigned long size;
9675 unsigned i;
9676 pubname_entry *p;
9677 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9678
9679 size = DWARF_PUBNAMES_HEADER_SIZE;
9680 FOR_EACH_VEC_ELT (*names, i, p)
9681 if (include_pubname_in_output (names, p))
9682 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9683
9684 size += DWARF_OFFSET_SIZE;
9685 return size;
9686 }
9687
9688 /* Return the size of the information in the .debug_aranges section. */
9689
9690 static unsigned long
9691 size_of_aranges (void)
9692 {
9693 unsigned long size;
9694
9695 size = DWARF_ARANGES_HEADER_SIZE;
9696
9697 /* Count the address/length pair for this compilation unit. */
9698 if (text_section_used)
9699 size += 2 * DWARF2_ADDR_SIZE;
9700 if (cold_text_section_used)
9701 size += 2 * DWARF2_ADDR_SIZE;
9702 if (have_multiple_function_sections)
9703 {
9704 unsigned fde_idx;
9705 dw_fde_ref fde;
9706
9707 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9708 {
9709 if (DECL_IGNORED_P (fde->decl))
9710 continue;
9711 if (!fde->in_std_section)
9712 size += 2 * DWARF2_ADDR_SIZE;
9713 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9714 size += 2 * DWARF2_ADDR_SIZE;
9715 }
9716 }
9717
9718 /* Count the two zero words used to terminated the address range table. */
9719 size += 2 * DWARF2_ADDR_SIZE;
9720 return size;
9721 }
9722 \f
9723 /* Select the encoding of an attribute value. */
9724
9725 static enum dwarf_form
9726 value_format (dw_attr_node *a)
9727 {
9728 switch (AT_class (a))
9729 {
9730 case dw_val_class_addr:
9731 /* Only very few attributes allow DW_FORM_addr. */
9732 switch (a->dw_attr)
9733 {
9734 case DW_AT_low_pc:
9735 case DW_AT_high_pc:
9736 case DW_AT_entry_pc:
9737 case DW_AT_trampoline:
9738 return (AT_index (a) == NOT_INDEXED
9739 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9740 default:
9741 break;
9742 }
9743 switch (DWARF2_ADDR_SIZE)
9744 {
9745 case 1:
9746 return DW_FORM_data1;
9747 case 2:
9748 return DW_FORM_data2;
9749 case 4:
9750 return DW_FORM_data4;
9751 case 8:
9752 return DW_FORM_data8;
9753 default:
9754 gcc_unreachable ();
9755 }
9756 case dw_val_class_loc_list:
9757 if (dwarf_split_debug_info
9758 && dwarf_version >= 5
9759 && AT_loc_list (a)->num_assigned)
9760 return DW_FORM_loclistx;
9761 /* FALLTHRU */
9762 case dw_val_class_view_list:
9763 case dw_val_class_range_list:
9764 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9765 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9766 care about sizes of .debug* sections in shared libraries and
9767 executables and don't take into account relocations that affect just
9768 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9769 table in the .debug_rnglists section. */
9770 if (dwarf_split_debug_info
9771 && dwarf_version >= 5
9772 && AT_class (a) == dw_val_class_range_list
9773 && rnglist_idx
9774 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9775 return DW_FORM_rnglistx;
9776 if (dwarf_version >= 4)
9777 return DW_FORM_sec_offset;
9778 /* FALLTHRU */
9779 case dw_val_class_vms_delta:
9780 case dw_val_class_offset:
9781 switch (DWARF_OFFSET_SIZE)
9782 {
9783 case 4:
9784 return DW_FORM_data4;
9785 case 8:
9786 return DW_FORM_data8;
9787 default:
9788 gcc_unreachable ();
9789 }
9790 case dw_val_class_loc:
9791 if (dwarf_version >= 4)
9792 return DW_FORM_exprloc;
9793 switch (constant_size (size_of_locs (AT_loc (a))))
9794 {
9795 case 1:
9796 return DW_FORM_block1;
9797 case 2:
9798 return DW_FORM_block2;
9799 case 4:
9800 return DW_FORM_block4;
9801 default:
9802 gcc_unreachable ();
9803 }
9804 case dw_val_class_const:
9805 return DW_FORM_sdata;
9806 case dw_val_class_unsigned_const:
9807 switch (constant_size (AT_unsigned (a)))
9808 {
9809 case 1:
9810 return DW_FORM_data1;
9811 case 2:
9812 return DW_FORM_data2;
9813 case 4:
9814 /* In DWARF3 DW_AT_data_member_location with
9815 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9816 constant, so we need to use DW_FORM_udata if we need
9817 a large constant. */
9818 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9819 return DW_FORM_udata;
9820 return DW_FORM_data4;
9821 case 8:
9822 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9823 return DW_FORM_udata;
9824 return DW_FORM_data8;
9825 default:
9826 gcc_unreachable ();
9827 }
9828 case dw_val_class_const_implicit:
9829 case dw_val_class_unsigned_const_implicit:
9830 case dw_val_class_file_implicit:
9831 return DW_FORM_implicit_const;
9832 case dw_val_class_const_double:
9833 switch (HOST_BITS_PER_WIDE_INT)
9834 {
9835 case 8:
9836 return DW_FORM_data2;
9837 case 16:
9838 return DW_FORM_data4;
9839 case 32:
9840 return DW_FORM_data8;
9841 case 64:
9842 if (dwarf_version >= 5)
9843 return DW_FORM_data16;
9844 /* FALLTHRU */
9845 default:
9846 return DW_FORM_block1;
9847 }
9848 case dw_val_class_wide_int:
9849 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9850 {
9851 case 8:
9852 return DW_FORM_data1;
9853 case 16:
9854 return DW_FORM_data2;
9855 case 32:
9856 return DW_FORM_data4;
9857 case 64:
9858 return DW_FORM_data8;
9859 case 128:
9860 if (dwarf_version >= 5)
9861 return DW_FORM_data16;
9862 /* FALLTHRU */
9863 default:
9864 return DW_FORM_block1;
9865 }
9866 case dw_val_class_symview:
9867 /* ??? We might use uleb128, but then we'd have to compute
9868 .debug_info offsets in the assembler. */
9869 if (symview_upper_bound <= 0xff)
9870 return DW_FORM_data1;
9871 else if (symview_upper_bound <= 0xffff)
9872 return DW_FORM_data2;
9873 else if (symview_upper_bound <= 0xffffffff)
9874 return DW_FORM_data4;
9875 else
9876 return DW_FORM_data8;
9877 case dw_val_class_vec:
9878 switch (constant_size (a->dw_attr_val.v.val_vec.length
9879 * a->dw_attr_val.v.val_vec.elt_size))
9880 {
9881 case 1:
9882 return DW_FORM_block1;
9883 case 2:
9884 return DW_FORM_block2;
9885 case 4:
9886 return DW_FORM_block4;
9887 default:
9888 gcc_unreachable ();
9889 }
9890 case dw_val_class_flag:
9891 if (dwarf_version >= 4)
9892 {
9893 /* Currently all add_AT_flag calls pass in 1 as last argument,
9894 so DW_FORM_flag_present can be used. If that ever changes,
9895 we'll need to use DW_FORM_flag and have some optimization
9896 in build_abbrev_table that will change those to
9897 DW_FORM_flag_present if it is set to 1 in all DIEs using
9898 the same abbrev entry. */
9899 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9900 return DW_FORM_flag_present;
9901 }
9902 return DW_FORM_flag;
9903 case dw_val_class_die_ref:
9904 if (AT_ref_external (a))
9905 {
9906 if (AT_ref (a)->comdat_type_p)
9907 return DW_FORM_ref_sig8;
9908 else
9909 return DW_FORM_ref_addr;
9910 }
9911 else
9912 return DW_FORM_ref;
9913 case dw_val_class_fde_ref:
9914 return DW_FORM_data;
9915 case dw_val_class_lbl_id:
9916 return (AT_index (a) == NOT_INDEXED
9917 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9918 case dw_val_class_lineptr:
9919 case dw_val_class_macptr:
9920 case dw_val_class_loclistsptr:
9921 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9922 case dw_val_class_str:
9923 return AT_string_form (a);
9924 case dw_val_class_file:
9925 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9926 {
9927 case 1:
9928 return DW_FORM_data1;
9929 case 2:
9930 return DW_FORM_data2;
9931 case 4:
9932 return DW_FORM_data4;
9933 default:
9934 gcc_unreachable ();
9935 }
9936
9937 case dw_val_class_data8:
9938 return DW_FORM_data8;
9939
9940 case dw_val_class_high_pc:
9941 switch (DWARF2_ADDR_SIZE)
9942 {
9943 case 1:
9944 return DW_FORM_data1;
9945 case 2:
9946 return DW_FORM_data2;
9947 case 4:
9948 return DW_FORM_data4;
9949 case 8:
9950 return DW_FORM_data8;
9951 default:
9952 gcc_unreachable ();
9953 }
9954
9955 case dw_val_class_discr_value:
9956 return (a->dw_attr_val.v.val_discr_value.pos
9957 ? DW_FORM_udata
9958 : DW_FORM_sdata);
9959 case dw_val_class_discr_list:
9960 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9961 {
9962 case 1:
9963 return DW_FORM_block1;
9964 case 2:
9965 return DW_FORM_block2;
9966 case 4:
9967 return DW_FORM_block4;
9968 default:
9969 gcc_unreachable ();
9970 }
9971
9972 default:
9973 gcc_unreachable ();
9974 }
9975 }
9976
9977 /* Output the encoding of an attribute value. */
9978
9979 static void
9980 output_value_format (dw_attr_node *a)
9981 {
9982 enum dwarf_form form = value_format (a);
9983
9984 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9985 }
9986
9987 /* Given a die and id, produce the appropriate abbreviations. */
9988
9989 static void
9990 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9991 {
9992 unsigned ix;
9993 dw_attr_node *a_attr;
9994
9995 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9996 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9997 dwarf_tag_name (abbrev->die_tag));
9998
9999 if (abbrev->die_child != NULL)
10000 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10001 else
10002 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10003
10004 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10005 {
10006 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10007 dwarf_attr_name (a_attr->dw_attr));
10008 output_value_format (a_attr);
10009 if (value_format (a_attr) == DW_FORM_implicit_const)
10010 {
10011 if (AT_class (a_attr) == dw_val_class_file_implicit)
10012 {
10013 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10014 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10015 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10016 }
10017 else
10018 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10019 }
10020 }
10021
10022 dw2_asm_output_data (1, 0, NULL);
10023 dw2_asm_output_data (1, 0, NULL);
10024 }
10025
10026
10027 /* Output the .debug_abbrev section which defines the DIE abbreviation
10028 table. */
10029
10030 static void
10031 output_abbrev_section (void)
10032 {
10033 unsigned int abbrev_id;
10034 dw_die_ref abbrev;
10035
10036 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10037 if (abbrev_id != 0)
10038 output_die_abbrevs (abbrev_id, abbrev);
10039
10040 /* Terminate the table. */
10041 dw2_asm_output_data (1, 0, NULL);
10042 }
10043
10044 /* Return a new location list, given the begin and end range, and the
10045 expression. */
10046
10047 static inline dw_loc_list_ref
10048 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10049 const char *end, var_loc_view vend,
10050 const char *section)
10051 {
10052 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10053
10054 retlist->begin = begin;
10055 retlist->begin_entry = NULL;
10056 retlist->end = end;
10057 retlist->expr = expr;
10058 retlist->section = section;
10059 retlist->vbegin = vbegin;
10060 retlist->vend = vend;
10061
10062 return retlist;
10063 }
10064
10065 /* Return true iff there's any nonzero view number in the loc list.
10066
10067 ??? When views are not enabled, we'll often extend a single range
10068 to the entire function, so that we emit a single location
10069 expression rather than a location list. With views, even with a
10070 single range, we'll output a list if start or end have a nonzero
10071 view. If we change this, we may want to stop splitting a single
10072 range in dw_loc_list just because of a nonzero view, even if it
10073 straddles across hot/cold partitions. */
10074
10075 static bool
10076 loc_list_has_views (dw_loc_list_ref list)
10077 {
10078 if (!debug_variable_location_views)
10079 return false;
10080
10081 for (dw_loc_list_ref loc = list;
10082 loc != NULL; loc = loc->dw_loc_next)
10083 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10084 return true;
10085
10086 return false;
10087 }
10088
10089 /* Generate a new internal symbol for this location list node, if it
10090 hasn't got one yet. */
10091
10092 static inline void
10093 gen_llsym (dw_loc_list_ref list)
10094 {
10095 gcc_assert (!list->ll_symbol);
10096 list->ll_symbol = gen_internal_sym ("LLST");
10097
10098 if (!loc_list_has_views (list))
10099 return;
10100
10101 if (dwarf2out_locviews_in_attribute ())
10102 {
10103 /* Use the same label_num for the view list. */
10104 label_num--;
10105 list->vl_symbol = gen_internal_sym ("LVUS");
10106 }
10107 else
10108 list->vl_symbol = list->ll_symbol;
10109 }
10110
10111 /* Generate a symbol for the list, but only if we really want to emit
10112 it as a list. */
10113
10114 static inline void
10115 maybe_gen_llsym (dw_loc_list_ref list)
10116 {
10117 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10118 return;
10119
10120 gen_llsym (list);
10121 }
10122
10123 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10124 NULL, don't consider size of the location expression. If we're not
10125 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10126 representation in *SIZEP. */
10127
10128 static bool
10129 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10130 {
10131 /* Don't output an entry that starts and ends at the same address. */
10132 if (strcmp (curr->begin, curr->end) == 0
10133 && curr->vbegin == curr->vend && !curr->force)
10134 return true;
10135
10136 if (!sizep)
10137 return false;
10138
10139 unsigned long size = size_of_locs (curr->expr);
10140
10141 /* If the expression is too large, drop it on the floor. We could
10142 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10143 in the expression, but >= 64KB expressions for a single value
10144 in a single range are unlikely very useful. */
10145 if (dwarf_version < 5 && size > 0xffff)
10146 return true;
10147
10148 *sizep = size;
10149
10150 return false;
10151 }
10152
10153 /* Output a view pair loclist entry for CURR, if it requires one. */
10154
10155 static void
10156 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10157 {
10158 if (!dwarf2out_locviews_in_loclist ())
10159 return;
10160
10161 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10162 return;
10163
10164 #ifdef DW_LLE_view_pair
10165 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10166
10167 if (dwarf2out_as_locview_support)
10168 {
10169 if (ZERO_VIEW_P (curr->vbegin))
10170 dw2_asm_output_data_uleb128 (0, "Location view begin");
10171 else
10172 {
10173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10174 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10175 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10176 }
10177
10178 if (ZERO_VIEW_P (curr->vend))
10179 dw2_asm_output_data_uleb128 (0, "Location view end");
10180 else
10181 {
10182 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10183 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10184 dw2_asm_output_symname_uleb128 (label, "Location view end");
10185 }
10186 }
10187 else
10188 {
10189 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10190 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10191 }
10192 #endif /* DW_LLE_view_pair */
10193
10194 return;
10195 }
10196
10197 /* Output the location list given to us. */
10198
10199 static void
10200 output_loc_list (dw_loc_list_ref list_head)
10201 {
10202 int vcount = 0, lcount = 0;
10203
10204 if (list_head->emitted)
10205 return;
10206 list_head->emitted = true;
10207
10208 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10209 {
10210 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10211
10212 for (dw_loc_list_ref curr = list_head; curr != NULL;
10213 curr = curr->dw_loc_next)
10214 {
10215 unsigned long size;
10216
10217 if (skip_loc_list_entry (curr, &size))
10218 continue;
10219
10220 vcount++;
10221
10222 /* ?? dwarf_split_debug_info? */
10223 if (dwarf2out_as_locview_support)
10224 {
10225 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10226
10227 if (!ZERO_VIEW_P (curr->vbegin))
10228 {
10229 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10230 dw2_asm_output_symname_uleb128 (label,
10231 "View list begin (%s)",
10232 list_head->vl_symbol);
10233 }
10234 else
10235 dw2_asm_output_data_uleb128 (0,
10236 "View list begin (%s)",
10237 list_head->vl_symbol);
10238
10239 if (!ZERO_VIEW_P (curr->vend))
10240 {
10241 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10242 dw2_asm_output_symname_uleb128 (label,
10243 "View list end (%s)",
10244 list_head->vl_symbol);
10245 }
10246 else
10247 dw2_asm_output_data_uleb128 (0,
10248 "View list end (%s)",
10249 list_head->vl_symbol);
10250 }
10251 else
10252 {
10253 dw2_asm_output_data_uleb128 (curr->vbegin,
10254 "View list begin (%s)",
10255 list_head->vl_symbol);
10256 dw2_asm_output_data_uleb128 (curr->vend,
10257 "View list end (%s)",
10258 list_head->vl_symbol);
10259 }
10260 }
10261 }
10262
10263 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10264
10265 const char *last_section = NULL;
10266 const char *base_label = NULL;
10267
10268 /* Walk the location list, and output each range + expression. */
10269 for (dw_loc_list_ref curr = list_head; curr != NULL;
10270 curr = curr->dw_loc_next)
10271 {
10272 unsigned long size;
10273
10274 /* Skip this entry? If we skip it here, we must skip it in the
10275 view list above as well. */
10276 if (skip_loc_list_entry (curr, &size))
10277 continue;
10278
10279 lcount++;
10280
10281 if (dwarf_version >= 5)
10282 {
10283 if (dwarf_split_debug_info)
10284 {
10285 dwarf2out_maybe_output_loclist_view_pair (curr);
10286 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10287 uleb128 index into .debug_addr and uleb128 length. */
10288 dw2_asm_output_data (1, DW_LLE_startx_length,
10289 "DW_LLE_startx_length (%s)",
10290 list_head->ll_symbol);
10291 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10292 "Location list range start index "
10293 "(%s)", curr->begin);
10294 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10295 For that case we probably need to emit DW_LLE_startx_endx,
10296 but we'd need 2 .debug_addr entries rather than just one. */
10297 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10298 "Location list length (%s)",
10299 list_head->ll_symbol);
10300 }
10301 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10302 {
10303 dwarf2out_maybe_output_loclist_view_pair (curr);
10304 /* If all code is in .text section, the base address is
10305 already provided by the CU attributes. Use
10306 DW_LLE_offset_pair where both addresses are uleb128 encoded
10307 offsets against that base. */
10308 dw2_asm_output_data (1, DW_LLE_offset_pair,
10309 "DW_LLE_offset_pair (%s)",
10310 list_head->ll_symbol);
10311 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10312 "Location list begin address (%s)",
10313 list_head->ll_symbol);
10314 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10315 "Location list end address (%s)",
10316 list_head->ll_symbol);
10317 }
10318 else if (HAVE_AS_LEB128)
10319 {
10320 /* Otherwise, find out how many consecutive entries could share
10321 the same base entry. If just one, emit DW_LLE_start_length,
10322 otherwise emit DW_LLE_base_address for the base address
10323 followed by a series of DW_LLE_offset_pair. */
10324 if (last_section == NULL || curr->section != last_section)
10325 {
10326 dw_loc_list_ref curr2;
10327 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10328 curr2 = curr2->dw_loc_next)
10329 {
10330 if (strcmp (curr2->begin, curr2->end) == 0
10331 && !curr2->force)
10332 continue;
10333 break;
10334 }
10335 if (curr2 == NULL || curr->section != curr2->section)
10336 last_section = NULL;
10337 else
10338 {
10339 last_section = curr->section;
10340 base_label = curr->begin;
10341 dw2_asm_output_data (1, DW_LLE_base_address,
10342 "DW_LLE_base_address (%s)",
10343 list_head->ll_symbol);
10344 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10345 "Base address (%s)",
10346 list_head->ll_symbol);
10347 }
10348 }
10349 /* Only one entry with the same base address. Use
10350 DW_LLE_start_length with absolute address and uleb128
10351 length. */
10352 if (last_section == NULL)
10353 {
10354 dwarf2out_maybe_output_loclist_view_pair (curr);
10355 dw2_asm_output_data (1, DW_LLE_start_length,
10356 "DW_LLE_start_length (%s)",
10357 list_head->ll_symbol);
10358 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10359 "Location list begin address (%s)",
10360 list_head->ll_symbol);
10361 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10362 "Location list length "
10363 "(%s)", list_head->ll_symbol);
10364 }
10365 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10366 DW_LLE_base_address. */
10367 else
10368 {
10369 dwarf2out_maybe_output_loclist_view_pair (curr);
10370 dw2_asm_output_data (1, DW_LLE_offset_pair,
10371 "DW_LLE_offset_pair (%s)",
10372 list_head->ll_symbol);
10373 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10374 "Location list begin address "
10375 "(%s)", list_head->ll_symbol);
10376 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10377 "Location list end address "
10378 "(%s)", list_head->ll_symbol);
10379 }
10380 }
10381 /* The assembler does not support .uleb128 directive. Emit
10382 DW_LLE_start_end with a pair of absolute addresses. */
10383 else
10384 {
10385 dwarf2out_maybe_output_loclist_view_pair (curr);
10386 dw2_asm_output_data (1, DW_LLE_start_end,
10387 "DW_LLE_start_end (%s)",
10388 list_head->ll_symbol);
10389 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10390 "Location list begin address (%s)",
10391 list_head->ll_symbol);
10392 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10393 "Location list end address (%s)",
10394 list_head->ll_symbol);
10395 }
10396 }
10397 else if (dwarf_split_debug_info)
10398 {
10399 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10400 and 4 byte length. */
10401 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10402 "Location list start/length entry (%s)",
10403 list_head->ll_symbol);
10404 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10405 "Location list range start index (%s)",
10406 curr->begin);
10407 /* The length field is 4 bytes. If we ever need to support
10408 an 8-byte length, we can add a new DW_LLE code or fall back
10409 to DW_LLE_GNU_start_end_entry. */
10410 dw2_asm_output_delta (4, curr->end, curr->begin,
10411 "Location list range length (%s)",
10412 list_head->ll_symbol);
10413 }
10414 else if (!have_multiple_function_sections)
10415 {
10416 /* Pair of relative addresses against start of text section. */
10417 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10418 "Location list begin address (%s)",
10419 list_head->ll_symbol);
10420 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10421 "Location list end address (%s)",
10422 list_head->ll_symbol);
10423 }
10424 else
10425 {
10426 /* Pair of absolute addresses. */
10427 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10428 "Location list begin address (%s)",
10429 list_head->ll_symbol);
10430 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10431 "Location list end address (%s)",
10432 list_head->ll_symbol);
10433 }
10434
10435 /* Output the block length for this list of location operations. */
10436 if (dwarf_version >= 5)
10437 dw2_asm_output_data_uleb128 (size, "Location expression size");
10438 else
10439 {
10440 gcc_assert (size <= 0xffff);
10441 dw2_asm_output_data (2, size, "Location expression size");
10442 }
10443
10444 output_loc_sequence (curr->expr, -1);
10445 }
10446
10447 /* And finally list termination. */
10448 if (dwarf_version >= 5)
10449 dw2_asm_output_data (1, DW_LLE_end_of_list,
10450 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10451 else if (dwarf_split_debug_info)
10452 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10453 "Location list terminator (%s)",
10454 list_head->ll_symbol);
10455 else
10456 {
10457 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10458 "Location list terminator begin (%s)",
10459 list_head->ll_symbol);
10460 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10461 "Location list terminator end (%s)",
10462 list_head->ll_symbol);
10463 }
10464
10465 gcc_assert (!list_head->vl_symbol
10466 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10467 }
10468
10469 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10470 section. Emit a relocated reference if val_entry is NULL, otherwise,
10471 emit an indirect reference. */
10472
10473 static void
10474 output_range_list_offset (dw_attr_node *a)
10475 {
10476 const char *name = dwarf_attr_name (a->dw_attr);
10477
10478 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10479 {
10480 if (dwarf_version >= 5)
10481 {
10482 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10483 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10484 debug_ranges_section, "%s", name);
10485 }
10486 else
10487 {
10488 char *p = strchr (ranges_section_label, '\0');
10489 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10490 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10491 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10492 debug_ranges_section, "%s", name);
10493 *p = '\0';
10494 }
10495 }
10496 else if (dwarf_version >= 5)
10497 {
10498 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10499 gcc_assert (rnglist_idx);
10500 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10501 }
10502 else
10503 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10504 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10505 "%s (offset from %s)", name, ranges_section_label);
10506 }
10507
10508 /* Output the offset into the debug_loc section. */
10509
10510 static void
10511 output_loc_list_offset (dw_attr_node *a)
10512 {
10513 char *sym = AT_loc_list (a)->ll_symbol;
10514
10515 gcc_assert (sym);
10516 if (!dwarf_split_debug_info)
10517 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10518 "%s", dwarf_attr_name (a->dw_attr));
10519 else if (dwarf_version >= 5)
10520 {
10521 gcc_assert (AT_loc_list (a)->num_assigned);
10522 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10523 dwarf_attr_name (a->dw_attr),
10524 sym);
10525 }
10526 else
10527 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10528 "%s", dwarf_attr_name (a->dw_attr));
10529 }
10530
10531 /* Output the offset into the debug_loc section. */
10532
10533 static void
10534 output_view_list_offset (dw_attr_node *a)
10535 {
10536 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10537
10538 gcc_assert (sym);
10539 if (dwarf_split_debug_info)
10540 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10541 "%s", dwarf_attr_name (a->dw_attr));
10542 else
10543 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10544 "%s", dwarf_attr_name (a->dw_attr));
10545 }
10546
10547 /* Output an attribute's index or value appropriately. */
10548
10549 static void
10550 output_attr_index_or_value (dw_attr_node *a)
10551 {
10552 const char *name = dwarf_attr_name (a->dw_attr);
10553
10554 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10555 {
10556 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10557 return;
10558 }
10559 switch (AT_class (a))
10560 {
10561 case dw_val_class_addr:
10562 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10563 break;
10564 case dw_val_class_high_pc:
10565 case dw_val_class_lbl_id:
10566 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10567 break;
10568 default:
10569 gcc_unreachable ();
10570 }
10571 }
10572
10573 /* Output a type signature. */
10574
10575 static inline void
10576 output_signature (const char *sig, const char *name)
10577 {
10578 int i;
10579
10580 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10581 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10582 }
10583
10584 /* Output a discriminant value. */
10585
10586 static inline void
10587 output_discr_value (dw_discr_value *discr_value, const char *name)
10588 {
10589 if (discr_value->pos)
10590 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10591 else
10592 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10593 }
10594
10595 /* Output the DIE and its attributes. Called recursively to generate
10596 the definitions of each child DIE. */
10597
10598 static void
10599 output_die (dw_die_ref die)
10600 {
10601 dw_attr_node *a;
10602 dw_die_ref c;
10603 unsigned long size;
10604 unsigned ix;
10605
10606 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10607 (unsigned long)die->die_offset,
10608 dwarf_tag_name (die->die_tag));
10609
10610 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10611 {
10612 const char *name = dwarf_attr_name (a->dw_attr);
10613
10614 switch (AT_class (a))
10615 {
10616 case dw_val_class_addr:
10617 output_attr_index_or_value (a);
10618 break;
10619
10620 case dw_val_class_offset:
10621 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10622 "%s", name);
10623 break;
10624
10625 case dw_val_class_range_list:
10626 output_range_list_offset (a);
10627 break;
10628
10629 case dw_val_class_loc:
10630 size = size_of_locs (AT_loc (a));
10631
10632 /* Output the block length for this list of location operations. */
10633 if (dwarf_version >= 4)
10634 dw2_asm_output_data_uleb128 (size, "%s", name);
10635 else
10636 dw2_asm_output_data (constant_size (size), size, "%s", name);
10637
10638 output_loc_sequence (AT_loc (a), -1);
10639 break;
10640
10641 case dw_val_class_const:
10642 /* ??? It would be slightly more efficient to use a scheme like is
10643 used for unsigned constants below, but gdb 4.x does not sign
10644 extend. Gdb 5.x does sign extend. */
10645 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10646 break;
10647
10648 case dw_val_class_unsigned_const:
10649 {
10650 int csize = constant_size (AT_unsigned (a));
10651 if (dwarf_version == 3
10652 && a->dw_attr == DW_AT_data_member_location
10653 && csize >= 4)
10654 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10655 else
10656 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10657 }
10658 break;
10659
10660 case dw_val_class_symview:
10661 {
10662 int vsize;
10663 if (symview_upper_bound <= 0xff)
10664 vsize = 1;
10665 else if (symview_upper_bound <= 0xffff)
10666 vsize = 2;
10667 else if (symview_upper_bound <= 0xffffffff)
10668 vsize = 4;
10669 else
10670 vsize = 8;
10671 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10672 "%s", name);
10673 }
10674 break;
10675
10676 case dw_val_class_const_implicit:
10677 if (flag_debug_asm)
10678 fprintf (asm_out_file, "\t\t\t%s %s ("
10679 HOST_WIDE_INT_PRINT_DEC ")\n",
10680 ASM_COMMENT_START, name, AT_int (a));
10681 break;
10682
10683 case dw_val_class_unsigned_const_implicit:
10684 if (flag_debug_asm)
10685 fprintf (asm_out_file, "\t\t\t%s %s ("
10686 HOST_WIDE_INT_PRINT_HEX ")\n",
10687 ASM_COMMENT_START, name, AT_unsigned (a));
10688 break;
10689
10690 case dw_val_class_const_double:
10691 {
10692 unsigned HOST_WIDE_INT first, second;
10693
10694 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10695 dw2_asm_output_data (1,
10696 HOST_BITS_PER_DOUBLE_INT
10697 / HOST_BITS_PER_CHAR,
10698 NULL);
10699
10700 if (WORDS_BIG_ENDIAN)
10701 {
10702 first = a->dw_attr_val.v.val_double.high;
10703 second = a->dw_attr_val.v.val_double.low;
10704 }
10705 else
10706 {
10707 first = a->dw_attr_val.v.val_double.low;
10708 second = a->dw_attr_val.v.val_double.high;
10709 }
10710
10711 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10712 first, "%s", name);
10713 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10714 second, NULL);
10715 }
10716 break;
10717
10718 case dw_val_class_wide_int:
10719 {
10720 int i;
10721 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10722 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10723 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10724 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10725 * l, NULL);
10726
10727 if (WORDS_BIG_ENDIAN)
10728 for (i = len - 1; i >= 0; --i)
10729 {
10730 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10731 "%s", name);
10732 name = "";
10733 }
10734 else
10735 for (i = 0; i < len; ++i)
10736 {
10737 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10738 "%s", name);
10739 name = "";
10740 }
10741 }
10742 break;
10743
10744 case dw_val_class_vec:
10745 {
10746 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10747 unsigned int len = a->dw_attr_val.v.val_vec.length;
10748 unsigned int i;
10749 unsigned char *p;
10750
10751 dw2_asm_output_data (constant_size (len * elt_size),
10752 len * elt_size, "%s", name);
10753 if (elt_size > sizeof (HOST_WIDE_INT))
10754 {
10755 elt_size /= 2;
10756 len *= 2;
10757 }
10758 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10759 i < len;
10760 i++, p += elt_size)
10761 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10762 "fp or vector constant word %u", i);
10763 break;
10764 }
10765
10766 case dw_val_class_flag:
10767 if (dwarf_version >= 4)
10768 {
10769 /* Currently all add_AT_flag calls pass in 1 as last argument,
10770 so DW_FORM_flag_present can be used. If that ever changes,
10771 we'll need to use DW_FORM_flag and have some optimization
10772 in build_abbrev_table that will change those to
10773 DW_FORM_flag_present if it is set to 1 in all DIEs using
10774 the same abbrev entry. */
10775 gcc_assert (AT_flag (a) == 1);
10776 if (flag_debug_asm)
10777 fprintf (asm_out_file, "\t\t\t%s %s\n",
10778 ASM_COMMENT_START, name);
10779 break;
10780 }
10781 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10782 break;
10783
10784 case dw_val_class_loc_list:
10785 output_loc_list_offset (a);
10786 break;
10787
10788 case dw_val_class_view_list:
10789 output_view_list_offset (a);
10790 break;
10791
10792 case dw_val_class_die_ref:
10793 if (AT_ref_external (a))
10794 {
10795 if (AT_ref (a)->comdat_type_p)
10796 {
10797 comdat_type_node *type_node
10798 = AT_ref (a)->die_id.die_type_node;
10799
10800 gcc_assert (type_node);
10801 output_signature (type_node->signature, name);
10802 }
10803 else
10804 {
10805 const char *sym = AT_ref (a)->die_id.die_symbol;
10806 int size;
10807
10808 gcc_assert (sym);
10809 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10810 length, whereas in DWARF3 it's always sized as an
10811 offset. */
10812 if (dwarf_version == 2)
10813 size = DWARF2_ADDR_SIZE;
10814 else
10815 size = DWARF_OFFSET_SIZE;
10816 /* ??? We cannot unconditionally output die_offset if
10817 non-zero - others might create references to those
10818 DIEs via symbols.
10819 And we do not clear its DIE offset after outputting it
10820 (and the label refers to the actual DIEs, not the
10821 DWARF CU unit header which is when using label + offset
10822 would be the correct thing to do).
10823 ??? This is the reason for the with_offset flag. */
10824 if (AT_ref (a)->with_offset)
10825 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10826 debug_info_section, "%s", name);
10827 else
10828 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10829 name);
10830 }
10831 }
10832 else
10833 {
10834 gcc_assert (AT_ref (a)->die_offset);
10835 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10836 "%s", name);
10837 }
10838 break;
10839
10840 case dw_val_class_fde_ref:
10841 {
10842 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10843
10844 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10845 a->dw_attr_val.v.val_fde_index * 2);
10846 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10847 "%s", name);
10848 }
10849 break;
10850
10851 case dw_val_class_vms_delta:
10852 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10853 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10854 AT_vms_delta2 (a), AT_vms_delta1 (a),
10855 "%s", name);
10856 #else
10857 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10858 AT_vms_delta2 (a), AT_vms_delta1 (a),
10859 "%s", name);
10860 #endif
10861 break;
10862
10863 case dw_val_class_lbl_id:
10864 output_attr_index_or_value (a);
10865 break;
10866
10867 case dw_val_class_lineptr:
10868 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10869 debug_line_section, "%s", name);
10870 break;
10871
10872 case dw_val_class_macptr:
10873 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10874 debug_macinfo_section, "%s", name);
10875 break;
10876
10877 case dw_val_class_loclistsptr:
10878 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10879 debug_loc_section, "%s", name);
10880 break;
10881
10882 case dw_val_class_str:
10883 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10884 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10885 a->dw_attr_val.v.val_str->label,
10886 debug_str_section,
10887 "%s: \"%s\"", name, AT_string (a));
10888 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10889 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10890 a->dw_attr_val.v.val_str->label,
10891 debug_line_str_section,
10892 "%s: \"%s\"", name, AT_string (a));
10893 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10894 dw2_asm_output_data_uleb128 (AT_index (a),
10895 "%s: \"%s\"", name, AT_string (a));
10896 else
10897 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10898 break;
10899
10900 case dw_val_class_file:
10901 {
10902 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10903
10904 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10905 a->dw_attr_val.v.val_file->filename);
10906 break;
10907 }
10908
10909 case dw_val_class_file_implicit:
10910 if (flag_debug_asm)
10911 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10912 ASM_COMMENT_START, name,
10913 maybe_emit_file (a->dw_attr_val.v.val_file),
10914 a->dw_attr_val.v.val_file->filename);
10915 break;
10916
10917 case dw_val_class_data8:
10918 {
10919 int i;
10920
10921 for (i = 0; i < 8; i++)
10922 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10923 i == 0 ? "%s" : NULL, name);
10924 break;
10925 }
10926
10927 case dw_val_class_high_pc:
10928 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10929 get_AT_low_pc (die), "DW_AT_high_pc");
10930 break;
10931
10932 case dw_val_class_discr_value:
10933 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10934 break;
10935
10936 case dw_val_class_discr_list:
10937 {
10938 dw_discr_list_ref list = AT_discr_list (a);
10939 const int size = size_of_discr_list (list);
10940
10941 /* This is a block, so output its length first. */
10942 dw2_asm_output_data (constant_size (size), size,
10943 "%s: block size", name);
10944
10945 for (; list != NULL; list = list->dw_discr_next)
10946 {
10947 /* One byte for the discriminant value descriptor, and then as
10948 many LEB128 numbers as required. */
10949 if (list->dw_discr_range)
10950 dw2_asm_output_data (1, DW_DSC_range,
10951 "%s: DW_DSC_range", name);
10952 else
10953 dw2_asm_output_data (1, DW_DSC_label,
10954 "%s: DW_DSC_label", name);
10955
10956 output_discr_value (&list->dw_discr_lower_bound, name);
10957 if (list->dw_discr_range)
10958 output_discr_value (&list->dw_discr_upper_bound, name);
10959 }
10960 break;
10961 }
10962
10963 default:
10964 gcc_unreachable ();
10965 }
10966 }
10967
10968 FOR_EACH_CHILD (die, c, output_die (c));
10969
10970 /* Add null byte to terminate sibling list. */
10971 if (die->die_child != NULL)
10972 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10973 (unsigned long) die->die_offset);
10974 }
10975
10976 /* Output the dwarf version number. */
10977
10978 static void
10979 output_dwarf_version ()
10980 {
10981 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10982 views in loclist. That will change eventually. */
10983 if (dwarf_version == 6)
10984 {
10985 static bool once;
10986 if (!once)
10987 {
10988 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10989 "incompatibilities");
10990 once = true;
10991 }
10992 dw2_asm_output_data (2, 5, "DWARF version number");
10993 }
10994 else
10995 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10996 }
10997
10998 /* Output the compilation unit that appears at the beginning of the
10999 .debug_info section, and precedes the DIE descriptions. */
11000
11001 static void
11002 output_compilation_unit_header (enum dwarf_unit_type ut)
11003 {
11004 if (!XCOFF_DEBUGGING_INFO)
11005 {
11006 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11007 dw2_asm_output_data (4, 0xffffffff,
11008 "Initial length escape value indicating 64-bit DWARF extension");
11009 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11010 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11011 "Length of Compilation Unit Info");
11012 }
11013
11014 output_dwarf_version ();
11015 if (dwarf_version >= 5)
11016 {
11017 const char *name;
11018 switch (ut)
11019 {
11020 case DW_UT_compile: name = "DW_UT_compile"; break;
11021 case DW_UT_type: name = "DW_UT_type"; break;
11022 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11023 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11024 default: gcc_unreachable ();
11025 }
11026 dw2_asm_output_data (1, ut, "%s", name);
11027 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11028 }
11029 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11030 debug_abbrev_section,
11031 "Offset Into Abbrev. Section");
11032 if (dwarf_version < 5)
11033 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11034 }
11035
11036 /* Output the compilation unit DIE and its children. */
11037
11038 static void
11039 output_comp_unit (dw_die_ref die, int output_if_empty,
11040 const unsigned char *dwo_id)
11041 {
11042 const char *secname, *oldsym;
11043 char *tmp;
11044
11045 /* Unless we are outputting main CU, we may throw away empty ones. */
11046 if (!output_if_empty && die->die_child == NULL)
11047 return;
11048
11049 /* Even if there are no children of this DIE, we must output the information
11050 about the compilation unit. Otherwise, on an empty translation unit, we
11051 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11052 will then complain when examining the file. First mark all the DIEs in
11053 this CU so we know which get local refs. */
11054 mark_dies (die);
11055
11056 external_ref_hash_type *extern_map = optimize_external_refs (die);
11057
11058 /* For now, optimize only the main CU, in order to optimize the rest
11059 we'd need to see all of them earlier. Leave the rest for post-linking
11060 tools like DWZ. */
11061 if (die == comp_unit_die ())
11062 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11063
11064 build_abbrev_table (die, extern_map);
11065
11066 optimize_abbrev_table ();
11067
11068 delete extern_map;
11069
11070 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11071 next_die_offset = (dwo_id
11072 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11073 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11074 calc_die_sizes (die);
11075
11076 oldsym = die->die_id.die_symbol;
11077 if (oldsym && die->comdat_type_p)
11078 {
11079 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11080
11081 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11082 secname = tmp;
11083 die->die_id.die_symbol = NULL;
11084 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11085 }
11086 else
11087 {
11088 switch_to_section (debug_info_section);
11089 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11090 info_section_emitted = true;
11091 }
11092
11093 /* For LTO cross unit DIE refs we want a symbol on the start of the
11094 debuginfo section, not on the CU DIE. */
11095 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11096 {
11097 /* ??? No way to get visibility assembled without a decl. */
11098 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11099 get_identifier (oldsym), char_type_node);
11100 TREE_PUBLIC (decl) = true;
11101 TREE_STATIC (decl) = true;
11102 DECL_ARTIFICIAL (decl) = true;
11103 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11104 DECL_VISIBILITY_SPECIFIED (decl) = true;
11105 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11106 #ifdef ASM_WEAKEN_LABEL
11107 /* We prefer a .weak because that handles duplicates from duplicate
11108 archive members in a graceful way. */
11109 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11110 #else
11111 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11112 #endif
11113 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11114 }
11115
11116 /* Output debugging information. */
11117 output_compilation_unit_header (dwo_id
11118 ? DW_UT_split_compile : DW_UT_compile);
11119 if (dwarf_version >= 5)
11120 {
11121 if (dwo_id != NULL)
11122 for (int i = 0; i < 8; i++)
11123 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11124 }
11125 output_die (die);
11126
11127 /* Leave the marks on the main CU, so we can check them in
11128 output_pubnames. */
11129 if (oldsym)
11130 {
11131 unmark_dies (die);
11132 die->die_id.die_symbol = oldsym;
11133 }
11134 }
11135
11136 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11137 and .debug_pubtypes. This is configured per-target, but can be
11138 overridden by the -gpubnames or -gno-pubnames options. */
11139
11140 static inline bool
11141 want_pubnames (void)
11142 {
11143 if (debug_info_level <= DINFO_LEVEL_TERSE
11144 /* Names and types go to the early debug part only. */
11145 || in_lto_p)
11146 return false;
11147 if (debug_generate_pub_sections != -1)
11148 return debug_generate_pub_sections;
11149 return targetm.want_debug_pub_sections;
11150 }
11151
11152 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11153
11154 static void
11155 add_AT_pubnames (dw_die_ref die)
11156 {
11157 if (want_pubnames ())
11158 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11159 }
11160
11161 /* Add a string attribute value to a skeleton DIE. */
11162
11163 static inline void
11164 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11165 const char *str)
11166 {
11167 dw_attr_node attr;
11168 struct indirect_string_node *node;
11169
11170 if (! skeleton_debug_str_hash)
11171 skeleton_debug_str_hash
11172 = hash_table<indirect_string_hasher>::create_ggc (10);
11173
11174 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11175 find_string_form (node);
11176 if (node->form == dwarf_FORM (DW_FORM_strx))
11177 node->form = DW_FORM_strp;
11178
11179 attr.dw_attr = attr_kind;
11180 attr.dw_attr_val.val_class = dw_val_class_str;
11181 attr.dw_attr_val.val_entry = NULL;
11182 attr.dw_attr_val.v.val_str = node;
11183 add_dwarf_attr (die, &attr);
11184 }
11185
11186 /* Helper function to generate top-level dies for skeleton debug_info and
11187 debug_types. */
11188
11189 static void
11190 add_top_level_skeleton_die_attrs (dw_die_ref die)
11191 {
11192 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11193 const char *comp_dir = comp_dir_string ();
11194
11195 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11196 if (comp_dir != NULL)
11197 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11198 add_AT_pubnames (die);
11199 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11200 }
11201
11202 /* Output skeleton debug sections that point to the dwo file. */
11203
11204 static void
11205 output_skeleton_debug_sections (dw_die_ref comp_unit,
11206 const unsigned char *dwo_id)
11207 {
11208 /* These attributes will be found in the full debug_info section. */
11209 remove_AT (comp_unit, DW_AT_producer);
11210 remove_AT (comp_unit, DW_AT_language);
11211
11212 switch_to_section (debug_skeleton_info_section);
11213 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11214
11215 /* Produce the skeleton compilation-unit header. This one differs enough from
11216 a normal CU header that it's better not to call output_compilation_unit
11217 header. */
11218 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11219 dw2_asm_output_data (4, 0xffffffff,
11220 "Initial length escape value indicating 64-bit "
11221 "DWARF extension");
11222
11223 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11224 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11225 - DWARF_INITIAL_LENGTH_SIZE
11226 + size_of_die (comp_unit),
11227 "Length of Compilation Unit Info");
11228 output_dwarf_version ();
11229 if (dwarf_version >= 5)
11230 {
11231 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11232 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11233 }
11234 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11235 debug_skeleton_abbrev_section,
11236 "Offset Into Abbrev. Section");
11237 if (dwarf_version < 5)
11238 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11239 else
11240 for (int i = 0; i < 8; i++)
11241 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11242
11243 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11244 output_die (comp_unit);
11245
11246 /* Build the skeleton debug_abbrev section. */
11247 switch_to_section (debug_skeleton_abbrev_section);
11248 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11249
11250 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11251
11252 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11253 }
11254
11255 /* Output a comdat type unit DIE and its children. */
11256
11257 static void
11258 output_comdat_type_unit (comdat_type_node *node,
11259 bool early_lto_debug ATTRIBUTE_UNUSED)
11260 {
11261 const char *secname;
11262 char *tmp;
11263 int i;
11264 #if defined (OBJECT_FORMAT_ELF)
11265 tree comdat_key;
11266 #endif
11267
11268 /* First mark all the DIEs in this CU so we know which get local refs. */
11269 mark_dies (node->root_die);
11270
11271 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11272
11273 build_abbrev_table (node->root_die, extern_map);
11274
11275 delete extern_map;
11276 extern_map = NULL;
11277
11278 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11279 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11280 calc_die_sizes (node->root_die);
11281
11282 #if defined (OBJECT_FORMAT_ELF)
11283 if (dwarf_version >= 5)
11284 {
11285 if (!dwarf_split_debug_info)
11286 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11287 else
11288 secname = (early_lto_debug
11289 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11290 }
11291 else if (!dwarf_split_debug_info)
11292 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11293 else
11294 secname = (early_lto_debug
11295 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11296
11297 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11298 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11299 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11300 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11301 comdat_key = get_identifier (tmp);
11302 targetm.asm_out.named_section (secname,
11303 SECTION_DEBUG | SECTION_LINKONCE,
11304 comdat_key);
11305 #else
11306 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11307 sprintf (tmp, (dwarf_version >= 5
11308 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11309 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11310 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11311 secname = tmp;
11312 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11313 #endif
11314
11315 /* Output debugging information. */
11316 output_compilation_unit_header (dwarf_split_debug_info
11317 ? DW_UT_split_type : DW_UT_type);
11318 output_signature (node->signature, "Type Signature");
11319 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11320 "Offset to Type DIE");
11321 output_die (node->root_die);
11322
11323 unmark_dies (node->root_die);
11324 }
11325
11326 /* Return the DWARF2/3 pubname associated with a decl. */
11327
11328 static const char *
11329 dwarf2_name (tree decl, int scope)
11330 {
11331 if (DECL_NAMELESS (decl))
11332 return NULL;
11333 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11334 }
11335
11336 /* Add a new entry to .debug_pubnames if appropriate. */
11337
11338 static void
11339 add_pubname_string (const char *str, dw_die_ref die)
11340 {
11341 pubname_entry e;
11342
11343 e.die = die;
11344 e.name = xstrdup (str);
11345 vec_safe_push (pubname_table, e);
11346 }
11347
11348 static void
11349 add_pubname (tree decl, dw_die_ref die)
11350 {
11351 if (!want_pubnames ())
11352 return;
11353
11354 /* Don't add items to the table when we expect that the consumer will have
11355 just read the enclosing die. For example, if the consumer is looking at a
11356 class_member, it will either be inside the class already, or will have just
11357 looked up the class to find the member. Either way, searching the class is
11358 faster than searching the index. */
11359 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11360 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11361 {
11362 const char *name = dwarf2_name (decl, 1);
11363
11364 if (name)
11365 add_pubname_string (name, die);
11366 }
11367 }
11368
11369 /* Add an enumerator to the pubnames section. */
11370
11371 static void
11372 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11373 {
11374 pubname_entry e;
11375
11376 gcc_assert (scope_name);
11377 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11378 e.die = die;
11379 vec_safe_push (pubname_table, e);
11380 }
11381
11382 /* Add a new entry to .debug_pubtypes if appropriate. */
11383
11384 static void
11385 add_pubtype (tree decl, dw_die_ref die)
11386 {
11387 pubname_entry e;
11388
11389 if (!want_pubnames ())
11390 return;
11391
11392 if ((TREE_PUBLIC (decl)
11393 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11394 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11395 {
11396 tree scope = NULL;
11397 const char *scope_name = "";
11398 const char *sep = is_cxx () ? "::" : ".";
11399 const char *name;
11400
11401 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11402 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11403 {
11404 scope_name = lang_hooks.dwarf_name (scope, 1);
11405 if (scope_name != NULL && scope_name[0] != '\0')
11406 scope_name = concat (scope_name, sep, NULL);
11407 else
11408 scope_name = "";
11409 }
11410
11411 if (TYPE_P (decl))
11412 name = type_tag (decl);
11413 else
11414 name = lang_hooks.dwarf_name (decl, 1);
11415
11416 /* If we don't have a name for the type, there's no point in adding
11417 it to the table. */
11418 if (name != NULL && name[0] != '\0')
11419 {
11420 e.die = die;
11421 e.name = concat (scope_name, name, NULL);
11422 vec_safe_push (pubtype_table, e);
11423 }
11424
11425 /* Although it might be more consistent to add the pubinfo for the
11426 enumerators as their dies are created, they should only be added if the
11427 enum type meets the criteria above. So rather than re-check the parent
11428 enum type whenever an enumerator die is created, just output them all
11429 here. This isn't protected by the name conditional because anonymous
11430 enums don't have names. */
11431 if (die->die_tag == DW_TAG_enumeration_type)
11432 {
11433 dw_die_ref c;
11434
11435 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11436 }
11437 }
11438 }
11439
11440 /* Output a single entry in the pubnames table. */
11441
11442 static void
11443 output_pubname (dw_offset die_offset, pubname_entry *entry)
11444 {
11445 dw_die_ref die = entry->die;
11446 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11447
11448 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11449
11450 if (debug_generate_pub_sections == 2)
11451 {
11452 /* This logic follows gdb's method for determining the value of the flag
11453 byte. */
11454 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11455 switch (die->die_tag)
11456 {
11457 case DW_TAG_typedef:
11458 case DW_TAG_base_type:
11459 case DW_TAG_subrange_type:
11460 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11461 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11462 break;
11463 case DW_TAG_enumerator:
11464 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11465 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11466 if (!is_cxx ())
11467 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11468 break;
11469 case DW_TAG_subprogram:
11470 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11471 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11472 if (!is_ada ())
11473 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11474 break;
11475 case DW_TAG_constant:
11476 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11477 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11478 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11479 break;
11480 case DW_TAG_variable:
11481 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11482 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11483 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11484 break;
11485 case DW_TAG_namespace:
11486 case DW_TAG_imported_declaration:
11487 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11488 break;
11489 case DW_TAG_class_type:
11490 case DW_TAG_interface_type:
11491 case DW_TAG_structure_type:
11492 case DW_TAG_union_type:
11493 case DW_TAG_enumeration_type:
11494 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11495 if (!is_cxx ())
11496 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11497 break;
11498 default:
11499 /* An unusual tag. Leave the flag-byte empty. */
11500 break;
11501 }
11502 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11503 "GDB-index flags");
11504 }
11505
11506 dw2_asm_output_nstring (entry->name, -1, "external name");
11507 }
11508
11509
11510 /* Output the public names table used to speed up access to externally
11511 visible names; or the public types table used to find type definitions. */
11512
11513 static void
11514 output_pubnames (vec<pubname_entry, va_gc> *names)
11515 {
11516 unsigned i;
11517 unsigned long pubnames_length = size_of_pubnames (names);
11518 pubname_entry *pub;
11519
11520 if (!XCOFF_DEBUGGING_INFO)
11521 {
11522 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11523 dw2_asm_output_data (4, 0xffffffff,
11524 "Initial length escape value indicating 64-bit DWARF extension");
11525 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11526 "Pub Info Length");
11527 }
11528
11529 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11530 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11531
11532 if (dwarf_split_debug_info)
11533 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11534 debug_skeleton_info_section,
11535 "Offset of Compilation Unit Info");
11536 else
11537 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11538 debug_info_section,
11539 "Offset of Compilation Unit Info");
11540 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11541 "Compilation Unit Length");
11542
11543 FOR_EACH_VEC_ELT (*names, i, pub)
11544 {
11545 if (include_pubname_in_output (names, pub))
11546 {
11547 dw_offset die_offset = pub->die->die_offset;
11548
11549 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11550 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11551 gcc_assert (pub->die->die_mark);
11552
11553 /* If we're putting types in their own .debug_types sections,
11554 the .debug_pubtypes table will still point to the compile
11555 unit (not the type unit), so we want to use the offset of
11556 the skeleton DIE (if there is one). */
11557 if (pub->die->comdat_type_p && names == pubtype_table)
11558 {
11559 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11560
11561 if (type_node != NULL)
11562 die_offset = (type_node->skeleton_die != NULL
11563 ? type_node->skeleton_die->die_offset
11564 : comp_unit_die ()->die_offset);
11565 }
11566
11567 output_pubname (die_offset, pub);
11568 }
11569 }
11570
11571 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11572 }
11573
11574 /* Output public names and types tables if necessary. */
11575
11576 static void
11577 output_pubtables (void)
11578 {
11579 if (!want_pubnames () || !info_section_emitted)
11580 return;
11581
11582 switch_to_section (debug_pubnames_section);
11583 output_pubnames (pubname_table);
11584 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11585 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11586 simply won't look for the section. */
11587 switch_to_section (debug_pubtypes_section);
11588 output_pubnames (pubtype_table);
11589 }
11590
11591
11592 /* Output the information that goes into the .debug_aranges table.
11593 Namely, define the beginning and ending address range of the
11594 text section generated for this compilation unit. */
11595
11596 static void
11597 output_aranges (void)
11598 {
11599 unsigned i;
11600 unsigned long aranges_length = size_of_aranges ();
11601
11602 if (!XCOFF_DEBUGGING_INFO)
11603 {
11604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11605 dw2_asm_output_data (4, 0xffffffff,
11606 "Initial length escape value indicating 64-bit DWARF extension");
11607 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11608 "Length of Address Ranges Info");
11609 }
11610
11611 /* Version number for aranges is still 2, even up to DWARF5. */
11612 dw2_asm_output_data (2, 2, "DWARF aranges version");
11613 if (dwarf_split_debug_info)
11614 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11615 debug_skeleton_info_section,
11616 "Offset of Compilation Unit Info");
11617 else
11618 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11619 debug_info_section,
11620 "Offset of Compilation Unit Info");
11621 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11622 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11623
11624 /* We need to align to twice the pointer size here. */
11625 if (DWARF_ARANGES_PAD_SIZE)
11626 {
11627 /* Pad using a 2 byte words so that padding is correct for any
11628 pointer size. */
11629 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11630 2 * DWARF2_ADDR_SIZE);
11631 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11632 dw2_asm_output_data (2, 0, NULL);
11633 }
11634
11635 /* It is necessary not to output these entries if the sections were
11636 not used; if the sections were not used, the length will be 0 and
11637 the address may end up as 0 if the section is discarded by ld
11638 --gc-sections, leaving an invalid (0, 0) entry that can be
11639 confused with the terminator. */
11640 if (text_section_used)
11641 {
11642 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11643 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11644 text_section_label, "Length");
11645 }
11646 if (cold_text_section_used)
11647 {
11648 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11649 "Address");
11650 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11651 cold_text_section_label, "Length");
11652 }
11653
11654 if (have_multiple_function_sections)
11655 {
11656 unsigned fde_idx;
11657 dw_fde_ref fde;
11658
11659 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11660 {
11661 if (DECL_IGNORED_P (fde->decl))
11662 continue;
11663 if (!fde->in_std_section)
11664 {
11665 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11666 "Address");
11667 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11668 fde->dw_fde_begin, "Length");
11669 }
11670 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11671 {
11672 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11673 "Address");
11674 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11675 fde->dw_fde_second_begin, "Length");
11676 }
11677 }
11678 }
11679
11680 /* Output the terminator words. */
11681 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11682 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11683 }
11684
11685 /* Add a new entry to .debug_ranges. Return its index into
11686 ranges_table vector. */
11687
11688 static unsigned int
11689 add_ranges_num (int num, bool maybe_new_sec)
11690 {
11691 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11692 vec_safe_push (ranges_table, r);
11693 return vec_safe_length (ranges_table) - 1;
11694 }
11695
11696 /* Add a new entry to .debug_ranges corresponding to a block, or a
11697 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11698 this entry might be in a different section from previous range. */
11699
11700 static unsigned int
11701 add_ranges (const_tree block, bool maybe_new_sec)
11702 {
11703 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11704 }
11705
11706 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11707 chain, or middle entry of a chain that will be directly referred to. */
11708
11709 static void
11710 note_rnglist_head (unsigned int offset)
11711 {
11712 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11713 return;
11714 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11715 }
11716
11717 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11718 When using dwarf_split_debug_info, address attributes in dies destined
11719 for the final executable should be direct references--setting the
11720 parameter force_direct ensures this behavior. */
11721
11722 static void
11723 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11724 bool *added, bool force_direct)
11725 {
11726 unsigned int in_use = vec_safe_length (ranges_by_label);
11727 unsigned int offset;
11728 dw_ranges_by_label rbl = { begin, end };
11729 vec_safe_push (ranges_by_label, rbl);
11730 offset = add_ranges_num (-(int)in_use - 1, true);
11731 if (!*added)
11732 {
11733 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11734 *added = true;
11735 note_rnglist_head (offset);
11736 }
11737 }
11738
11739 /* Emit .debug_ranges section. */
11740
11741 static void
11742 output_ranges (void)
11743 {
11744 unsigned i;
11745 static const char *const start_fmt = "Offset %#x";
11746 const char *fmt = start_fmt;
11747 dw_ranges *r;
11748
11749 switch_to_section (debug_ranges_section);
11750 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11751 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11752 {
11753 int block_num = r->num;
11754
11755 if (block_num > 0)
11756 {
11757 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11758 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11759
11760 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11761 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11762
11763 /* If all code is in the text section, then the compilation
11764 unit base address defaults to DW_AT_low_pc, which is the
11765 base of the text section. */
11766 if (!have_multiple_function_sections)
11767 {
11768 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11769 text_section_label,
11770 fmt, i * 2 * DWARF2_ADDR_SIZE);
11771 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11772 text_section_label, NULL);
11773 }
11774
11775 /* Otherwise, the compilation unit base address is zero,
11776 which allows us to use absolute addresses, and not worry
11777 about whether the target supports cross-section
11778 arithmetic. */
11779 else
11780 {
11781 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11782 fmt, i * 2 * DWARF2_ADDR_SIZE);
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11784 }
11785
11786 fmt = NULL;
11787 }
11788
11789 /* Negative block_num stands for an index into ranges_by_label. */
11790 else if (block_num < 0)
11791 {
11792 int lab_idx = - block_num - 1;
11793
11794 if (!have_multiple_function_sections)
11795 {
11796 gcc_unreachable ();
11797 #if 0
11798 /* If we ever use add_ranges_by_labels () for a single
11799 function section, all we have to do is to take out
11800 the #if 0 above. */
11801 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11802 (*ranges_by_label)[lab_idx].begin,
11803 text_section_label,
11804 fmt, i * 2 * DWARF2_ADDR_SIZE);
11805 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11806 (*ranges_by_label)[lab_idx].end,
11807 text_section_label, NULL);
11808 #endif
11809 }
11810 else
11811 {
11812 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11813 (*ranges_by_label)[lab_idx].begin,
11814 fmt, i * 2 * DWARF2_ADDR_SIZE);
11815 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11816 (*ranges_by_label)[lab_idx].end,
11817 NULL);
11818 }
11819 }
11820 else
11821 {
11822 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11823 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11824 fmt = start_fmt;
11825 }
11826 }
11827 }
11828
11829 /* Non-zero if .debug_line_str should be used for .debug_line section
11830 strings or strings that are likely shareable with those. */
11831 #define DWARF5_USE_DEBUG_LINE_STR \
11832 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11833 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11834 /* FIXME: there is no .debug_line_str.dwo section, \
11835 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11836 && !dwarf_split_debug_info)
11837
11838 /* Assign .debug_rnglists indexes. */
11839
11840 static void
11841 index_rnglists (void)
11842 {
11843 unsigned i;
11844 dw_ranges *r;
11845
11846 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11847 if (r->label)
11848 r->idx = rnglist_idx++;
11849 }
11850
11851 /* Emit .debug_rnglists section. */
11852
11853 static void
11854 output_rnglists (unsigned generation)
11855 {
11856 unsigned i;
11857 dw_ranges *r;
11858 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11859 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11860 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11861
11862 switch_to_section (debug_ranges_section);
11863 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11864 /* There are up to 4 unique ranges labels per generation.
11865 See also init_sections_and_labels. */
11866 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11867 2 + generation * 4);
11868 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11869 3 + generation * 4);
11870 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11871 dw2_asm_output_data (4, 0xffffffff,
11872 "Initial length escape value indicating "
11873 "64-bit DWARF extension");
11874 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11875 "Length of Range Lists");
11876 ASM_OUTPUT_LABEL (asm_out_file, l1);
11877 output_dwarf_version ();
11878 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11879 dw2_asm_output_data (1, 0, "Segment Size");
11880 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11881 about relocation sizes and primarily care about the size of .debug*
11882 sections in linked shared libraries and executables, then
11883 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11884 into it are usually larger than just DW_FORM_sec_offset offsets
11885 into the .debug_rnglists section. */
11886 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11887 "Offset Entry Count");
11888 if (dwarf_split_debug_info)
11889 {
11890 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11891 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11892 if (r->label)
11893 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11894 ranges_base_label, NULL);
11895 }
11896
11897 const char *lab = "";
11898 unsigned int len = vec_safe_length (ranges_table);
11899 const char *base = NULL;
11900 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11901 {
11902 int block_num = r->num;
11903
11904 if (r->label)
11905 {
11906 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11907 lab = r->label;
11908 }
11909 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11910 base = NULL;
11911 if (block_num > 0)
11912 {
11913 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11914 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11915
11916 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11917 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11918
11919 if (HAVE_AS_LEB128)
11920 {
11921 /* If all code is in the text section, then the compilation
11922 unit base address defaults to DW_AT_low_pc, which is the
11923 base of the text section. */
11924 if (!have_multiple_function_sections)
11925 {
11926 dw2_asm_output_data (1, DW_RLE_offset_pair,
11927 "DW_RLE_offset_pair (%s)", lab);
11928 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11929 "Range begin address (%s)", lab);
11930 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11931 "Range end address (%s)", lab);
11932 continue;
11933 }
11934 if (base == NULL)
11935 {
11936 dw_ranges *r2 = NULL;
11937 if (i < len - 1)
11938 r2 = &(*ranges_table)[i + 1];
11939 if (r2
11940 && r2->num != 0
11941 && r2->label == NULL
11942 && !r2->maybe_new_sec)
11943 {
11944 dw2_asm_output_data (1, DW_RLE_base_address,
11945 "DW_RLE_base_address (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11947 "Base address (%s)", lab);
11948 strcpy (basebuf, blabel);
11949 base = basebuf;
11950 }
11951 }
11952 if (base)
11953 {
11954 dw2_asm_output_data (1, DW_RLE_offset_pair,
11955 "DW_RLE_offset_pair (%s)", lab);
11956 dw2_asm_output_delta_uleb128 (blabel, base,
11957 "Range begin address (%s)", lab);
11958 dw2_asm_output_delta_uleb128 (elabel, base,
11959 "Range end address (%s)", lab);
11960 continue;
11961 }
11962 dw2_asm_output_data (1, DW_RLE_start_length,
11963 "DW_RLE_start_length (%s)", lab);
11964 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11965 "Range begin address (%s)", lab);
11966 dw2_asm_output_delta_uleb128 (elabel, blabel,
11967 "Range length (%s)", lab);
11968 }
11969 else
11970 {
11971 dw2_asm_output_data (1, DW_RLE_start_end,
11972 "DW_RLE_start_end (%s)", lab);
11973 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11974 "Range begin address (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11976 "Range end address (%s)", lab);
11977 }
11978 }
11979
11980 /* Negative block_num stands for an index into ranges_by_label. */
11981 else if (block_num < 0)
11982 {
11983 int lab_idx = - block_num - 1;
11984 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11985 const char *elabel = (*ranges_by_label)[lab_idx].end;
11986
11987 if (!have_multiple_function_sections)
11988 gcc_unreachable ();
11989 if (HAVE_AS_LEB128)
11990 {
11991 dw2_asm_output_data (1, DW_RLE_start_length,
11992 "DW_RLE_start_length (%s)", lab);
11993 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11994 "Range begin address (%s)", lab);
11995 dw2_asm_output_delta_uleb128 (elabel, blabel,
11996 "Range length (%s)", lab);
11997 }
11998 else
11999 {
12000 dw2_asm_output_data (1, DW_RLE_start_end,
12001 "DW_RLE_start_end (%s)", lab);
12002 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12003 "Range begin address (%s)", lab);
12004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12005 "Range end address (%s)", lab);
12006 }
12007 }
12008 else
12009 dw2_asm_output_data (1, DW_RLE_end_of_list,
12010 "DW_RLE_end_of_list (%s)", lab);
12011 }
12012 ASM_OUTPUT_LABEL (asm_out_file, l2);
12013 }
12014
12015 /* Data structure containing information about input files. */
12016 struct file_info
12017 {
12018 const char *path; /* Complete file name. */
12019 const char *fname; /* File name part. */
12020 int length; /* Length of entire string. */
12021 struct dwarf_file_data * file_idx; /* Index in input file table. */
12022 int dir_idx; /* Index in directory table. */
12023 };
12024
12025 /* Data structure containing information about directories with source
12026 files. */
12027 struct dir_info
12028 {
12029 const char *path; /* Path including directory name. */
12030 int length; /* Path length. */
12031 int prefix; /* Index of directory entry which is a prefix. */
12032 int count; /* Number of files in this directory. */
12033 int dir_idx; /* Index of directory used as base. */
12034 };
12035
12036 /* Callback function for file_info comparison. We sort by looking at
12037 the directories in the path. */
12038
12039 static int
12040 file_info_cmp (const void *p1, const void *p2)
12041 {
12042 const struct file_info *const s1 = (const struct file_info *) p1;
12043 const struct file_info *const s2 = (const struct file_info *) p2;
12044 const unsigned char *cp1;
12045 const unsigned char *cp2;
12046
12047 /* Take care of file names without directories. We need to make sure that
12048 we return consistent values to qsort since some will get confused if
12049 we return the same value when identical operands are passed in opposite
12050 orders. So if neither has a directory, return 0 and otherwise return
12051 1 or -1 depending on which one has the directory. We want the one with
12052 the directory to sort after the one without, so all no directory files
12053 are at the start (normally only the compilation unit file). */
12054 if ((s1->path == s1->fname || s2->path == s2->fname))
12055 return (s2->path == s2->fname) - (s1->path == s1->fname);
12056
12057 cp1 = (const unsigned char *) s1->path;
12058 cp2 = (const unsigned char *) s2->path;
12059
12060 while (1)
12061 {
12062 ++cp1;
12063 ++cp2;
12064 /* Reached the end of the first path? If so, handle like above,
12065 but now we want longer directory prefixes before shorter ones. */
12066 if ((cp1 == (const unsigned char *) s1->fname)
12067 || (cp2 == (const unsigned char *) s2->fname))
12068 return ((cp1 == (const unsigned char *) s1->fname)
12069 - (cp2 == (const unsigned char *) s2->fname));
12070
12071 /* Character of current path component the same? */
12072 else if (*cp1 != *cp2)
12073 return *cp1 - *cp2;
12074 }
12075 }
12076
12077 struct file_name_acquire_data
12078 {
12079 struct file_info *files;
12080 int used_files;
12081 int max_files;
12082 };
12083
12084 /* Traversal function for the hash table. */
12085
12086 int
12087 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12088 {
12089 struct dwarf_file_data *d = *slot;
12090 struct file_info *fi;
12091 const char *f;
12092
12093 gcc_assert (fnad->max_files >= d->emitted_number);
12094
12095 if (! d->emitted_number)
12096 return 1;
12097
12098 gcc_assert (fnad->max_files != fnad->used_files);
12099
12100 fi = fnad->files + fnad->used_files++;
12101
12102 /* Skip all leading "./". */
12103 f = d->filename;
12104 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12105 f += 2;
12106
12107 /* Create a new array entry. */
12108 fi->path = f;
12109 fi->length = strlen (f);
12110 fi->file_idx = d;
12111
12112 /* Search for the file name part. */
12113 f = strrchr (f, DIR_SEPARATOR);
12114 #if defined (DIR_SEPARATOR_2)
12115 {
12116 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12117
12118 if (g != NULL)
12119 {
12120 if (f == NULL || f < g)
12121 f = g;
12122 }
12123 }
12124 #endif
12125
12126 fi->fname = f == NULL ? fi->path : f + 1;
12127 return 1;
12128 }
12129
12130 /* Helper function for output_file_names. Emit a FORM encoded
12131 string STR, with assembly comment start ENTRY_KIND and
12132 index IDX */
12133
12134 static void
12135 output_line_string (enum dwarf_form form, const char *str,
12136 const char *entry_kind, unsigned int idx)
12137 {
12138 switch (form)
12139 {
12140 case DW_FORM_string:
12141 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12142 break;
12143 case DW_FORM_line_strp:
12144 if (!debug_line_str_hash)
12145 debug_line_str_hash
12146 = hash_table<indirect_string_hasher>::create_ggc (10);
12147
12148 struct indirect_string_node *node;
12149 node = find_AT_string_in_table (str, debug_line_str_hash);
12150 set_indirect_string (node);
12151 node->form = form;
12152 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12153 debug_line_str_section, "%s: %#x: \"%s\"",
12154 entry_kind, 0, node->str);
12155 break;
12156 default:
12157 gcc_unreachable ();
12158 }
12159 }
12160
12161 /* Output the directory table and the file name table. We try to minimize
12162 the total amount of memory needed. A heuristic is used to avoid large
12163 slowdowns with many input files. */
12164
12165 static void
12166 output_file_names (void)
12167 {
12168 struct file_name_acquire_data fnad;
12169 int numfiles;
12170 struct file_info *files;
12171 struct dir_info *dirs;
12172 int *saved;
12173 int *savehere;
12174 int *backmap;
12175 int ndirs;
12176 int idx_offset;
12177 int i;
12178
12179 if (!last_emitted_file)
12180 {
12181 if (dwarf_version >= 5)
12182 {
12183 dw2_asm_output_data (1, 0, "Directory entry format count");
12184 dw2_asm_output_data_uleb128 (0, "Directories count");
12185 dw2_asm_output_data (1, 0, "File name entry format count");
12186 dw2_asm_output_data_uleb128 (0, "File names count");
12187 }
12188 else
12189 {
12190 dw2_asm_output_data (1, 0, "End directory table");
12191 dw2_asm_output_data (1, 0, "End file name table");
12192 }
12193 return;
12194 }
12195
12196 numfiles = last_emitted_file->emitted_number;
12197
12198 /* Allocate the various arrays we need. */
12199 files = XALLOCAVEC (struct file_info, numfiles);
12200 dirs = XALLOCAVEC (struct dir_info, numfiles);
12201
12202 fnad.files = files;
12203 fnad.used_files = 0;
12204 fnad.max_files = numfiles;
12205 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12206 gcc_assert (fnad.used_files == fnad.max_files);
12207
12208 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12209
12210 /* Find all the different directories used. */
12211 dirs[0].path = files[0].path;
12212 dirs[0].length = files[0].fname - files[0].path;
12213 dirs[0].prefix = -1;
12214 dirs[0].count = 1;
12215 dirs[0].dir_idx = 0;
12216 files[0].dir_idx = 0;
12217 ndirs = 1;
12218
12219 for (i = 1; i < numfiles; i++)
12220 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12221 && memcmp (dirs[ndirs - 1].path, files[i].path,
12222 dirs[ndirs - 1].length) == 0)
12223 {
12224 /* Same directory as last entry. */
12225 files[i].dir_idx = ndirs - 1;
12226 ++dirs[ndirs - 1].count;
12227 }
12228 else
12229 {
12230 int j;
12231
12232 /* This is a new directory. */
12233 dirs[ndirs].path = files[i].path;
12234 dirs[ndirs].length = files[i].fname - files[i].path;
12235 dirs[ndirs].count = 1;
12236 dirs[ndirs].dir_idx = ndirs;
12237 files[i].dir_idx = ndirs;
12238
12239 /* Search for a prefix. */
12240 dirs[ndirs].prefix = -1;
12241 for (j = 0; j < ndirs; j++)
12242 if (dirs[j].length < dirs[ndirs].length
12243 && dirs[j].length > 1
12244 && (dirs[ndirs].prefix == -1
12245 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12246 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12247 dirs[ndirs].prefix = j;
12248
12249 ++ndirs;
12250 }
12251
12252 /* Now to the actual work. We have to find a subset of the directories which
12253 allow expressing the file name using references to the directory table
12254 with the least amount of characters. We do not do an exhaustive search
12255 where we would have to check out every combination of every single
12256 possible prefix. Instead we use a heuristic which provides nearly optimal
12257 results in most cases and never is much off. */
12258 saved = XALLOCAVEC (int, ndirs);
12259 savehere = XALLOCAVEC (int, ndirs);
12260
12261 memset (saved, '\0', ndirs * sizeof (saved[0]));
12262 for (i = 0; i < ndirs; i++)
12263 {
12264 int j;
12265 int total;
12266
12267 /* We can always save some space for the current directory. But this
12268 does not mean it will be enough to justify adding the directory. */
12269 savehere[i] = dirs[i].length;
12270 total = (savehere[i] - saved[i]) * dirs[i].count;
12271
12272 for (j = i + 1; j < ndirs; j++)
12273 {
12274 savehere[j] = 0;
12275 if (saved[j] < dirs[i].length)
12276 {
12277 /* Determine whether the dirs[i] path is a prefix of the
12278 dirs[j] path. */
12279 int k;
12280
12281 k = dirs[j].prefix;
12282 while (k != -1 && k != (int) i)
12283 k = dirs[k].prefix;
12284
12285 if (k == (int) i)
12286 {
12287 /* Yes it is. We can possibly save some memory by
12288 writing the filenames in dirs[j] relative to
12289 dirs[i]. */
12290 savehere[j] = dirs[i].length;
12291 total += (savehere[j] - saved[j]) * dirs[j].count;
12292 }
12293 }
12294 }
12295
12296 /* Check whether we can save enough to justify adding the dirs[i]
12297 directory. */
12298 if (total > dirs[i].length + 1)
12299 {
12300 /* It's worthwhile adding. */
12301 for (j = i; j < ndirs; j++)
12302 if (savehere[j] > 0)
12303 {
12304 /* Remember how much we saved for this directory so far. */
12305 saved[j] = savehere[j];
12306
12307 /* Remember the prefix directory. */
12308 dirs[j].dir_idx = i;
12309 }
12310 }
12311 }
12312
12313 /* Emit the directory name table. */
12314 idx_offset = dirs[0].length > 0 ? 1 : 0;
12315 enum dwarf_form str_form = DW_FORM_string;
12316 enum dwarf_form idx_form = DW_FORM_udata;
12317 if (dwarf_version >= 5)
12318 {
12319 const char *comp_dir = comp_dir_string ();
12320 if (comp_dir == NULL)
12321 comp_dir = "";
12322 dw2_asm_output_data (1, 1, "Directory entry format count");
12323 if (DWARF5_USE_DEBUG_LINE_STR)
12324 str_form = DW_FORM_line_strp;
12325 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12326 dw2_asm_output_data_uleb128 (str_form, "%s",
12327 get_DW_FORM_name (str_form));
12328 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12329 if (str_form == DW_FORM_string)
12330 {
12331 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12332 for (i = 1 - idx_offset; i < ndirs; i++)
12333 dw2_asm_output_nstring (dirs[i].path,
12334 dirs[i].length
12335 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12336 "Directory Entry: %#x", i + idx_offset);
12337 }
12338 else
12339 {
12340 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12341 for (i = 1 - idx_offset; i < ndirs; i++)
12342 {
12343 const char *str
12344 = ggc_alloc_string (dirs[i].path,
12345 dirs[i].length
12346 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12347 output_line_string (str_form, str, "Directory Entry",
12348 (unsigned) i + idx_offset);
12349 }
12350 }
12351 }
12352 else
12353 {
12354 for (i = 1 - idx_offset; i < ndirs; i++)
12355 dw2_asm_output_nstring (dirs[i].path,
12356 dirs[i].length
12357 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12358 "Directory Entry: %#x", i + idx_offset);
12359
12360 dw2_asm_output_data (1, 0, "End directory table");
12361 }
12362
12363 /* We have to emit them in the order of emitted_number since that's
12364 used in the debug info generation. To do this efficiently we
12365 generate a back-mapping of the indices first. */
12366 backmap = XALLOCAVEC (int, numfiles);
12367 for (i = 0; i < numfiles; i++)
12368 backmap[files[i].file_idx->emitted_number - 1] = i;
12369
12370 if (dwarf_version >= 5)
12371 {
12372 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12373 if (filename0 == NULL)
12374 filename0 = "";
12375 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12376 DW_FORM_data2. Choose one based on the number of directories
12377 and how much space would they occupy in each encoding.
12378 If we have at most 256 directories, all indexes fit into
12379 a single byte, so DW_FORM_data1 is most compact (if there
12380 are at most 128 directories, DW_FORM_udata would be as
12381 compact as that, but not shorter and slower to decode). */
12382 if (ndirs + idx_offset <= 256)
12383 idx_form = DW_FORM_data1;
12384 /* If there are more than 65536 directories, we have to use
12385 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12386 Otherwise, compute what space would occupy if all the indexes
12387 used DW_FORM_udata - sum - and compare that to how large would
12388 be DW_FORM_data2 encoding, and pick the more efficient one. */
12389 else if (ndirs + idx_offset <= 65536)
12390 {
12391 unsigned HOST_WIDE_INT sum = 1;
12392 for (i = 0; i < numfiles; i++)
12393 {
12394 int file_idx = backmap[i];
12395 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12396 sum += size_of_uleb128 (dir_idx);
12397 }
12398 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12399 idx_form = DW_FORM_data2;
12400 }
12401 #ifdef VMS_DEBUGGING_INFO
12402 dw2_asm_output_data (1, 4, "File name entry format count");
12403 #else
12404 dw2_asm_output_data (1, 2, "File name entry format count");
12405 #endif
12406 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12407 dw2_asm_output_data_uleb128 (str_form, "%s",
12408 get_DW_FORM_name (str_form));
12409 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12410 "DW_LNCT_directory_index");
12411 dw2_asm_output_data_uleb128 (idx_form, "%s",
12412 get_DW_FORM_name (idx_form));
12413 #ifdef VMS_DEBUGGING_INFO
12414 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12415 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12416 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12417 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12418 #endif
12419 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12420
12421 output_line_string (str_form, filename0, "File Entry", 0);
12422
12423 /* Include directory index. */
12424 if (idx_form != DW_FORM_udata)
12425 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12426 0, NULL);
12427 else
12428 dw2_asm_output_data_uleb128 (0, NULL);
12429
12430 #ifdef VMS_DEBUGGING_INFO
12431 dw2_asm_output_data_uleb128 (0, NULL);
12432 dw2_asm_output_data_uleb128 (0, NULL);
12433 #endif
12434 }
12435
12436 /* Now write all the file names. */
12437 for (i = 0; i < numfiles; i++)
12438 {
12439 int file_idx = backmap[i];
12440 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12441
12442 #ifdef VMS_DEBUGGING_INFO
12443 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12444
12445 /* Setting these fields can lead to debugger miscomparisons,
12446 but VMS Debug requires them to be set correctly. */
12447
12448 int ver;
12449 long long cdt;
12450 long siz;
12451 int maxfilelen = (strlen (files[file_idx].path)
12452 + dirs[dir_idx].length
12453 + MAX_VMS_VERSION_LEN + 1);
12454 char *filebuf = XALLOCAVEC (char, maxfilelen);
12455
12456 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12457 snprintf (filebuf, maxfilelen, "%s;%d",
12458 files[file_idx].path + dirs[dir_idx].length, ver);
12459
12460 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12461
12462 /* Include directory index. */
12463 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12464 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12465 dir_idx + idx_offset, NULL);
12466 else
12467 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12468
12469 /* Modification time. */
12470 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12471 &cdt, 0, 0, 0) == 0)
12472 ? cdt : 0, NULL);
12473
12474 /* File length in bytes. */
12475 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12476 0, &siz, 0, 0) == 0)
12477 ? siz : 0, NULL);
12478 #else
12479 output_line_string (str_form,
12480 files[file_idx].path + dirs[dir_idx].length,
12481 "File Entry", (unsigned) i + 1);
12482
12483 /* Include directory index. */
12484 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12485 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12486 dir_idx + idx_offset, NULL);
12487 else
12488 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12489
12490 if (dwarf_version >= 5)
12491 continue;
12492
12493 /* Modification time. */
12494 dw2_asm_output_data_uleb128 (0, NULL);
12495
12496 /* File length in bytes. */
12497 dw2_asm_output_data_uleb128 (0, NULL);
12498 #endif /* VMS_DEBUGGING_INFO */
12499 }
12500
12501 if (dwarf_version < 5)
12502 dw2_asm_output_data (1, 0, "End file name table");
12503 }
12504
12505
12506 /* Output one line number table into the .debug_line section. */
12507
12508 static void
12509 output_one_line_info_table (dw_line_info_table *table)
12510 {
12511 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12512 unsigned int current_line = 1;
12513 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12514 dw_line_info_entry *ent, *prev_addr;
12515 size_t i;
12516 unsigned int view;
12517
12518 view = 0;
12519
12520 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12521 {
12522 switch (ent->opcode)
12523 {
12524 case LI_set_address:
12525 /* ??? Unfortunately, we have little choice here currently, and
12526 must always use the most general form. GCC does not know the
12527 address delta itself, so we can't use DW_LNS_advance_pc. Many
12528 ports do have length attributes which will give an upper bound
12529 on the address range. We could perhaps use length attributes
12530 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12531 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12532
12533 view = 0;
12534
12535 /* This can handle any delta. This takes
12536 4+DWARF2_ADDR_SIZE bytes. */
12537 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12538 debug_variable_location_views
12539 ? ", reset view to 0" : "");
12540 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12541 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12542 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12543
12544 prev_addr = ent;
12545 break;
12546
12547 case LI_adv_address:
12548 {
12549 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12550 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12551 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12552
12553 view++;
12554
12555 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12556 dw2_asm_output_delta (2, line_label, prev_label,
12557 "from %s to %s", prev_label, line_label);
12558
12559 prev_addr = ent;
12560 break;
12561 }
12562
12563 case LI_set_line:
12564 if (ent->val == current_line)
12565 {
12566 /* We still need to start a new row, so output a copy insn. */
12567 dw2_asm_output_data (1, DW_LNS_copy,
12568 "copy line %u", current_line);
12569 }
12570 else
12571 {
12572 int line_offset = ent->val - current_line;
12573 int line_delta = line_offset - DWARF_LINE_BASE;
12574
12575 current_line = ent->val;
12576 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12577 {
12578 /* This can handle deltas from -10 to 234, using the current
12579 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12580 This takes 1 byte. */
12581 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12582 "line %u", current_line);
12583 }
12584 else
12585 {
12586 /* This can handle any delta. This takes at least 4 bytes,
12587 depending on the value being encoded. */
12588 dw2_asm_output_data (1, DW_LNS_advance_line,
12589 "advance to line %u", current_line);
12590 dw2_asm_output_data_sleb128 (line_offset, NULL);
12591 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12592 }
12593 }
12594 break;
12595
12596 case LI_set_file:
12597 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12598 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12599 break;
12600
12601 case LI_set_column:
12602 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12603 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12604 break;
12605
12606 case LI_negate_stmt:
12607 current_is_stmt = !current_is_stmt;
12608 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12609 "is_stmt %d", current_is_stmt);
12610 break;
12611
12612 case LI_set_prologue_end:
12613 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12614 "set prologue end");
12615 break;
12616
12617 case LI_set_epilogue_begin:
12618 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12619 "set epilogue begin");
12620 break;
12621
12622 case LI_set_discriminator:
12623 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12624 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12625 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12626 dw2_asm_output_data_uleb128 (ent->val, NULL);
12627 break;
12628 }
12629 }
12630
12631 /* Emit debug info for the address of the end of the table. */
12632 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12633 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12634 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12635 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12636
12637 dw2_asm_output_data (1, 0, "end sequence");
12638 dw2_asm_output_data_uleb128 (1, NULL);
12639 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12640 }
12641
12642 /* Output the source line number correspondence information. This
12643 information goes into the .debug_line section. */
12644
12645 static void
12646 output_line_info (bool prologue_only)
12647 {
12648 static unsigned int generation;
12649 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12650 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12651 bool saw_one = false;
12652 int opc;
12653
12654 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12655 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12656 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12657 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12658
12659 if (!XCOFF_DEBUGGING_INFO)
12660 {
12661 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12662 dw2_asm_output_data (4, 0xffffffff,
12663 "Initial length escape value indicating 64-bit DWARF extension");
12664 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12665 "Length of Source Line Info");
12666 }
12667
12668 ASM_OUTPUT_LABEL (asm_out_file, l1);
12669
12670 output_dwarf_version ();
12671 if (dwarf_version >= 5)
12672 {
12673 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12674 dw2_asm_output_data (1, 0, "Segment Size");
12675 }
12676 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12677 ASM_OUTPUT_LABEL (asm_out_file, p1);
12678
12679 /* Define the architecture-dependent minimum instruction length (in bytes).
12680 In this implementation of DWARF, this field is used for information
12681 purposes only. Since GCC generates assembly language, we have no
12682 a priori knowledge of how many instruction bytes are generated for each
12683 source line, and therefore can use only the DW_LNE_set_address and
12684 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12685 this as '1', which is "correct enough" for all architectures,
12686 and don't let the target override. */
12687 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12688
12689 if (dwarf_version >= 4)
12690 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12691 "Maximum Operations Per Instruction");
12692 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12693 "Default is_stmt_start flag");
12694 dw2_asm_output_data (1, DWARF_LINE_BASE,
12695 "Line Base Value (Special Opcodes)");
12696 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12697 "Line Range Value (Special Opcodes)");
12698 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12699 "Special Opcode Base");
12700
12701 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12702 {
12703 int n_op_args;
12704 switch (opc)
12705 {
12706 case DW_LNS_advance_pc:
12707 case DW_LNS_advance_line:
12708 case DW_LNS_set_file:
12709 case DW_LNS_set_column:
12710 case DW_LNS_fixed_advance_pc:
12711 case DW_LNS_set_isa:
12712 n_op_args = 1;
12713 break;
12714 default:
12715 n_op_args = 0;
12716 break;
12717 }
12718
12719 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12720 opc, n_op_args);
12721 }
12722
12723 /* Write out the information about the files we use. */
12724 output_file_names ();
12725 ASM_OUTPUT_LABEL (asm_out_file, p2);
12726 if (prologue_only)
12727 {
12728 /* Output the marker for the end of the line number info. */
12729 ASM_OUTPUT_LABEL (asm_out_file, l2);
12730 return;
12731 }
12732
12733 if (separate_line_info)
12734 {
12735 dw_line_info_table *table;
12736 size_t i;
12737
12738 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12739 if (table->in_use)
12740 {
12741 output_one_line_info_table (table);
12742 saw_one = true;
12743 }
12744 }
12745 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12746 {
12747 output_one_line_info_table (cold_text_section_line_info);
12748 saw_one = true;
12749 }
12750
12751 /* ??? Some Darwin linkers crash on a .debug_line section with no
12752 sequences. Further, merely a DW_LNE_end_sequence entry is not
12753 sufficient -- the address column must also be initialized.
12754 Make sure to output at least one set_address/end_sequence pair,
12755 choosing .text since that section is always present. */
12756 if (text_section_line_info->in_use || !saw_one)
12757 output_one_line_info_table (text_section_line_info);
12758
12759 /* Output the marker for the end of the line number info. */
12760 ASM_OUTPUT_LABEL (asm_out_file, l2);
12761 }
12762 \f
12763 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12764
12765 static inline bool
12766 need_endianity_attribute_p (bool reverse)
12767 {
12768 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12769 }
12770
12771 /* Given a pointer to a tree node for some base type, return a pointer to
12772 a DIE that describes the given type. REVERSE is true if the type is
12773 to be interpreted in the reverse storage order wrt the target order.
12774
12775 This routine must only be called for GCC type nodes that correspond to
12776 Dwarf base (fundamental) types. */
12777
12778 static dw_die_ref
12779 base_type_die (tree type, bool reverse)
12780 {
12781 dw_die_ref base_type_result;
12782 enum dwarf_type encoding;
12783 bool fpt_used = false;
12784 struct fixed_point_type_info fpt_info;
12785 tree type_bias = NULL_TREE;
12786
12787 /* If this is a subtype that should not be emitted as a subrange type,
12788 use the base type. See subrange_type_for_debug_p. */
12789 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12790 type = TREE_TYPE (type);
12791
12792 switch (TREE_CODE (type))
12793 {
12794 case INTEGER_TYPE:
12795 if ((dwarf_version >= 4 || !dwarf_strict)
12796 && TYPE_NAME (type)
12797 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12798 && DECL_IS_BUILTIN (TYPE_NAME (type))
12799 && DECL_NAME (TYPE_NAME (type)))
12800 {
12801 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12802 if (strcmp (name, "char16_t") == 0
12803 || strcmp (name, "char32_t") == 0)
12804 {
12805 encoding = DW_ATE_UTF;
12806 break;
12807 }
12808 }
12809 if ((dwarf_version >= 3 || !dwarf_strict)
12810 && lang_hooks.types.get_fixed_point_type_info)
12811 {
12812 memset (&fpt_info, 0, sizeof (fpt_info));
12813 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12814 {
12815 fpt_used = true;
12816 encoding = ((TYPE_UNSIGNED (type))
12817 ? DW_ATE_unsigned_fixed
12818 : DW_ATE_signed_fixed);
12819 break;
12820 }
12821 }
12822 if (TYPE_STRING_FLAG (type))
12823 {
12824 if (TYPE_UNSIGNED (type))
12825 encoding = DW_ATE_unsigned_char;
12826 else
12827 encoding = DW_ATE_signed_char;
12828 }
12829 else if (TYPE_UNSIGNED (type))
12830 encoding = DW_ATE_unsigned;
12831 else
12832 encoding = DW_ATE_signed;
12833
12834 if (!dwarf_strict
12835 && lang_hooks.types.get_type_bias)
12836 type_bias = lang_hooks.types.get_type_bias (type);
12837 break;
12838
12839 case REAL_TYPE:
12840 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12841 {
12842 if (dwarf_version >= 3 || !dwarf_strict)
12843 encoding = DW_ATE_decimal_float;
12844 else
12845 encoding = DW_ATE_lo_user;
12846 }
12847 else
12848 encoding = DW_ATE_float;
12849 break;
12850
12851 case FIXED_POINT_TYPE:
12852 if (!(dwarf_version >= 3 || !dwarf_strict))
12853 encoding = DW_ATE_lo_user;
12854 else if (TYPE_UNSIGNED (type))
12855 encoding = DW_ATE_unsigned_fixed;
12856 else
12857 encoding = DW_ATE_signed_fixed;
12858 break;
12859
12860 /* Dwarf2 doesn't know anything about complex ints, so use
12861 a user defined type for it. */
12862 case COMPLEX_TYPE:
12863 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12864 encoding = DW_ATE_complex_float;
12865 else
12866 encoding = DW_ATE_lo_user;
12867 break;
12868
12869 case BOOLEAN_TYPE:
12870 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12871 encoding = DW_ATE_boolean;
12872 break;
12873
12874 default:
12875 /* No other TREE_CODEs are Dwarf fundamental types. */
12876 gcc_unreachable ();
12877 }
12878
12879 base_type_result = new_die_raw (DW_TAG_base_type);
12880
12881 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12882 int_size_in_bytes (type));
12883 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12884
12885 if (need_endianity_attribute_p (reverse))
12886 add_AT_unsigned (base_type_result, DW_AT_endianity,
12887 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12888
12889 add_alignment_attribute (base_type_result, type);
12890
12891 if (fpt_used)
12892 {
12893 switch (fpt_info.scale_factor_kind)
12894 {
12895 case fixed_point_scale_factor_binary:
12896 add_AT_int (base_type_result, DW_AT_binary_scale,
12897 fpt_info.scale_factor.binary);
12898 break;
12899
12900 case fixed_point_scale_factor_decimal:
12901 add_AT_int (base_type_result, DW_AT_decimal_scale,
12902 fpt_info.scale_factor.decimal);
12903 break;
12904
12905 case fixed_point_scale_factor_arbitrary:
12906 /* Arbitrary scale factors cannot be described in standard DWARF,
12907 yet. */
12908 if (!dwarf_strict)
12909 {
12910 /* Describe the scale factor as a rational constant. */
12911 const dw_die_ref scale_factor
12912 = new_die (DW_TAG_constant, comp_unit_die (), type);
12913
12914 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12915 fpt_info.scale_factor.arbitrary.numerator);
12916 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12917 fpt_info.scale_factor.arbitrary.denominator);
12918
12919 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12920 }
12921 break;
12922
12923 default:
12924 gcc_unreachable ();
12925 }
12926 }
12927
12928 if (type_bias)
12929 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12930 dw_scalar_form_constant
12931 | dw_scalar_form_exprloc
12932 | dw_scalar_form_reference,
12933 NULL);
12934
12935 return base_type_result;
12936 }
12937
12938 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12939 named 'auto' in its type: return true for it, false otherwise. */
12940
12941 static inline bool
12942 is_cxx_auto (tree type)
12943 {
12944 if (is_cxx ())
12945 {
12946 tree name = TYPE_IDENTIFIER (type);
12947 if (name == get_identifier ("auto")
12948 || name == get_identifier ("decltype(auto)"))
12949 return true;
12950 }
12951 return false;
12952 }
12953
12954 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12955 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12956
12957 static inline int
12958 is_base_type (tree type)
12959 {
12960 switch (TREE_CODE (type))
12961 {
12962 case INTEGER_TYPE:
12963 case REAL_TYPE:
12964 case FIXED_POINT_TYPE:
12965 case COMPLEX_TYPE:
12966 case BOOLEAN_TYPE:
12967 return 1;
12968
12969 case VOID_TYPE:
12970 case ARRAY_TYPE:
12971 case RECORD_TYPE:
12972 case UNION_TYPE:
12973 case QUAL_UNION_TYPE:
12974 case ENUMERAL_TYPE:
12975 case FUNCTION_TYPE:
12976 case METHOD_TYPE:
12977 case POINTER_TYPE:
12978 case REFERENCE_TYPE:
12979 case NULLPTR_TYPE:
12980 case OFFSET_TYPE:
12981 case LANG_TYPE:
12982 case VECTOR_TYPE:
12983 return 0;
12984
12985 default:
12986 if (is_cxx_auto (type))
12987 return 0;
12988 gcc_unreachable ();
12989 }
12990
12991 return 0;
12992 }
12993
12994 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12995 node, return the size in bits for the type if it is a constant, or else
12996 return the alignment for the type if the type's size is not constant, or
12997 else return BITS_PER_WORD if the type actually turns out to be an
12998 ERROR_MARK node. */
12999
13000 static inline unsigned HOST_WIDE_INT
13001 simple_type_size_in_bits (const_tree type)
13002 {
13003 if (TREE_CODE (type) == ERROR_MARK)
13004 return BITS_PER_WORD;
13005 else if (TYPE_SIZE (type) == NULL_TREE)
13006 return 0;
13007 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13008 return tree_to_uhwi (TYPE_SIZE (type));
13009 else
13010 return TYPE_ALIGN (type);
13011 }
13012
13013 /* Similarly, but return an offset_int instead of UHWI. */
13014
13015 static inline offset_int
13016 offset_int_type_size_in_bits (const_tree type)
13017 {
13018 if (TREE_CODE (type) == ERROR_MARK)
13019 return BITS_PER_WORD;
13020 else if (TYPE_SIZE (type) == NULL_TREE)
13021 return 0;
13022 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13023 return wi::to_offset (TYPE_SIZE (type));
13024 else
13025 return TYPE_ALIGN (type);
13026 }
13027
13028 /* Given a pointer to a tree node for a subrange type, return a pointer
13029 to a DIE that describes the given type. */
13030
13031 static dw_die_ref
13032 subrange_type_die (tree type, tree low, tree high, tree bias,
13033 dw_die_ref context_die)
13034 {
13035 dw_die_ref subrange_die;
13036 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13037
13038 if (context_die == NULL)
13039 context_die = comp_unit_die ();
13040
13041 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13042
13043 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13044 {
13045 /* The size of the subrange type and its base type do not match,
13046 so we need to generate a size attribute for the subrange type. */
13047 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13048 }
13049
13050 add_alignment_attribute (subrange_die, type);
13051
13052 if (low)
13053 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13054 if (high)
13055 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13056 if (bias && !dwarf_strict)
13057 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13058 dw_scalar_form_constant
13059 | dw_scalar_form_exprloc
13060 | dw_scalar_form_reference,
13061 NULL);
13062
13063 return subrange_die;
13064 }
13065
13066 /* Returns the (const and/or volatile) cv_qualifiers associated with
13067 the decl node. This will normally be augmented with the
13068 cv_qualifiers of the underlying type in add_type_attribute. */
13069
13070 static int
13071 decl_quals (const_tree decl)
13072 {
13073 return ((TREE_READONLY (decl)
13074 /* The C++ front-end correctly marks reference-typed
13075 variables as readonly, but from a language (and debug
13076 info) standpoint they are not const-qualified. */
13077 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13078 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13079 | (TREE_THIS_VOLATILE (decl)
13080 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13081 }
13082
13083 /* Determine the TYPE whose qualifiers match the largest strict subset
13084 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13085 qualifiers outside QUAL_MASK. */
13086
13087 static int
13088 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13089 {
13090 tree t;
13091 int best_rank = 0, best_qual = 0, max_rank;
13092
13093 type_quals &= qual_mask;
13094 max_rank = popcount_hwi (type_quals) - 1;
13095
13096 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13097 t = TYPE_NEXT_VARIANT (t))
13098 {
13099 int q = TYPE_QUALS (t) & qual_mask;
13100
13101 if ((q & type_quals) == q && q != type_quals
13102 && check_base_type (t, type))
13103 {
13104 int rank = popcount_hwi (q);
13105
13106 if (rank > best_rank)
13107 {
13108 best_rank = rank;
13109 best_qual = q;
13110 }
13111 }
13112 }
13113
13114 return best_qual;
13115 }
13116
13117 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13118 static const dwarf_qual_info_t dwarf_qual_info[] =
13119 {
13120 { TYPE_QUAL_CONST, DW_TAG_const_type },
13121 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13122 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13123 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13124 };
13125 static const unsigned int dwarf_qual_info_size
13126 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13127
13128 /* If DIE is a qualified DIE of some base DIE with the same parent,
13129 return the base DIE, otherwise return NULL. Set MASK to the
13130 qualifiers added compared to the returned DIE. */
13131
13132 static dw_die_ref
13133 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13134 {
13135 unsigned int i;
13136 for (i = 0; i < dwarf_qual_info_size; i++)
13137 if (die->die_tag == dwarf_qual_info[i].t)
13138 break;
13139 if (i == dwarf_qual_info_size)
13140 return NULL;
13141 if (vec_safe_length (die->die_attr) != 1)
13142 return NULL;
13143 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13144 if (type == NULL || type->die_parent != die->die_parent)
13145 return NULL;
13146 *mask |= dwarf_qual_info[i].q;
13147 if (depth)
13148 {
13149 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13150 if (ret)
13151 return ret;
13152 }
13153 return type;
13154 }
13155
13156 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13157 entry that chains the modifiers specified by CV_QUALS in front of the
13158 given type. REVERSE is true if the type is to be interpreted in the
13159 reverse storage order wrt the target order. */
13160
13161 static dw_die_ref
13162 modified_type_die (tree type, int cv_quals, bool reverse,
13163 dw_die_ref context_die)
13164 {
13165 enum tree_code code = TREE_CODE (type);
13166 dw_die_ref mod_type_die;
13167 dw_die_ref sub_die = NULL;
13168 tree item_type = NULL;
13169 tree qualified_type;
13170 tree name, low, high;
13171 dw_die_ref mod_scope;
13172 /* Only these cv-qualifiers are currently handled. */
13173 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13174 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13175 ENCODE_QUAL_ADDR_SPACE(~0U));
13176 const bool reverse_base_type
13177 = need_endianity_attribute_p (reverse) && is_base_type (type);
13178
13179 if (code == ERROR_MARK)
13180 return NULL;
13181
13182 if (lang_hooks.types.get_debug_type)
13183 {
13184 tree debug_type = lang_hooks.types.get_debug_type (type);
13185
13186 if (debug_type != NULL_TREE && debug_type != type)
13187 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13188 }
13189
13190 cv_quals &= cv_qual_mask;
13191
13192 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13193 tag modifier (and not an attribute) old consumers won't be able
13194 to handle it. */
13195 if (dwarf_version < 3)
13196 cv_quals &= ~TYPE_QUAL_RESTRICT;
13197
13198 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13199 if (dwarf_version < 5)
13200 cv_quals &= ~TYPE_QUAL_ATOMIC;
13201
13202 /* See if we already have the appropriately qualified variant of
13203 this type. */
13204 qualified_type = get_qualified_type (type, cv_quals);
13205
13206 if (qualified_type == sizetype)
13207 {
13208 /* Try not to expose the internal sizetype type's name. */
13209 if (TYPE_NAME (qualified_type)
13210 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13211 {
13212 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13213
13214 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13215 && (TYPE_PRECISION (t)
13216 == TYPE_PRECISION (qualified_type))
13217 && (TYPE_UNSIGNED (t)
13218 == TYPE_UNSIGNED (qualified_type)));
13219 qualified_type = t;
13220 }
13221 else if (qualified_type == sizetype
13222 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13223 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13224 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13225 qualified_type = size_type_node;
13226 if (type == sizetype)
13227 type = qualified_type;
13228 }
13229
13230 /* If we do, then we can just use its DIE, if it exists. */
13231 if (qualified_type)
13232 {
13233 mod_type_die = lookup_type_die (qualified_type);
13234
13235 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13236 dealt with specially: the DIE with the attribute, if it exists, is
13237 placed immediately after the regular DIE for the same base type. */
13238 if (mod_type_die
13239 && (!reverse_base_type
13240 || ((mod_type_die = mod_type_die->die_sib) != NULL
13241 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13242 return mod_type_die;
13243 }
13244
13245 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13246
13247 /* Handle C typedef types. */
13248 if (name
13249 && TREE_CODE (name) == TYPE_DECL
13250 && DECL_ORIGINAL_TYPE (name)
13251 && !DECL_ARTIFICIAL (name))
13252 {
13253 tree dtype = TREE_TYPE (name);
13254
13255 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13256 if (qualified_type == dtype && !reverse_base_type)
13257 {
13258 tree origin = decl_ultimate_origin (name);
13259
13260 /* Typedef variants that have an abstract origin don't get their own
13261 type DIE (see gen_typedef_die), so fall back on the ultimate
13262 abstract origin instead. */
13263 if (origin != NULL && origin != name)
13264 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13265 context_die);
13266
13267 /* For a named type, use the typedef. */
13268 gen_type_die (qualified_type, context_die);
13269 return lookup_type_die (qualified_type);
13270 }
13271 else
13272 {
13273 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13274 dquals &= cv_qual_mask;
13275 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13276 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13277 /* cv-unqualified version of named type. Just use
13278 the unnamed type to which it refers. */
13279 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13280 reverse, context_die);
13281 /* Else cv-qualified version of named type; fall through. */
13282 }
13283 }
13284
13285 mod_scope = scope_die_for (type, context_die);
13286
13287 if (cv_quals)
13288 {
13289 int sub_quals = 0, first_quals = 0;
13290 unsigned i;
13291 dw_die_ref first = NULL, last = NULL;
13292
13293 /* Determine a lesser qualified type that most closely matches
13294 this one. Then generate DW_TAG_* entries for the remaining
13295 qualifiers. */
13296 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13297 cv_qual_mask);
13298 if (sub_quals && use_debug_types)
13299 {
13300 bool needed = false;
13301 /* If emitting type units, make sure the order of qualifiers
13302 is canonical. Thus, start from unqualified type if
13303 an earlier qualifier is missing in sub_quals, but some later
13304 one is present there. */
13305 for (i = 0; i < dwarf_qual_info_size; i++)
13306 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13307 needed = true;
13308 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13309 {
13310 sub_quals = 0;
13311 break;
13312 }
13313 }
13314 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13315 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13316 {
13317 /* As not all intermediate qualified DIEs have corresponding
13318 tree types, ensure that qualified DIEs in the same scope
13319 as their DW_AT_type are emitted after their DW_AT_type,
13320 only with other qualified DIEs for the same type possibly
13321 in between them. Determine the range of such qualified
13322 DIEs now (first being the base type, last being corresponding
13323 last qualified DIE for it). */
13324 unsigned int count = 0;
13325 first = qualified_die_p (mod_type_die, &first_quals,
13326 dwarf_qual_info_size);
13327 if (first == NULL)
13328 first = mod_type_die;
13329 gcc_assert ((first_quals & ~sub_quals) == 0);
13330 for (count = 0, last = first;
13331 count < (1U << dwarf_qual_info_size);
13332 count++, last = last->die_sib)
13333 {
13334 int quals = 0;
13335 if (last == mod_scope->die_child)
13336 break;
13337 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13338 != first)
13339 break;
13340 }
13341 }
13342
13343 for (i = 0; i < dwarf_qual_info_size; i++)
13344 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13345 {
13346 dw_die_ref d;
13347 if (first && first != last)
13348 {
13349 for (d = first->die_sib; ; d = d->die_sib)
13350 {
13351 int quals = 0;
13352 qualified_die_p (d, &quals, dwarf_qual_info_size);
13353 if (quals == (first_quals | dwarf_qual_info[i].q))
13354 break;
13355 if (d == last)
13356 {
13357 d = NULL;
13358 break;
13359 }
13360 }
13361 if (d)
13362 {
13363 mod_type_die = d;
13364 continue;
13365 }
13366 }
13367 if (first)
13368 {
13369 d = new_die_raw (dwarf_qual_info[i].t);
13370 add_child_die_after (mod_scope, d, last);
13371 last = d;
13372 }
13373 else
13374 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13375 if (mod_type_die)
13376 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13377 mod_type_die = d;
13378 first_quals |= dwarf_qual_info[i].q;
13379 }
13380 }
13381 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13382 {
13383 dwarf_tag tag = DW_TAG_pointer_type;
13384 if (code == REFERENCE_TYPE)
13385 {
13386 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13387 tag = DW_TAG_rvalue_reference_type;
13388 else
13389 tag = DW_TAG_reference_type;
13390 }
13391 mod_type_die = new_die (tag, mod_scope, type);
13392
13393 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13394 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13395 add_alignment_attribute (mod_type_die, type);
13396 item_type = TREE_TYPE (type);
13397
13398 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13399 if (!ADDR_SPACE_GENERIC_P (as))
13400 {
13401 int action = targetm.addr_space.debug (as);
13402 if (action >= 0)
13403 {
13404 /* Positive values indicate an address_class. */
13405 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13406 }
13407 else
13408 {
13409 /* Negative values indicate an (inverted) segment base reg. */
13410 dw_loc_descr_ref d
13411 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13412 add_AT_loc (mod_type_die, DW_AT_segment, d);
13413 }
13414 }
13415 }
13416 else if (code == INTEGER_TYPE
13417 && TREE_TYPE (type) != NULL_TREE
13418 && subrange_type_for_debug_p (type, &low, &high))
13419 {
13420 tree bias = NULL_TREE;
13421 if (lang_hooks.types.get_type_bias)
13422 bias = lang_hooks.types.get_type_bias (type);
13423 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13424 item_type = TREE_TYPE (type);
13425 }
13426 else if (is_base_type (type))
13427 {
13428 mod_type_die = base_type_die (type, reverse);
13429
13430 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13431 if (reverse_base_type)
13432 {
13433 dw_die_ref after_die
13434 = modified_type_die (type, cv_quals, false, context_die);
13435 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13436 }
13437 else
13438 add_child_die (comp_unit_die (), mod_type_die);
13439
13440 add_pubtype (type, mod_type_die);
13441 }
13442 else
13443 {
13444 gen_type_die (type, context_die);
13445
13446 /* We have to get the type_main_variant here (and pass that to the
13447 `lookup_type_die' routine) because the ..._TYPE node we have
13448 might simply be a *copy* of some original type node (where the
13449 copy was created to help us keep track of typedef names) and
13450 that copy might have a different TYPE_UID from the original
13451 ..._TYPE node. */
13452 if (TREE_CODE (type) == FUNCTION_TYPE
13453 || TREE_CODE (type) == METHOD_TYPE)
13454 {
13455 /* For function/method types, can't just use type_main_variant here,
13456 because that can have different ref-qualifiers for C++,
13457 but try to canonicalize. */
13458 tree main = TYPE_MAIN_VARIANT (type);
13459 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13460 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13461 && check_base_type (t, main)
13462 && check_lang_type (t, type))
13463 return lookup_type_die (t);
13464 return lookup_type_die (type);
13465 }
13466 else if (TREE_CODE (type) != VECTOR_TYPE
13467 && TREE_CODE (type) != ARRAY_TYPE)
13468 return lookup_type_die (type_main_variant (type));
13469 else
13470 /* Vectors have the debugging information in the type,
13471 not the main variant. */
13472 return lookup_type_die (type);
13473 }
13474
13475 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13476 don't output a DW_TAG_typedef, since there isn't one in the
13477 user's program; just attach a DW_AT_name to the type.
13478 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13479 if the base type already has the same name. */
13480 if (name
13481 && ((TREE_CODE (name) != TYPE_DECL
13482 && (qualified_type == TYPE_MAIN_VARIANT (type)
13483 || (cv_quals == TYPE_UNQUALIFIED)))
13484 || (TREE_CODE (name) == TYPE_DECL
13485 && TREE_TYPE (name) == qualified_type
13486 && DECL_NAME (name))))
13487 {
13488 if (TREE_CODE (name) == TYPE_DECL)
13489 /* Could just call add_name_and_src_coords_attributes here,
13490 but since this is a builtin type it doesn't have any
13491 useful source coordinates anyway. */
13492 name = DECL_NAME (name);
13493 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13494 }
13495 /* This probably indicates a bug. */
13496 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13497 {
13498 name = TYPE_IDENTIFIER (type);
13499 add_name_attribute (mod_type_die,
13500 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13501 }
13502
13503 if (qualified_type && !reverse_base_type)
13504 equate_type_number_to_die (qualified_type, mod_type_die);
13505
13506 if (item_type)
13507 /* We must do this after the equate_type_number_to_die call, in case
13508 this is a recursive type. This ensures that the modified_type_die
13509 recursion will terminate even if the type is recursive. Recursive
13510 types are possible in Ada. */
13511 sub_die = modified_type_die (item_type,
13512 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13513 reverse,
13514 context_die);
13515
13516 if (sub_die != NULL)
13517 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13518
13519 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13520 if (TYPE_ARTIFICIAL (type))
13521 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13522
13523 return mod_type_die;
13524 }
13525
13526 /* Generate DIEs for the generic parameters of T.
13527 T must be either a generic type or a generic function.
13528 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13529
13530 static void
13531 gen_generic_params_dies (tree t)
13532 {
13533 tree parms, args;
13534 int parms_num, i;
13535 dw_die_ref die = NULL;
13536 int non_default;
13537
13538 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13539 return;
13540
13541 if (TYPE_P (t))
13542 die = lookup_type_die (t);
13543 else if (DECL_P (t))
13544 die = lookup_decl_die (t);
13545
13546 gcc_assert (die);
13547
13548 parms = lang_hooks.get_innermost_generic_parms (t);
13549 if (!parms)
13550 /* T has no generic parameter. It means T is neither a generic type
13551 or function. End of story. */
13552 return;
13553
13554 parms_num = TREE_VEC_LENGTH (parms);
13555 args = lang_hooks.get_innermost_generic_args (t);
13556 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13557 non_default = int_cst_value (TREE_CHAIN (args));
13558 else
13559 non_default = TREE_VEC_LENGTH (args);
13560 for (i = 0; i < parms_num; i++)
13561 {
13562 tree parm, arg, arg_pack_elems;
13563 dw_die_ref parm_die;
13564
13565 parm = TREE_VEC_ELT (parms, i);
13566 arg = TREE_VEC_ELT (args, i);
13567 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13568 gcc_assert (parm && TREE_VALUE (parm) && arg);
13569
13570 if (parm && TREE_VALUE (parm) && arg)
13571 {
13572 /* If PARM represents a template parameter pack,
13573 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13574 by DW_TAG_template_*_parameter DIEs for the argument
13575 pack elements of ARG. Note that ARG would then be
13576 an argument pack. */
13577 if (arg_pack_elems)
13578 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13579 arg_pack_elems,
13580 die);
13581 else
13582 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13583 true /* emit name */, die);
13584 if (i >= non_default)
13585 add_AT_flag (parm_die, DW_AT_default_value, 1);
13586 }
13587 }
13588 }
13589
13590 /* Create and return a DIE for PARM which should be
13591 the representation of a generic type parameter.
13592 For instance, in the C++ front end, PARM would be a template parameter.
13593 ARG is the argument to PARM.
13594 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13595 name of the PARM.
13596 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13597 as a child node. */
13598
13599 static dw_die_ref
13600 generic_parameter_die (tree parm, tree arg,
13601 bool emit_name_p,
13602 dw_die_ref parent_die)
13603 {
13604 dw_die_ref tmpl_die = NULL;
13605 const char *name = NULL;
13606
13607 /* C++2a accepts class literals as template parameters, and var
13608 decls with initializers represent them. The VAR_DECLs would be
13609 rejected, but we can take the DECL_INITIAL constructor and
13610 attempt to expand it. */
13611 if (arg && VAR_P (arg))
13612 arg = DECL_INITIAL (arg);
13613
13614 if (!parm || !DECL_NAME (parm) || !arg)
13615 return NULL;
13616
13617 /* We support non-type generic parameters and arguments,
13618 type generic parameters and arguments, as well as
13619 generic generic parameters (a.k.a. template template parameters in C++)
13620 and arguments. */
13621 if (TREE_CODE (parm) == PARM_DECL)
13622 /* PARM is a nontype generic parameter */
13623 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13624 else if (TREE_CODE (parm) == TYPE_DECL)
13625 /* PARM is a type generic parameter. */
13626 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13627 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13628 /* PARM is a generic generic parameter.
13629 Its DIE is a GNU extension. It shall have a
13630 DW_AT_name attribute to represent the name of the template template
13631 parameter, and a DW_AT_GNU_template_name attribute to represent the
13632 name of the template template argument. */
13633 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13634 parent_die, parm);
13635 else
13636 gcc_unreachable ();
13637
13638 if (tmpl_die)
13639 {
13640 tree tmpl_type;
13641
13642 /* If PARM is a generic parameter pack, it means we are
13643 emitting debug info for a template argument pack element.
13644 In other terms, ARG is a template argument pack element.
13645 In that case, we don't emit any DW_AT_name attribute for
13646 the die. */
13647 if (emit_name_p)
13648 {
13649 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13650 gcc_assert (name);
13651 add_AT_string (tmpl_die, DW_AT_name, name);
13652 }
13653
13654 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13655 {
13656 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13657 TMPL_DIE should have a child DW_AT_type attribute that is set
13658 to the type of the argument to PARM, which is ARG.
13659 If PARM is a type generic parameter, TMPL_DIE should have a
13660 child DW_AT_type that is set to ARG. */
13661 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13662 add_type_attribute (tmpl_die, tmpl_type,
13663 (TREE_THIS_VOLATILE (tmpl_type)
13664 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13665 false, parent_die);
13666 }
13667 else
13668 {
13669 /* So TMPL_DIE is a DIE representing a
13670 a generic generic template parameter, a.k.a template template
13671 parameter in C++ and arg is a template. */
13672
13673 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13674 to the name of the argument. */
13675 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13676 if (name)
13677 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13678 }
13679
13680 if (TREE_CODE (parm) == PARM_DECL)
13681 /* So PARM is a non-type generic parameter.
13682 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13683 attribute of TMPL_DIE which value represents the value
13684 of ARG.
13685 We must be careful here:
13686 The value of ARG might reference some function decls.
13687 We might currently be emitting debug info for a generic
13688 type and types are emitted before function decls, we don't
13689 know if the function decls referenced by ARG will actually be
13690 emitted after cgraph computations.
13691 So must defer the generation of the DW_AT_const_value to
13692 after cgraph is ready. */
13693 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13694 }
13695
13696 return tmpl_die;
13697 }
13698
13699 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13700 PARM_PACK must be a template parameter pack. The returned DIE
13701 will be child DIE of PARENT_DIE. */
13702
13703 static dw_die_ref
13704 template_parameter_pack_die (tree parm_pack,
13705 tree parm_pack_args,
13706 dw_die_ref parent_die)
13707 {
13708 dw_die_ref die;
13709 int j;
13710
13711 gcc_assert (parent_die && parm_pack);
13712
13713 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13714 add_name_and_src_coords_attributes (die, parm_pack);
13715 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13716 generic_parameter_die (parm_pack,
13717 TREE_VEC_ELT (parm_pack_args, j),
13718 false /* Don't emit DW_AT_name */,
13719 die);
13720 return die;
13721 }
13722
13723 /* Return the DBX register number described by a given RTL node. */
13724
13725 static unsigned int
13726 dbx_reg_number (const_rtx rtl)
13727 {
13728 unsigned regno = REGNO (rtl);
13729
13730 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13731
13732 #ifdef LEAF_REG_REMAP
13733 if (crtl->uses_only_leaf_regs)
13734 {
13735 int leaf_reg = LEAF_REG_REMAP (regno);
13736 if (leaf_reg != -1)
13737 regno = (unsigned) leaf_reg;
13738 }
13739 #endif
13740
13741 regno = DBX_REGISTER_NUMBER (regno);
13742 gcc_assert (regno != INVALID_REGNUM);
13743 return regno;
13744 }
13745
13746 /* Optionally add a DW_OP_piece term to a location description expression.
13747 DW_OP_piece is only added if the location description expression already
13748 doesn't end with DW_OP_piece. */
13749
13750 static void
13751 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13752 {
13753 dw_loc_descr_ref loc;
13754
13755 if (*list_head != NULL)
13756 {
13757 /* Find the end of the chain. */
13758 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13759 ;
13760
13761 if (loc->dw_loc_opc != DW_OP_piece)
13762 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13763 }
13764 }
13765
13766 /* Return a location descriptor that designates a machine register or
13767 zero if there is none. */
13768
13769 static dw_loc_descr_ref
13770 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13771 {
13772 rtx regs;
13773
13774 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13775 return 0;
13776
13777 /* We only use "frame base" when we're sure we're talking about the
13778 post-prologue local stack frame. We do this by *not* running
13779 register elimination until this point, and recognizing the special
13780 argument pointer and soft frame pointer rtx's.
13781 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13782 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13783 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13784 {
13785 dw_loc_descr_ref result = NULL;
13786
13787 if (dwarf_version >= 4 || !dwarf_strict)
13788 {
13789 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13790 initialized);
13791 if (result)
13792 add_loc_descr (&result,
13793 new_loc_descr (DW_OP_stack_value, 0, 0));
13794 }
13795 return result;
13796 }
13797
13798 regs = targetm.dwarf_register_span (rtl);
13799
13800 if (REG_NREGS (rtl) > 1 || regs)
13801 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13802 else
13803 {
13804 unsigned int dbx_regnum = dbx_reg_number (rtl);
13805 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13806 return 0;
13807 return one_reg_loc_descriptor (dbx_regnum, initialized);
13808 }
13809 }
13810
13811 /* Return a location descriptor that designates a machine register for
13812 a given hard register number. */
13813
13814 static dw_loc_descr_ref
13815 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13816 {
13817 dw_loc_descr_ref reg_loc_descr;
13818
13819 if (regno <= 31)
13820 reg_loc_descr
13821 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13822 else
13823 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13824
13825 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13826 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13827
13828 return reg_loc_descr;
13829 }
13830
13831 /* Given an RTL of a register, return a location descriptor that
13832 designates a value that spans more than one register. */
13833
13834 static dw_loc_descr_ref
13835 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13836 enum var_init_status initialized)
13837 {
13838 int size, i;
13839 dw_loc_descr_ref loc_result = NULL;
13840
13841 /* Simple, contiguous registers. */
13842 if (regs == NULL_RTX)
13843 {
13844 unsigned reg = REGNO (rtl);
13845 int nregs;
13846
13847 #ifdef LEAF_REG_REMAP
13848 if (crtl->uses_only_leaf_regs)
13849 {
13850 int leaf_reg = LEAF_REG_REMAP (reg);
13851 if (leaf_reg != -1)
13852 reg = (unsigned) leaf_reg;
13853 }
13854 #endif
13855
13856 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13857 nregs = REG_NREGS (rtl);
13858
13859 /* At present we only track constant-sized pieces. */
13860 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13861 return NULL;
13862 size /= nregs;
13863
13864 loc_result = NULL;
13865 while (nregs--)
13866 {
13867 dw_loc_descr_ref t;
13868
13869 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13870 VAR_INIT_STATUS_INITIALIZED);
13871 add_loc_descr (&loc_result, t);
13872 add_loc_descr_op_piece (&loc_result, size);
13873 ++reg;
13874 }
13875 return loc_result;
13876 }
13877
13878 /* Now onto stupid register sets in non contiguous locations. */
13879
13880 gcc_assert (GET_CODE (regs) == PARALLEL);
13881
13882 /* At present we only track constant-sized pieces. */
13883 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13884 return NULL;
13885 loc_result = NULL;
13886
13887 for (i = 0; i < XVECLEN (regs, 0); ++i)
13888 {
13889 dw_loc_descr_ref t;
13890
13891 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13892 VAR_INIT_STATUS_INITIALIZED);
13893 add_loc_descr (&loc_result, t);
13894 add_loc_descr_op_piece (&loc_result, size);
13895 }
13896
13897 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13898 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13899 return loc_result;
13900 }
13901
13902 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13903
13904 /* Return a location descriptor that designates a constant i,
13905 as a compound operation from constant (i >> shift), constant shift
13906 and DW_OP_shl. */
13907
13908 static dw_loc_descr_ref
13909 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13910 {
13911 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13912 add_loc_descr (&ret, int_loc_descriptor (shift));
13913 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13914 return ret;
13915 }
13916
13917 /* Return a location descriptor that designates constant POLY_I. */
13918
13919 static dw_loc_descr_ref
13920 int_loc_descriptor (poly_int64 poly_i)
13921 {
13922 enum dwarf_location_atom op;
13923
13924 HOST_WIDE_INT i;
13925 if (!poly_i.is_constant (&i))
13926 {
13927 /* Create location descriptions for the non-constant part and
13928 add any constant offset at the end. */
13929 dw_loc_descr_ref ret = NULL;
13930 HOST_WIDE_INT constant = poly_i.coeffs[0];
13931 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13932 {
13933 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13934 if (coeff != 0)
13935 {
13936 dw_loc_descr_ref start = ret;
13937 unsigned int factor;
13938 int bias;
13939 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13940 (j, &factor, &bias);
13941
13942 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13943 add COEFF * (REGNO / FACTOR) now and subtract
13944 COEFF * BIAS from the final constant part. */
13945 constant -= coeff * bias;
13946 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13947 if (coeff % factor == 0)
13948 coeff /= factor;
13949 else
13950 {
13951 int amount = exact_log2 (factor);
13952 gcc_assert (amount >= 0);
13953 add_loc_descr (&ret, int_loc_descriptor (amount));
13954 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13955 }
13956 if (coeff != 1)
13957 {
13958 add_loc_descr (&ret, int_loc_descriptor (coeff));
13959 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13960 }
13961 if (start)
13962 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13963 }
13964 }
13965 loc_descr_plus_const (&ret, constant);
13966 return ret;
13967 }
13968
13969 /* Pick the smallest representation of a constant, rather than just
13970 defaulting to the LEB encoding. */
13971 if (i >= 0)
13972 {
13973 int clz = clz_hwi (i);
13974 int ctz = ctz_hwi (i);
13975 if (i <= 31)
13976 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13977 else if (i <= 0xff)
13978 op = DW_OP_const1u;
13979 else if (i <= 0xffff)
13980 op = DW_OP_const2u;
13981 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13982 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13983 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13984 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13985 while DW_OP_const4u is 5 bytes. */
13986 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13987 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13988 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13989 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13990 while DW_OP_const4u is 5 bytes. */
13991 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13992
13993 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13994 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13995 <= 4)
13996 {
13997 /* As i >= 2**31, the double cast above will yield a negative number.
13998 Since wrapping is defined in DWARF expressions we can output big
13999 positive integers as small negative ones, regardless of the size
14000 of host wide ints.
14001
14002 Here, since the evaluator will handle 32-bit values and since i >=
14003 2**31, we know it's going to be interpreted as a negative literal:
14004 store it this way if we can do better than 5 bytes this way. */
14005 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14006 }
14007 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14008 op = DW_OP_const4u;
14009
14010 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14011 least 6 bytes: see if we can do better before falling back to it. */
14012 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14013 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14014 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14015 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14016 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14017 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14018 >= HOST_BITS_PER_WIDE_INT)
14019 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14020 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14021 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14022 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14023 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14024 && size_of_uleb128 (i) > 6)
14025 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14026 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14027 else
14028 op = DW_OP_constu;
14029 }
14030 else
14031 {
14032 if (i >= -0x80)
14033 op = DW_OP_const1s;
14034 else if (i >= -0x8000)
14035 op = DW_OP_const2s;
14036 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14037 {
14038 if (size_of_int_loc_descriptor (i) < 5)
14039 {
14040 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14041 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14042 return ret;
14043 }
14044 op = DW_OP_const4s;
14045 }
14046 else
14047 {
14048 if (size_of_int_loc_descriptor (i)
14049 < (unsigned long) 1 + size_of_sleb128 (i))
14050 {
14051 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14052 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14053 return ret;
14054 }
14055 op = DW_OP_consts;
14056 }
14057 }
14058
14059 return new_loc_descr (op, i, 0);
14060 }
14061
14062 /* Likewise, for unsigned constants. */
14063
14064 static dw_loc_descr_ref
14065 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14066 {
14067 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14068 const unsigned HOST_WIDE_INT max_uint
14069 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14070
14071 /* If possible, use the clever signed constants handling. */
14072 if (i <= max_int)
14073 return int_loc_descriptor ((HOST_WIDE_INT) i);
14074
14075 /* Here, we are left with positive numbers that cannot be represented as
14076 HOST_WIDE_INT, i.e.:
14077 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14078
14079 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14080 whereas may be better to output a negative integer: thanks to integer
14081 wrapping, we know that:
14082 x = x - 2 ** DWARF2_ADDR_SIZE
14083 = x - 2 * (max (HOST_WIDE_INT) + 1)
14084 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14085 small negative integers. Let's try that in cases it will clearly improve
14086 the encoding: there is no gain turning DW_OP_const4u into
14087 DW_OP_const4s. */
14088 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14089 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14090 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14091 {
14092 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14093
14094 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14095 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14096 const HOST_WIDE_INT second_shift
14097 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14098
14099 /* So we finally have:
14100 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14101 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14102 return int_loc_descriptor (second_shift);
14103 }
14104
14105 /* Last chance: fallback to a simple constant operation. */
14106 return new_loc_descr
14107 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14108 ? DW_OP_const4u
14109 : DW_OP_const8u,
14110 i, 0);
14111 }
14112
14113 /* Generate and return a location description that computes the unsigned
14114 comparison of the two stack top entries (a OP b where b is the top-most
14115 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14116 LE_EXPR, GT_EXPR or GE_EXPR. */
14117
14118 static dw_loc_descr_ref
14119 uint_comparison_loc_list (enum tree_code kind)
14120 {
14121 enum dwarf_location_atom op, flip_op;
14122 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14123
14124 switch (kind)
14125 {
14126 case LT_EXPR:
14127 op = DW_OP_lt;
14128 break;
14129 case LE_EXPR:
14130 op = DW_OP_le;
14131 break;
14132 case GT_EXPR:
14133 op = DW_OP_gt;
14134 break;
14135 case GE_EXPR:
14136 op = DW_OP_ge;
14137 break;
14138 default:
14139 gcc_unreachable ();
14140 }
14141
14142 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14143 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14144
14145 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14146 possible to perform unsigned comparisons: we just have to distinguish
14147 three cases:
14148
14149 1. when a and b have the same sign (as signed integers); then we should
14150 return: a OP(signed) b;
14151
14152 2. when a is a negative signed integer while b is a positive one, then a
14153 is a greater unsigned integer than b; likewise when a and b's roles
14154 are flipped.
14155
14156 So first, compare the sign of the two operands. */
14157 ret = new_loc_descr (DW_OP_over, 0, 0);
14158 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14159 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14160 /* If they have different signs (i.e. they have different sign bits), then
14161 the stack top value has now the sign bit set and thus it's smaller than
14162 zero. */
14163 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14164 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14165 add_loc_descr (&ret, bra_node);
14166
14167 /* We are in case 1. At this point, we know both operands have the same
14168 sign, to it's safe to use the built-in signed comparison. */
14169 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14170 add_loc_descr (&ret, jmp_node);
14171
14172 /* We are in case 2. Here, we know both operands do not have the same sign,
14173 so we have to flip the signed comparison. */
14174 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14175 tmp = new_loc_descr (flip_op, 0, 0);
14176 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14177 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14178 add_loc_descr (&ret, tmp);
14179
14180 /* This dummy operation is necessary to make the two branches join. */
14181 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14182 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14183 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14184 add_loc_descr (&ret, tmp);
14185
14186 return ret;
14187 }
14188
14189 /* Likewise, but takes the location description lists (might be destructive on
14190 them). Return NULL if either is NULL or if concatenation fails. */
14191
14192 static dw_loc_list_ref
14193 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14194 enum tree_code kind)
14195 {
14196 if (left == NULL || right == NULL)
14197 return NULL;
14198
14199 add_loc_list (&left, right);
14200 if (left == NULL)
14201 return NULL;
14202
14203 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14204 return left;
14205 }
14206
14207 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14208 without actually allocating it. */
14209
14210 static unsigned long
14211 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14212 {
14213 return size_of_int_loc_descriptor (i >> shift)
14214 + size_of_int_loc_descriptor (shift)
14215 + 1;
14216 }
14217
14218 /* Return size_of_locs (int_loc_descriptor (i)) without
14219 actually allocating it. */
14220
14221 static unsigned long
14222 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14223 {
14224 unsigned long s;
14225
14226 if (i >= 0)
14227 {
14228 int clz, ctz;
14229 if (i <= 31)
14230 return 1;
14231 else if (i <= 0xff)
14232 return 2;
14233 else if (i <= 0xffff)
14234 return 3;
14235 clz = clz_hwi (i);
14236 ctz = ctz_hwi (i);
14237 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14238 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14239 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14240 - clz - 5);
14241 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14242 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14243 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14244 - clz - 8);
14245 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14246 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14247 <= 4)
14248 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14249 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14250 return 5;
14251 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14252 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14253 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14254 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14255 - clz - 8);
14256 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14257 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14258 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14259 - clz - 16);
14260 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14261 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14262 && s > 6)
14263 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14264 - clz - 32);
14265 else
14266 return 1 + s;
14267 }
14268 else
14269 {
14270 if (i >= -0x80)
14271 return 2;
14272 else if (i >= -0x8000)
14273 return 3;
14274 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14275 {
14276 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14277 {
14278 s = size_of_int_loc_descriptor (-i) + 1;
14279 if (s < 5)
14280 return s;
14281 }
14282 return 5;
14283 }
14284 else
14285 {
14286 unsigned long r = 1 + size_of_sleb128 (i);
14287 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14288 {
14289 s = size_of_int_loc_descriptor (-i) + 1;
14290 if (s < r)
14291 return s;
14292 }
14293 return r;
14294 }
14295 }
14296 }
14297
14298 /* Return loc description representing "address" of integer value.
14299 This can appear only as toplevel expression. */
14300
14301 static dw_loc_descr_ref
14302 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14303 {
14304 int litsize;
14305 dw_loc_descr_ref loc_result = NULL;
14306
14307 if (!(dwarf_version >= 4 || !dwarf_strict))
14308 return NULL;
14309
14310 litsize = size_of_int_loc_descriptor (i);
14311 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14312 is more compact. For DW_OP_stack_value we need:
14313 litsize + 1 (DW_OP_stack_value)
14314 and for DW_OP_implicit_value:
14315 1 (DW_OP_implicit_value) + 1 (length) + size. */
14316 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14317 {
14318 loc_result = int_loc_descriptor (i);
14319 add_loc_descr (&loc_result,
14320 new_loc_descr (DW_OP_stack_value, 0, 0));
14321 return loc_result;
14322 }
14323
14324 loc_result = new_loc_descr (DW_OP_implicit_value,
14325 size, 0);
14326 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14327 loc_result->dw_loc_oprnd2.v.val_int = i;
14328 return loc_result;
14329 }
14330
14331 /* Return a location descriptor that designates a base+offset location. */
14332
14333 static dw_loc_descr_ref
14334 based_loc_descr (rtx reg, poly_int64 offset,
14335 enum var_init_status initialized)
14336 {
14337 unsigned int regno;
14338 dw_loc_descr_ref result;
14339 dw_fde_ref fde = cfun->fde;
14340
14341 /* We only use "frame base" when we're sure we're talking about the
14342 post-prologue local stack frame. We do this by *not* running
14343 register elimination until this point, and recognizing the special
14344 argument pointer and soft frame pointer rtx's. */
14345 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14346 {
14347 rtx elim = (ira_use_lra_p
14348 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14349 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14350
14351 if (elim != reg)
14352 {
14353 /* Allow hard frame pointer here even if frame pointer
14354 isn't used since hard frame pointer is encoded with
14355 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14356 not hard frame pointer directly. */
14357 elim = strip_offset_and_add (elim, &offset);
14358 gcc_assert (elim == hard_frame_pointer_rtx
14359 || elim == stack_pointer_rtx);
14360
14361 /* If drap register is used to align stack, use frame
14362 pointer + offset to access stack variables. If stack
14363 is aligned without drap, use stack pointer + offset to
14364 access stack variables. */
14365 if (crtl->stack_realign_tried
14366 && reg == frame_pointer_rtx)
14367 {
14368 int base_reg
14369 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14370 ? HARD_FRAME_POINTER_REGNUM
14371 : REGNO (elim));
14372 return new_reg_loc_descr (base_reg, offset);
14373 }
14374
14375 gcc_assert (frame_pointer_fb_offset_valid);
14376 offset += frame_pointer_fb_offset;
14377 HOST_WIDE_INT const_offset;
14378 if (offset.is_constant (&const_offset))
14379 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14380 else
14381 {
14382 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14383 loc_descr_plus_const (&ret, offset);
14384 return ret;
14385 }
14386 }
14387 }
14388
14389 regno = REGNO (reg);
14390 #ifdef LEAF_REG_REMAP
14391 if (crtl->uses_only_leaf_regs)
14392 {
14393 int leaf_reg = LEAF_REG_REMAP (regno);
14394 if (leaf_reg != -1)
14395 regno = (unsigned) leaf_reg;
14396 }
14397 #endif
14398 regno = DWARF_FRAME_REGNUM (regno);
14399
14400 HOST_WIDE_INT const_offset;
14401 if (!optimize && fde
14402 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14403 && offset.is_constant (&const_offset))
14404 {
14405 /* Use cfa+offset to represent the location of arguments passed
14406 on the stack when drap is used to align stack.
14407 Only do this when not optimizing, for optimized code var-tracking
14408 is supposed to track where the arguments live and the register
14409 used as vdrap or drap in some spot might be used for something
14410 else in other part of the routine. */
14411 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14412 }
14413
14414 result = new_reg_loc_descr (regno, offset);
14415
14416 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14417 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14418
14419 return result;
14420 }
14421
14422 /* Return true if this RTL expression describes a base+offset calculation. */
14423
14424 static inline int
14425 is_based_loc (const_rtx rtl)
14426 {
14427 return (GET_CODE (rtl) == PLUS
14428 && ((REG_P (XEXP (rtl, 0))
14429 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14430 && CONST_INT_P (XEXP (rtl, 1)))));
14431 }
14432
14433 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14434 failed. */
14435
14436 static dw_loc_descr_ref
14437 tls_mem_loc_descriptor (rtx mem)
14438 {
14439 tree base;
14440 dw_loc_descr_ref loc_result;
14441
14442 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14443 return NULL;
14444
14445 base = get_base_address (MEM_EXPR (mem));
14446 if (base == NULL
14447 || !VAR_P (base)
14448 || !DECL_THREAD_LOCAL_P (base))
14449 return NULL;
14450
14451 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14452 if (loc_result == NULL)
14453 return NULL;
14454
14455 if (maybe_ne (MEM_OFFSET (mem), 0))
14456 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14457
14458 return loc_result;
14459 }
14460
14461 /* Output debug info about reason why we failed to expand expression as dwarf
14462 expression. */
14463
14464 static void
14465 expansion_failed (tree expr, rtx rtl, char const *reason)
14466 {
14467 if (dump_file && (dump_flags & TDF_DETAILS))
14468 {
14469 fprintf (dump_file, "Failed to expand as dwarf: ");
14470 if (expr)
14471 print_generic_expr (dump_file, expr, dump_flags);
14472 if (rtl)
14473 {
14474 fprintf (dump_file, "\n");
14475 print_rtl (dump_file, rtl);
14476 }
14477 fprintf (dump_file, "\nReason: %s\n", reason);
14478 }
14479 }
14480
14481 /* Helper function for const_ok_for_output. */
14482
14483 static bool
14484 const_ok_for_output_1 (rtx rtl)
14485 {
14486 if (targetm.const_not_ok_for_debug_p (rtl))
14487 {
14488 if (GET_CODE (rtl) != UNSPEC)
14489 {
14490 expansion_failed (NULL_TREE, rtl,
14491 "Expression rejected for debug by the backend.\n");
14492 return false;
14493 }
14494
14495 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14496 the target hook doesn't explicitly allow it in debug info, assume
14497 we can't express it in the debug info. */
14498 /* Don't complain about TLS UNSPECs, those are just too hard to
14499 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14500 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14501 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14502 if (flag_checking
14503 && (XVECLEN (rtl, 0) == 0
14504 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14505 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14506 inform (current_function_decl
14507 ? DECL_SOURCE_LOCATION (current_function_decl)
14508 : UNKNOWN_LOCATION,
14509 #if NUM_UNSPEC_VALUES > 0
14510 "non-delegitimized UNSPEC %s (%d) found in variable location",
14511 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14512 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14513 #else
14514 "non-delegitimized UNSPEC %d found in variable location",
14515 #endif
14516 XINT (rtl, 1));
14517 expansion_failed (NULL_TREE, rtl,
14518 "UNSPEC hasn't been delegitimized.\n");
14519 return false;
14520 }
14521
14522 if (CONST_POLY_INT_P (rtl))
14523 return false;
14524
14525 /* FIXME: Refer to PR60655. It is possible for simplification
14526 of rtl expressions in var tracking to produce such expressions.
14527 We should really identify / validate expressions
14528 enclosed in CONST that can be handled by assemblers on various
14529 targets and only handle legitimate cases here. */
14530 switch (GET_CODE (rtl))
14531 {
14532 case SYMBOL_REF:
14533 break;
14534 case NOT:
14535 case NEG:
14536 return false;
14537 case PLUS:
14538 {
14539 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14540 operands. */
14541 subrtx_var_iterator::array_type array;
14542 bool first = false;
14543 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14544 if (SYMBOL_REF_P (*iter)
14545 || LABEL_P (*iter)
14546 || GET_CODE (*iter) == UNSPEC)
14547 {
14548 first = true;
14549 break;
14550 }
14551 if (!first)
14552 return true;
14553 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14554 if (SYMBOL_REF_P (*iter)
14555 || LABEL_P (*iter)
14556 || GET_CODE (*iter) == UNSPEC)
14557 return false;
14558 return true;
14559 }
14560 case MINUS:
14561 {
14562 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14563 appear in the second operand of MINUS. */
14564 subrtx_var_iterator::array_type array;
14565 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14566 if (SYMBOL_REF_P (*iter)
14567 || LABEL_P (*iter)
14568 || GET_CODE (*iter) == UNSPEC)
14569 return false;
14570 return true;
14571 }
14572 default:
14573 return true;
14574 }
14575
14576 if (CONSTANT_POOL_ADDRESS_P (rtl))
14577 {
14578 bool marked;
14579 get_pool_constant_mark (rtl, &marked);
14580 /* If all references to this pool constant were optimized away,
14581 it was not output and thus we can't represent it. */
14582 if (!marked)
14583 {
14584 expansion_failed (NULL_TREE, rtl,
14585 "Constant was removed from constant pool.\n");
14586 return false;
14587 }
14588 }
14589
14590 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14591 return false;
14592
14593 /* Avoid references to external symbols in debug info, on several targets
14594 the linker might even refuse to link when linking a shared library,
14595 and in many other cases the relocations for .debug_info/.debug_loc are
14596 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14597 to be defined within the same shared library or executable are fine. */
14598 if (SYMBOL_REF_EXTERNAL_P (rtl))
14599 {
14600 tree decl = SYMBOL_REF_DECL (rtl);
14601
14602 if (decl == NULL || !targetm.binds_local_p (decl))
14603 {
14604 expansion_failed (NULL_TREE, rtl,
14605 "Symbol not defined in current TU.\n");
14606 return false;
14607 }
14608 }
14609
14610 return true;
14611 }
14612
14613 /* Return true if constant RTL can be emitted in DW_OP_addr or
14614 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14615 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14616
14617 static bool
14618 const_ok_for_output (rtx rtl)
14619 {
14620 if (GET_CODE (rtl) == SYMBOL_REF)
14621 return const_ok_for_output_1 (rtl);
14622
14623 if (GET_CODE (rtl) == CONST)
14624 {
14625 subrtx_var_iterator::array_type array;
14626 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14627 if (!const_ok_for_output_1 (*iter))
14628 return false;
14629 return true;
14630 }
14631
14632 return true;
14633 }
14634
14635 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14636 if possible, NULL otherwise. */
14637
14638 static dw_die_ref
14639 base_type_for_mode (machine_mode mode, bool unsignedp)
14640 {
14641 dw_die_ref type_die;
14642 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14643
14644 if (type == NULL)
14645 return NULL;
14646 switch (TREE_CODE (type))
14647 {
14648 case INTEGER_TYPE:
14649 case REAL_TYPE:
14650 break;
14651 default:
14652 return NULL;
14653 }
14654 type_die = lookup_type_die (type);
14655 if (!type_die)
14656 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14657 comp_unit_die ());
14658 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14659 return NULL;
14660 return type_die;
14661 }
14662
14663 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14664 type matching MODE, or, if MODE is narrower than or as wide as
14665 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14666 possible. */
14667
14668 static dw_loc_descr_ref
14669 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14670 {
14671 machine_mode outer_mode = mode;
14672 dw_die_ref type_die;
14673 dw_loc_descr_ref cvt;
14674
14675 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14676 {
14677 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14678 return op;
14679 }
14680 type_die = base_type_for_mode (outer_mode, 1);
14681 if (type_die == NULL)
14682 return NULL;
14683 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14684 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14685 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14686 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14687 add_loc_descr (&op, cvt);
14688 return op;
14689 }
14690
14691 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14692
14693 static dw_loc_descr_ref
14694 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14695 dw_loc_descr_ref op1)
14696 {
14697 dw_loc_descr_ref ret = op0;
14698 add_loc_descr (&ret, op1);
14699 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14700 if (STORE_FLAG_VALUE != 1)
14701 {
14702 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14703 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14704 }
14705 return ret;
14706 }
14707
14708 /* Subroutine of scompare_loc_descriptor for the case in which we're
14709 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14710 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14711
14712 static dw_loc_descr_ref
14713 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14714 scalar_int_mode op_mode,
14715 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14716 {
14717 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14718 dw_loc_descr_ref cvt;
14719
14720 if (type_die == NULL)
14721 return NULL;
14722 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14723 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14724 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14725 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14726 add_loc_descr (&op0, cvt);
14727 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14728 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14729 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14730 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14731 add_loc_descr (&op1, cvt);
14732 return compare_loc_descriptor (op, op0, op1);
14733 }
14734
14735 /* Subroutine of scompare_loc_descriptor for the case in which we're
14736 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14737 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14738
14739 static dw_loc_descr_ref
14740 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14741 scalar_int_mode op_mode,
14742 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14743 {
14744 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14745 /* For eq/ne, if the operands are known to be zero-extended,
14746 there is no need to do the fancy shifting up. */
14747 if (op == DW_OP_eq || op == DW_OP_ne)
14748 {
14749 dw_loc_descr_ref last0, last1;
14750 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14751 ;
14752 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14753 ;
14754 /* deref_size zero extends, and for constants we can check
14755 whether they are zero extended or not. */
14756 if (((last0->dw_loc_opc == DW_OP_deref_size
14757 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14758 || (CONST_INT_P (XEXP (rtl, 0))
14759 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14760 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14761 && ((last1->dw_loc_opc == DW_OP_deref_size
14762 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14763 || (CONST_INT_P (XEXP (rtl, 1))
14764 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14765 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14766 return compare_loc_descriptor (op, op0, op1);
14767
14768 /* EQ/NE comparison against constant in narrower type than
14769 DWARF2_ADDR_SIZE can be performed either as
14770 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14771 DW_OP_{eq,ne}
14772 or
14773 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14774 DW_OP_{eq,ne}. Pick whatever is shorter. */
14775 if (CONST_INT_P (XEXP (rtl, 1))
14776 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14777 && (size_of_int_loc_descriptor (shift) + 1
14778 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14779 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14780 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14781 & GET_MODE_MASK (op_mode))))
14782 {
14783 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14784 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14785 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14786 & GET_MODE_MASK (op_mode));
14787 return compare_loc_descriptor (op, op0, op1);
14788 }
14789 }
14790 add_loc_descr (&op0, int_loc_descriptor (shift));
14791 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14792 if (CONST_INT_P (XEXP (rtl, 1)))
14793 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14794 else
14795 {
14796 add_loc_descr (&op1, int_loc_descriptor (shift));
14797 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14798 }
14799 return compare_loc_descriptor (op, op0, op1);
14800 }
14801
14802 /* Return location descriptor for unsigned comparison OP RTL. */
14803
14804 static dw_loc_descr_ref
14805 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14806 machine_mode mem_mode)
14807 {
14808 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14809 dw_loc_descr_ref op0, op1;
14810
14811 if (op_mode == VOIDmode)
14812 op_mode = GET_MODE (XEXP (rtl, 1));
14813 if (op_mode == VOIDmode)
14814 return NULL;
14815
14816 scalar_int_mode int_op_mode;
14817 if (dwarf_strict
14818 && dwarf_version < 5
14819 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14820 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14821 return NULL;
14822
14823 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14824 VAR_INIT_STATUS_INITIALIZED);
14825 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827
14828 if (op0 == NULL || op1 == NULL)
14829 return NULL;
14830
14831 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14832 {
14833 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14834 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14835
14836 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14837 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14838 }
14839 return compare_loc_descriptor (op, op0, op1);
14840 }
14841
14842 /* Return location descriptor for unsigned comparison OP RTL. */
14843
14844 static dw_loc_descr_ref
14845 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14846 machine_mode mem_mode)
14847 {
14848 dw_loc_descr_ref op0, op1;
14849
14850 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14851 if (test_op_mode == VOIDmode)
14852 test_op_mode = GET_MODE (XEXP (rtl, 1));
14853
14854 scalar_int_mode op_mode;
14855 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14856 return NULL;
14857
14858 if (dwarf_strict
14859 && dwarf_version < 5
14860 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14861 return NULL;
14862
14863 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14864 VAR_INIT_STATUS_INITIALIZED);
14865 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14866 VAR_INIT_STATUS_INITIALIZED);
14867
14868 if (op0 == NULL || op1 == NULL)
14869 return NULL;
14870
14871 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14874 dw_loc_descr_ref last0, last1;
14875 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14876 ;
14877 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14878 ;
14879 if (CONST_INT_P (XEXP (rtl, 0)))
14880 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14881 /* deref_size zero extends, so no need to mask it again. */
14882 else if (last0->dw_loc_opc != DW_OP_deref_size
14883 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14884 {
14885 add_loc_descr (&op0, int_loc_descriptor (mask));
14886 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14887 }
14888 if (CONST_INT_P (XEXP (rtl, 1)))
14889 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14890 /* deref_size zero extends, so no need to mask it again. */
14891 else if (last1->dw_loc_opc != DW_OP_deref_size
14892 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14893 {
14894 add_loc_descr (&op1, int_loc_descriptor (mask));
14895 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14896 }
14897 }
14898 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14899 {
14900 HOST_WIDE_INT bias = 1;
14901 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14902 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14903 if (CONST_INT_P (XEXP (rtl, 1)))
14904 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14905 + INTVAL (XEXP (rtl, 1)));
14906 else
14907 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14908 bias, 0));
14909 }
14910 return compare_loc_descriptor (op, op0, op1);
14911 }
14912
14913 /* Return location descriptor for {U,S}{MIN,MAX}. */
14914
14915 static dw_loc_descr_ref
14916 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14917 machine_mode mem_mode)
14918 {
14919 enum dwarf_location_atom op;
14920 dw_loc_descr_ref op0, op1, ret;
14921 dw_loc_descr_ref bra_node, drop_node;
14922
14923 scalar_int_mode int_mode;
14924 if (dwarf_strict
14925 && dwarf_version < 5
14926 && (!is_a <scalar_int_mode> (mode, &int_mode)
14927 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14928 return NULL;
14929
14930 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14931 VAR_INIT_STATUS_INITIALIZED);
14932 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14933 VAR_INIT_STATUS_INITIALIZED);
14934
14935 if (op0 == NULL || op1 == NULL)
14936 return NULL;
14937
14938 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14939 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14940 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14941 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14942 {
14943 /* Checked by the caller. */
14944 int_mode = as_a <scalar_int_mode> (mode);
14945 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14946 {
14947 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14948 add_loc_descr (&op0, int_loc_descriptor (mask));
14949 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14950 add_loc_descr (&op1, int_loc_descriptor (mask));
14951 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14952 }
14953 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14954 {
14955 HOST_WIDE_INT bias = 1;
14956 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14957 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14958 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14959 }
14960 }
14961 else if (is_a <scalar_int_mode> (mode, &int_mode)
14962 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14963 {
14964 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14965 add_loc_descr (&op0, int_loc_descriptor (shift));
14966 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14967 add_loc_descr (&op1, int_loc_descriptor (shift));
14968 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14969 }
14970 else if (is_a <scalar_int_mode> (mode, &int_mode)
14971 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14972 {
14973 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14974 dw_loc_descr_ref cvt;
14975 if (type_die == NULL)
14976 return NULL;
14977 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14978 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14979 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14980 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14981 add_loc_descr (&op0, cvt);
14982 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14983 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14984 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14985 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14986 add_loc_descr (&op1, cvt);
14987 }
14988
14989 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14990 op = DW_OP_lt;
14991 else
14992 op = DW_OP_gt;
14993 ret = op0;
14994 add_loc_descr (&ret, op1);
14995 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14996 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14997 add_loc_descr (&ret, bra_node);
14998 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14999 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15000 add_loc_descr (&ret, drop_node);
15001 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15002 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15003 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15004 && is_a <scalar_int_mode> (mode, &int_mode)
15005 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15006 ret = convert_descriptor_to_mode (int_mode, ret);
15007 return ret;
15008 }
15009
15010 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15011 but after converting arguments to type_die, afterwards
15012 convert back to unsigned. */
15013
15014 static dw_loc_descr_ref
15015 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15016 scalar_int_mode mode, machine_mode mem_mode)
15017 {
15018 dw_loc_descr_ref cvt, op0, op1;
15019
15020 if (type_die == NULL)
15021 return NULL;
15022 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15023 VAR_INIT_STATUS_INITIALIZED);
15024 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 if (op0 == NULL || op1 == NULL)
15027 return NULL;
15028 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15029 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15030 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15031 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15032 add_loc_descr (&op0, cvt);
15033 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15034 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15035 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15036 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15037 add_loc_descr (&op1, cvt);
15038 add_loc_descr (&op0, op1);
15039 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15040 return convert_descriptor_to_mode (mode, op0);
15041 }
15042
15043 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15044 const0 is DW_OP_lit0 or corresponding typed constant,
15045 const1 is DW_OP_lit1 or corresponding typed constant
15046 and constMSB is constant with just the MSB bit set
15047 for the mode):
15048 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15049 L1: const0 DW_OP_swap
15050 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15051 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15052 L3: DW_OP_drop
15053 L4: DW_OP_nop
15054
15055 CTZ is similar:
15056 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15057 L1: const0 DW_OP_swap
15058 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15059 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15060 L3: DW_OP_drop
15061 L4: DW_OP_nop
15062
15063 FFS is similar:
15064 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15065 L1: const1 DW_OP_swap
15066 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15067 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15068 L3: DW_OP_drop
15069 L4: DW_OP_nop */
15070
15071 static dw_loc_descr_ref
15072 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15073 machine_mode mem_mode)
15074 {
15075 dw_loc_descr_ref op0, ret, tmp;
15076 HOST_WIDE_INT valv;
15077 dw_loc_descr_ref l1jump, l1label;
15078 dw_loc_descr_ref l2jump, l2label;
15079 dw_loc_descr_ref l3jump, l3label;
15080 dw_loc_descr_ref l4jump, l4label;
15081 rtx msb;
15082
15083 if (GET_MODE (XEXP (rtl, 0)) != mode)
15084 return NULL;
15085
15086 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15087 VAR_INIT_STATUS_INITIALIZED);
15088 if (op0 == NULL)
15089 return NULL;
15090 ret = op0;
15091 if (GET_CODE (rtl) == CLZ)
15092 {
15093 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15094 valv = GET_MODE_BITSIZE (mode);
15095 }
15096 else if (GET_CODE (rtl) == FFS)
15097 valv = 0;
15098 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15099 valv = GET_MODE_BITSIZE (mode);
15100 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15101 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15102 add_loc_descr (&ret, l1jump);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15104 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15105 VAR_INIT_STATUS_INITIALIZED);
15106 if (tmp == NULL)
15107 return NULL;
15108 add_loc_descr (&ret, tmp);
15109 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15110 add_loc_descr (&ret, l4jump);
15111 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15112 ? const1_rtx : const0_rtx,
15113 mode, mem_mode,
15114 VAR_INIT_STATUS_INITIALIZED);
15115 if (l1label == NULL)
15116 return NULL;
15117 add_loc_descr (&ret, l1label);
15118 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15119 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15120 add_loc_descr (&ret, l2label);
15121 if (GET_CODE (rtl) != CLZ)
15122 msb = const1_rtx;
15123 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15124 msb = GEN_INT (HOST_WIDE_INT_1U
15125 << (GET_MODE_BITSIZE (mode) - 1));
15126 else
15127 msb = immed_wide_int_const
15128 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15129 GET_MODE_PRECISION (mode)), mode);
15130 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15131 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15132 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15133 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15134 else
15135 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15136 VAR_INIT_STATUS_INITIALIZED);
15137 if (tmp == NULL)
15138 return NULL;
15139 add_loc_descr (&ret, tmp);
15140 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15141 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15142 add_loc_descr (&ret, l3jump);
15143 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (tmp == NULL)
15146 return NULL;
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15149 ? DW_OP_shl : DW_OP_shr, 0, 0));
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15151 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15152 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15153 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15154 add_loc_descr (&ret, l2jump);
15155 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15156 add_loc_descr (&ret, l3label);
15157 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15158 add_loc_descr (&ret, l4label);
15159 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15160 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15161 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15162 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15163 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15164 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15165 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15166 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15167 return ret;
15168 }
15169
15170 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15171 const1 is DW_OP_lit1 or corresponding typed constant):
15172 const0 DW_OP_swap
15173 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15174 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15175 L2: DW_OP_drop
15176
15177 PARITY is similar:
15178 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15179 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15180 L2: DW_OP_drop */
15181
15182 static dw_loc_descr_ref
15183 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15184 machine_mode mem_mode)
15185 {
15186 dw_loc_descr_ref op0, ret, tmp;
15187 dw_loc_descr_ref l1jump, l1label;
15188 dw_loc_descr_ref l2jump, l2label;
15189
15190 if (GET_MODE (XEXP (rtl, 0)) != mode)
15191 return NULL;
15192
15193 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (op0 == NULL)
15196 return NULL;
15197 ret = op0;
15198 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15199 VAR_INIT_STATUS_INITIALIZED);
15200 if (tmp == NULL)
15201 return NULL;
15202 add_loc_descr (&ret, tmp);
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15204 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15205 add_loc_descr (&ret, l1label);
15206 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15207 add_loc_descr (&ret, l2jump);
15208 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15209 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15210 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15211 VAR_INIT_STATUS_INITIALIZED);
15212 if (tmp == NULL)
15213 return NULL;
15214 add_loc_descr (&ret, tmp);
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15217 ? DW_OP_plus : DW_OP_xor, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15219 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15220 VAR_INIT_STATUS_INITIALIZED);
15221 add_loc_descr (&ret, tmp);
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15223 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15224 add_loc_descr (&ret, l1jump);
15225 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15226 add_loc_descr (&ret, l2label);
15227 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15228 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15229 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15230 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15231 return ret;
15232 }
15233
15234 /* BSWAP (constS is initial shift count, either 56 or 24):
15235 constS const0
15236 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15237 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15238 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15239 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15240 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15241
15242 static dw_loc_descr_ref
15243 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15244 machine_mode mem_mode)
15245 {
15246 dw_loc_descr_ref op0, ret, tmp;
15247 dw_loc_descr_ref l1jump, l1label;
15248 dw_loc_descr_ref l2jump, l2label;
15249
15250 if (BITS_PER_UNIT != 8
15251 || (GET_MODE_BITSIZE (mode) != 32
15252 && GET_MODE_BITSIZE (mode) != 64))
15253 return NULL;
15254
15255 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15256 VAR_INIT_STATUS_INITIALIZED);
15257 if (op0 == NULL)
15258 return NULL;
15259
15260 ret = op0;
15261 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15262 mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 if (tmp == NULL)
15265 return NULL;
15266 add_loc_descr (&ret, tmp);
15267 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15268 VAR_INIT_STATUS_INITIALIZED);
15269 if (tmp == NULL)
15270 return NULL;
15271 add_loc_descr (&ret, tmp);
15272 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15273 add_loc_descr (&ret, l1label);
15274 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15275 mode, mem_mode,
15276 VAR_INIT_STATUS_INITIALIZED);
15277 add_loc_descr (&ret, tmp);
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15281 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15282 VAR_INIT_STATUS_INITIALIZED);
15283 if (tmp == NULL)
15284 return NULL;
15285 add_loc_descr (&ret, tmp);
15286 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15292 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15293 VAR_INIT_STATUS_INITIALIZED);
15294 add_loc_descr (&ret, tmp);
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15296 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15297 add_loc_descr (&ret, l2jump);
15298 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15299 VAR_INIT_STATUS_INITIALIZED);
15300 add_loc_descr (&ret, tmp);
15301 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15303 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15304 add_loc_descr (&ret, l1jump);
15305 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15306 add_loc_descr (&ret, l2label);
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15309 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15310 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15311 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15312 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15313 return ret;
15314 }
15315
15316 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15317 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15318 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15319 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15320
15321 ROTATERT is similar:
15322 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15323 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15324 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15325
15326 static dw_loc_descr_ref
15327 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15328 machine_mode mem_mode)
15329 {
15330 rtx rtlop1 = XEXP (rtl, 1);
15331 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15332 int i;
15333
15334 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15335 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15336 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15337 VAR_INIT_STATUS_INITIALIZED);
15338 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15339 VAR_INIT_STATUS_INITIALIZED);
15340 if (op0 == NULL || op1 == NULL)
15341 return NULL;
15342 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15343 for (i = 0; i < 2; i++)
15344 {
15345 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15346 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15347 mode, mem_mode,
15348 VAR_INIT_STATUS_INITIALIZED);
15349 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15350 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15351 ? DW_OP_const4u
15352 : HOST_BITS_PER_WIDE_INT == 64
15353 ? DW_OP_const8u : DW_OP_constu,
15354 GET_MODE_MASK (mode), 0);
15355 else
15356 mask[i] = NULL;
15357 if (mask[i] == NULL)
15358 return NULL;
15359 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15360 }
15361 ret = op0;
15362 add_loc_descr (&ret, op1);
15363 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15364 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15365 if (GET_CODE (rtl) == ROTATERT)
15366 {
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15368 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15369 GET_MODE_BITSIZE (mode), 0));
15370 }
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15372 if (mask[0] != NULL)
15373 add_loc_descr (&ret, mask[0]);
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15375 if (mask[1] != NULL)
15376 {
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15378 add_loc_descr (&ret, mask[1]);
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15380 }
15381 if (GET_CODE (rtl) == ROTATE)
15382 {
15383 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15384 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15385 GET_MODE_BITSIZE (mode), 0));
15386 }
15387 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15388 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15389 return ret;
15390 }
15391
15392 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15393 for DEBUG_PARAMETER_REF RTL. */
15394
15395 static dw_loc_descr_ref
15396 parameter_ref_descriptor (rtx rtl)
15397 {
15398 dw_loc_descr_ref ret;
15399 dw_die_ref ref;
15400
15401 if (dwarf_strict)
15402 return NULL;
15403 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15404 /* With LTO during LTRANS we get the late DIE that refers to the early
15405 DIE, thus we add another indirection here. This seems to confuse
15406 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15407 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15408 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15409 if (ref)
15410 {
15411 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15412 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15413 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15414 }
15415 else
15416 {
15417 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15418 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15419 }
15420 return ret;
15421 }
15422
15423 /* The following routine converts the RTL for a variable or parameter
15424 (resident in memory) into an equivalent Dwarf representation of a
15425 mechanism for getting the address of that same variable onto the top of a
15426 hypothetical "address evaluation" stack.
15427
15428 When creating memory location descriptors, we are effectively transforming
15429 the RTL for a memory-resident object into its Dwarf postfix expression
15430 equivalent. This routine recursively descends an RTL tree, turning
15431 it into Dwarf postfix code as it goes.
15432
15433 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15434
15435 MEM_MODE is the mode of the memory reference, needed to handle some
15436 autoincrement addressing modes.
15437
15438 Return 0 if we can't represent the location. */
15439
15440 dw_loc_descr_ref
15441 mem_loc_descriptor (rtx rtl, machine_mode mode,
15442 machine_mode mem_mode,
15443 enum var_init_status initialized)
15444 {
15445 dw_loc_descr_ref mem_loc_result = NULL;
15446 enum dwarf_location_atom op;
15447 dw_loc_descr_ref op0, op1;
15448 rtx inner = NULL_RTX;
15449 poly_int64 offset;
15450
15451 if (mode == VOIDmode)
15452 mode = GET_MODE (rtl);
15453
15454 /* Note that for a dynamically sized array, the location we will generate a
15455 description of here will be the lowest numbered location which is
15456 actually within the array. That's *not* necessarily the same as the
15457 zeroth element of the array. */
15458
15459 rtl = targetm.delegitimize_address (rtl);
15460
15461 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15462 return NULL;
15463
15464 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15465 switch (GET_CODE (rtl))
15466 {
15467 case POST_INC:
15468 case POST_DEC:
15469 case POST_MODIFY:
15470 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15471
15472 case SUBREG:
15473 /* The case of a subreg may arise when we have a local (register)
15474 variable or a formal (register) parameter which doesn't quite fill
15475 up an entire register. For now, just assume that it is
15476 legitimate to make the Dwarf info refer to the whole register which
15477 contains the given subreg. */
15478 if (!subreg_lowpart_p (rtl))
15479 break;
15480 inner = SUBREG_REG (rtl);
15481 /* FALLTHRU */
15482 case TRUNCATE:
15483 if (inner == NULL_RTX)
15484 inner = XEXP (rtl, 0);
15485 if (is_a <scalar_int_mode> (mode, &int_mode)
15486 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15487 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15488 #ifdef POINTERS_EXTEND_UNSIGNED
15489 || (int_mode == Pmode && mem_mode != VOIDmode)
15490 #endif
15491 )
15492 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15493 {
15494 mem_loc_result = mem_loc_descriptor (inner,
15495 inner_mode,
15496 mem_mode, initialized);
15497 break;
15498 }
15499 if (dwarf_strict && dwarf_version < 5)
15500 break;
15501 if (is_a <scalar_int_mode> (mode, &int_mode)
15502 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15503 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15504 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15505 {
15506 dw_die_ref type_die;
15507 dw_loc_descr_ref cvt;
15508
15509 mem_loc_result = mem_loc_descriptor (inner,
15510 GET_MODE (inner),
15511 mem_mode, initialized);
15512 if (mem_loc_result == NULL)
15513 break;
15514 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15515 if (type_die == NULL)
15516 {
15517 mem_loc_result = NULL;
15518 break;
15519 }
15520 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15521 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15522 else
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15524 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15525 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15527 add_loc_descr (&mem_loc_result, cvt);
15528 if (is_a <scalar_int_mode> (mode, &int_mode)
15529 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15530 {
15531 /* Convert it to untyped afterwards. */
15532 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15533 add_loc_descr (&mem_loc_result, cvt);
15534 }
15535 }
15536 break;
15537
15538 case REG:
15539 if (!is_a <scalar_int_mode> (mode, &int_mode)
15540 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15541 && rtl != arg_pointer_rtx
15542 && rtl != frame_pointer_rtx
15543 #ifdef POINTERS_EXTEND_UNSIGNED
15544 && (int_mode != Pmode || mem_mode == VOIDmode)
15545 #endif
15546 ))
15547 {
15548 dw_die_ref type_die;
15549 unsigned int dbx_regnum;
15550
15551 if (dwarf_strict && dwarf_version < 5)
15552 break;
15553 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15554 break;
15555 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15556 if (type_die == NULL)
15557 break;
15558
15559 dbx_regnum = dbx_reg_number (rtl);
15560 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15561 break;
15562 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15563 dbx_regnum, 0);
15564 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15565 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15566 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15567 break;
15568 }
15569 /* Whenever a register number forms a part of the description of the
15570 method for calculating the (dynamic) address of a memory resident
15571 object, DWARF rules require the register number be referred to as
15572 a "base register". This distinction is not based in any way upon
15573 what category of register the hardware believes the given register
15574 belongs to. This is strictly DWARF terminology we're dealing with
15575 here. Note that in cases where the location of a memory-resident
15576 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15577 OP_CONST (0)) the actual DWARF location descriptor that we generate
15578 may just be OP_BASEREG (basereg). This may look deceptively like
15579 the object in question was allocated to a register (rather than in
15580 memory) so DWARF consumers need to be aware of the subtle
15581 distinction between OP_REG and OP_BASEREG. */
15582 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15583 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15584 else if (stack_realign_drap
15585 && crtl->drap_reg
15586 && crtl->args.internal_arg_pointer == rtl
15587 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15588 {
15589 /* If RTL is internal_arg_pointer, which has been optimized
15590 out, use DRAP instead. */
15591 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15592 VAR_INIT_STATUS_INITIALIZED);
15593 }
15594 break;
15595
15596 case SIGN_EXTEND:
15597 case ZERO_EXTEND:
15598 if (!is_a <scalar_int_mode> (mode, &int_mode)
15599 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15600 break;
15601 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15602 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15603 if (op0 == 0)
15604 break;
15605 else if (GET_CODE (rtl) == ZERO_EXTEND
15606 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15607 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15608 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15609 to expand zero extend as two shifts instead of
15610 masking. */
15611 && GET_MODE_SIZE (inner_mode) <= 4)
15612 {
15613 mem_loc_result = op0;
15614 add_loc_descr (&mem_loc_result,
15615 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15616 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15617 }
15618 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15619 {
15620 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15621 shift *= BITS_PER_UNIT;
15622 if (GET_CODE (rtl) == SIGN_EXTEND)
15623 op = DW_OP_shra;
15624 else
15625 op = DW_OP_shr;
15626 mem_loc_result = op0;
15627 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15628 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15629 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15630 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15631 }
15632 else if (!dwarf_strict || dwarf_version >= 5)
15633 {
15634 dw_die_ref type_die1, type_die2;
15635 dw_loc_descr_ref cvt;
15636
15637 type_die1 = base_type_for_mode (inner_mode,
15638 GET_CODE (rtl) == ZERO_EXTEND);
15639 if (type_die1 == NULL)
15640 break;
15641 type_die2 = base_type_for_mode (int_mode, 1);
15642 if (type_die2 == NULL)
15643 break;
15644 mem_loc_result = op0;
15645 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15646 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15647 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15648 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15649 add_loc_descr (&mem_loc_result, cvt);
15650 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15651 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15652 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15653 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15654 add_loc_descr (&mem_loc_result, cvt);
15655 }
15656 break;
15657
15658 case MEM:
15659 {
15660 rtx new_rtl = avoid_constant_pool_reference (rtl);
15661 if (new_rtl != rtl)
15662 {
15663 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15664 initialized);
15665 if (mem_loc_result != NULL)
15666 return mem_loc_result;
15667 }
15668 }
15669 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15670 get_address_mode (rtl), mode,
15671 VAR_INIT_STATUS_INITIALIZED);
15672 if (mem_loc_result == NULL)
15673 mem_loc_result = tls_mem_loc_descriptor (rtl);
15674 if (mem_loc_result != NULL)
15675 {
15676 if (!is_a <scalar_int_mode> (mode, &int_mode)
15677 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15678 {
15679 dw_die_ref type_die;
15680 dw_loc_descr_ref deref;
15681 HOST_WIDE_INT size;
15682
15683 if (dwarf_strict && dwarf_version < 5)
15684 return NULL;
15685 if (!GET_MODE_SIZE (mode).is_constant (&size))
15686 return NULL;
15687 type_die
15688 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15689 if (type_die == NULL)
15690 return NULL;
15691 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15692 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15693 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15694 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15695 add_loc_descr (&mem_loc_result, deref);
15696 }
15697 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15698 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15699 else
15700 add_loc_descr (&mem_loc_result,
15701 new_loc_descr (DW_OP_deref_size,
15702 GET_MODE_SIZE (int_mode), 0));
15703 }
15704 break;
15705
15706 case LO_SUM:
15707 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15708
15709 case LABEL_REF:
15710 /* Some ports can transform a symbol ref into a label ref, because
15711 the symbol ref is too far away and has to be dumped into a constant
15712 pool. */
15713 case CONST:
15714 case SYMBOL_REF:
15715 case UNSPEC:
15716 if (!is_a <scalar_int_mode> (mode, &int_mode)
15717 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15718 #ifdef POINTERS_EXTEND_UNSIGNED
15719 && (int_mode != Pmode || mem_mode == VOIDmode)
15720 #endif
15721 ))
15722 break;
15723
15724 if (GET_CODE (rtl) == UNSPEC)
15725 {
15726 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15727 can't express it in the debug info. This can happen e.g. with some
15728 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15729 approves. */
15730 bool not_ok = false;
15731 subrtx_var_iterator::array_type array;
15732 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15733 if (*iter != rtl && !CONSTANT_P (*iter))
15734 {
15735 not_ok = true;
15736 break;
15737 }
15738
15739 if (not_ok)
15740 break;
15741
15742 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15743 if (!const_ok_for_output_1 (*iter))
15744 {
15745 not_ok = true;
15746 break;
15747 }
15748
15749 if (not_ok)
15750 break;
15751
15752 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15753 goto symref;
15754 }
15755
15756 if (GET_CODE (rtl) == SYMBOL_REF
15757 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15758 {
15759 dw_loc_descr_ref temp;
15760
15761 /* If this is not defined, we have no way to emit the data. */
15762 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15763 break;
15764
15765 temp = new_addr_loc_descr (rtl, dtprel_true);
15766
15767 /* We check for DWARF 5 here because gdb did not implement
15768 DW_OP_form_tls_address until after 7.12. */
15769 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15770 ? DW_OP_form_tls_address
15771 : DW_OP_GNU_push_tls_address),
15772 0, 0);
15773 add_loc_descr (&mem_loc_result, temp);
15774
15775 break;
15776 }
15777
15778 if (!const_ok_for_output (rtl))
15779 {
15780 if (GET_CODE (rtl) == CONST)
15781 switch (GET_CODE (XEXP (rtl, 0)))
15782 {
15783 case NOT:
15784 op = DW_OP_not;
15785 goto try_const_unop;
15786 case NEG:
15787 op = DW_OP_neg;
15788 goto try_const_unop;
15789 try_const_unop:
15790 rtx arg;
15791 arg = XEXP (XEXP (rtl, 0), 0);
15792 if (!CONSTANT_P (arg))
15793 arg = gen_rtx_CONST (int_mode, arg);
15794 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15795 initialized);
15796 if (op0)
15797 {
15798 mem_loc_result = op0;
15799 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15800 }
15801 break;
15802 default:
15803 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15804 mem_mode, initialized);
15805 break;
15806 }
15807 break;
15808 }
15809
15810 symref:
15811 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15812 vec_safe_push (used_rtx_array, rtl);
15813 break;
15814
15815 case CONCAT:
15816 case CONCATN:
15817 case VAR_LOCATION:
15818 case DEBUG_IMPLICIT_PTR:
15819 expansion_failed (NULL_TREE, rtl,
15820 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15821 return 0;
15822
15823 case ENTRY_VALUE:
15824 if (dwarf_strict && dwarf_version < 5)
15825 return NULL;
15826 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15827 {
15828 if (!is_a <scalar_int_mode> (mode, &int_mode)
15829 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15830 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15831 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15832 else
15833 {
15834 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15835 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15836 return NULL;
15837 op0 = one_reg_loc_descriptor (dbx_regnum,
15838 VAR_INIT_STATUS_INITIALIZED);
15839 }
15840 }
15841 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15842 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15843 {
15844 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15845 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15846 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15847 return NULL;
15848 }
15849 else
15850 gcc_unreachable ();
15851 if (op0 == NULL)
15852 return NULL;
15853 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15854 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15855 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15856 break;
15857
15858 case DEBUG_PARAMETER_REF:
15859 mem_loc_result = parameter_ref_descriptor (rtl);
15860 break;
15861
15862 case PRE_MODIFY:
15863 /* Extract the PLUS expression nested inside and fall into
15864 PLUS code below. */
15865 rtl = XEXP (rtl, 1);
15866 goto plus;
15867
15868 case PRE_INC:
15869 case PRE_DEC:
15870 /* Turn these into a PLUS expression and fall into the PLUS code
15871 below. */
15872 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15873 gen_int_mode (GET_CODE (rtl) == PRE_INC
15874 ? GET_MODE_UNIT_SIZE (mem_mode)
15875 : -GET_MODE_UNIT_SIZE (mem_mode),
15876 mode));
15877
15878 /* fall through */
15879
15880 case PLUS:
15881 plus:
15882 if (is_based_loc (rtl)
15883 && is_a <scalar_int_mode> (mode, &int_mode)
15884 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15885 || XEXP (rtl, 0) == arg_pointer_rtx
15886 || XEXP (rtl, 0) == frame_pointer_rtx))
15887 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15888 INTVAL (XEXP (rtl, 1)),
15889 VAR_INIT_STATUS_INITIALIZED);
15890 else
15891 {
15892 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15893 VAR_INIT_STATUS_INITIALIZED);
15894 if (mem_loc_result == 0)
15895 break;
15896
15897 if (CONST_INT_P (XEXP (rtl, 1))
15898 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15899 <= DWARF2_ADDR_SIZE))
15900 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15901 else
15902 {
15903 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15904 VAR_INIT_STATUS_INITIALIZED);
15905 if (op1 == 0)
15906 return NULL;
15907 add_loc_descr (&mem_loc_result, op1);
15908 add_loc_descr (&mem_loc_result,
15909 new_loc_descr (DW_OP_plus, 0, 0));
15910 }
15911 }
15912 break;
15913
15914 /* If a pseudo-reg is optimized away, it is possible for it to
15915 be replaced with a MEM containing a multiply or shift. */
15916 case MINUS:
15917 op = DW_OP_minus;
15918 goto do_binop;
15919
15920 case MULT:
15921 op = DW_OP_mul;
15922 goto do_binop;
15923
15924 case DIV:
15925 if ((!dwarf_strict || dwarf_version >= 5)
15926 && is_a <scalar_int_mode> (mode, &int_mode)
15927 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15928 {
15929 mem_loc_result = typed_binop (DW_OP_div, rtl,
15930 base_type_for_mode (mode, 0),
15931 int_mode, mem_mode);
15932 break;
15933 }
15934 op = DW_OP_div;
15935 goto do_binop;
15936
15937 case UMOD:
15938 op = DW_OP_mod;
15939 goto do_binop;
15940
15941 case ASHIFT:
15942 op = DW_OP_shl;
15943 goto do_shift;
15944
15945 case ASHIFTRT:
15946 op = DW_OP_shra;
15947 goto do_shift;
15948
15949 case LSHIFTRT:
15950 op = DW_OP_shr;
15951 goto do_shift;
15952
15953 do_shift:
15954 if (!is_a <scalar_int_mode> (mode, &int_mode))
15955 break;
15956 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15957 VAR_INIT_STATUS_INITIALIZED);
15958 {
15959 rtx rtlop1 = XEXP (rtl, 1);
15960 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15961 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15962 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15963 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15964 VAR_INIT_STATUS_INITIALIZED);
15965 }
15966
15967 if (op0 == 0 || op1 == 0)
15968 break;
15969
15970 mem_loc_result = op0;
15971 add_loc_descr (&mem_loc_result, op1);
15972 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15973 break;
15974
15975 case AND:
15976 op = DW_OP_and;
15977 goto do_binop;
15978
15979 case IOR:
15980 op = DW_OP_or;
15981 goto do_binop;
15982
15983 case XOR:
15984 op = DW_OP_xor;
15985 goto do_binop;
15986
15987 do_binop:
15988 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15989 VAR_INIT_STATUS_INITIALIZED);
15990 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15991 VAR_INIT_STATUS_INITIALIZED);
15992
15993 if (op0 == 0 || op1 == 0)
15994 break;
15995
15996 mem_loc_result = op0;
15997 add_loc_descr (&mem_loc_result, op1);
15998 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15999 break;
16000
16001 case MOD:
16002 if ((!dwarf_strict || dwarf_version >= 5)
16003 && is_a <scalar_int_mode> (mode, &int_mode)
16004 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16005 {
16006 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16007 base_type_for_mode (mode, 0),
16008 int_mode, mem_mode);
16009 break;
16010 }
16011
16012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16013 VAR_INIT_STATUS_INITIALIZED);
16014 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16015 VAR_INIT_STATUS_INITIALIZED);
16016
16017 if (op0 == 0 || op1 == 0)
16018 break;
16019
16020 mem_loc_result = op0;
16021 add_loc_descr (&mem_loc_result, op1);
16022 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16023 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16024 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16027 break;
16028
16029 case UDIV:
16030 if ((!dwarf_strict || dwarf_version >= 5)
16031 && is_a <scalar_int_mode> (mode, &int_mode))
16032 {
16033 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16034 {
16035 op = DW_OP_div;
16036 goto do_binop;
16037 }
16038 mem_loc_result = typed_binop (DW_OP_div, rtl,
16039 base_type_for_mode (int_mode, 1),
16040 int_mode, mem_mode);
16041 }
16042 break;
16043
16044 case NOT:
16045 op = DW_OP_not;
16046 goto do_unop;
16047
16048 case ABS:
16049 op = DW_OP_abs;
16050 goto do_unop;
16051
16052 case NEG:
16053 op = DW_OP_neg;
16054 goto do_unop;
16055
16056 do_unop:
16057 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16058 VAR_INIT_STATUS_INITIALIZED);
16059
16060 if (op0 == 0)
16061 break;
16062
16063 mem_loc_result = op0;
16064 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16065 break;
16066
16067 case CONST_INT:
16068 if (!is_a <scalar_int_mode> (mode, &int_mode)
16069 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16070 #ifdef POINTERS_EXTEND_UNSIGNED
16071 || (int_mode == Pmode
16072 && mem_mode != VOIDmode
16073 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16074 #endif
16075 )
16076 {
16077 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16078 break;
16079 }
16080 if ((!dwarf_strict || dwarf_version >= 5)
16081 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16082 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16083 {
16084 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16085 scalar_int_mode amode;
16086 if (type_die == NULL)
16087 return NULL;
16088 if (INTVAL (rtl) >= 0
16089 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16090 .exists (&amode))
16091 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16092 /* const DW_OP_convert <XXX> vs.
16093 DW_OP_const_type <XXX, 1, const>. */
16094 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16095 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16096 {
16097 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16098 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16099 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16100 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16101 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16102 add_loc_descr (&mem_loc_result, op0);
16103 return mem_loc_result;
16104 }
16105 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16106 INTVAL (rtl));
16107 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16108 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16109 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16110 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16111 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16112 else
16113 {
16114 mem_loc_result->dw_loc_oprnd2.val_class
16115 = dw_val_class_const_double;
16116 mem_loc_result->dw_loc_oprnd2.v.val_double
16117 = double_int::from_shwi (INTVAL (rtl));
16118 }
16119 }
16120 break;
16121
16122 case CONST_DOUBLE:
16123 if (!dwarf_strict || dwarf_version >= 5)
16124 {
16125 dw_die_ref type_die;
16126
16127 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16128 CONST_DOUBLE rtx could represent either a large integer
16129 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16130 the value is always a floating point constant.
16131
16132 When it is an integer, a CONST_DOUBLE is used whenever
16133 the constant requires 2 HWIs to be adequately represented.
16134 We output CONST_DOUBLEs as blocks. */
16135 if (mode == VOIDmode
16136 || (GET_MODE (rtl) == VOIDmode
16137 && maybe_ne (GET_MODE_BITSIZE (mode),
16138 HOST_BITS_PER_DOUBLE_INT)))
16139 break;
16140 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16141 if (type_die == NULL)
16142 return NULL;
16143 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16144 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16145 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16146 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16147 #if TARGET_SUPPORTS_WIDE_INT == 0
16148 if (!SCALAR_FLOAT_MODE_P (mode))
16149 {
16150 mem_loc_result->dw_loc_oprnd2.val_class
16151 = dw_val_class_const_double;
16152 mem_loc_result->dw_loc_oprnd2.v.val_double
16153 = rtx_to_double_int (rtl);
16154 }
16155 else
16156 #endif
16157 {
16158 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16159 unsigned int length = GET_MODE_SIZE (float_mode);
16160 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16161
16162 insert_float (rtl, array);
16163 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16164 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16165 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16166 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16167 }
16168 }
16169 break;
16170
16171 case CONST_WIDE_INT:
16172 if (!dwarf_strict || dwarf_version >= 5)
16173 {
16174 dw_die_ref type_die;
16175
16176 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16177 if (type_die == NULL)
16178 return NULL;
16179 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16180 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16181 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16182 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16183 mem_loc_result->dw_loc_oprnd2.val_class
16184 = dw_val_class_wide_int;
16185 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16186 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16187 }
16188 break;
16189
16190 case CONST_POLY_INT:
16191 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16192 break;
16193
16194 case EQ:
16195 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16196 break;
16197
16198 case GE:
16199 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16200 break;
16201
16202 case GT:
16203 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16204 break;
16205
16206 case LE:
16207 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16208 break;
16209
16210 case LT:
16211 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16212 break;
16213
16214 case NE:
16215 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16216 break;
16217
16218 case GEU:
16219 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16220 break;
16221
16222 case GTU:
16223 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16224 break;
16225
16226 case LEU:
16227 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16228 break;
16229
16230 case LTU:
16231 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16232 break;
16233
16234 case UMIN:
16235 case UMAX:
16236 if (!SCALAR_INT_MODE_P (mode))
16237 break;
16238 /* FALLTHRU */
16239 case SMIN:
16240 case SMAX:
16241 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16242 break;
16243
16244 case ZERO_EXTRACT:
16245 case SIGN_EXTRACT:
16246 if (CONST_INT_P (XEXP (rtl, 1))
16247 && CONST_INT_P (XEXP (rtl, 2))
16248 && is_a <scalar_int_mode> (mode, &int_mode)
16249 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16250 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16251 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16252 && ((unsigned) INTVAL (XEXP (rtl, 1))
16253 + (unsigned) INTVAL (XEXP (rtl, 2))
16254 <= GET_MODE_BITSIZE (int_mode)))
16255 {
16256 int shift, size;
16257 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16258 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16259 if (op0 == 0)
16260 break;
16261 if (GET_CODE (rtl) == SIGN_EXTRACT)
16262 op = DW_OP_shra;
16263 else
16264 op = DW_OP_shr;
16265 mem_loc_result = op0;
16266 size = INTVAL (XEXP (rtl, 1));
16267 shift = INTVAL (XEXP (rtl, 2));
16268 if (BITS_BIG_ENDIAN)
16269 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16270 if (shift + size != (int) DWARF2_ADDR_SIZE)
16271 {
16272 add_loc_descr (&mem_loc_result,
16273 int_loc_descriptor (DWARF2_ADDR_SIZE
16274 - shift - size));
16275 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16276 }
16277 if (size != (int) DWARF2_ADDR_SIZE)
16278 {
16279 add_loc_descr (&mem_loc_result,
16280 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16281 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16282 }
16283 }
16284 break;
16285
16286 case IF_THEN_ELSE:
16287 {
16288 dw_loc_descr_ref op2, bra_node, drop_node;
16289 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16290 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16291 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16292 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16293 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16294 VAR_INIT_STATUS_INITIALIZED);
16295 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16296 VAR_INIT_STATUS_INITIALIZED);
16297 if (op0 == NULL || op1 == NULL || op2 == NULL)
16298 break;
16299
16300 mem_loc_result = op1;
16301 add_loc_descr (&mem_loc_result, op2);
16302 add_loc_descr (&mem_loc_result, op0);
16303 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16304 add_loc_descr (&mem_loc_result, bra_node);
16305 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16306 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16307 add_loc_descr (&mem_loc_result, drop_node);
16308 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16309 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16310 }
16311 break;
16312
16313 case FLOAT_EXTEND:
16314 case FLOAT_TRUNCATE:
16315 case FLOAT:
16316 case UNSIGNED_FLOAT:
16317 case FIX:
16318 case UNSIGNED_FIX:
16319 if (!dwarf_strict || dwarf_version >= 5)
16320 {
16321 dw_die_ref type_die;
16322 dw_loc_descr_ref cvt;
16323
16324 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16325 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16326 if (op0 == NULL)
16327 break;
16328 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16329 && (GET_CODE (rtl) == FLOAT
16330 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16331 {
16332 type_die = base_type_for_mode (int_mode,
16333 GET_CODE (rtl) == UNSIGNED_FLOAT);
16334 if (type_die == NULL)
16335 break;
16336 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16337 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16338 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16339 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16340 add_loc_descr (&op0, cvt);
16341 }
16342 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16343 if (type_die == NULL)
16344 break;
16345 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16346 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16347 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16348 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16349 add_loc_descr (&op0, cvt);
16350 if (is_a <scalar_int_mode> (mode, &int_mode)
16351 && (GET_CODE (rtl) == FIX
16352 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16353 {
16354 op0 = convert_descriptor_to_mode (int_mode, op0);
16355 if (op0 == NULL)
16356 break;
16357 }
16358 mem_loc_result = op0;
16359 }
16360 break;
16361
16362 case CLZ:
16363 case CTZ:
16364 case FFS:
16365 if (is_a <scalar_int_mode> (mode, &int_mode))
16366 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16367 break;
16368
16369 case POPCOUNT:
16370 case PARITY:
16371 if (is_a <scalar_int_mode> (mode, &int_mode))
16372 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16373 break;
16374
16375 case BSWAP:
16376 if (is_a <scalar_int_mode> (mode, &int_mode))
16377 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16378 break;
16379
16380 case ROTATE:
16381 case ROTATERT:
16382 if (is_a <scalar_int_mode> (mode, &int_mode))
16383 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16384 break;
16385
16386 case COMPARE:
16387 /* In theory, we could implement the above. */
16388 /* DWARF cannot represent the unsigned compare operations
16389 natively. */
16390 case SS_MULT:
16391 case US_MULT:
16392 case SS_DIV:
16393 case US_DIV:
16394 case SS_PLUS:
16395 case US_PLUS:
16396 case SS_MINUS:
16397 case US_MINUS:
16398 case SS_NEG:
16399 case US_NEG:
16400 case SS_ABS:
16401 case SS_ASHIFT:
16402 case US_ASHIFT:
16403 case SS_TRUNCATE:
16404 case US_TRUNCATE:
16405 case UNORDERED:
16406 case ORDERED:
16407 case UNEQ:
16408 case UNGE:
16409 case UNGT:
16410 case UNLE:
16411 case UNLT:
16412 case LTGT:
16413 case FRACT_CONVERT:
16414 case UNSIGNED_FRACT_CONVERT:
16415 case SAT_FRACT:
16416 case UNSIGNED_SAT_FRACT:
16417 case SQRT:
16418 case ASM_OPERANDS:
16419 case VEC_MERGE:
16420 case VEC_SELECT:
16421 case VEC_CONCAT:
16422 case VEC_DUPLICATE:
16423 case VEC_SERIES:
16424 case HIGH:
16425 case FMA:
16426 case STRICT_LOW_PART:
16427 case CONST_VECTOR:
16428 case CONST_FIXED:
16429 case CLRSB:
16430 case CLOBBER:
16431 case CLOBBER_HIGH:
16432 break;
16433
16434 case CONST_STRING:
16435 resolve_one_addr (&rtl);
16436 goto symref;
16437
16438 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16439 the expression. An UNSPEC rtx represents a raw DWARF operation,
16440 new_loc_descr is called for it to build the operation directly.
16441 Otherwise mem_loc_descriptor is called recursively. */
16442 case PARALLEL:
16443 {
16444 int index = 0;
16445 dw_loc_descr_ref exp_result = NULL;
16446
16447 for (; index < XVECLEN (rtl, 0); index++)
16448 {
16449 rtx elem = XVECEXP (rtl, 0, index);
16450 if (GET_CODE (elem) == UNSPEC)
16451 {
16452 /* Each DWARF operation UNSPEC contain two operands, if
16453 one operand is not used for the operation, const0_rtx is
16454 passed. */
16455 gcc_assert (XVECLEN (elem, 0) == 2);
16456
16457 HOST_WIDE_INT dw_op = XINT (elem, 1);
16458 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16459 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16460 exp_result
16461 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16462 oprnd2);
16463 }
16464 else
16465 exp_result
16466 = mem_loc_descriptor (elem, mode, mem_mode,
16467 VAR_INIT_STATUS_INITIALIZED);
16468
16469 if (!mem_loc_result)
16470 mem_loc_result = exp_result;
16471 else
16472 add_loc_descr (&mem_loc_result, exp_result);
16473 }
16474
16475 break;
16476 }
16477
16478 default:
16479 if (flag_checking)
16480 {
16481 print_rtl (stderr, rtl);
16482 gcc_unreachable ();
16483 }
16484 break;
16485 }
16486
16487 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16488 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16489
16490 return mem_loc_result;
16491 }
16492
16493 /* Return a descriptor that describes the concatenation of two locations.
16494 This is typically a complex variable. */
16495
16496 static dw_loc_descr_ref
16497 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16498 {
16499 /* At present we only track constant-sized pieces. */
16500 unsigned int size0, size1;
16501 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16502 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16503 return 0;
16504
16505 dw_loc_descr_ref cc_loc_result = NULL;
16506 dw_loc_descr_ref x0_ref
16507 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16508 dw_loc_descr_ref x1_ref
16509 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510
16511 if (x0_ref == 0 || x1_ref == 0)
16512 return 0;
16513
16514 cc_loc_result = x0_ref;
16515 add_loc_descr_op_piece (&cc_loc_result, size0);
16516
16517 add_loc_descr (&cc_loc_result, x1_ref);
16518 add_loc_descr_op_piece (&cc_loc_result, size1);
16519
16520 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16521 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16522
16523 return cc_loc_result;
16524 }
16525
16526 /* Return a descriptor that describes the concatenation of N
16527 locations. */
16528
16529 static dw_loc_descr_ref
16530 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16531 {
16532 unsigned int i;
16533 dw_loc_descr_ref cc_loc_result = NULL;
16534 unsigned int n = XVECLEN (concatn, 0);
16535 unsigned int size;
16536
16537 for (i = 0; i < n; ++i)
16538 {
16539 dw_loc_descr_ref ref;
16540 rtx x = XVECEXP (concatn, 0, i);
16541
16542 /* At present we only track constant-sized pieces. */
16543 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16544 return NULL;
16545
16546 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16547 if (ref == NULL)
16548 return NULL;
16549
16550 add_loc_descr (&cc_loc_result, ref);
16551 add_loc_descr_op_piece (&cc_loc_result, size);
16552 }
16553
16554 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16555 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16556
16557 return cc_loc_result;
16558 }
16559
16560 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16561 for DEBUG_IMPLICIT_PTR RTL. */
16562
16563 static dw_loc_descr_ref
16564 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16565 {
16566 dw_loc_descr_ref ret;
16567 dw_die_ref ref;
16568
16569 if (dwarf_strict && dwarf_version < 5)
16570 return NULL;
16571 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16572 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16573 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16574 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16575 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16576 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16577 if (ref)
16578 {
16579 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16580 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16581 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16582 }
16583 else
16584 {
16585 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16586 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16587 }
16588 return ret;
16589 }
16590
16591 /* Output a proper Dwarf location descriptor for a variable or parameter
16592 which is either allocated in a register or in a memory location. For a
16593 register, we just generate an OP_REG and the register number. For a
16594 memory location we provide a Dwarf postfix expression describing how to
16595 generate the (dynamic) address of the object onto the address stack.
16596
16597 MODE is mode of the decl if this loc_descriptor is going to be used in
16598 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16599 allowed, VOIDmode otherwise.
16600
16601 If we don't know how to describe it, return 0. */
16602
16603 static dw_loc_descr_ref
16604 loc_descriptor (rtx rtl, machine_mode mode,
16605 enum var_init_status initialized)
16606 {
16607 dw_loc_descr_ref loc_result = NULL;
16608 scalar_int_mode int_mode;
16609
16610 switch (GET_CODE (rtl))
16611 {
16612 case SUBREG:
16613 /* The case of a subreg may arise when we have a local (register)
16614 variable or a formal (register) parameter which doesn't quite fill
16615 up an entire register. For now, just assume that it is
16616 legitimate to make the Dwarf info refer to the whole register which
16617 contains the given subreg. */
16618 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16619 loc_result = loc_descriptor (SUBREG_REG (rtl),
16620 GET_MODE (SUBREG_REG (rtl)), initialized);
16621 else
16622 goto do_default;
16623 break;
16624
16625 case REG:
16626 loc_result = reg_loc_descriptor (rtl, initialized);
16627 break;
16628
16629 case MEM:
16630 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16631 GET_MODE (rtl), initialized);
16632 if (loc_result == NULL)
16633 loc_result = tls_mem_loc_descriptor (rtl);
16634 if (loc_result == NULL)
16635 {
16636 rtx new_rtl = avoid_constant_pool_reference (rtl);
16637 if (new_rtl != rtl)
16638 loc_result = loc_descriptor (new_rtl, mode, initialized);
16639 }
16640 break;
16641
16642 case CONCAT:
16643 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16644 initialized);
16645 break;
16646
16647 case CONCATN:
16648 loc_result = concatn_loc_descriptor (rtl, initialized);
16649 break;
16650
16651 case VAR_LOCATION:
16652 /* Single part. */
16653 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16654 {
16655 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16656 if (GET_CODE (loc) == EXPR_LIST)
16657 loc = XEXP (loc, 0);
16658 loc_result = loc_descriptor (loc, mode, initialized);
16659 break;
16660 }
16661
16662 rtl = XEXP (rtl, 1);
16663 /* FALLTHRU */
16664
16665 case PARALLEL:
16666 {
16667 rtvec par_elems = XVEC (rtl, 0);
16668 int num_elem = GET_NUM_ELEM (par_elems);
16669 machine_mode mode;
16670 int i, size;
16671
16672 /* Create the first one, so we have something to add to. */
16673 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16674 VOIDmode, initialized);
16675 if (loc_result == NULL)
16676 return NULL;
16677 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16678 /* At present we only track constant-sized pieces. */
16679 if (!GET_MODE_SIZE (mode).is_constant (&size))
16680 return NULL;
16681 add_loc_descr_op_piece (&loc_result, size);
16682 for (i = 1; i < num_elem; i++)
16683 {
16684 dw_loc_descr_ref temp;
16685
16686 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16687 VOIDmode, initialized);
16688 if (temp == NULL)
16689 return NULL;
16690 add_loc_descr (&loc_result, temp);
16691 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16692 /* At present we only track constant-sized pieces. */
16693 if (!GET_MODE_SIZE (mode).is_constant (&size))
16694 return NULL;
16695 add_loc_descr_op_piece (&loc_result, size);
16696 }
16697 }
16698 break;
16699
16700 case CONST_INT:
16701 if (mode != VOIDmode && mode != BLKmode)
16702 {
16703 int_mode = as_a <scalar_int_mode> (mode);
16704 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16705 INTVAL (rtl));
16706 }
16707 break;
16708
16709 case CONST_DOUBLE:
16710 if (mode == VOIDmode)
16711 mode = GET_MODE (rtl);
16712
16713 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16714 {
16715 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16716
16717 /* Note that a CONST_DOUBLE rtx could represent either an integer
16718 or a floating-point constant. A CONST_DOUBLE is used whenever
16719 the constant requires more than one word in order to be
16720 adequately represented. We output CONST_DOUBLEs as blocks. */
16721 scalar_mode smode = as_a <scalar_mode> (mode);
16722 loc_result = new_loc_descr (DW_OP_implicit_value,
16723 GET_MODE_SIZE (smode), 0);
16724 #if TARGET_SUPPORTS_WIDE_INT == 0
16725 if (!SCALAR_FLOAT_MODE_P (smode))
16726 {
16727 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16728 loc_result->dw_loc_oprnd2.v.val_double
16729 = rtx_to_double_int (rtl);
16730 }
16731 else
16732 #endif
16733 {
16734 unsigned int length = GET_MODE_SIZE (smode);
16735 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16736
16737 insert_float (rtl, array);
16738 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16739 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16740 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16741 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16742 }
16743 }
16744 break;
16745
16746 case CONST_WIDE_INT:
16747 if (mode == VOIDmode)
16748 mode = GET_MODE (rtl);
16749
16750 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16751 {
16752 int_mode = as_a <scalar_int_mode> (mode);
16753 loc_result = new_loc_descr (DW_OP_implicit_value,
16754 GET_MODE_SIZE (int_mode), 0);
16755 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16756 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16757 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16758 }
16759 break;
16760
16761 case CONST_VECTOR:
16762 if (mode == VOIDmode)
16763 mode = GET_MODE (rtl);
16764
16765 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16766 {
16767 unsigned int length;
16768 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16769 return NULL;
16770
16771 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16772 unsigned char *array
16773 = ggc_vec_alloc<unsigned char> (length * elt_size);
16774 unsigned int i;
16775 unsigned char *p;
16776 machine_mode imode = GET_MODE_INNER (mode);
16777
16778 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16779 switch (GET_MODE_CLASS (mode))
16780 {
16781 case MODE_VECTOR_INT:
16782 for (i = 0, p = array; i < length; i++, p += elt_size)
16783 {
16784 rtx elt = CONST_VECTOR_ELT (rtl, i);
16785 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16786 }
16787 break;
16788
16789 case MODE_VECTOR_FLOAT:
16790 for (i = 0, p = array; i < length; i++, p += elt_size)
16791 {
16792 rtx elt = CONST_VECTOR_ELT (rtl, i);
16793 insert_float (elt, p);
16794 }
16795 break;
16796
16797 default:
16798 gcc_unreachable ();
16799 }
16800
16801 loc_result = new_loc_descr (DW_OP_implicit_value,
16802 length * elt_size, 0);
16803 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16804 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16805 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16806 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16807 }
16808 break;
16809
16810 case CONST:
16811 if (mode == VOIDmode
16812 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16813 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16814 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16815 {
16816 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16817 break;
16818 }
16819 /* FALLTHROUGH */
16820 case SYMBOL_REF:
16821 if (!const_ok_for_output (rtl))
16822 break;
16823 /* FALLTHROUGH */
16824 case LABEL_REF:
16825 if (is_a <scalar_int_mode> (mode, &int_mode)
16826 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16827 && (dwarf_version >= 4 || !dwarf_strict))
16828 {
16829 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16830 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16831 vec_safe_push (used_rtx_array, rtl);
16832 }
16833 break;
16834
16835 case DEBUG_IMPLICIT_PTR:
16836 loc_result = implicit_ptr_descriptor (rtl, 0);
16837 break;
16838
16839 case PLUS:
16840 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16841 && CONST_INT_P (XEXP (rtl, 1)))
16842 {
16843 loc_result
16844 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16845 break;
16846 }
16847 /* FALLTHRU */
16848 do_default:
16849 default:
16850 if ((is_a <scalar_int_mode> (mode, &int_mode)
16851 && GET_MODE (rtl) == int_mode
16852 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16853 && dwarf_version >= 4)
16854 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16855 {
16856 /* Value expression. */
16857 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16858 if (loc_result)
16859 add_loc_descr (&loc_result,
16860 new_loc_descr (DW_OP_stack_value, 0, 0));
16861 }
16862 break;
16863 }
16864
16865 return loc_result;
16866 }
16867
16868 /* We need to figure out what section we should use as the base for the
16869 address ranges where a given location is valid.
16870 1. If this particular DECL has a section associated with it, use that.
16871 2. If this function has a section associated with it, use that.
16872 3. Otherwise, use the text section.
16873 XXX: If you split a variable across multiple sections, we won't notice. */
16874
16875 static const char *
16876 secname_for_decl (const_tree decl)
16877 {
16878 const char *secname;
16879
16880 if (VAR_OR_FUNCTION_DECL_P (decl)
16881 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16882 && DECL_SECTION_NAME (decl))
16883 secname = DECL_SECTION_NAME (decl);
16884 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16885 {
16886 if (in_cold_section_p)
16887 {
16888 section *sec = current_function_section ();
16889 if (sec->common.flags & SECTION_NAMED)
16890 return sec->named.name;
16891 }
16892 secname = DECL_SECTION_NAME (current_function_decl);
16893 }
16894 else if (cfun && in_cold_section_p)
16895 secname = crtl->subsections.cold_section_label;
16896 else
16897 secname = text_section_label;
16898
16899 return secname;
16900 }
16901
16902 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16903
16904 static bool
16905 decl_by_reference_p (tree decl)
16906 {
16907 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16908 || VAR_P (decl))
16909 && DECL_BY_REFERENCE (decl));
16910 }
16911
16912 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16913 for VARLOC. */
16914
16915 static dw_loc_descr_ref
16916 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16917 enum var_init_status initialized)
16918 {
16919 int have_address = 0;
16920 dw_loc_descr_ref descr;
16921 machine_mode mode;
16922
16923 if (want_address != 2)
16924 {
16925 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16926 /* Single part. */
16927 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16928 {
16929 varloc = PAT_VAR_LOCATION_LOC (varloc);
16930 if (GET_CODE (varloc) == EXPR_LIST)
16931 varloc = XEXP (varloc, 0);
16932 mode = GET_MODE (varloc);
16933 if (MEM_P (varloc))
16934 {
16935 rtx addr = XEXP (varloc, 0);
16936 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16937 mode, initialized);
16938 if (descr)
16939 have_address = 1;
16940 else
16941 {
16942 rtx x = avoid_constant_pool_reference (varloc);
16943 if (x != varloc)
16944 descr = mem_loc_descriptor (x, mode, VOIDmode,
16945 initialized);
16946 }
16947 }
16948 else
16949 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16950 }
16951 else
16952 return 0;
16953 }
16954 else
16955 {
16956 if (GET_CODE (varloc) == VAR_LOCATION)
16957 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16958 else
16959 mode = DECL_MODE (loc);
16960 descr = loc_descriptor (varloc, mode, initialized);
16961 have_address = 1;
16962 }
16963
16964 if (!descr)
16965 return 0;
16966
16967 if (want_address == 2 && !have_address
16968 && (dwarf_version >= 4 || !dwarf_strict))
16969 {
16970 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16971 {
16972 expansion_failed (loc, NULL_RTX,
16973 "DWARF address size mismatch");
16974 return 0;
16975 }
16976 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16977 have_address = 1;
16978 }
16979 /* Show if we can't fill the request for an address. */
16980 if (want_address && !have_address)
16981 {
16982 expansion_failed (loc, NULL_RTX,
16983 "Want address and only have value");
16984 return 0;
16985 }
16986
16987 /* If we've got an address and don't want one, dereference. */
16988 if (!want_address && have_address)
16989 {
16990 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16991 enum dwarf_location_atom op;
16992
16993 if (size > DWARF2_ADDR_SIZE || size == -1)
16994 {
16995 expansion_failed (loc, NULL_RTX,
16996 "DWARF address size mismatch");
16997 return 0;
16998 }
16999 else if (size == DWARF2_ADDR_SIZE)
17000 op = DW_OP_deref;
17001 else
17002 op = DW_OP_deref_size;
17003
17004 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17005 }
17006
17007 return descr;
17008 }
17009
17010 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17011 if it is not possible. */
17012
17013 static dw_loc_descr_ref
17014 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17015 {
17016 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17017 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17018 else if (dwarf_version >= 3 || !dwarf_strict)
17019 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17020 else
17021 return NULL;
17022 }
17023
17024 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17025 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17026
17027 static dw_loc_descr_ref
17028 dw_sra_loc_expr (tree decl, rtx loc)
17029 {
17030 rtx p;
17031 unsigned HOST_WIDE_INT padsize = 0;
17032 dw_loc_descr_ref descr, *descr_tail;
17033 unsigned HOST_WIDE_INT decl_size;
17034 rtx varloc;
17035 enum var_init_status initialized;
17036
17037 if (DECL_SIZE (decl) == NULL
17038 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17039 return NULL;
17040
17041 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17042 descr = NULL;
17043 descr_tail = &descr;
17044
17045 for (p = loc; p; p = XEXP (p, 1))
17046 {
17047 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17048 rtx loc_note = *decl_piece_varloc_ptr (p);
17049 dw_loc_descr_ref cur_descr;
17050 dw_loc_descr_ref *tail, last = NULL;
17051 unsigned HOST_WIDE_INT opsize = 0;
17052
17053 if (loc_note == NULL_RTX
17054 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17055 {
17056 padsize += bitsize;
17057 continue;
17058 }
17059 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17060 varloc = NOTE_VAR_LOCATION (loc_note);
17061 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17062 if (cur_descr == NULL)
17063 {
17064 padsize += bitsize;
17065 continue;
17066 }
17067
17068 /* Check that cur_descr either doesn't use
17069 DW_OP_*piece operations, or their sum is equal
17070 to bitsize. Otherwise we can't embed it. */
17071 for (tail = &cur_descr; *tail != NULL;
17072 tail = &(*tail)->dw_loc_next)
17073 if ((*tail)->dw_loc_opc == DW_OP_piece)
17074 {
17075 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17076 * BITS_PER_UNIT;
17077 last = *tail;
17078 }
17079 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17080 {
17081 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17082 last = *tail;
17083 }
17084
17085 if (last != NULL && opsize != bitsize)
17086 {
17087 padsize += bitsize;
17088 /* Discard the current piece of the descriptor and release any
17089 addr_table entries it uses. */
17090 remove_loc_list_addr_table_entries (cur_descr);
17091 continue;
17092 }
17093
17094 /* If there is a hole, add DW_OP_*piece after empty DWARF
17095 expression, which means that those bits are optimized out. */
17096 if (padsize)
17097 {
17098 if (padsize > decl_size)
17099 {
17100 remove_loc_list_addr_table_entries (cur_descr);
17101 goto discard_descr;
17102 }
17103 decl_size -= padsize;
17104 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17105 if (*descr_tail == NULL)
17106 {
17107 remove_loc_list_addr_table_entries (cur_descr);
17108 goto discard_descr;
17109 }
17110 descr_tail = &(*descr_tail)->dw_loc_next;
17111 padsize = 0;
17112 }
17113 *descr_tail = cur_descr;
17114 descr_tail = tail;
17115 if (bitsize > decl_size)
17116 goto discard_descr;
17117 decl_size -= bitsize;
17118 if (last == NULL)
17119 {
17120 HOST_WIDE_INT offset = 0;
17121 if (GET_CODE (varloc) == VAR_LOCATION
17122 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17123 {
17124 varloc = PAT_VAR_LOCATION_LOC (varloc);
17125 if (GET_CODE (varloc) == EXPR_LIST)
17126 varloc = XEXP (varloc, 0);
17127 }
17128 do
17129 {
17130 if (GET_CODE (varloc) == CONST
17131 || GET_CODE (varloc) == SIGN_EXTEND
17132 || GET_CODE (varloc) == ZERO_EXTEND)
17133 varloc = XEXP (varloc, 0);
17134 else if (GET_CODE (varloc) == SUBREG)
17135 varloc = SUBREG_REG (varloc);
17136 else
17137 break;
17138 }
17139 while (1);
17140 /* DW_OP_bit_size offset should be zero for register
17141 or implicit location descriptions and empty location
17142 descriptions, but for memory addresses needs big endian
17143 adjustment. */
17144 if (MEM_P (varloc))
17145 {
17146 unsigned HOST_WIDE_INT memsize;
17147 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17148 goto discard_descr;
17149 memsize *= BITS_PER_UNIT;
17150 if (memsize != bitsize)
17151 {
17152 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17153 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17154 goto discard_descr;
17155 if (memsize < bitsize)
17156 goto discard_descr;
17157 if (BITS_BIG_ENDIAN)
17158 offset = memsize - bitsize;
17159 }
17160 }
17161
17162 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17163 if (*descr_tail == NULL)
17164 goto discard_descr;
17165 descr_tail = &(*descr_tail)->dw_loc_next;
17166 }
17167 }
17168
17169 /* If there were any non-empty expressions, add padding till the end of
17170 the decl. */
17171 if (descr != NULL && decl_size != 0)
17172 {
17173 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17174 if (*descr_tail == NULL)
17175 goto discard_descr;
17176 }
17177 return descr;
17178
17179 discard_descr:
17180 /* Discard the descriptor and release any addr_table entries it uses. */
17181 remove_loc_list_addr_table_entries (descr);
17182 return NULL;
17183 }
17184
17185 /* Return the dwarf representation of the location list LOC_LIST of
17186 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17187 function. */
17188
17189 static dw_loc_list_ref
17190 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17191 {
17192 const char *endname, *secname;
17193 var_loc_view endview;
17194 rtx varloc;
17195 enum var_init_status initialized;
17196 struct var_loc_node *node;
17197 dw_loc_descr_ref descr;
17198 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17199 dw_loc_list_ref list = NULL;
17200 dw_loc_list_ref *listp = &list;
17201
17202 /* Now that we know what section we are using for a base,
17203 actually construct the list of locations.
17204 The first location information is what is passed to the
17205 function that creates the location list, and the remaining
17206 locations just get added on to that list.
17207 Note that we only know the start address for a location
17208 (IE location changes), so to build the range, we use
17209 the range [current location start, next location start].
17210 This means we have to special case the last node, and generate
17211 a range of [last location start, end of function label]. */
17212
17213 if (cfun && crtl->has_bb_partition)
17214 {
17215 bool save_in_cold_section_p = in_cold_section_p;
17216 in_cold_section_p = first_function_block_is_cold;
17217 if (loc_list->last_before_switch == NULL)
17218 in_cold_section_p = !in_cold_section_p;
17219 secname = secname_for_decl (decl);
17220 in_cold_section_p = save_in_cold_section_p;
17221 }
17222 else
17223 secname = secname_for_decl (decl);
17224
17225 for (node = loc_list->first; node; node = node->next)
17226 {
17227 bool range_across_switch = false;
17228 if (GET_CODE (node->loc) == EXPR_LIST
17229 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17230 {
17231 if (GET_CODE (node->loc) == EXPR_LIST)
17232 {
17233 descr = NULL;
17234 /* This requires DW_OP_{,bit_}piece, which is not usable
17235 inside DWARF expressions. */
17236 if (want_address == 2)
17237 descr = dw_sra_loc_expr (decl, node->loc);
17238 }
17239 else
17240 {
17241 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17242 varloc = NOTE_VAR_LOCATION (node->loc);
17243 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17244 }
17245 if (descr)
17246 {
17247 /* If section switch happens in between node->label
17248 and node->next->label (or end of function) and
17249 we can't emit it as a single entry list,
17250 emit two ranges, first one ending at the end
17251 of first partition and second one starting at the
17252 beginning of second partition. */
17253 if (node == loc_list->last_before_switch
17254 && (node != loc_list->first || loc_list->first->next
17255 /* If we are to emit a view number, we will emit
17256 a loclist rather than a single location
17257 expression for the entire function (see
17258 loc_list_has_views), so we have to split the
17259 range that straddles across partitions. */
17260 || !ZERO_VIEW_P (node->view))
17261 && current_function_decl)
17262 {
17263 endname = cfun->fde->dw_fde_end;
17264 endview = 0;
17265 range_across_switch = true;
17266 }
17267 /* The variable has a location between NODE->LABEL and
17268 NODE->NEXT->LABEL. */
17269 else if (node->next)
17270 endname = node->next->label, endview = node->next->view;
17271 /* If the variable has a location at the last label
17272 it keeps its location until the end of function. */
17273 else if (!current_function_decl)
17274 endname = text_end_label, endview = 0;
17275 else
17276 {
17277 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17278 current_function_funcdef_no);
17279 endname = ggc_strdup (label_id);
17280 endview = 0;
17281 }
17282
17283 *listp = new_loc_list (descr, node->label, node->view,
17284 endname, endview, secname);
17285 if (TREE_CODE (decl) == PARM_DECL
17286 && node == loc_list->first
17287 && NOTE_P (node->loc)
17288 && strcmp (node->label, endname) == 0)
17289 (*listp)->force = true;
17290 listp = &(*listp)->dw_loc_next;
17291 }
17292 }
17293
17294 if (cfun
17295 && crtl->has_bb_partition
17296 && node == loc_list->last_before_switch)
17297 {
17298 bool save_in_cold_section_p = in_cold_section_p;
17299 in_cold_section_p = !first_function_block_is_cold;
17300 secname = secname_for_decl (decl);
17301 in_cold_section_p = save_in_cold_section_p;
17302 }
17303
17304 if (range_across_switch)
17305 {
17306 if (GET_CODE (node->loc) == EXPR_LIST)
17307 descr = dw_sra_loc_expr (decl, node->loc);
17308 else
17309 {
17310 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17311 varloc = NOTE_VAR_LOCATION (node->loc);
17312 descr = dw_loc_list_1 (decl, varloc, want_address,
17313 initialized);
17314 }
17315 gcc_assert (descr);
17316 /* The variable has a location between NODE->LABEL and
17317 NODE->NEXT->LABEL. */
17318 if (node->next)
17319 endname = node->next->label, endview = node->next->view;
17320 else
17321 endname = cfun->fde->dw_fde_second_end, endview = 0;
17322 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17323 endname, endview, secname);
17324 listp = &(*listp)->dw_loc_next;
17325 }
17326 }
17327
17328 /* Try to avoid the overhead of a location list emitting a location
17329 expression instead, but only if we didn't have more than one
17330 location entry in the first place. If some entries were not
17331 representable, we don't want to pretend a single entry that was
17332 applies to the entire scope in which the variable is
17333 available. */
17334 if (list && loc_list->first->next)
17335 gen_llsym (list);
17336 else
17337 maybe_gen_llsym (list);
17338
17339 return list;
17340 }
17341
17342 /* Return if the loc_list has only single element and thus can be represented
17343 as location description. */
17344
17345 static bool
17346 single_element_loc_list_p (dw_loc_list_ref list)
17347 {
17348 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17349 return !list->ll_symbol;
17350 }
17351
17352 /* Duplicate a single element of location list. */
17353
17354 static inline dw_loc_descr_ref
17355 copy_loc_descr (dw_loc_descr_ref ref)
17356 {
17357 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17358 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17359 return copy;
17360 }
17361
17362 /* To each location in list LIST append loc descr REF. */
17363
17364 static void
17365 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17366 {
17367 dw_loc_descr_ref copy;
17368 add_loc_descr (&list->expr, ref);
17369 list = list->dw_loc_next;
17370 while (list)
17371 {
17372 copy = copy_loc_descr (ref);
17373 add_loc_descr (&list->expr, copy);
17374 while (copy->dw_loc_next)
17375 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17376 list = list->dw_loc_next;
17377 }
17378 }
17379
17380 /* To each location in list LIST prepend loc descr REF. */
17381
17382 static void
17383 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17384 {
17385 dw_loc_descr_ref copy;
17386 dw_loc_descr_ref ref_end = list->expr;
17387 add_loc_descr (&ref, list->expr);
17388 list->expr = ref;
17389 list = list->dw_loc_next;
17390 while (list)
17391 {
17392 dw_loc_descr_ref end = list->expr;
17393 list->expr = copy = copy_loc_descr (ref);
17394 while (copy->dw_loc_next != ref_end)
17395 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17396 copy->dw_loc_next = end;
17397 list = list->dw_loc_next;
17398 }
17399 }
17400
17401 /* Given two lists RET and LIST
17402 produce location list that is result of adding expression in LIST
17403 to expression in RET on each position in program.
17404 Might be destructive on both RET and LIST.
17405
17406 TODO: We handle only simple cases of RET or LIST having at most one
17407 element. General case would involve sorting the lists in program order
17408 and merging them that will need some additional work.
17409 Adding that will improve quality of debug info especially for SRA-ed
17410 structures. */
17411
17412 static void
17413 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17414 {
17415 if (!list)
17416 return;
17417 if (!*ret)
17418 {
17419 *ret = list;
17420 return;
17421 }
17422 if (!list->dw_loc_next)
17423 {
17424 add_loc_descr_to_each (*ret, list->expr);
17425 return;
17426 }
17427 if (!(*ret)->dw_loc_next)
17428 {
17429 prepend_loc_descr_to_each (list, (*ret)->expr);
17430 *ret = list;
17431 return;
17432 }
17433 expansion_failed (NULL_TREE, NULL_RTX,
17434 "Don't know how to merge two non-trivial"
17435 " location lists.\n");
17436 *ret = NULL;
17437 return;
17438 }
17439
17440 /* LOC is constant expression. Try a luck, look it up in constant
17441 pool and return its loc_descr of its address. */
17442
17443 static dw_loc_descr_ref
17444 cst_pool_loc_descr (tree loc)
17445 {
17446 /* Get an RTL for this, if something has been emitted. */
17447 rtx rtl = lookup_constant_def (loc);
17448
17449 if (!rtl || !MEM_P (rtl))
17450 {
17451 gcc_assert (!rtl);
17452 return 0;
17453 }
17454 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17455
17456 /* TODO: We might get more coverage if we was actually delaying expansion
17457 of all expressions till end of compilation when constant pools are fully
17458 populated. */
17459 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17460 {
17461 expansion_failed (loc, NULL_RTX,
17462 "CST value in contant pool but not marked.");
17463 return 0;
17464 }
17465 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17466 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17467 }
17468
17469 /* Return dw_loc_list representing address of addr_expr LOC
17470 by looking for inner INDIRECT_REF expression and turning
17471 it into simple arithmetics.
17472
17473 See loc_list_from_tree for the meaning of CONTEXT. */
17474
17475 static dw_loc_list_ref
17476 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17477 loc_descr_context *context)
17478 {
17479 tree obj, offset;
17480 poly_int64 bitsize, bitpos, bytepos;
17481 machine_mode mode;
17482 int unsignedp, reversep, volatilep = 0;
17483 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17484
17485 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17486 &bitsize, &bitpos, &offset, &mode,
17487 &unsignedp, &reversep, &volatilep);
17488 STRIP_NOPS (obj);
17489 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17490 {
17491 expansion_failed (loc, NULL_RTX, "bitfield access");
17492 return 0;
17493 }
17494 if (!INDIRECT_REF_P (obj))
17495 {
17496 expansion_failed (obj,
17497 NULL_RTX, "no indirect ref in inner refrence");
17498 return 0;
17499 }
17500 if (!offset && known_eq (bitpos, 0))
17501 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17502 context);
17503 else if (toplev
17504 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17505 && (dwarf_version >= 4 || !dwarf_strict))
17506 {
17507 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17508 if (!list_ret)
17509 return 0;
17510 if (offset)
17511 {
17512 /* Variable offset. */
17513 list_ret1 = loc_list_from_tree (offset, 0, context);
17514 if (list_ret1 == 0)
17515 return 0;
17516 add_loc_list (&list_ret, list_ret1);
17517 if (!list_ret)
17518 return 0;
17519 add_loc_descr_to_each (list_ret,
17520 new_loc_descr (DW_OP_plus, 0, 0));
17521 }
17522 HOST_WIDE_INT value;
17523 if (bytepos.is_constant (&value) && value > 0)
17524 add_loc_descr_to_each (list_ret,
17525 new_loc_descr (DW_OP_plus_uconst, value, 0));
17526 else if (maybe_ne (bytepos, 0))
17527 loc_list_plus_const (list_ret, bytepos);
17528 add_loc_descr_to_each (list_ret,
17529 new_loc_descr (DW_OP_stack_value, 0, 0));
17530 }
17531 return list_ret;
17532 }
17533
17534 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17535 all operations from LOC are nops, move to the last one. Insert in NOPS all
17536 operations that are skipped. */
17537
17538 static void
17539 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17540 hash_set<dw_loc_descr_ref> &nops)
17541 {
17542 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17543 {
17544 nops.add (loc);
17545 loc = loc->dw_loc_next;
17546 }
17547 }
17548
17549 /* Helper for loc_descr_without_nops: free the location description operation
17550 P. */
17551
17552 bool
17553 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17554 {
17555 ggc_free (loc);
17556 return true;
17557 }
17558
17559 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17560 finishes LOC. */
17561
17562 static void
17563 loc_descr_without_nops (dw_loc_descr_ref &loc)
17564 {
17565 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17566 return;
17567
17568 /* Set of all DW_OP_nop operations we remove. */
17569 hash_set<dw_loc_descr_ref> nops;
17570
17571 /* First, strip all prefix NOP operations in order to keep the head of the
17572 operations list. */
17573 loc_descr_to_next_no_nop (loc, nops);
17574
17575 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17576 {
17577 /* For control flow operations: strip "prefix" nops in destination
17578 labels. */
17579 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17580 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17581 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17582 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17583
17584 /* Do the same for the operations that follow, then move to the next
17585 iteration. */
17586 if (cur->dw_loc_next != NULL)
17587 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17588 cur = cur->dw_loc_next;
17589 }
17590
17591 nops.traverse<void *, free_loc_descr> (NULL);
17592 }
17593
17594
17595 struct dwarf_procedure_info;
17596
17597 /* Helper structure for location descriptions generation. */
17598 struct loc_descr_context
17599 {
17600 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17601 NULL_TREE if DW_OP_push_object_address in invalid for this location
17602 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17603 tree context_type;
17604 /* The ..._DECL node that should be translated as a
17605 DW_OP_push_object_address operation. */
17606 tree base_decl;
17607 /* Information about the DWARF procedure we are currently generating. NULL if
17608 we are not generating a DWARF procedure. */
17609 struct dwarf_procedure_info *dpi;
17610 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17611 by consumer. Used for DW_TAG_generic_subrange attributes. */
17612 bool placeholder_arg;
17613 /* True if PLACEHOLDER_EXPR has been seen. */
17614 bool placeholder_seen;
17615 };
17616
17617 /* DWARF procedures generation
17618
17619 DWARF expressions (aka. location descriptions) are used to encode variable
17620 things such as sizes or offsets. Such computations can have redundant parts
17621 that can be factorized in order to reduce the size of the output debug
17622 information. This is the whole point of DWARF procedures.
17623
17624 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17625 already factorized into functions ("size functions") in order to handle very
17626 big and complex types. Such functions are quite simple: they have integral
17627 arguments, they return an integral result and their body contains only a
17628 return statement with arithmetic expressions. This is the only kind of
17629 function we are interested in translating into DWARF procedures, here.
17630
17631 DWARF expressions and DWARF procedure are executed using a stack, so we have
17632 to define some calling convention for them to interact. Let's say that:
17633
17634 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17635 all arguments in reverse order (right-to-left) so that when the DWARF
17636 procedure execution starts, the first argument is the top of the stack.
17637
17638 - Then, when returning, the DWARF procedure must have consumed all arguments
17639 on the stack, must have pushed the result and touched nothing else.
17640
17641 - Each integral argument and the result are integral types can be hold in a
17642 single stack slot.
17643
17644 - We call "frame offset" the number of stack slots that are "under DWARF
17645 procedure control": it includes the arguments slots, the temporaries and
17646 the result slot. Thus, it is equal to the number of arguments when the
17647 procedure execution starts and must be equal to one (the result) when it
17648 returns. */
17649
17650 /* Helper structure used when generating operations for a DWARF procedure. */
17651 struct dwarf_procedure_info
17652 {
17653 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17654 currently translated. */
17655 tree fndecl;
17656 /* The number of arguments FNDECL takes. */
17657 unsigned args_count;
17658 };
17659
17660 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17661 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17662 equate it to this DIE. */
17663
17664 static dw_die_ref
17665 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17666 dw_die_ref parent_die)
17667 {
17668 dw_die_ref dwarf_proc_die;
17669
17670 if ((dwarf_version < 3 && dwarf_strict)
17671 || location == NULL)
17672 return NULL;
17673
17674 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17675 if (fndecl)
17676 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17677 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17678 return dwarf_proc_die;
17679 }
17680
17681 /* Return whether TYPE is a supported type as a DWARF procedure argument
17682 type or return type (we handle only scalar types and pointer types that
17683 aren't wider than the DWARF expression evaluation stack. */
17684
17685 static bool
17686 is_handled_procedure_type (tree type)
17687 {
17688 return ((INTEGRAL_TYPE_P (type)
17689 || TREE_CODE (type) == OFFSET_TYPE
17690 || TREE_CODE (type) == POINTER_TYPE)
17691 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17692 }
17693
17694 /* Helper for resolve_args_picking: do the same but stop when coming across
17695 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17696 offset *before* evaluating the corresponding operation. */
17697
17698 static bool
17699 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17700 struct dwarf_procedure_info *dpi,
17701 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17702 {
17703 /* The "frame_offset" identifier is already used to name a macro... */
17704 unsigned frame_offset_ = initial_frame_offset;
17705 dw_loc_descr_ref l;
17706
17707 for (l = loc; l != NULL;)
17708 {
17709 bool existed;
17710 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17711
17712 /* If we already met this node, there is nothing to compute anymore. */
17713 if (existed)
17714 {
17715 /* Make sure that the stack size is consistent wherever the execution
17716 flow comes from. */
17717 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17718 break;
17719 }
17720 l_frame_offset = frame_offset_;
17721
17722 /* If needed, relocate the picking offset with respect to the frame
17723 offset. */
17724 if (l->frame_offset_rel)
17725 {
17726 unsigned HOST_WIDE_INT off;
17727 switch (l->dw_loc_opc)
17728 {
17729 case DW_OP_pick:
17730 off = l->dw_loc_oprnd1.v.val_unsigned;
17731 break;
17732 case DW_OP_dup:
17733 off = 0;
17734 break;
17735 case DW_OP_over:
17736 off = 1;
17737 break;
17738 default:
17739 gcc_unreachable ();
17740 }
17741 /* frame_offset_ is the size of the current stack frame, including
17742 incoming arguments. Besides, the arguments are pushed
17743 right-to-left. Thus, in order to access the Nth argument from
17744 this operation node, the picking has to skip temporaries *plus*
17745 one stack slot per argument (0 for the first one, 1 for the second
17746 one, etc.).
17747
17748 The targetted argument number (N) is already set as the operand,
17749 and the number of temporaries can be computed with:
17750 frame_offsets_ - dpi->args_count */
17751 off += frame_offset_ - dpi->args_count;
17752
17753 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17754 if (off > 255)
17755 return false;
17756
17757 if (off == 0)
17758 {
17759 l->dw_loc_opc = DW_OP_dup;
17760 l->dw_loc_oprnd1.v.val_unsigned = 0;
17761 }
17762 else if (off == 1)
17763 {
17764 l->dw_loc_opc = DW_OP_over;
17765 l->dw_loc_oprnd1.v.val_unsigned = 0;
17766 }
17767 else
17768 {
17769 l->dw_loc_opc = DW_OP_pick;
17770 l->dw_loc_oprnd1.v.val_unsigned = off;
17771 }
17772 }
17773
17774 /* Update frame_offset according to the effect the current operation has
17775 on the stack. */
17776 switch (l->dw_loc_opc)
17777 {
17778 case DW_OP_deref:
17779 case DW_OP_swap:
17780 case DW_OP_rot:
17781 case DW_OP_abs:
17782 case DW_OP_neg:
17783 case DW_OP_not:
17784 case DW_OP_plus_uconst:
17785 case DW_OP_skip:
17786 case DW_OP_reg0:
17787 case DW_OP_reg1:
17788 case DW_OP_reg2:
17789 case DW_OP_reg3:
17790 case DW_OP_reg4:
17791 case DW_OP_reg5:
17792 case DW_OP_reg6:
17793 case DW_OP_reg7:
17794 case DW_OP_reg8:
17795 case DW_OP_reg9:
17796 case DW_OP_reg10:
17797 case DW_OP_reg11:
17798 case DW_OP_reg12:
17799 case DW_OP_reg13:
17800 case DW_OP_reg14:
17801 case DW_OP_reg15:
17802 case DW_OP_reg16:
17803 case DW_OP_reg17:
17804 case DW_OP_reg18:
17805 case DW_OP_reg19:
17806 case DW_OP_reg20:
17807 case DW_OP_reg21:
17808 case DW_OP_reg22:
17809 case DW_OP_reg23:
17810 case DW_OP_reg24:
17811 case DW_OP_reg25:
17812 case DW_OP_reg26:
17813 case DW_OP_reg27:
17814 case DW_OP_reg28:
17815 case DW_OP_reg29:
17816 case DW_OP_reg30:
17817 case DW_OP_reg31:
17818 case DW_OP_bregx:
17819 case DW_OP_piece:
17820 case DW_OP_deref_size:
17821 case DW_OP_nop:
17822 case DW_OP_bit_piece:
17823 case DW_OP_implicit_value:
17824 case DW_OP_stack_value:
17825 break;
17826
17827 case DW_OP_addr:
17828 case DW_OP_const1u:
17829 case DW_OP_const1s:
17830 case DW_OP_const2u:
17831 case DW_OP_const2s:
17832 case DW_OP_const4u:
17833 case DW_OP_const4s:
17834 case DW_OP_const8u:
17835 case DW_OP_const8s:
17836 case DW_OP_constu:
17837 case DW_OP_consts:
17838 case DW_OP_dup:
17839 case DW_OP_over:
17840 case DW_OP_pick:
17841 case DW_OP_lit0:
17842 case DW_OP_lit1:
17843 case DW_OP_lit2:
17844 case DW_OP_lit3:
17845 case DW_OP_lit4:
17846 case DW_OP_lit5:
17847 case DW_OP_lit6:
17848 case DW_OP_lit7:
17849 case DW_OP_lit8:
17850 case DW_OP_lit9:
17851 case DW_OP_lit10:
17852 case DW_OP_lit11:
17853 case DW_OP_lit12:
17854 case DW_OP_lit13:
17855 case DW_OP_lit14:
17856 case DW_OP_lit15:
17857 case DW_OP_lit16:
17858 case DW_OP_lit17:
17859 case DW_OP_lit18:
17860 case DW_OP_lit19:
17861 case DW_OP_lit20:
17862 case DW_OP_lit21:
17863 case DW_OP_lit22:
17864 case DW_OP_lit23:
17865 case DW_OP_lit24:
17866 case DW_OP_lit25:
17867 case DW_OP_lit26:
17868 case DW_OP_lit27:
17869 case DW_OP_lit28:
17870 case DW_OP_lit29:
17871 case DW_OP_lit30:
17872 case DW_OP_lit31:
17873 case DW_OP_breg0:
17874 case DW_OP_breg1:
17875 case DW_OP_breg2:
17876 case DW_OP_breg3:
17877 case DW_OP_breg4:
17878 case DW_OP_breg5:
17879 case DW_OP_breg6:
17880 case DW_OP_breg7:
17881 case DW_OP_breg8:
17882 case DW_OP_breg9:
17883 case DW_OP_breg10:
17884 case DW_OP_breg11:
17885 case DW_OP_breg12:
17886 case DW_OP_breg13:
17887 case DW_OP_breg14:
17888 case DW_OP_breg15:
17889 case DW_OP_breg16:
17890 case DW_OP_breg17:
17891 case DW_OP_breg18:
17892 case DW_OP_breg19:
17893 case DW_OP_breg20:
17894 case DW_OP_breg21:
17895 case DW_OP_breg22:
17896 case DW_OP_breg23:
17897 case DW_OP_breg24:
17898 case DW_OP_breg25:
17899 case DW_OP_breg26:
17900 case DW_OP_breg27:
17901 case DW_OP_breg28:
17902 case DW_OP_breg29:
17903 case DW_OP_breg30:
17904 case DW_OP_breg31:
17905 case DW_OP_fbreg:
17906 case DW_OP_push_object_address:
17907 case DW_OP_call_frame_cfa:
17908 case DW_OP_GNU_variable_value:
17909 case DW_OP_GNU_addr_index:
17910 case DW_OP_GNU_const_index:
17911 ++frame_offset_;
17912 break;
17913
17914 case DW_OP_drop:
17915 case DW_OP_xderef:
17916 case DW_OP_and:
17917 case DW_OP_div:
17918 case DW_OP_minus:
17919 case DW_OP_mod:
17920 case DW_OP_mul:
17921 case DW_OP_or:
17922 case DW_OP_plus:
17923 case DW_OP_shl:
17924 case DW_OP_shr:
17925 case DW_OP_shra:
17926 case DW_OP_xor:
17927 case DW_OP_bra:
17928 case DW_OP_eq:
17929 case DW_OP_ge:
17930 case DW_OP_gt:
17931 case DW_OP_le:
17932 case DW_OP_lt:
17933 case DW_OP_ne:
17934 case DW_OP_regx:
17935 case DW_OP_xderef_size:
17936 --frame_offset_;
17937 break;
17938
17939 case DW_OP_call2:
17940 case DW_OP_call4:
17941 case DW_OP_call_ref:
17942 {
17943 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17944 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17945
17946 if (stack_usage == NULL)
17947 return false;
17948 frame_offset_ += *stack_usage;
17949 break;
17950 }
17951
17952 case DW_OP_implicit_pointer:
17953 case DW_OP_entry_value:
17954 case DW_OP_const_type:
17955 case DW_OP_regval_type:
17956 case DW_OP_deref_type:
17957 case DW_OP_convert:
17958 case DW_OP_reinterpret:
17959 case DW_OP_form_tls_address:
17960 case DW_OP_GNU_push_tls_address:
17961 case DW_OP_GNU_uninit:
17962 case DW_OP_GNU_encoded_addr:
17963 case DW_OP_GNU_implicit_pointer:
17964 case DW_OP_GNU_entry_value:
17965 case DW_OP_GNU_const_type:
17966 case DW_OP_GNU_regval_type:
17967 case DW_OP_GNU_deref_type:
17968 case DW_OP_GNU_convert:
17969 case DW_OP_GNU_reinterpret:
17970 case DW_OP_GNU_parameter_ref:
17971 /* loc_list_from_tree will probably not output these operations for
17972 size functions, so assume they will not appear here. */
17973 /* Fall through... */
17974
17975 default:
17976 gcc_unreachable ();
17977 }
17978
17979 /* Now, follow the control flow (except subroutine calls). */
17980 switch (l->dw_loc_opc)
17981 {
17982 case DW_OP_bra:
17983 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17984 frame_offsets))
17985 return false;
17986 /* Fall through. */
17987
17988 case DW_OP_skip:
17989 l = l->dw_loc_oprnd1.v.val_loc;
17990 break;
17991
17992 case DW_OP_stack_value:
17993 return true;
17994
17995 default:
17996 l = l->dw_loc_next;
17997 break;
17998 }
17999 }
18000
18001 return true;
18002 }
18003
18004 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18005 operations) in order to resolve the operand of DW_OP_pick operations that
18006 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18007 offset *before* LOC is executed. Return if all relocations were
18008 successful. */
18009
18010 static bool
18011 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18012 struct dwarf_procedure_info *dpi)
18013 {
18014 /* Associate to all visited operations the frame offset *before* evaluating
18015 this operation. */
18016 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18017
18018 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18019 frame_offsets);
18020 }
18021
18022 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18023 Return NULL if it is not possible. */
18024
18025 static dw_die_ref
18026 function_to_dwarf_procedure (tree fndecl)
18027 {
18028 struct loc_descr_context ctx;
18029 struct dwarf_procedure_info dpi;
18030 dw_die_ref dwarf_proc_die;
18031 tree tree_body = DECL_SAVED_TREE (fndecl);
18032 dw_loc_descr_ref loc_body, epilogue;
18033
18034 tree cursor;
18035 unsigned i;
18036
18037 /* Do not generate multiple DWARF procedures for the same function
18038 declaration. */
18039 dwarf_proc_die = lookup_decl_die (fndecl);
18040 if (dwarf_proc_die != NULL)
18041 return dwarf_proc_die;
18042
18043 /* DWARF procedures are available starting with the DWARFv3 standard. */
18044 if (dwarf_version < 3 && dwarf_strict)
18045 return NULL;
18046
18047 /* We handle only functions for which we still have a body, that return a
18048 supported type and that takes arguments with supported types. Note that
18049 there is no point translating functions that return nothing. */
18050 if (tree_body == NULL_TREE
18051 || DECL_RESULT (fndecl) == NULL_TREE
18052 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18053 return NULL;
18054
18055 for (cursor = DECL_ARGUMENTS (fndecl);
18056 cursor != NULL_TREE;
18057 cursor = TREE_CHAIN (cursor))
18058 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18059 return NULL;
18060
18061 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18062 if (TREE_CODE (tree_body) != RETURN_EXPR)
18063 return NULL;
18064 tree_body = TREE_OPERAND (tree_body, 0);
18065 if (TREE_CODE (tree_body) != MODIFY_EXPR
18066 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18067 return NULL;
18068 tree_body = TREE_OPERAND (tree_body, 1);
18069
18070 /* Try to translate the body expression itself. Note that this will probably
18071 cause an infinite recursion if its call graph has a cycle. This is very
18072 unlikely for size functions, however, so don't bother with such things at
18073 the moment. */
18074 ctx.context_type = NULL_TREE;
18075 ctx.base_decl = NULL_TREE;
18076 ctx.dpi = &dpi;
18077 ctx.placeholder_arg = false;
18078 ctx.placeholder_seen = false;
18079 dpi.fndecl = fndecl;
18080 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18081 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18082 if (!loc_body)
18083 return NULL;
18084
18085 /* After evaluating all operands in "loc_body", we should still have on the
18086 stack all arguments plus the desired function result (top of the stack).
18087 Generate code in order to keep only the result in our stack frame. */
18088 epilogue = NULL;
18089 for (i = 0; i < dpi.args_count; ++i)
18090 {
18091 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18092 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18093 op_couple->dw_loc_next->dw_loc_next = epilogue;
18094 epilogue = op_couple;
18095 }
18096 add_loc_descr (&loc_body, epilogue);
18097 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18098 return NULL;
18099
18100 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18101 because they are considered useful. Now there is an epilogue, they are
18102 not anymore, so give it another try. */
18103 loc_descr_without_nops (loc_body);
18104
18105 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18106 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18107 though, given that size functions do not come from source, so they should
18108 not have a dedicated DW_TAG_subprogram DIE. */
18109 dwarf_proc_die
18110 = new_dwarf_proc_die (loc_body, fndecl,
18111 get_context_die (DECL_CONTEXT (fndecl)));
18112
18113 /* The called DWARF procedure consumes one stack slot per argument and
18114 returns one stack slot. */
18115 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18116
18117 return dwarf_proc_die;
18118 }
18119
18120
18121 /* Generate Dwarf location list representing LOC.
18122 If WANT_ADDRESS is false, expression computing LOC will be computed
18123 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18124 if WANT_ADDRESS is 2, expression computing address useable in location
18125 will be returned (i.e. DW_OP_reg can be used
18126 to refer to register values).
18127
18128 CONTEXT provides information to customize the location descriptions
18129 generation. Its context_type field specifies what type is implicitly
18130 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18131 will not be generated.
18132
18133 Its DPI field determines whether we are generating a DWARF expression for a
18134 DWARF procedure, so PARM_DECL references are processed specifically.
18135
18136 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18137 and dpi fields were null. */
18138
18139 static dw_loc_list_ref
18140 loc_list_from_tree_1 (tree loc, int want_address,
18141 struct loc_descr_context *context)
18142 {
18143 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18144 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18145 int have_address = 0;
18146 enum dwarf_location_atom op;
18147
18148 /* ??? Most of the time we do not take proper care for sign/zero
18149 extending the values properly. Hopefully this won't be a real
18150 problem... */
18151
18152 if (context != NULL
18153 && context->base_decl == loc
18154 && want_address == 0)
18155 {
18156 if (dwarf_version >= 3 || !dwarf_strict)
18157 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18158 NULL, 0, NULL, 0, NULL);
18159 else
18160 return NULL;
18161 }
18162
18163 switch (TREE_CODE (loc))
18164 {
18165 case ERROR_MARK:
18166 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18167 return 0;
18168
18169 case PLACEHOLDER_EXPR:
18170 /* This case involves extracting fields from an object to determine the
18171 position of other fields. It is supposed to appear only as the first
18172 operand of COMPONENT_REF nodes and to reference precisely the type
18173 that the context allows. */
18174 if (context != NULL
18175 && TREE_TYPE (loc) == context->context_type
18176 && want_address >= 1)
18177 {
18178 if (dwarf_version >= 3 || !dwarf_strict)
18179 {
18180 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18181 have_address = 1;
18182 break;
18183 }
18184 else
18185 return NULL;
18186 }
18187 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18188 the single argument passed by consumer. */
18189 else if (context != NULL
18190 && context->placeholder_arg
18191 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18192 && want_address == 0)
18193 {
18194 ret = new_loc_descr (DW_OP_pick, 0, 0);
18195 ret->frame_offset_rel = 1;
18196 context->placeholder_seen = true;
18197 break;
18198 }
18199 else
18200 expansion_failed (loc, NULL_RTX,
18201 "PLACEHOLDER_EXPR for an unexpected type");
18202 break;
18203
18204 case CALL_EXPR:
18205 {
18206 const int nargs = call_expr_nargs (loc);
18207 tree callee = get_callee_fndecl (loc);
18208 int i;
18209 dw_die_ref dwarf_proc;
18210
18211 if (callee == NULL_TREE)
18212 goto call_expansion_failed;
18213
18214 /* We handle only functions that return an integer. */
18215 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18216 goto call_expansion_failed;
18217
18218 dwarf_proc = function_to_dwarf_procedure (callee);
18219 if (dwarf_proc == NULL)
18220 goto call_expansion_failed;
18221
18222 /* Evaluate arguments right-to-left so that the first argument will
18223 be the top-most one on the stack. */
18224 for (i = nargs - 1; i >= 0; --i)
18225 {
18226 dw_loc_descr_ref loc_descr
18227 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18228 context);
18229
18230 if (loc_descr == NULL)
18231 goto call_expansion_failed;
18232
18233 add_loc_descr (&ret, loc_descr);
18234 }
18235
18236 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18237 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18238 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18239 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18240 add_loc_descr (&ret, ret1);
18241 break;
18242
18243 call_expansion_failed:
18244 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18245 /* There are no opcodes for these operations. */
18246 return 0;
18247 }
18248
18249 case PREINCREMENT_EXPR:
18250 case PREDECREMENT_EXPR:
18251 case POSTINCREMENT_EXPR:
18252 case POSTDECREMENT_EXPR:
18253 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18254 /* There are no opcodes for these operations. */
18255 return 0;
18256
18257 case ADDR_EXPR:
18258 /* If we already want an address, see if there is INDIRECT_REF inside
18259 e.g. for &this->field. */
18260 if (want_address)
18261 {
18262 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18263 (loc, want_address == 2, context);
18264 if (list_ret)
18265 have_address = 1;
18266 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18267 && (ret = cst_pool_loc_descr (loc)))
18268 have_address = 1;
18269 }
18270 /* Otherwise, process the argument and look for the address. */
18271 if (!list_ret && !ret)
18272 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18273 else
18274 {
18275 if (want_address)
18276 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18277 return NULL;
18278 }
18279 break;
18280
18281 case VAR_DECL:
18282 if (DECL_THREAD_LOCAL_P (loc))
18283 {
18284 rtx rtl;
18285 enum dwarf_location_atom tls_op;
18286 enum dtprel_bool dtprel = dtprel_false;
18287
18288 if (targetm.have_tls)
18289 {
18290 /* If this is not defined, we have no way to emit the
18291 data. */
18292 if (!targetm.asm_out.output_dwarf_dtprel)
18293 return 0;
18294
18295 /* The way DW_OP_GNU_push_tls_address is specified, we
18296 can only look up addresses of objects in the current
18297 module. We used DW_OP_addr as first op, but that's
18298 wrong, because DW_OP_addr is relocated by the debug
18299 info consumer, while DW_OP_GNU_push_tls_address
18300 operand shouldn't be. */
18301 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18302 return 0;
18303 dtprel = dtprel_true;
18304 /* We check for DWARF 5 here because gdb did not implement
18305 DW_OP_form_tls_address until after 7.12. */
18306 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18307 : DW_OP_GNU_push_tls_address);
18308 }
18309 else
18310 {
18311 if (!targetm.emutls.debug_form_tls_address
18312 || !(dwarf_version >= 3 || !dwarf_strict))
18313 return 0;
18314 /* We stuffed the control variable into the DECL_VALUE_EXPR
18315 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18316 no longer appear in gimple code. We used the control
18317 variable in specific so that we could pick it up here. */
18318 loc = DECL_VALUE_EXPR (loc);
18319 tls_op = DW_OP_form_tls_address;
18320 }
18321
18322 rtl = rtl_for_decl_location (loc);
18323 if (rtl == NULL_RTX)
18324 return 0;
18325
18326 if (!MEM_P (rtl))
18327 return 0;
18328 rtl = XEXP (rtl, 0);
18329 if (! CONSTANT_P (rtl))
18330 return 0;
18331
18332 ret = new_addr_loc_descr (rtl, dtprel);
18333 ret1 = new_loc_descr (tls_op, 0, 0);
18334 add_loc_descr (&ret, ret1);
18335
18336 have_address = 1;
18337 break;
18338 }
18339 /* FALLTHRU */
18340
18341 case PARM_DECL:
18342 if (context != NULL && context->dpi != NULL
18343 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18344 {
18345 /* We are generating code for a DWARF procedure and we want to access
18346 one of its arguments: find the appropriate argument offset and let
18347 the resolve_args_picking pass compute the offset that complies
18348 with the stack frame size. */
18349 unsigned i = 0;
18350 tree cursor;
18351
18352 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18353 cursor != NULL_TREE && cursor != loc;
18354 cursor = TREE_CHAIN (cursor), ++i)
18355 ;
18356 /* If we are translating a DWARF procedure, all referenced parameters
18357 must belong to the current function. */
18358 gcc_assert (cursor != NULL_TREE);
18359
18360 ret = new_loc_descr (DW_OP_pick, i, 0);
18361 ret->frame_offset_rel = 1;
18362 break;
18363 }
18364 /* FALLTHRU */
18365
18366 case RESULT_DECL:
18367 if (DECL_HAS_VALUE_EXPR_P (loc))
18368 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18369 want_address, context);
18370 /* FALLTHRU */
18371
18372 case FUNCTION_DECL:
18373 {
18374 rtx rtl;
18375 var_loc_list *loc_list = lookup_decl_loc (loc);
18376
18377 if (loc_list && loc_list->first)
18378 {
18379 list_ret = dw_loc_list (loc_list, loc, want_address);
18380 have_address = want_address != 0;
18381 break;
18382 }
18383 rtl = rtl_for_decl_location (loc);
18384 if (rtl == NULL_RTX)
18385 {
18386 if (TREE_CODE (loc) != FUNCTION_DECL
18387 && early_dwarf
18388 && current_function_decl
18389 && want_address != 1
18390 && ! DECL_IGNORED_P (loc)
18391 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18392 || POINTER_TYPE_P (TREE_TYPE (loc)))
18393 && DECL_CONTEXT (loc) == current_function_decl
18394 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18395 <= DWARF2_ADDR_SIZE))
18396 {
18397 dw_die_ref ref = lookup_decl_die (loc);
18398 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18399 if (ref)
18400 {
18401 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18402 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18403 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18404 }
18405 else
18406 {
18407 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18408 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18409 }
18410 break;
18411 }
18412 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18413 return 0;
18414 }
18415 else if (CONST_INT_P (rtl))
18416 {
18417 HOST_WIDE_INT val = INTVAL (rtl);
18418 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18419 val &= GET_MODE_MASK (DECL_MODE (loc));
18420 ret = int_loc_descriptor (val);
18421 }
18422 else if (GET_CODE (rtl) == CONST_STRING)
18423 {
18424 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18425 return 0;
18426 }
18427 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18428 ret = new_addr_loc_descr (rtl, dtprel_false);
18429 else
18430 {
18431 machine_mode mode, mem_mode;
18432
18433 /* Certain constructs can only be represented at top-level. */
18434 if (want_address == 2)
18435 {
18436 ret = loc_descriptor (rtl, VOIDmode,
18437 VAR_INIT_STATUS_INITIALIZED);
18438 have_address = 1;
18439 }
18440 else
18441 {
18442 mode = GET_MODE (rtl);
18443 mem_mode = VOIDmode;
18444 if (MEM_P (rtl))
18445 {
18446 mem_mode = mode;
18447 mode = get_address_mode (rtl);
18448 rtl = XEXP (rtl, 0);
18449 have_address = 1;
18450 }
18451 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18452 VAR_INIT_STATUS_INITIALIZED);
18453 }
18454 if (!ret)
18455 expansion_failed (loc, rtl,
18456 "failed to produce loc descriptor for rtl");
18457 }
18458 }
18459 break;
18460
18461 case MEM_REF:
18462 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18463 {
18464 have_address = 1;
18465 goto do_plus;
18466 }
18467 /* Fallthru. */
18468 case INDIRECT_REF:
18469 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18470 have_address = 1;
18471 break;
18472
18473 case TARGET_MEM_REF:
18474 case SSA_NAME:
18475 case DEBUG_EXPR_DECL:
18476 return NULL;
18477
18478 case COMPOUND_EXPR:
18479 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18480 context);
18481
18482 CASE_CONVERT:
18483 case VIEW_CONVERT_EXPR:
18484 case SAVE_EXPR:
18485 case MODIFY_EXPR:
18486 case NON_LVALUE_EXPR:
18487 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18488 context);
18489
18490 case COMPONENT_REF:
18491 case BIT_FIELD_REF:
18492 case ARRAY_REF:
18493 case ARRAY_RANGE_REF:
18494 case REALPART_EXPR:
18495 case IMAGPART_EXPR:
18496 {
18497 tree obj, offset;
18498 poly_int64 bitsize, bitpos, bytepos;
18499 machine_mode mode;
18500 int unsignedp, reversep, volatilep = 0;
18501
18502 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18503 &unsignedp, &reversep, &volatilep);
18504
18505 gcc_assert (obj != loc);
18506
18507 list_ret = loc_list_from_tree_1 (obj,
18508 want_address == 2
18509 && known_eq (bitpos, 0)
18510 && !offset ? 2 : 1,
18511 context);
18512 /* TODO: We can extract value of the small expression via shifting even
18513 for nonzero bitpos. */
18514 if (list_ret == 0)
18515 return 0;
18516 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18517 || !multiple_p (bitsize, BITS_PER_UNIT))
18518 {
18519 expansion_failed (loc, NULL_RTX,
18520 "bitfield access");
18521 return 0;
18522 }
18523
18524 if (offset != NULL_TREE)
18525 {
18526 /* Variable offset. */
18527 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18528 if (list_ret1 == 0)
18529 return 0;
18530 add_loc_list (&list_ret, list_ret1);
18531 if (!list_ret)
18532 return 0;
18533 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18534 }
18535
18536 HOST_WIDE_INT value;
18537 if (bytepos.is_constant (&value) && value > 0)
18538 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18539 value, 0));
18540 else if (maybe_ne (bytepos, 0))
18541 loc_list_plus_const (list_ret, bytepos);
18542
18543 have_address = 1;
18544 break;
18545 }
18546
18547 case INTEGER_CST:
18548 if ((want_address || !tree_fits_shwi_p (loc))
18549 && (ret = cst_pool_loc_descr (loc)))
18550 have_address = 1;
18551 else if (want_address == 2
18552 && tree_fits_shwi_p (loc)
18553 && (ret = address_of_int_loc_descriptor
18554 (int_size_in_bytes (TREE_TYPE (loc)),
18555 tree_to_shwi (loc))))
18556 have_address = 1;
18557 else if (tree_fits_shwi_p (loc))
18558 ret = int_loc_descriptor (tree_to_shwi (loc));
18559 else if (tree_fits_uhwi_p (loc))
18560 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18561 else
18562 {
18563 expansion_failed (loc, NULL_RTX,
18564 "Integer operand is not host integer");
18565 return 0;
18566 }
18567 break;
18568
18569 case CONSTRUCTOR:
18570 case REAL_CST:
18571 case STRING_CST:
18572 case COMPLEX_CST:
18573 if ((ret = cst_pool_loc_descr (loc)))
18574 have_address = 1;
18575 else if (TREE_CODE (loc) == CONSTRUCTOR)
18576 {
18577 tree type = TREE_TYPE (loc);
18578 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18579 unsigned HOST_WIDE_INT offset = 0;
18580 unsigned HOST_WIDE_INT cnt;
18581 constructor_elt *ce;
18582
18583 if (TREE_CODE (type) == RECORD_TYPE)
18584 {
18585 /* This is very limited, but it's enough to output
18586 pointers to member functions, as long as the
18587 referenced function is defined in the current
18588 translation unit. */
18589 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18590 {
18591 tree val = ce->value;
18592
18593 tree field = ce->index;
18594
18595 if (val)
18596 STRIP_NOPS (val);
18597
18598 if (!field || DECL_BIT_FIELD (field))
18599 {
18600 expansion_failed (loc, NULL_RTX,
18601 "bitfield in record type constructor");
18602 size = offset = (unsigned HOST_WIDE_INT)-1;
18603 ret = NULL;
18604 break;
18605 }
18606
18607 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18608 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18609 gcc_assert (pos + fieldsize <= size);
18610 if (pos < offset)
18611 {
18612 expansion_failed (loc, NULL_RTX,
18613 "out-of-order fields in record constructor");
18614 size = offset = (unsigned HOST_WIDE_INT)-1;
18615 ret = NULL;
18616 break;
18617 }
18618 if (pos > offset)
18619 {
18620 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18621 add_loc_descr (&ret, ret1);
18622 offset = pos;
18623 }
18624 if (val && fieldsize != 0)
18625 {
18626 ret1 = loc_descriptor_from_tree (val, want_address, context);
18627 if (!ret1)
18628 {
18629 expansion_failed (loc, NULL_RTX,
18630 "unsupported expression in field");
18631 size = offset = (unsigned HOST_WIDE_INT)-1;
18632 ret = NULL;
18633 break;
18634 }
18635 add_loc_descr (&ret, ret1);
18636 }
18637 if (fieldsize)
18638 {
18639 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18640 add_loc_descr (&ret, ret1);
18641 offset = pos + fieldsize;
18642 }
18643 }
18644
18645 if (offset != size)
18646 {
18647 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18648 add_loc_descr (&ret, ret1);
18649 offset = size;
18650 }
18651
18652 have_address = !!want_address;
18653 }
18654 else
18655 expansion_failed (loc, NULL_RTX,
18656 "constructor of non-record type");
18657 }
18658 else
18659 /* We can construct small constants here using int_loc_descriptor. */
18660 expansion_failed (loc, NULL_RTX,
18661 "constructor or constant not in constant pool");
18662 break;
18663
18664 case TRUTH_AND_EXPR:
18665 case TRUTH_ANDIF_EXPR:
18666 case BIT_AND_EXPR:
18667 op = DW_OP_and;
18668 goto do_binop;
18669
18670 case TRUTH_XOR_EXPR:
18671 case BIT_XOR_EXPR:
18672 op = DW_OP_xor;
18673 goto do_binop;
18674
18675 case TRUTH_OR_EXPR:
18676 case TRUTH_ORIF_EXPR:
18677 case BIT_IOR_EXPR:
18678 op = DW_OP_or;
18679 goto do_binop;
18680
18681 case FLOOR_DIV_EXPR:
18682 case CEIL_DIV_EXPR:
18683 case ROUND_DIV_EXPR:
18684 case TRUNC_DIV_EXPR:
18685 case EXACT_DIV_EXPR:
18686 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18687 return 0;
18688 op = DW_OP_div;
18689 goto do_binop;
18690
18691 case MINUS_EXPR:
18692 op = DW_OP_minus;
18693 goto do_binop;
18694
18695 case FLOOR_MOD_EXPR:
18696 case CEIL_MOD_EXPR:
18697 case ROUND_MOD_EXPR:
18698 case TRUNC_MOD_EXPR:
18699 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18700 {
18701 op = DW_OP_mod;
18702 goto do_binop;
18703 }
18704 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18705 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18706 if (list_ret == 0 || list_ret1 == 0)
18707 return 0;
18708
18709 add_loc_list (&list_ret, list_ret1);
18710 if (list_ret == 0)
18711 return 0;
18712 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18713 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18714 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18715 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18716 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18717 break;
18718
18719 case MULT_EXPR:
18720 op = DW_OP_mul;
18721 goto do_binop;
18722
18723 case LSHIFT_EXPR:
18724 op = DW_OP_shl;
18725 goto do_binop;
18726
18727 case RSHIFT_EXPR:
18728 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18729 goto do_binop;
18730
18731 case POINTER_PLUS_EXPR:
18732 case PLUS_EXPR:
18733 do_plus:
18734 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18735 {
18736 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18737 smarter to encode their opposite. The DW_OP_plus_uconst operation
18738 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18739 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18740 bytes, Y being the size of the operation that pushes the opposite
18741 of the addend. So let's choose the smallest representation. */
18742 const tree tree_addend = TREE_OPERAND (loc, 1);
18743 offset_int wi_addend;
18744 HOST_WIDE_INT shwi_addend;
18745 dw_loc_descr_ref loc_naddend;
18746
18747 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18748 if (list_ret == 0)
18749 return 0;
18750
18751 /* Try to get the literal to push. It is the opposite of the addend,
18752 so as we rely on wrapping during DWARF evaluation, first decode
18753 the literal as a "DWARF-sized" signed number. */
18754 wi_addend = wi::to_offset (tree_addend);
18755 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18756 shwi_addend = wi_addend.to_shwi ();
18757 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18758 ? int_loc_descriptor (-shwi_addend)
18759 : NULL;
18760
18761 if (loc_naddend != NULL
18762 && ((unsigned) size_of_uleb128 (shwi_addend)
18763 > size_of_loc_descr (loc_naddend)))
18764 {
18765 add_loc_descr_to_each (list_ret, loc_naddend);
18766 add_loc_descr_to_each (list_ret,
18767 new_loc_descr (DW_OP_minus, 0, 0));
18768 }
18769 else
18770 {
18771 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18772 {
18773 loc_naddend = loc_cur;
18774 loc_cur = loc_cur->dw_loc_next;
18775 ggc_free (loc_naddend);
18776 }
18777 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18778 }
18779 break;
18780 }
18781
18782 op = DW_OP_plus;
18783 goto do_binop;
18784
18785 case LE_EXPR:
18786 op = DW_OP_le;
18787 goto do_comp_binop;
18788
18789 case GE_EXPR:
18790 op = DW_OP_ge;
18791 goto do_comp_binop;
18792
18793 case LT_EXPR:
18794 op = DW_OP_lt;
18795 goto do_comp_binop;
18796
18797 case GT_EXPR:
18798 op = DW_OP_gt;
18799 goto do_comp_binop;
18800
18801 do_comp_binop:
18802 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18803 {
18804 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18805 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18806 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18807 TREE_CODE (loc));
18808 break;
18809 }
18810 else
18811 goto do_binop;
18812
18813 case EQ_EXPR:
18814 op = DW_OP_eq;
18815 goto do_binop;
18816
18817 case NE_EXPR:
18818 op = DW_OP_ne;
18819 goto do_binop;
18820
18821 do_binop:
18822 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18823 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18824 if (list_ret == 0 || list_ret1 == 0)
18825 return 0;
18826
18827 add_loc_list (&list_ret, list_ret1);
18828 if (list_ret == 0)
18829 return 0;
18830 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18831 break;
18832
18833 case TRUTH_NOT_EXPR:
18834 case BIT_NOT_EXPR:
18835 op = DW_OP_not;
18836 goto do_unop;
18837
18838 case ABS_EXPR:
18839 op = DW_OP_abs;
18840 goto do_unop;
18841
18842 case NEGATE_EXPR:
18843 op = DW_OP_neg;
18844 goto do_unop;
18845
18846 do_unop:
18847 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18848 if (list_ret == 0)
18849 return 0;
18850
18851 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18852 break;
18853
18854 case MIN_EXPR:
18855 case MAX_EXPR:
18856 {
18857 const enum tree_code code =
18858 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18859
18860 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18861 build2 (code, integer_type_node,
18862 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18863 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18864 }
18865
18866 /* fall through */
18867
18868 case COND_EXPR:
18869 {
18870 dw_loc_descr_ref lhs
18871 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18872 dw_loc_list_ref rhs
18873 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18874 dw_loc_descr_ref bra_node, jump_node, tmp;
18875
18876 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18877 if (list_ret == 0 || lhs == 0 || rhs == 0)
18878 return 0;
18879
18880 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18881 add_loc_descr_to_each (list_ret, bra_node);
18882
18883 add_loc_list (&list_ret, rhs);
18884 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18885 add_loc_descr_to_each (list_ret, jump_node);
18886
18887 add_loc_descr_to_each (list_ret, lhs);
18888 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18889 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18890
18891 /* ??? Need a node to point the skip at. Use a nop. */
18892 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18893 add_loc_descr_to_each (list_ret, tmp);
18894 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18895 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18896 }
18897 break;
18898
18899 case FIX_TRUNC_EXPR:
18900 return 0;
18901
18902 default:
18903 /* Leave front-end specific codes as simply unknown. This comes
18904 up, for instance, with the C STMT_EXPR. */
18905 if ((unsigned int) TREE_CODE (loc)
18906 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18907 {
18908 expansion_failed (loc, NULL_RTX,
18909 "language specific tree node");
18910 return 0;
18911 }
18912
18913 /* Otherwise this is a generic code; we should just lists all of
18914 these explicitly. We forgot one. */
18915 if (flag_checking)
18916 gcc_unreachable ();
18917
18918 /* In a release build, we want to degrade gracefully: better to
18919 generate incomplete debugging information than to crash. */
18920 return NULL;
18921 }
18922
18923 if (!ret && !list_ret)
18924 return 0;
18925
18926 if (want_address == 2 && !have_address
18927 && (dwarf_version >= 4 || !dwarf_strict))
18928 {
18929 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18930 {
18931 expansion_failed (loc, NULL_RTX,
18932 "DWARF address size mismatch");
18933 return 0;
18934 }
18935 if (ret)
18936 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18937 else
18938 add_loc_descr_to_each (list_ret,
18939 new_loc_descr (DW_OP_stack_value, 0, 0));
18940 have_address = 1;
18941 }
18942 /* Show if we can't fill the request for an address. */
18943 if (want_address && !have_address)
18944 {
18945 expansion_failed (loc, NULL_RTX,
18946 "Want address and only have value");
18947 return 0;
18948 }
18949
18950 gcc_assert (!ret || !list_ret);
18951
18952 /* If we've got an address and don't want one, dereference. */
18953 if (!want_address && have_address)
18954 {
18955 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18956
18957 if (size > DWARF2_ADDR_SIZE || size == -1)
18958 {
18959 expansion_failed (loc, NULL_RTX,
18960 "DWARF address size mismatch");
18961 return 0;
18962 }
18963 else if (size == DWARF2_ADDR_SIZE)
18964 op = DW_OP_deref;
18965 else
18966 op = DW_OP_deref_size;
18967
18968 if (ret)
18969 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18970 else
18971 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18972 }
18973 if (ret)
18974 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18975
18976 return list_ret;
18977 }
18978
18979 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18980 expressions. */
18981
18982 static dw_loc_list_ref
18983 loc_list_from_tree (tree loc, int want_address,
18984 struct loc_descr_context *context)
18985 {
18986 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18987
18988 for (dw_loc_list_ref loc_cur = result;
18989 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18990 loc_descr_without_nops (loc_cur->expr);
18991 return result;
18992 }
18993
18994 /* Same as above but return only single location expression. */
18995 static dw_loc_descr_ref
18996 loc_descriptor_from_tree (tree loc, int want_address,
18997 struct loc_descr_context *context)
18998 {
18999 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19000 if (!ret)
19001 return NULL;
19002 if (ret->dw_loc_next)
19003 {
19004 expansion_failed (loc, NULL_RTX,
19005 "Location list where only loc descriptor needed");
19006 return NULL;
19007 }
19008 return ret->expr;
19009 }
19010
19011 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19012 pointer to the declared type for the relevant field variable, or return
19013 `integer_type_node' if the given node turns out to be an
19014 ERROR_MARK node. */
19015
19016 static inline tree
19017 field_type (const_tree decl)
19018 {
19019 tree type;
19020
19021 if (TREE_CODE (decl) == ERROR_MARK)
19022 return integer_type_node;
19023
19024 type = DECL_BIT_FIELD_TYPE (decl);
19025 if (type == NULL_TREE)
19026 type = TREE_TYPE (decl);
19027
19028 return type;
19029 }
19030
19031 /* Given a pointer to a tree node, return the alignment in bits for
19032 it, or else return BITS_PER_WORD if the node actually turns out to
19033 be an ERROR_MARK node. */
19034
19035 static inline unsigned
19036 simple_type_align_in_bits (const_tree type)
19037 {
19038 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19039 }
19040
19041 static inline unsigned
19042 simple_decl_align_in_bits (const_tree decl)
19043 {
19044 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19045 }
19046
19047 /* Return the result of rounding T up to ALIGN. */
19048
19049 static inline offset_int
19050 round_up_to_align (const offset_int &t, unsigned int align)
19051 {
19052 return wi::udiv_trunc (t + align - 1, align) * align;
19053 }
19054
19055 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19056 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19057 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19058 if we fail to return the size in one of these two forms. */
19059
19060 static dw_loc_descr_ref
19061 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19062 {
19063 tree tree_size;
19064 struct loc_descr_context ctx;
19065
19066 /* Return a constant integer in priority, if possible. */
19067 *cst_size = int_size_in_bytes (type);
19068 if (*cst_size != -1)
19069 return NULL;
19070
19071 ctx.context_type = const_cast<tree> (type);
19072 ctx.base_decl = NULL_TREE;
19073 ctx.dpi = NULL;
19074 ctx.placeholder_arg = false;
19075 ctx.placeholder_seen = false;
19076
19077 type = TYPE_MAIN_VARIANT (type);
19078 tree_size = TYPE_SIZE_UNIT (type);
19079 return ((tree_size != NULL_TREE)
19080 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19081 : NULL);
19082 }
19083
19084 /* Helper structure for RECORD_TYPE processing. */
19085 struct vlr_context
19086 {
19087 /* Root RECORD_TYPE. It is needed to generate data member location
19088 descriptions in variable-length records (VLR), but also to cope with
19089 variants, which are composed of nested structures multiplexed with
19090 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19091 function processing a FIELD_DECL, it is required to be non null. */
19092 tree struct_type;
19093 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19094 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19095 this variant part as part of the root record (in storage units). For
19096 regular records, it must be NULL_TREE. */
19097 tree variant_part_offset;
19098 };
19099
19100 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19101 addressed byte of the "containing object" for the given FIELD_DECL. If
19102 possible, return a native constant through CST_OFFSET (in which case NULL is
19103 returned); otherwise return a DWARF expression that computes the offset.
19104
19105 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19106 that offset is, either because the argument turns out to be a pointer to an
19107 ERROR_MARK node, or because the offset expression is too complex for us.
19108
19109 CTX is required: see the comment for VLR_CONTEXT. */
19110
19111 static dw_loc_descr_ref
19112 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19113 HOST_WIDE_INT *cst_offset)
19114 {
19115 tree tree_result;
19116 dw_loc_list_ref loc_result;
19117
19118 *cst_offset = 0;
19119
19120 if (TREE_CODE (decl) == ERROR_MARK)
19121 return NULL;
19122 else
19123 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19124
19125 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19126 case. */
19127 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19128 return NULL;
19129
19130 /* We used to handle only constant offsets in all cases. Now, we handle
19131 properly dynamic byte offsets only when PCC bitfield type doesn't
19132 matter. */
19133 if (PCC_BITFIELD_TYPE_MATTERS
19134 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19135 {
19136 offset_int object_offset_in_bits;
19137 offset_int object_offset_in_bytes;
19138 offset_int bitpos_int;
19139 tree type;
19140 tree field_size_tree;
19141 offset_int deepest_bitpos;
19142 offset_int field_size_in_bits;
19143 unsigned int type_align_in_bits;
19144 unsigned int decl_align_in_bits;
19145 offset_int type_size_in_bits;
19146
19147 bitpos_int = wi::to_offset (bit_position (decl));
19148 type = field_type (decl);
19149 type_size_in_bits = offset_int_type_size_in_bits (type);
19150 type_align_in_bits = simple_type_align_in_bits (type);
19151
19152 field_size_tree = DECL_SIZE (decl);
19153
19154 /* The size could be unspecified if there was an error, or for
19155 a flexible array member. */
19156 if (!field_size_tree)
19157 field_size_tree = bitsize_zero_node;
19158
19159 /* If the size of the field is not constant, use the type size. */
19160 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19161 field_size_in_bits = wi::to_offset (field_size_tree);
19162 else
19163 field_size_in_bits = type_size_in_bits;
19164
19165 decl_align_in_bits = simple_decl_align_in_bits (decl);
19166
19167 /* The GCC front-end doesn't make any attempt to keep track of the
19168 starting bit offset (relative to the start of the containing
19169 structure type) of the hypothetical "containing object" for a
19170 bit-field. Thus, when computing the byte offset value for the
19171 start of the "containing object" of a bit-field, we must deduce
19172 this information on our own. This can be rather tricky to do in
19173 some cases. For example, handling the following structure type
19174 definition when compiling for an i386/i486 target (which only
19175 aligns long long's to 32-bit boundaries) can be very tricky:
19176
19177 struct S { int field1; long long field2:31; };
19178
19179 Fortunately, there is a simple rule-of-thumb which can be used
19180 in such cases. When compiling for an i386/i486, GCC will
19181 allocate 8 bytes for the structure shown above. It decides to
19182 do this based upon one simple rule for bit-field allocation.
19183 GCC allocates each "containing object" for each bit-field at
19184 the first (i.e. lowest addressed) legitimate alignment boundary
19185 (based upon the required minimum alignment for the declared
19186 type of the field) which it can possibly use, subject to the
19187 condition that there is still enough available space remaining
19188 in the containing object (when allocated at the selected point)
19189 to fully accommodate all of the bits of the bit-field itself.
19190
19191 This simple rule makes it obvious why GCC allocates 8 bytes for
19192 each object of the structure type shown above. When looking
19193 for a place to allocate the "containing object" for `field2',
19194 the compiler simply tries to allocate a 64-bit "containing
19195 object" at each successive 32-bit boundary (starting at zero)
19196 until it finds a place to allocate that 64- bit field such that
19197 at least 31 contiguous (and previously unallocated) bits remain
19198 within that selected 64 bit field. (As it turns out, for the
19199 example above, the compiler finds it is OK to allocate the
19200 "containing object" 64-bit field at bit-offset zero within the
19201 structure type.)
19202
19203 Here we attempt to work backwards from the limited set of facts
19204 we're given, and we try to deduce from those facts, where GCC
19205 must have believed that the containing object started (within
19206 the structure type). The value we deduce is then used (by the
19207 callers of this routine) to generate DW_AT_location and
19208 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19209 the case of DW_AT_location, regular fields as well). */
19210
19211 /* Figure out the bit-distance from the start of the structure to
19212 the "deepest" bit of the bit-field. */
19213 deepest_bitpos = bitpos_int + field_size_in_bits;
19214
19215 /* This is the tricky part. Use some fancy footwork to deduce
19216 where the lowest addressed bit of the containing object must
19217 be. */
19218 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19219
19220 /* Round up to type_align by default. This works best for
19221 bitfields. */
19222 object_offset_in_bits
19223 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19224
19225 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19226 {
19227 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19228
19229 /* Round up to decl_align instead. */
19230 object_offset_in_bits
19231 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19232 }
19233
19234 object_offset_in_bytes
19235 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19236 if (ctx->variant_part_offset == NULL_TREE)
19237 {
19238 *cst_offset = object_offset_in_bytes.to_shwi ();
19239 return NULL;
19240 }
19241 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19242 }
19243 else
19244 tree_result = byte_position (decl);
19245
19246 if (ctx->variant_part_offset != NULL_TREE)
19247 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19248 ctx->variant_part_offset, tree_result);
19249
19250 /* If the byte offset is a constant, it's simplier to handle a native
19251 constant rather than a DWARF expression. */
19252 if (TREE_CODE (tree_result) == INTEGER_CST)
19253 {
19254 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19255 return NULL;
19256 }
19257 struct loc_descr_context loc_ctx = {
19258 ctx->struct_type, /* context_type */
19259 NULL_TREE, /* base_decl */
19260 NULL, /* dpi */
19261 false, /* placeholder_arg */
19262 false /* placeholder_seen */
19263 };
19264 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19265
19266 /* We want a DWARF expression: abort if we only have a location list with
19267 multiple elements. */
19268 if (!loc_result || !single_element_loc_list_p (loc_result))
19269 return NULL;
19270 else
19271 return loc_result->expr;
19272 }
19273 \f
19274 /* The following routines define various Dwarf attributes and any data
19275 associated with them. */
19276
19277 /* Add a location description attribute value to a DIE.
19278
19279 This emits location attributes suitable for whole variables and
19280 whole parameters. Note that the location attributes for struct fields are
19281 generated by the routine `data_member_location_attribute' below. */
19282
19283 static inline void
19284 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19285 dw_loc_list_ref descr)
19286 {
19287 bool check_no_locviews = true;
19288 if (descr == 0)
19289 return;
19290 if (single_element_loc_list_p (descr))
19291 add_AT_loc (die, attr_kind, descr->expr);
19292 else
19293 {
19294 add_AT_loc_list (die, attr_kind, descr);
19295 gcc_assert (descr->ll_symbol);
19296 if (attr_kind == DW_AT_location && descr->vl_symbol
19297 && dwarf2out_locviews_in_attribute ())
19298 {
19299 add_AT_view_list (die, DW_AT_GNU_locviews);
19300 check_no_locviews = false;
19301 }
19302 }
19303
19304 if (check_no_locviews)
19305 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19306 }
19307
19308 /* Add DW_AT_accessibility attribute to DIE if needed. */
19309
19310 static void
19311 add_accessibility_attribute (dw_die_ref die, tree decl)
19312 {
19313 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19314 children, otherwise the default is DW_ACCESS_public. In DWARF2
19315 the default has always been DW_ACCESS_public. */
19316 if (TREE_PROTECTED (decl))
19317 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19318 else if (TREE_PRIVATE (decl))
19319 {
19320 if (dwarf_version == 2
19321 || die->die_parent == NULL
19322 || die->die_parent->die_tag != DW_TAG_class_type)
19323 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19324 }
19325 else if (dwarf_version > 2
19326 && die->die_parent
19327 && die->die_parent->die_tag == DW_TAG_class_type)
19328 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19329 }
19330
19331 /* Attach the specialized form of location attribute used for data members of
19332 struct and union types. In the special case of a FIELD_DECL node which
19333 represents a bit-field, the "offset" part of this special location
19334 descriptor must indicate the distance in bytes from the lowest-addressed
19335 byte of the containing struct or union type to the lowest-addressed byte of
19336 the "containing object" for the bit-field. (See the `field_byte_offset'
19337 function above).
19338
19339 For any given bit-field, the "containing object" is a hypothetical object
19340 (of some integral or enum type) within which the given bit-field lives. The
19341 type of this hypothetical "containing object" is always the same as the
19342 declared type of the individual bit-field itself (for GCC anyway... the
19343 DWARF spec doesn't actually mandate this). Note that it is the size (in
19344 bytes) of the hypothetical "containing object" which will be given in the
19345 DW_AT_byte_size attribute for this bit-field. (See the
19346 `byte_size_attribute' function below.) It is also used when calculating the
19347 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19348 function below.)
19349
19350 CTX is required: see the comment for VLR_CONTEXT. */
19351
19352 static void
19353 add_data_member_location_attribute (dw_die_ref die,
19354 tree decl,
19355 struct vlr_context *ctx)
19356 {
19357 HOST_WIDE_INT offset;
19358 dw_loc_descr_ref loc_descr = 0;
19359
19360 if (TREE_CODE (decl) == TREE_BINFO)
19361 {
19362 /* We're working on the TAG_inheritance for a base class. */
19363 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19364 {
19365 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19366 aren't at a fixed offset from all (sub)objects of the same
19367 type. We need to extract the appropriate offset from our
19368 vtable. The following dwarf expression means
19369
19370 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19371
19372 This is specific to the V3 ABI, of course. */
19373
19374 dw_loc_descr_ref tmp;
19375
19376 /* Make a copy of the object address. */
19377 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19378 add_loc_descr (&loc_descr, tmp);
19379
19380 /* Extract the vtable address. */
19381 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19382 add_loc_descr (&loc_descr, tmp);
19383
19384 /* Calculate the address of the offset. */
19385 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19386 gcc_assert (offset < 0);
19387
19388 tmp = int_loc_descriptor (-offset);
19389 add_loc_descr (&loc_descr, tmp);
19390 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19391 add_loc_descr (&loc_descr, tmp);
19392
19393 /* Extract the offset. */
19394 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19395 add_loc_descr (&loc_descr, tmp);
19396
19397 /* Add it to the object address. */
19398 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19399 add_loc_descr (&loc_descr, tmp);
19400 }
19401 else
19402 offset = tree_to_shwi (BINFO_OFFSET (decl));
19403 }
19404 else
19405 {
19406 loc_descr = field_byte_offset (decl, ctx, &offset);
19407
19408 /* If loc_descr is available then we know the field offset is dynamic.
19409 However, GDB does not handle dynamic field offsets very well at the
19410 moment. */
19411 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19412 {
19413 loc_descr = NULL;
19414 offset = 0;
19415 }
19416
19417 /* Data member location evalutation starts with the base address on the
19418 stack. Compute the field offset and add it to this base address. */
19419 else if (loc_descr != NULL)
19420 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19421 }
19422
19423 if (! loc_descr)
19424 {
19425 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19426 e.g. GDB only added support to it in November 2016. For DWARF5
19427 we need newer debug info consumers anyway. We might change this
19428 to dwarf_version >= 4 once most consumers catched up. */
19429 if (dwarf_version >= 5
19430 && TREE_CODE (decl) == FIELD_DECL
19431 && DECL_BIT_FIELD_TYPE (decl))
19432 {
19433 tree off = bit_position (decl);
19434 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19435 {
19436 remove_AT (die, DW_AT_byte_size);
19437 remove_AT (die, DW_AT_bit_offset);
19438 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19439 return;
19440 }
19441 }
19442 if (dwarf_version > 2)
19443 {
19444 /* Don't need to output a location expression, just the constant. */
19445 if (offset < 0)
19446 add_AT_int (die, DW_AT_data_member_location, offset);
19447 else
19448 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19449 return;
19450 }
19451 else
19452 {
19453 enum dwarf_location_atom op;
19454
19455 /* The DWARF2 standard says that we should assume that the structure
19456 address is already on the stack, so we can specify a structure
19457 field address by using DW_OP_plus_uconst. */
19458 op = DW_OP_plus_uconst;
19459 loc_descr = new_loc_descr (op, offset, 0);
19460 }
19461 }
19462
19463 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19464 }
19465
19466 /* Writes integer values to dw_vec_const array. */
19467
19468 static void
19469 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19470 {
19471 while (size != 0)
19472 {
19473 *dest++ = val & 0xff;
19474 val >>= 8;
19475 --size;
19476 }
19477 }
19478
19479 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19480
19481 static HOST_WIDE_INT
19482 extract_int (const unsigned char *src, unsigned int size)
19483 {
19484 HOST_WIDE_INT val = 0;
19485
19486 src += size;
19487 while (size != 0)
19488 {
19489 val <<= 8;
19490 val |= *--src & 0xff;
19491 --size;
19492 }
19493 return val;
19494 }
19495
19496 /* Writes wide_int values to dw_vec_const array. */
19497
19498 static void
19499 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19500 {
19501 int i;
19502
19503 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19504 {
19505 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19506 return;
19507 }
19508
19509 /* We'd have to extend this code to support odd sizes. */
19510 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19511
19512 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19513
19514 if (WORDS_BIG_ENDIAN)
19515 for (i = n - 1; i >= 0; i--)
19516 {
19517 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19518 dest += sizeof (HOST_WIDE_INT);
19519 }
19520 else
19521 for (i = 0; i < n; i++)
19522 {
19523 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19524 dest += sizeof (HOST_WIDE_INT);
19525 }
19526 }
19527
19528 /* Writes floating point values to dw_vec_const array. */
19529
19530 static void
19531 insert_float (const_rtx rtl, unsigned char *array)
19532 {
19533 long val[4];
19534 int i;
19535 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19536
19537 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19538
19539 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19540 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19541 {
19542 insert_int (val[i], 4, array);
19543 array += 4;
19544 }
19545 }
19546
19547 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19548 does not have a "location" either in memory or in a register. These
19549 things can arise in GNU C when a constant is passed as an actual parameter
19550 to an inlined function. They can also arise in C++ where declared
19551 constants do not necessarily get memory "homes". */
19552
19553 static bool
19554 add_const_value_attribute (dw_die_ref die, rtx rtl)
19555 {
19556 switch (GET_CODE (rtl))
19557 {
19558 case CONST_INT:
19559 {
19560 HOST_WIDE_INT val = INTVAL (rtl);
19561
19562 if (val < 0)
19563 add_AT_int (die, DW_AT_const_value, val);
19564 else
19565 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19566 }
19567 return true;
19568
19569 case CONST_WIDE_INT:
19570 {
19571 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19572 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19573 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19574 wide_int w = wi::zext (w1, prec);
19575 add_AT_wide (die, DW_AT_const_value, w);
19576 }
19577 return true;
19578
19579 case CONST_DOUBLE:
19580 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19581 floating-point constant. A CONST_DOUBLE is used whenever the
19582 constant requires more than one word in order to be adequately
19583 represented. */
19584 if (TARGET_SUPPORTS_WIDE_INT == 0
19585 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19586 add_AT_double (die, DW_AT_const_value,
19587 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19588 else
19589 {
19590 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19591 unsigned int length = GET_MODE_SIZE (mode);
19592 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19593
19594 insert_float (rtl, array);
19595 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19596 }
19597 return true;
19598
19599 case CONST_VECTOR:
19600 {
19601 unsigned int length;
19602 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19603 return false;
19604
19605 machine_mode mode = GET_MODE (rtl);
19606 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19607 unsigned char *array
19608 = ggc_vec_alloc<unsigned char> (length * elt_size);
19609 unsigned int i;
19610 unsigned char *p;
19611 machine_mode imode = GET_MODE_INNER (mode);
19612
19613 switch (GET_MODE_CLASS (mode))
19614 {
19615 case MODE_VECTOR_INT:
19616 for (i = 0, p = array; i < length; i++, p += elt_size)
19617 {
19618 rtx elt = CONST_VECTOR_ELT (rtl, i);
19619 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19620 }
19621 break;
19622
19623 case MODE_VECTOR_FLOAT:
19624 for (i = 0, p = array; i < length; i++, p += elt_size)
19625 {
19626 rtx elt = CONST_VECTOR_ELT (rtl, i);
19627 insert_float (elt, p);
19628 }
19629 break;
19630
19631 default:
19632 gcc_unreachable ();
19633 }
19634
19635 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19636 }
19637 return true;
19638
19639 case CONST_STRING:
19640 if (dwarf_version >= 4 || !dwarf_strict)
19641 {
19642 dw_loc_descr_ref loc_result;
19643 resolve_one_addr (&rtl);
19644 rtl_addr:
19645 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19646 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19647 add_AT_loc (die, DW_AT_location, loc_result);
19648 vec_safe_push (used_rtx_array, rtl);
19649 return true;
19650 }
19651 return false;
19652
19653 case CONST:
19654 if (CONSTANT_P (XEXP (rtl, 0)))
19655 return add_const_value_attribute (die, XEXP (rtl, 0));
19656 /* FALLTHROUGH */
19657 case SYMBOL_REF:
19658 if (!const_ok_for_output (rtl))
19659 return false;
19660 /* FALLTHROUGH */
19661 case LABEL_REF:
19662 if (dwarf_version >= 4 || !dwarf_strict)
19663 goto rtl_addr;
19664 return false;
19665
19666 case PLUS:
19667 /* In cases where an inlined instance of an inline function is passed
19668 the address of an `auto' variable (which is local to the caller) we
19669 can get a situation where the DECL_RTL of the artificial local
19670 variable (for the inlining) which acts as a stand-in for the
19671 corresponding formal parameter (of the inline function) will look
19672 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19673 exactly a compile-time constant expression, but it isn't the address
19674 of the (artificial) local variable either. Rather, it represents the
19675 *value* which the artificial local variable always has during its
19676 lifetime. We currently have no way to represent such quasi-constant
19677 values in Dwarf, so for now we just punt and generate nothing. */
19678 return false;
19679
19680 case HIGH:
19681 case CONST_FIXED:
19682 case MINUS:
19683 case SIGN_EXTEND:
19684 case ZERO_EXTEND:
19685 return false;
19686
19687 case MEM:
19688 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19689 && MEM_READONLY_P (rtl)
19690 && GET_MODE (rtl) == BLKmode)
19691 {
19692 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19693 return true;
19694 }
19695 return false;
19696
19697 default:
19698 /* No other kinds of rtx should be possible here. */
19699 gcc_unreachable ();
19700 }
19701 return false;
19702 }
19703
19704 /* Determine whether the evaluation of EXPR references any variables
19705 or functions which aren't otherwise used (and therefore may not be
19706 output). */
19707 static tree
19708 reference_to_unused (tree * tp, int * walk_subtrees,
19709 void * data ATTRIBUTE_UNUSED)
19710 {
19711 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19712 *walk_subtrees = 0;
19713
19714 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19715 && ! TREE_ASM_WRITTEN (*tp))
19716 return *tp;
19717 /* ??? The C++ FE emits debug information for using decls, so
19718 putting gcc_unreachable here falls over. See PR31899. For now
19719 be conservative. */
19720 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19721 return *tp;
19722 else if (VAR_P (*tp))
19723 {
19724 varpool_node *node = varpool_node::get (*tp);
19725 if (!node || !node->definition)
19726 return *tp;
19727 }
19728 else if (TREE_CODE (*tp) == FUNCTION_DECL
19729 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19730 {
19731 /* The call graph machinery must have finished analyzing,
19732 optimizing and gimplifying the CU by now.
19733 So if *TP has no call graph node associated
19734 to it, it means *TP will not be emitted. */
19735 if (!cgraph_node::get (*tp))
19736 return *tp;
19737 }
19738 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19739 return *tp;
19740
19741 return NULL_TREE;
19742 }
19743
19744 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19745 for use in a later add_const_value_attribute call. */
19746
19747 static rtx
19748 rtl_for_decl_init (tree init, tree type)
19749 {
19750 rtx rtl = NULL_RTX;
19751
19752 STRIP_NOPS (init);
19753
19754 /* If a variable is initialized with a string constant without embedded
19755 zeros, build CONST_STRING. */
19756 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19757 {
19758 tree enttype = TREE_TYPE (type);
19759 tree domain = TYPE_DOMAIN (type);
19760 scalar_int_mode mode;
19761
19762 if (is_int_mode (TYPE_MODE (enttype), &mode)
19763 && GET_MODE_SIZE (mode) == 1
19764 && domain
19765 && TYPE_MAX_VALUE (domain)
19766 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19767 && integer_zerop (TYPE_MIN_VALUE (domain))
19768 && compare_tree_int (TYPE_MAX_VALUE (domain),
19769 TREE_STRING_LENGTH (init) - 1) == 0
19770 && ((size_t) TREE_STRING_LENGTH (init)
19771 == strlen (TREE_STRING_POINTER (init)) + 1))
19772 {
19773 rtl = gen_rtx_CONST_STRING (VOIDmode,
19774 ggc_strdup (TREE_STRING_POINTER (init)));
19775 rtl = gen_rtx_MEM (BLKmode, rtl);
19776 MEM_READONLY_P (rtl) = 1;
19777 }
19778 }
19779 /* Other aggregates, and complex values, could be represented using
19780 CONCAT: FIXME! */
19781 else if (AGGREGATE_TYPE_P (type)
19782 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19783 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19784 || TREE_CODE (type) == COMPLEX_TYPE)
19785 ;
19786 /* Vectors only work if their mode is supported by the target.
19787 FIXME: generic vectors ought to work too. */
19788 else if (TREE_CODE (type) == VECTOR_TYPE
19789 && !VECTOR_MODE_P (TYPE_MODE (type)))
19790 ;
19791 /* If the initializer is something that we know will expand into an
19792 immediate RTL constant, expand it now. We must be careful not to
19793 reference variables which won't be output. */
19794 else if (initializer_constant_valid_p (init, type)
19795 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19796 {
19797 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19798 possible. */
19799 if (TREE_CODE (type) == VECTOR_TYPE)
19800 switch (TREE_CODE (init))
19801 {
19802 case VECTOR_CST:
19803 break;
19804 case CONSTRUCTOR:
19805 if (TREE_CONSTANT (init))
19806 {
19807 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19808 bool constant_p = true;
19809 tree value;
19810 unsigned HOST_WIDE_INT ix;
19811
19812 /* Even when ctor is constant, it might contain non-*_CST
19813 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19814 belong into VECTOR_CST nodes. */
19815 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19816 if (!CONSTANT_CLASS_P (value))
19817 {
19818 constant_p = false;
19819 break;
19820 }
19821
19822 if (constant_p)
19823 {
19824 init = build_vector_from_ctor (type, elts);
19825 break;
19826 }
19827 }
19828 /* FALLTHRU */
19829
19830 default:
19831 return NULL;
19832 }
19833
19834 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19835
19836 /* If expand_expr returns a MEM, it wasn't immediate. */
19837 gcc_assert (!rtl || !MEM_P (rtl));
19838 }
19839
19840 return rtl;
19841 }
19842
19843 /* Generate RTL for the variable DECL to represent its location. */
19844
19845 static rtx
19846 rtl_for_decl_location (tree decl)
19847 {
19848 rtx rtl;
19849
19850 /* Here we have to decide where we are going to say the parameter "lives"
19851 (as far as the debugger is concerned). We only have a couple of
19852 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19853
19854 DECL_RTL normally indicates where the parameter lives during most of the
19855 activation of the function. If optimization is enabled however, this
19856 could be either NULL or else a pseudo-reg. Both of those cases indicate
19857 that the parameter doesn't really live anywhere (as far as the code
19858 generation parts of GCC are concerned) during most of the function's
19859 activation. That will happen (for example) if the parameter is never
19860 referenced within the function.
19861
19862 We could just generate a location descriptor here for all non-NULL
19863 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19864 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19865 where DECL_RTL is NULL or is a pseudo-reg.
19866
19867 Note however that we can only get away with using DECL_INCOMING_RTL as
19868 a backup substitute for DECL_RTL in certain limited cases. In cases
19869 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19870 we can be sure that the parameter was passed using the same type as it is
19871 declared to have within the function, and that its DECL_INCOMING_RTL
19872 points us to a place where a value of that type is passed.
19873
19874 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19875 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19876 because in these cases DECL_INCOMING_RTL points us to a value of some
19877 type which is *different* from the type of the parameter itself. Thus,
19878 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19879 such cases, the debugger would end up (for example) trying to fetch a
19880 `float' from a place which actually contains the first part of a
19881 `double'. That would lead to really incorrect and confusing
19882 output at debug-time.
19883
19884 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19885 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19886 are a couple of exceptions however. On little-endian machines we can
19887 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19888 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19889 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19890 when (on a little-endian machine) a non-prototyped function has a
19891 parameter declared to be of type `short' or `char'. In such cases,
19892 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19893 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19894 passed `int' value. If the debugger then uses that address to fetch
19895 a `short' or a `char' (on a little-endian machine) the result will be
19896 the correct data, so we allow for such exceptional cases below.
19897
19898 Note that our goal here is to describe the place where the given formal
19899 parameter lives during most of the function's activation (i.e. between the
19900 end of the prologue and the start of the epilogue). We'll do that as best
19901 as we can. Note however that if the given formal parameter is modified
19902 sometime during the execution of the function, then a stack backtrace (at
19903 debug-time) will show the function as having been called with the *new*
19904 value rather than the value which was originally passed in. This happens
19905 rarely enough that it is not a major problem, but it *is* a problem, and
19906 I'd like to fix it.
19907
19908 A future version of dwarf2out.c may generate two additional attributes for
19909 any given DW_TAG_formal_parameter DIE which will describe the "passed
19910 type" and the "passed location" for the given formal parameter in addition
19911 to the attributes we now generate to indicate the "declared type" and the
19912 "active location" for each parameter. This additional set of attributes
19913 could be used by debuggers for stack backtraces. Separately, note that
19914 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19915 This happens (for example) for inlined-instances of inline function formal
19916 parameters which are never referenced. This really shouldn't be
19917 happening. All PARM_DECL nodes should get valid non-NULL
19918 DECL_INCOMING_RTL values. FIXME. */
19919
19920 /* Use DECL_RTL as the "location" unless we find something better. */
19921 rtl = DECL_RTL_IF_SET (decl);
19922
19923 /* When generating abstract instances, ignore everything except
19924 constants, symbols living in memory, and symbols living in
19925 fixed registers. */
19926 if (! reload_completed)
19927 {
19928 if (rtl
19929 && (CONSTANT_P (rtl)
19930 || (MEM_P (rtl)
19931 && CONSTANT_P (XEXP (rtl, 0)))
19932 || (REG_P (rtl)
19933 && VAR_P (decl)
19934 && TREE_STATIC (decl))))
19935 {
19936 rtl = targetm.delegitimize_address (rtl);
19937 return rtl;
19938 }
19939 rtl = NULL_RTX;
19940 }
19941 else if (TREE_CODE (decl) == PARM_DECL)
19942 {
19943 if (rtl == NULL_RTX
19944 || is_pseudo_reg (rtl)
19945 || (MEM_P (rtl)
19946 && is_pseudo_reg (XEXP (rtl, 0))
19947 && DECL_INCOMING_RTL (decl)
19948 && MEM_P (DECL_INCOMING_RTL (decl))
19949 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19950 {
19951 tree declared_type = TREE_TYPE (decl);
19952 tree passed_type = DECL_ARG_TYPE (decl);
19953 machine_mode dmode = TYPE_MODE (declared_type);
19954 machine_mode pmode = TYPE_MODE (passed_type);
19955
19956 /* This decl represents a formal parameter which was optimized out.
19957 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19958 all cases where (rtl == NULL_RTX) just below. */
19959 if (dmode == pmode)
19960 rtl = DECL_INCOMING_RTL (decl);
19961 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19962 && SCALAR_INT_MODE_P (dmode)
19963 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19964 && DECL_INCOMING_RTL (decl))
19965 {
19966 rtx inc = DECL_INCOMING_RTL (decl);
19967 if (REG_P (inc))
19968 rtl = inc;
19969 else if (MEM_P (inc))
19970 {
19971 if (BYTES_BIG_ENDIAN)
19972 rtl = adjust_address_nv (inc, dmode,
19973 GET_MODE_SIZE (pmode)
19974 - GET_MODE_SIZE (dmode));
19975 else
19976 rtl = inc;
19977 }
19978 }
19979 }
19980
19981 /* If the parm was passed in registers, but lives on the stack, then
19982 make a big endian correction if the mode of the type of the
19983 parameter is not the same as the mode of the rtl. */
19984 /* ??? This is the same series of checks that are made in dbxout.c before
19985 we reach the big endian correction code there. It isn't clear if all
19986 of these checks are necessary here, but keeping them all is the safe
19987 thing to do. */
19988 else if (MEM_P (rtl)
19989 && XEXP (rtl, 0) != const0_rtx
19990 && ! CONSTANT_P (XEXP (rtl, 0))
19991 /* Not passed in memory. */
19992 && !MEM_P (DECL_INCOMING_RTL (decl))
19993 /* Not passed by invisible reference. */
19994 && (!REG_P (XEXP (rtl, 0))
19995 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19996 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19997 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19998 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19999 #endif
20000 )
20001 /* Big endian correction check. */
20002 && BYTES_BIG_ENDIAN
20003 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20004 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20005 UNITS_PER_WORD))
20006 {
20007 machine_mode addr_mode = get_address_mode (rtl);
20008 poly_int64 offset = (UNITS_PER_WORD
20009 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20010
20011 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20012 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20013 }
20014 }
20015 else if (VAR_P (decl)
20016 && rtl
20017 && MEM_P (rtl)
20018 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20019 {
20020 machine_mode addr_mode = get_address_mode (rtl);
20021 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20022 GET_MODE (rtl));
20023
20024 /* If a variable is declared "register" yet is smaller than
20025 a register, then if we store the variable to memory, it
20026 looks like we're storing a register-sized value, when in
20027 fact we are not. We need to adjust the offset of the
20028 storage location to reflect the actual value's bytes,
20029 else gdb will not be able to display it. */
20030 if (maybe_ne (offset, 0))
20031 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20032 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20033 }
20034
20035 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20036 and will have been substituted directly into all expressions that use it.
20037 C does not have such a concept, but C++ and other languages do. */
20038 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20039 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20040
20041 if (rtl)
20042 rtl = targetm.delegitimize_address (rtl);
20043
20044 /* If we don't look past the constant pool, we risk emitting a
20045 reference to a constant pool entry that isn't referenced from
20046 code, and thus is not emitted. */
20047 if (rtl)
20048 rtl = avoid_constant_pool_reference (rtl);
20049
20050 /* Try harder to get a rtl. If this symbol ends up not being emitted
20051 in the current CU, resolve_addr will remove the expression referencing
20052 it. */
20053 if (rtl == NULL_RTX
20054 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20055 && VAR_P (decl)
20056 && !DECL_EXTERNAL (decl)
20057 && TREE_STATIC (decl)
20058 && DECL_NAME (decl)
20059 && !DECL_HARD_REGISTER (decl)
20060 && DECL_MODE (decl) != VOIDmode)
20061 {
20062 rtl = make_decl_rtl_for_debug (decl);
20063 if (!MEM_P (rtl)
20064 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20065 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20066 rtl = NULL_RTX;
20067 }
20068
20069 return rtl;
20070 }
20071
20072 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20073 returned. If so, the decl for the COMMON block is returned, and the
20074 value is the offset into the common block for the symbol. */
20075
20076 static tree
20077 fortran_common (tree decl, HOST_WIDE_INT *value)
20078 {
20079 tree val_expr, cvar;
20080 machine_mode mode;
20081 poly_int64 bitsize, bitpos;
20082 tree offset;
20083 HOST_WIDE_INT cbitpos;
20084 int unsignedp, reversep, volatilep = 0;
20085
20086 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20087 it does not have a value (the offset into the common area), or if it
20088 is thread local (as opposed to global) then it isn't common, and shouldn't
20089 be handled as such. */
20090 if (!VAR_P (decl)
20091 || !TREE_STATIC (decl)
20092 || !DECL_HAS_VALUE_EXPR_P (decl)
20093 || !is_fortran ())
20094 return NULL_TREE;
20095
20096 val_expr = DECL_VALUE_EXPR (decl);
20097 if (TREE_CODE (val_expr) != COMPONENT_REF)
20098 return NULL_TREE;
20099
20100 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20101 &unsignedp, &reversep, &volatilep);
20102
20103 if (cvar == NULL_TREE
20104 || !VAR_P (cvar)
20105 || DECL_ARTIFICIAL (cvar)
20106 || !TREE_PUBLIC (cvar)
20107 /* We don't expect to have to cope with variable offsets,
20108 since at present all static data must have a constant size. */
20109 || !bitpos.is_constant (&cbitpos))
20110 return NULL_TREE;
20111
20112 *value = 0;
20113 if (offset != NULL)
20114 {
20115 if (!tree_fits_shwi_p (offset))
20116 return NULL_TREE;
20117 *value = tree_to_shwi (offset);
20118 }
20119 if (cbitpos != 0)
20120 *value += cbitpos / BITS_PER_UNIT;
20121
20122 return cvar;
20123 }
20124
20125 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20126 data attribute for a variable or a parameter. We generate the
20127 DW_AT_const_value attribute only in those cases where the given variable
20128 or parameter does not have a true "location" either in memory or in a
20129 register. This can happen (for example) when a constant is passed as an
20130 actual argument in a call to an inline function. (It's possible that
20131 these things can crop up in other ways also.) Note that one type of
20132 constant value which can be passed into an inlined function is a constant
20133 pointer. This can happen for example if an actual argument in an inlined
20134 function call evaluates to a compile-time constant address.
20135
20136 CACHE_P is true if it is worth caching the location list for DECL,
20137 so that future calls can reuse it rather than regenerate it from scratch.
20138 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20139 since we will need to refer to them each time the function is inlined. */
20140
20141 static bool
20142 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20143 {
20144 rtx rtl;
20145 dw_loc_list_ref list;
20146 var_loc_list *loc_list;
20147 cached_dw_loc_list *cache;
20148
20149 if (early_dwarf)
20150 return false;
20151
20152 if (TREE_CODE (decl) == ERROR_MARK)
20153 return false;
20154
20155 if (get_AT (die, DW_AT_location)
20156 || get_AT (die, DW_AT_const_value))
20157 return true;
20158
20159 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20160 || TREE_CODE (decl) == RESULT_DECL);
20161
20162 /* Try to get some constant RTL for this decl, and use that as the value of
20163 the location. */
20164
20165 rtl = rtl_for_decl_location (decl);
20166 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20167 && add_const_value_attribute (die, rtl))
20168 return true;
20169
20170 /* See if we have single element location list that is equivalent to
20171 a constant value. That way we are better to use add_const_value_attribute
20172 rather than expanding constant value equivalent. */
20173 loc_list = lookup_decl_loc (decl);
20174 if (loc_list
20175 && loc_list->first
20176 && loc_list->first->next == NULL
20177 && NOTE_P (loc_list->first->loc)
20178 && NOTE_VAR_LOCATION (loc_list->first->loc)
20179 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20180 {
20181 struct var_loc_node *node;
20182
20183 node = loc_list->first;
20184 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20185 if (GET_CODE (rtl) == EXPR_LIST)
20186 rtl = XEXP (rtl, 0);
20187 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20188 && add_const_value_attribute (die, rtl))
20189 return true;
20190 }
20191 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20192 list several times. See if we've already cached the contents. */
20193 list = NULL;
20194 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20195 cache_p = false;
20196 if (cache_p)
20197 {
20198 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20199 if (cache)
20200 list = cache->loc_list;
20201 }
20202 if (list == NULL)
20203 {
20204 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20205 NULL);
20206 /* It is usually worth caching this result if the decl is from
20207 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20208 if (cache_p && list && list->dw_loc_next)
20209 {
20210 cached_dw_loc_list **slot
20211 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20212 DECL_UID (decl),
20213 INSERT);
20214 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20215 cache->decl_id = DECL_UID (decl);
20216 cache->loc_list = list;
20217 *slot = cache;
20218 }
20219 }
20220 if (list)
20221 {
20222 add_AT_location_description (die, DW_AT_location, list);
20223 return true;
20224 }
20225 /* None of that worked, so it must not really have a location;
20226 try adding a constant value attribute from the DECL_INITIAL. */
20227 return tree_add_const_value_attribute_for_decl (die, decl);
20228 }
20229
20230 /* Helper function for tree_add_const_value_attribute. Natively encode
20231 initializer INIT into an array. Return true if successful. */
20232
20233 static bool
20234 native_encode_initializer (tree init, unsigned char *array, int size)
20235 {
20236 tree type;
20237
20238 if (init == NULL_TREE)
20239 return false;
20240
20241 STRIP_NOPS (init);
20242 switch (TREE_CODE (init))
20243 {
20244 case STRING_CST:
20245 type = TREE_TYPE (init);
20246 if (TREE_CODE (type) == ARRAY_TYPE)
20247 {
20248 tree enttype = TREE_TYPE (type);
20249 scalar_int_mode mode;
20250
20251 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20252 || GET_MODE_SIZE (mode) != 1)
20253 return false;
20254 if (int_size_in_bytes (type) != size)
20255 return false;
20256 if (size > TREE_STRING_LENGTH (init))
20257 {
20258 memcpy (array, TREE_STRING_POINTER (init),
20259 TREE_STRING_LENGTH (init));
20260 memset (array + TREE_STRING_LENGTH (init),
20261 '\0', size - TREE_STRING_LENGTH (init));
20262 }
20263 else
20264 memcpy (array, TREE_STRING_POINTER (init), size);
20265 return true;
20266 }
20267 return false;
20268 case CONSTRUCTOR:
20269 type = TREE_TYPE (init);
20270 if (int_size_in_bytes (type) != size)
20271 return false;
20272 if (TREE_CODE (type) == ARRAY_TYPE)
20273 {
20274 HOST_WIDE_INT min_index;
20275 unsigned HOST_WIDE_INT cnt;
20276 int curpos = 0, fieldsize;
20277 constructor_elt *ce;
20278
20279 if (TYPE_DOMAIN (type) == NULL_TREE
20280 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20281 return false;
20282
20283 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20284 if (fieldsize <= 0)
20285 return false;
20286
20287 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20288 memset (array, '\0', size);
20289 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20290 {
20291 tree val = ce->value;
20292 tree index = ce->index;
20293 int pos = curpos;
20294 if (index && TREE_CODE (index) == RANGE_EXPR)
20295 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20296 * fieldsize;
20297 else if (index)
20298 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20299
20300 if (val)
20301 {
20302 STRIP_NOPS (val);
20303 if (!native_encode_initializer (val, array + pos, fieldsize))
20304 return false;
20305 }
20306 curpos = pos + fieldsize;
20307 if (index && TREE_CODE (index) == RANGE_EXPR)
20308 {
20309 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20310 - tree_to_shwi (TREE_OPERAND (index, 0));
20311 while (count-- > 0)
20312 {
20313 if (val)
20314 memcpy (array + curpos, array + pos, fieldsize);
20315 curpos += fieldsize;
20316 }
20317 }
20318 gcc_assert (curpos <= size);
20319 }
20320 return true;
20321 }
20322 else if (TREE_CODE (type) == RECORD_TYPE
20323 || TREE_CODE (type) == UNION_TYPE)
20324 {
20325 tree field = NULL_TREE;
20326 unsigned HOST_WIDE_INT cnt;
20327 constructor_elt *ce;
20328
20329 if (int_size_in_bytes (type) != size)
20330 return false;
20331
20332 if (TREE_CODE (type) == RECORD_TYPE)
20333 field = TYPE_FIELDS (type);
20334
20335 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20336 {
20337 tree val = ce->value;
20338 int pos, fieldsize;
20339
20340 if (ce->index != 0)
20341 field = ce->index;
20342
20343 if (val)
20344 STRIP_NOPS (val);
20345
20346 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20347 return false;
20348
20349 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20350 && TYPE_DOMAIN (TREE_TYPE (field))
20351 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20352 return false;
20353 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20354 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20355 return false;
20356 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20357 pos = int_byte_position (field);
20358 gcc_assert (pos + fieldsize <= size);
20359 if (val && fieldsize != 0
20360 && !native_encode_initializer (val, array + pos, fieldsize))
20361 return false;
20362 }
20363 return true;
20364 }
20365 return false;
20366 case VIEW_CONVERT_EXPR:
20367 case NON_LVALUE_EXPR:
20368 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20369 default:
20370 return native_encode_expr (init, array, size) == size;
20371 }
20372 }
20373
20374 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20375 attribute is the const value T. */
20376
20377 static bool
20378 tree_add_const_value_attribute (dw_die_ref die, tree t)
20379 {
20380 tree init;
20381 tree type = TREE_TYPE (t);
20382 rtx rtl;
20383
20384 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20385 return false;
20386
20387 init = t;
20388 gcc_assert (!DECL_P (init));
20389
20390 if (TREE_CODE (init) == INTEGER_CST)
20391 {
20392 if (tree_fits_uhwi_p (init))
20393 {
20394 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20395 return true;
20396 }
20397 if (tree_fits_shwi_p (init))
20398 {
20399 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20400 return true;
20401 }
20402 }
20403 if (! early_dwarf)
20404 {
20405 rtl = rtl_for_decl_init (init, type);
20406 if (rtl)
20407 return add_const_value_attribute (die, rtl);
20408 }
20409 /* If the host and target are sane, try harder. */
20410 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20411 && initializer_constant_valid_p (init, type))
20412 {
20413 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20414 if (size > 0 && (int) size == size)
20415 {
20416 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20417
20418 if (native_encode_initializer (init, array, size))
20419 {
20420 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20421 return true;
20422 }
20423 ggc_free (array);
20424 }
20425 }
20426 return false;
20427 }
20428
20429 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20430 attribute is the const value of T, where T is an integral constant
20431 variable with static storage duration
20432 (so it can't be a PARM_DECL or a RESULT_DECL). */
20433
20434 static bool
20435 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20436 {
20437
20438 if (!decl
20439 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20440 || (VAR_P (decl) && !TREE_STATIC (decl)))
20441 return false;
20442
20443 if (TREE_READONLY (decl)
20444 && ! TREE_THIS_VOLATILE (decl)
20445 && DECL_INITIAL (decl))
20446 /* OK */;
20447 else
20448 return false;
20449
20450 /* Don't add DW_AT_const_value if abstract origin already has one. */
20451 if (get_AT (var_die, DW_AT_const_value))
20452 return false;
20453
20454 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20455 }
20456
20457 /* Convert the CFI instructions for the current function into a
20458 location list. This is used for DW_AT_frame_base when we targeting
20459 a dwarf2 consumer that does not support the dwarf3
20460 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20461 expressions. */
20462
20463 static dw_loc_list_ref
20464 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20465 {
20466 int ix;
20467 dw_fde_ref fde;
20468 dw_loc_list_ref list, *list_tail;
20469 dw_cfi_ref cfi;
20470 dw_cfa_location last_cfa, next_cfa;
20471 const char *start_label, *last_label, *section;
20472 dw_cfa_location remember;
20473
20474 fde = cfun->fde;
20475 gcc_assert (fde != NULL);
20476
20477 section = secname_for_decl (current_function_decl);
20478 list_tail = &list;
20479 list = NULL;
20480
20481 memset (&next_cfa, 0, sizeof (next_cfa));
20482 next_cfa.reg = INVALID_REGNUM;
20483 remember = next_cfa;
20484
20485 start_label = fde->dw_fde_begin;
20486
20487 /* ??? Bald assumption that the CIE opcode list does not contain
20488 advance opcodes. */
20489 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20490 lookup_cfa_1 (cfi, &next_cfa, &remember);
20491
20492 last_cfa = next_cfa;
20493 last_label = start_label;
20494
20495 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20496 {
20497 /* If the first partition contained no CFI adjustments, the
20498 CIE opcodes apply to the whole first partition. */
20499 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20500 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20501 list_tail =&(*list_tail)->dw_loc_next;
20502 start_label = last_label = fde->dw_fde_second_begin;
20503 }
20504
20505 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20506 {
20507 switch (cfi->dw_cfi_opc)
20508 {
20509 case DW_CFA_set_loc:
20510 case DW_CFA_advance_loc1:
20511 case DW_CFA_advance_loc2:
20512 case DW_CFA_advance_loc4:
20513 if (!cfa_equal_p (&last_cfa, &next_cfa))
20514 {
20515 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20516 start_label, 0, last_label, 0, section);
20517
20518 list_tail = &(*list_tail)->dw_loc_next;
20519 last_cfa = next_cfa;
20520 start_label = last_label;
20521 }
20522 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20523 break;
20524
20525 case DW_CFA_advance_loc:
20526 /* The encoding is complex enough that we should never emit this. */
20527 gcc_unreachable ();
20528
20529 default:
20530 lookup_cfa_1 (cfi, &next_cfa, &remember);
20531 break;
20532 }
20533 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20534 {
20535 if (!cfa_equal_p (&last_cfa, &next_cfa))
20536 {
20537 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20538 start_label, 0, last_label, 0, section);
20539
20540 list_tail = &(*list_tail)->dw_loc_next;
20541 last_cfa = next_cfa;
20542 start_label = last_label;
20543 }
20544 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20545 start_label, 0, fde->dw_fde_end, 0, section);
20546 list_tail = &(*list_tail)->dw_loc_next;
20547 start_label = last_label = fde->dw_fde_second_begin;
20548 }
20549 }
20550
20551 if (!cfa_equal_p (&last_cfa, &next_cfa))
20552 {
20553 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20554 start_label, 0, last_label, 0, section);
20555 list_tail = &(*list_tail)->dw_loc_next;
20556 start_label = last_label;
20557 }
20558
20559 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20560 start_label, 0,
20561 fde->dw_fde_second_begin
20562 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20563 section);
20564
20565 maybe_gen_llsym (list);
20566
20567 return list;
20568 }
20569
20570 /* Compute a displacement from the "steady-state frame pointer" to the
20571 frame base (often the same as the CFA), and store it in
20572 frame_pointer_fb_offset. OFFSET is added to the displacement
20573 before the latter is negated. */
20574
20575 static void
20576 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20577 {
20578 rtx reg, elim;
20579
20580 #ifdef FRAME_POINTER_CFA_OFFSET
20581 reg = frame_pointer_rtx;
20582 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20583 #else
20584 reg = arg_pointer_rtx;
20585 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20586 #endif
20587
20588 elim = (ira_use_lra_p
20589 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20590 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20591 elim = strip_offset_and_add (elim, &offset);
20592
20593 frame_pointer_fb_offset = -offset;
20594
20595 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20596 in which to eliminate. This is because it's stack pointer isn't
20597 directly accessible as a register within the ISA. To work around
20598 this, assume that while we cannot provide a proper value for
20599 frame_pointer_fb_offset, we won't need one either. We can use
20600 hard frame pointer in debug info even if frame pointer isn't used
20601 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20602 which uses the DW_AT_frame_base attribute, not hard frame pointer
20603 directly. */
20604 frame_pointer_fb_offset_valid
20605 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20606 }
20607
20608 /* Generate a DW_AT_name attribute given some string value to be included as
20609 the value of the attribute. */
20610
20611 static void
20612 add_name_attribute (dw_die_ref die, const char *name_string)
20613 {
20614 if (name_string != NULL && *name_string != 0)
20615 {
20616 if (demangle_name_func)
20617 name_string = (*demangle_name_func) (name_string);
20618
20619 add_AT_string (die, DW_AT_name, name_string);
20620 }
20621 }
20622
20623 /* Generate a DW_AT_description attribute given some string value to be included
20624 as the value of the attribute. */
20625
20626 static void
20627 add_desc_attribute (dw_die_ref die, const char *name_string)
20628 {
20629 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20630 return;
20631
20632 if (name_string == NULL || *name_string == 0)
20633 return;
20634
20635 if (demangle_name_func)
20636 name_string = (*demangle_name_func) (name_string);
20637
20638 add_AT_string (die, DW_AT_description, name_string);
20639 }
20640
20641 /* Generate a DW_AT_description attribute given some decl to be included
20642 as the value of the attribute. */
20643
20644 static void
20645 add_desc_attribute (dw_die_ref die, tree decl)
20646 {
20647 tree decl_name;
20648
20649 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20650 return;
20651
20652 if (decl == NULL_TREE || !DECL_P (decl))
20653 return;
20654 decl_name = DECL_NAME (decl);
20655
20656 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20657 {
20658 const char *name = dwarf2_name (decl, 0);
20659 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20660 }
20661 else
20662 {
20663 char *desc = print_generic_expr_to_str (decl);
20664 add_desc_attribute (die, desc);
20665 free (desc);
20666 }
20667 }
20668
20669 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20670 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20671 of TYPE accordingly.
20672
20673 ??? This is a temporary measure until after we're able to generate
20674 regular DWARF for the complex Ada type system. */
20675
20676 static void
20677 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20678 dw_die_ref context_die)
20679 {
20680 tree dtype;
20681 dw_die_ref dtype_die;
20682
20683 if (!lang_hooks.types.descriptive_type)
20684 return;
20685
20686 dtype = lang_hooks.types.descriptive_type (type);
20687 if (!dtype)
20688 return;
20689
20690 dtype_die = lookup_type_die (dtype);
20691 if (!dtype_die)
20692 {
20693 gen_type_die (dtype, context_die);
20694 dtype_die = lookup_type_die (dtype);
20695 gcc_assert (dtype_die);
20696 }
20697
20698 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20699 }
20700
20701 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20702
20703 static const char *
20704 comp_dir_string (void)
20705 {
20706 const char *wd;
20707 char *wd_plus_sep = NULL;
20708 static const char *cached_wd = NULL;
20709
20710 if (cached_wd != NULL)
20711 return cached_wd;
20712
20713 wd = get_src_pwd ();
20714 if (wd == NULL)
20715 return NULL;
20716
20717 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20718 {
20719 size_t wdlen = strlen (wd);
20720 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20721 strcpy (wd_plus_sep, wd);
20722 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20723 wd_plus_sep [wdlen + 1] = 0;
20724 wd = wd_plus_sep;
20725 }
20726
20727 cached_wd = remap_debug_filename (wd);
20728
20729 /* remap_debug_filename can just pass through wd or return a new gc string.
20730 These two types can't be both stored in a GTY(())-tagged string, but since
20731 the cached value lives forever just copy it if needed. */
20732 if (cached_wd != wd)
20733 {
20734 cached_wd = xstrdup (cached_wd);
20735 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20736 free (wd_plus_sep);
20737 }
20738
20739 return cached_wd;
20740 }
20741
20742 /* Generate a DW_AT_comp_dir attribute for DIE. */
20743
20744 static void
20745 add_comp_dir_attribute (dw_die_ref die)
20746 {
20747 const char * wd = comp_dir_string ();
20748 if (wd != NULL)
20749 add_AT_string (die, DW_AT_comp_dir, wd);
20750 }
20751
20752 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20753 pointer computation, ...), output a representation for that bound according
20754 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20755 loc_list_from_tree for the meaning of CONTEXT. */
20756
20757 static void
20758 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20759 int forms, struct loc_descr_context *context)
20760 {
20761 dw_die_ref context_die, decl_die = NULL;
20762 dw_loc_list_ref list;
20763 bool strip_conversions = true;
20764 bool placeholder_seen = false;
20765
20766 while (strip_conversions)
20767 switch (TREE_CODE (value))
20768 {
20769 case ERROR_MARK:
20770 case SAVE_EXPR:
20771 return;
20772
20773 CASE_CONVERT:
20774 case VIEW_CONVERT_EXPR:
20775 value = TREE_OPERAND (value, 0);
20776 break;
20777
20778 default:
20779 strip_conversions = false;
20780 break;
20781 }
20782
20783 /* If possible and permitted, output the attribute as a constant. */
20784 if ((forms & dw_scalar_form_constant) != 0
20785 && TREE_CODE (value) == INTEGER_CST)
20786 {
20787 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20788
20789 /* If HOST_WIDE_INT is big enough then represent the bound as
20790 a constant value. We need to choose a form based on
20791 whether the type is signed or unsigned. We cannot just
20792 call add_AT_unsigned if the value itself is positive
20793 (add_AT_unsigned might add the unsigned value encoded as
20794 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20795 bounds type and then sign extend any unsigned values found
20796 for signed types. This is needed only for
20797 DW_AT_{lower,upper}_bound, since for most other attributes,
20798 consumers will treat DW_FORM_data[1248] as unsigned values,
20799 regardless of the underlying type. */
20800 if (prec <= HOST_BITS_PER_WIDE_INT
20801 || tree_fits_uhwi_p (value))
20802 {
20803 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20804 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20805 else
20806 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20807 }
20808 else
20809 /* Otherwise represent the bound as an unsigned value with
20810 the precision of its type. The precision and signedness
20811 of the type will be necessary to re-interpret it
20812 unambiguously. */
20813 add_AT_wide (die, attr, wi::to_wide (value));
20814 return;
20815 }
20816
20817 /* Otherwise, if it's possible and permitted too, output a reference to
20818 another DIE. */
20819 if ((forms & dw_scalar_form_reference) != 0)
20820 {
20821 tree decl = NULL_TREE;
20822
20823 /* Some type attributes reference an outer type. For instance, the upper
20824 bound of an array may reference an embedding record (this happens in
20825 Ada). */
20826 if (TREE_CODE (value) == COMPONENT_REF
20827 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20828 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20829 decl = TREE_OPERAND (value, 1);
20830
20831 else if (VAR_P (value)
20832 || TREE_CODE (value) == PARM_DECL
20833 || TREE_CODE (value) == RESULT_DECL)
20834 decl = value;
20835
20836 if (decl != NULL_TREE)
20837 {
20838 decl_die = lookup_decl_die (decl);
20839
20840 /* ??? Can this happen, or should the variable have been bound
20841 first? Probably it can, since I imagine that we try to create
20842 the types of parameters in the order in which they exist in
20843 the list, and won't have created a forward reference to a
20844 later parameter. */
20845 if (decl_die != NULL)
20846 {
20847 if (get_AT (decl_die, DW_AT_location)
20848 || get_AT (decl_die, DW_AT_data_member_location)
20849 || get_AT (decl_die, DW_AT_const_value))
20850 {
20851 add_AT_die_ref (die, attr, decl_die);
20852 return;
20853 }
20854 }
20855 }
20856 }
20857
20858 /* Last chance: try to create a stack operation procedure to evaluate the
20859 value. Do nothing if even that is not possible or permitted. */
20860 if ((forms & dw_scalar_form_exprloc) == 0)
20861 return;
20862
20863 list = loc_list_from_tree (value, 2, context);
20864 if (context && context->placeholder_arg)
20865 {
20866 placeholder_seen = context->placeholder_seen;
20867 context->placeholder_seen = false;
20868 }
20869 if (list == NULL || single_element_loc_list_p (list))
20870 {
20871 /* If this attribute is not a reference nor constant, it is
20872 a DWARF expression rather than location description. For that
20873 loc_list_from_tree (value, 0, &context) is needed. */
20874 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20875 if (list2 && single_element_loc_list_p (list2))
20876 {
20877 if (placeholder_seen)
20878 {
20879 struct dwarf_procedure_info dpi;
20880 dpi.fndecl = NULL_TREE;
20881 dpi.args_count = 1;
20882 if (!resolve_args_picking (list2->expr, 1, &dpi))
20883 return;
20884 }
20885 add_AT_loc (die, attr, list2->expr);
20886 return;
20887 }
20888 }
20889
20890 /* If that failed to give a single element location list, fall back to
20891 outputting this as a reference... still if permitted. */
20892 if (list == NULL
20893 || (forms & dw_scalar_form_reference) == 0
20894 || placeholder_seen)
20895 return;
20896
20897 if (!decl_die)
20898 {
20899 if (current_function_decl == 0)
20900 context_die = comp_unit_die ();
20901 else
20902 context_die = lookup_decl_die (current_function_decl);
20903
20904 decl_die = new_die (DW_TAG_variable, context_die, value);
20905 add_AT_flag (decl_die, DW_AT_artificial, 1);
20906 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20907 context_die);
20908 }
20909
20910 add_AT_location_description (decl_die, DW_AT_location, list);
20911 add_AT_die_ref (die, attr, decl_die);
20912 }
20913
20914 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20915 default. */
20916
20917 static int
20918 lower_bound_default (void)
20919 {
20920 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20921 {
20922 case DW_LANG_C:
20923 case DW_LANG_C89:
20924 case DW_LANG_C99:
20925 case DW_LANG_C11:
20926 case DW_LANG_C_plus_plus:
20927 case DW_LANG_C_plus_plus_11:
20928 case DW_LANG_C_plus_plus_14:
20929 case DW_LANG_ObjC:
20930 case DW_LANG_ObjC_plus_plus:
20931 return 0;
20932 case DW_LANG_Fortran77:
20933 case DW_LANG_Fortran90:
20934 case DW_LANG_Fortran95:
20935 case DW_LANG_Fortran03:
20936 case DW_LANG_Fortran08:
20937 return 1;
20938 case DW_LANG_UPC:
20939 case DW_LANG_D:
20940 case DW_LANG_Python:
20941 return dwarf_version >= 4 ? 0 : -1;
20942 case DW_LANG_Ada95:
20943 case DW_LANG_Ada83:
20944 case DW_LANG_Cobol74:
20945 case DW_LANG_Cobol85:
20946 case DW_LANG_Modula2:
20947 case DW_LANG_PLI:
20948 return dwarf_version >= 4 ? 1 : -1;
20949 default:
20950 return -1;
20951 }
20952 }
20953
20954 /* Given a tree node describing an array bound (either lower or upper) output
20955 a representation for that bound. */
20956
20957 static void
20958 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20959 tree bound, struct loc_descr_context *context)
20960 {
20961 int dflt;
20962
20963 while (1)
20964 switch (TREE_CODE (bound))
20965 {
20966 /* Strip all conversions. */
20967 CASE_CONVERT:
20968 case VIEW_CONVERT_EXPR:
20969 bound = TREE_OPERAND (bound, 0);
20970 break;
20971
20972 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20973 are even omitted when they are the default. */
20974 case INTEGER_CST:
20975 /* If the value for this bound is the default one, we can even omit the
20976 attribute. */
20977 if (bound_attr == DW_AT_lower_bound
20978 && tree_fits_shwi_p (bound)
20979 && (dflt = lower_bound_default ()) != -1
20980 && tree_to_shwi (bound) == dflt)
20981 return;
20982
20983 /* FALLTHRU */
20984
20985 default:
20986 /* Because of the complex interaction there can be with other GNAT
20987 encodings, GDB isn't ready yet to handle proper DWARF description
20988 for self-referencial subrange bounds: let GNAT encodings do the
20989 magic in such a case. */
20990 if (is_ada ()
20991 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20992 && contains_placeholder_p (bound))
20993 return;
20994
20995 add_scalar_info (subrange_die, bound_attr, bound,
20996 dw_scalar_form_constant
20997 | dw_scalar_form_exprloc
20998 | dw_scalar_form_reference,
20999 context);
21000 return;
21001 }
21002 }
21003
21004 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
21005 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
21006 Note that the block of subscript information for an array type also
21007 includes information about the element type of the given array type.
21008
21009 This function reuses previously set type and bound information if
21010 available. */
21011
21012 static void
21013 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
21014 {
21015 unsigned dimension_number;
21016 tree lower, upper;
21017 dw_die_ref child = type_die->die_child;
21018
21019 for (dimension_number = 0;
21020 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21021 type = TREE_TYPE (type), dimension_number++)
21022 {
21023 tree domain = TYPE_DOMAIN (type);
21024
21025 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21026 break;
21027
21028 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21029 and (in GNU C only) variable bounds. Handle all three forms
21030 here. */
21031
21032 /* Find and reuse a previously generated DW_TAG_subrange_type if
21033 available.
21034
21035 For multi-dimensional arrays, as we iterate through the
21036 various dimensions in the enclosing for loop above, we also
21037 iterate through the DIE children and pick at each
21038 DW_TAG_subrange_type previously generated (if available).
21039 Each child DW_TAG_subrange_type DIE describes the range of
21040 the current dimension. At this point we should have as many
21041 DW_TAG_subrange_type's as we have dimensions in the
21042 array. */
21043 dw_die_ref subrange_die = NULL;
21044 if (child)
21045 while (1)
21046 {
21047 child = child->die_sib;
21048 if (child->die_tag == DW_TAG_subrange_type)
21049 subrange_die = child;
21050 if (child == type_die->die_child)
21051 {
21052 /* If we wrapped around, stop looking next time. */
21053 child = NULL;
21054 break;
21055 }
21056 if (child->die_tag == DW_TAG_subrange_type)
21057 break;
21058 }
21059 if (!subrange_die)
21060 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21061
21062 if (domain)
21063 {
21064 /* We have an array type with specified bounds. */
21065 lower = TYPE_MIN_VALUE (domain);
21066 upper = TYPE_MAX_VALUE (domain);
21067
21068 /* Define the index type. */
21069 if (TREE_TYPE (domain)
21070 && !get_AT (subrange_die, DW_AT_type))
21071 {
21072 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21073 TREE_TYPE field. We can't emit debug info for this
21074 because it is an unnamed integral type. */
21075 if (TREE_CODE (domain) == INTEGER_TYPE
21076 && TYPE_NAME (domain) == NULL_TREE
21077 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21078 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21079 ;
21080 else
21081 add_type_attribute (subrange_die, TREE_TYPE (domain),
21082 TYPE_UNQUALIFIED, false, type_die);
21083 }
21084
21085 /* ??? If upper is NULL, the array has unspecified length,
21086 but it does have a lower bound. This happens with Fortran
21087 dimension arr(N:*)
21088 Since the debugger is definitely going to need to know N
21089 to produce useful results, go ahead and output the lower
21090 bound solo, and hope the debugger can cope. */
21091
21092 if (!get_AT (subrange_die, DW_AT_lower_bound))
21093 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21094 if (!get_AT (subrange_die, DW_AT_upper_bound)
21095 && !get_AT (subrange_die, DW_AT_count))
21096 {
21097 if (upper)
21098 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21099 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21100 /* Zero-length array. */
21101 add_bound_info (subrange_die, DW_AT_count,
21102 build_int_cst (TREE_TYPE (lower), 0), NULL);
21103 }
21104 }
21105
21106 /* Otherwise we have an array type with an unspecified length. The
21107 DWARF-2 spec does not say how to handle this; let's just leave out the
21108 bounds. */
21109 }
21110 }
21111
21112 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21113
21114 static void
21115 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21116 {
21117 dw_die_ref decl_die;
21118 HOST_WIDE_INT size;
21119 dw_loc_descr_ref size_expr = NULL;
21120
21121 switch (TREE_CODE (tree_node))
21122 {
21123 case ERROR_MARK:
21124 size = 0;
21125 break;
21126 case ENUMERAL_TYPE:
21127 case RECORD_TYPE:
21128 case UNION_TYPE:
21129 case QUAL_UNION_TYPE:
21130 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21131 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21132 {
21133 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21134 return;
21135 }
21136 size_expr = type_byte_size (tree_node, &size);
21137 break;
21138 case FIELD_DECL:
21139 /* For a data member of a struct or union, the DW_AT_byte_size is
21140 generally given as the number of bytes normally allocated for an
21141 object of the *declared* type of the member itself. This is true
21142 even for bit-fields. */
21143 size = int_size_in_bytes (field_type (tree_node));
21144 break;
21145 default:
21146 gcc_unreachable ();
21147 }
21148
21149 /* Support for dynamically-sized objects was introduced by DWARFv3.
21150 At the moment, GDB does not handle variable byte sizes very well,
21151 though. */
21152 if ((dwarf_version >= 3 || !dwarf_strict)
21153 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21154 && size_expr != NULL)
21155 add_AT_loc (die, DW_AT_byte_size, size_expr);
21156
21157 /* Note that `size' might be -1 when we get to this point. If it is, that
21158 indicates that the byte size of the entity in question is variable and
21159 that we could not generate a DWARF expression that computes it. */
21160 if (size >= 0)
21161 add_AT_unsigned (die, DW_AT_byte_size, size);
21162 }
21163
21164 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21165 alignment. */
21166
21167 static void
21168 add_alignment_attribute (dw_die_ref die, tree tree_node)
21169 {
21170 if (dwarf_version < 5 && dwarf_strict)
21171 return;
21172
21173 unsigned align;
21174
21175 if (DECL_P (tree_node))
21176 {
21177 if (!DECL_USER_ALIGN (tree_node))
21178 return;
21179
21180 align = DECL_ALIGN_UNIT (tree_node);
21181 }
21182 else if (TYPE_P (tree_node))
21183 {
21184 if (!TYPE_USER_ALIGN (tree_node))
21185 return;
21186
21187 align = TYPE_ALIGN_UNIT (tree_node);
21188 }
21189 else
21190 gcc_unreachable ();
21191
21192 add_AT_unsigned (die, DW_AT_alignment, align);
21193 }
21194
21195 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21196 which specifies the distance in bits from the highest order bit of the
21197 "containing object" for the bit-field to the highest order bit of the
21198 bit-field itself.
21199
21200 For any given bit-field, the "containing object" is a hypothetical object
21201 (of some integral or enum type) within which the given bit-field lives. The
21202 type of this hypothetical "containing object" is always the same as the
21203 declared type of the individual bit-field itself. The determination of the
21204 exact location of the "containing object" for a bit-field is rather
21205 complicated. It's handled by the `field_byte_offset' function (above).
21206
21207 CTX is required: see the comment for VLR_CONTEXT.
21208
21209 Note that it is the size (in bytes) of the hypothetical "containing object"
21210 which will be given in the DW_AT_byte_size attribute for this bit-field.
21211 (See `byte_size_attribute' above). */
21212
21213 static inline void
21214 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21215 {
21216 HOST_WIDE_INT object_offset_in_bytes;
21217 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21218 HOST_WIDE_INT bitpos_int;
21219 HOST_WIDE_INT highest_order_object_bit_offset;
21220 HOST_WIDE_INT highest_order_field_bit_offset;
21221 HOST_WIDE_INT bit_offset;
21222
21223 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21224
21225 /* Must be a field and a bit field. */
21226 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21227
21228 /* We can't yet handle bit-fields whose offsets are variable, so if we
21229 encounter such things, just return without generating any attribute
21230 whatsoever. Likewise for variable or too large size. */
21231 if (! tree_fits_shwi_p (bit_position (decl))
21232 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21233 return;
21234
21235 bitpos_int = int_bit_position (decl);
21236
21237 /* Note that the bit offset is always the distance (in bits) from the
21238 highest-order bit of the "containing object" to the highest-order bit of
21239 the bit-field itself. Since the "high-order end" of any object or field
21240 is different on big-endian and little-endian machines, the computation
21241 below must take account of these differences. */
21242 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21243 highest_order_field_bit_offset = bitpos_int;
21244
21245 if (! BYTES_BIG_ENDIAN)
21246 {
21247 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21248 highest_order_object_bit_offset +=
21249 simple_type_size_in_bits (original_type);
21250 }
21251
21252 bit_offset
21253 = (! BYTES_BIG_ENDIAN
21254 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21255 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21256
21257 if (bit_offset < 0)
21258 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21259 else
21260 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21261 }
21262
21263 /* For a FIELD_DECL node which represents a bit field, output an attribute
21264 which specifies the length in bits of the given field. */
21265
21266 static inline void
21267 add_bit_size_attribute (dw_die_ref die, tree decl)
21268 {
21269 /* Must be a field and a bit field. */
21270 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21271 && DECL_BIT_FIELD_TYPE (decl));
21272
21273 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21274 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21275 }
21276
21277 /* If the compiled language is ANSI C, then add a 'prototyped'
21278 attribute, if arg types are given for the parameters of a function. */
21279
21280 static inline void
21281 add_prototyped_attribute (dw_die_ref die, tree func_type)
21282 {
21283 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21284 {
21285 case DW_LANG_C:
21286 case DW_LANG_C89:
21287 case DW_LANG_C99:
21288 case DW_LANG_C11:
21289 case DW_LANG_ObjC:
21290 if (prototype_p (func_type))
21291 add_AT_flag (die, DW_AT_prototyped, 1);
21292 break;
21293 default:
21294 break;
21295 }
21296 }
21297
21298 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21299 by looking in the type declaration, the object declaration equate table or
21300 the block mapping. */
21301
21302 static inline void
21303 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21304 {
21305 dw_die_ref origin_die = NULL;
21306
21307 /* For late LTO debug output we want to refer directly to the abstract
21308 DIE in the early debug rather to the possibly existing concrete
21309 instance and avoid creating that just for this purpose. */
21310 sym_off_pair *desc;
21311 if (in_lto_p
21312 && external_die_map
21313 && (desc = external_die_map->get (origin)))
21314 {
21315 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21316 desc->sym, desc->off);
21317 return;
21318 }
21319
21320 if (DECL_P (origin))
21321 origin_die = lookup_decl_die (origin);
21322 else if (TYPE_P (origin))
21323 origin_die = lookup_type_die (origin);
21324 else if (TREE_CODE (origin) == BLOCK)
21325 origin_die = lookup_block_die (origin);
21326
21327 /* XXX: Functions that are never lowered don't always have correct block
21328 trees (in the case of java, they simply have no block tree, in some other
21329 languages). For these functions, there is nothing we can really do to
21330 output correct debug info for inlined functions in all cases. Rather
21331 than die, we'll just produce deficient debug info now, in that we will
21332 have variables without a proper abstract origin. In the future, when all
21333 functions are lowered, we should re-add a gcc_assert (origin_die)
21334 here. */
21335
21336 if (origin_die)
21337 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21338 }
21339
21340 /* We do not currently support the pure_virtual attribute. */
21341
21342 static inline void
21343 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21344 {
21345 if (DECL_VINDEX (func_decl))
21346 {
21347 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21348
21349 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21350 add_AT_loc (die, DW_AT_vtable_elem_location,
21351 new_loc_descr (DW_OP_constu,
21352 tree_to_shwi (DECL_VINDEX (func_decl)),
21353 0));
21354
21355 /* GNU extension: Record what type this method came from originally. */
21356 if (debug_info_level > DINFO_LEVEL_TERSE
21357 && DECL_CONTEXT (func_decl))
21358 add_AT_die_ref (die, DW_AT_containing_type,
21359 lookup_type_die (DECL_CONTEXT (func_decl)));
21360 }
21361 }
21362 \f
21363 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21364 given decl. This used to be a vendor extension until after DWARF 4
21365 standardized it. */
21366
21367 static void
21368 add_linkage_attr (dw_die_ref die, tree decl)
21369 {
21370 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21371
21372 /* Mimic what assemble_name_raw does with a leading '*'. */
21373 if (name[0] == '*')
21374 name = &name[1];
21375
21376 if (dwarf_version >= 4)
21377 add_AT_string (die, DW_AT_linkage_name, name);
21378 else
21379 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21380 }
21381
21382 /* Add source coordinate attributes for the given decl. */
21383
21384 static void
21385 add_src_coords_attributes (dw_die_ref die, tree decl)
21386 {
21387 expanded_location s;
21388
21389 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21390 return;
21391 s = expand_location (DECL_SOURCE_LOCATION (decl));
21392 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21393 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21394 if (debug_column_info && s.column)
21395 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21396 }
21397
21398 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21399
21400 static void
21401 add_linkage_name_raw (dw_die_ref die, tree decl)
21402 {
21403 /* Defer until we have an assembler name set. */
21404 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21405 {
21406 limbo_die_node *asm_name;
21407
21408 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21409 asm_name->die = die;
21410 asm_name->created_for = decl;
21411 asm_name->next = deferred_asm_name;
21412 deferred_asm_name = asm_name;
21413 }
21414 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21415 add_linkage_attr (die, decl);
21416 }
21417
21418 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21419
21420 static void
21421 add_linkage_name (dw_die_ref die, tree decl)
21422 {
21423 if (debug_info_level > DINFO_LEVEL_NONE
21424 && VAR_OR_FUNCTION_DECL_P (decl)
21425 && TREE_PUBLIC (decl)
21426 && !(VAR_P (decl) && DECL_REGISTER (decl))
21427 && die->die_tag != DW_TAG_member)
21428 add_linkage_name_raw (die, decl);
21429 }
21430
21431 /* Add a DW_AT_name attribute and source coordinate attribute for the
21432 given decl, but only if it actually has a name. */
21433
21434 static void
21435 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21436 bool no_linkage_name)
21437 {
21438 tree decl_name;
21439
21440 decl_name = DECL_NAME (decl);
21441 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21442 {
21443 const char *name = dwarf2_name (decl, 0);
21444 if (name)
21445 add_name_attribute (die, name);
21446 else
21447 add_desc_attribute (die, decl);
21448
21449 if (! DECL_ARTIFICIAL (decl))
21450 add_src_coords_attributes (die, decl);
21451
21452 if (!no_linkage_name)
21453 add_linkage_name (die, decl);
21454 }
21455 else
21456 add_desc_attribute (die, decl);
21457
21458 #ifdef VMS_DEBUGGING_INFO
21459 /* Get the function's name, as described by its RTL. This may be different
21460 from the DECL_NAME name used in the source file. */
21461 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21462 {
21463 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21464 XEXP (DECL_RTL (decl), 0), false);
21465 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21466 }
21467 #endif /* VMS_DEBUGGING_INFO */
21468 }
21469
21470 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21471
21472 static void
21473 add_discr_value (dw_die_ref die, dw_discr_value *value)
21474 {
21475 dw_attr_node attr;
21476
21477 attr.dw_attr = DW_AT_discr_value;
21478 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21479 attr.dw_attr_val.val_entry = NULL;
21480 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21481 if (value->pos)
21482 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21483 else
21484 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21485 add_dwarf_attr (die, &attr);
21486 }
21487
21488 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21489
21490 static void
21491 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21492 {
21493 dw_attr_node attr;
21494
21495 attr.dw_attr = DW_AT_discr_list;
21496 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21497 attr.dw_attr_val.val_entry = NULL;
21498 attr.dw_attr_val.v.val_discr_list = discr_list;
21499 add_dwarf_attr (die, &attr);
21500 }
21501
21502 static inline dw_discr_list_ref
21503 AT_discr_list (dw_attr_node *attr)
21504 {
21505 return attr->dw_attr_val.v.val_discr_list;
21506 }
21507
21508 #ifdef VMS_DEBUGGING_INFO
21509 /* Output the debug main pointer die for VMS */
21510
21511 void
21512 dwarf2out_vms_debug_main_pointer (void)
21513 {
21514 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21515 dw_die_ref die;
21516
21517 /* Allocate the VMS debug main subprogram die. */
21518 die = new_die_raw (DW_TAG_subprogram);
21519 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21520 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21521 current_function_funcdef_no);
21522 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21523
21524 /* Make it the first child of comp_unit_die (). */
21525 die->die_parent = comp_unit_die ();
21526 if (comp_unit_die ()->die_child)
21527 {
21528 die->die_sib = comp_unit_die ()->die_child->die_sib;
21529 comp_unit_die ()->die_child->die_sib = die;
21530 }
21531 else
21532 {
21533 die->die_sib = die;
21534 comp_unit_die ()->die_child = die;
21535 }
21536 }
21537 #endif /* VMS_DEBUGGING_INFO */
21538
21539 /* walk_tree helper function for uses_local_type, below. */
21540
21541 static tree
21542 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21543 {
21544 if (!TYPE_P (*tp))
21545 *walk_subtrees = 0;
21546 else
21547 {
21548 tree name = TYPE_NAME (*tp);
21549 if (name && DECL_P (name) && decl_function_context (name))
21550 return *tp;
21551 }
21552 return NULL_TREE;
21553 }
21554
21555 /* If TYPE involves a function-local type (including a local typedef to a
21556 non-local type), returns that type; otherwise returns NULL_TREE. */
21557
21558 static tree
21559 uses_local_type (tree type)
21560 {
21561 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21562 return used;
21563 }
21564
21565 /* Return the DIE for the scope that immediately contains this type.
21566 Non-named types that do not involve a function-local type get global
21567 scope. Named types nested in namespaces or other types get their
21568 containing scope. All other types (i.e. function-local named types) get
21569 the current active scope. */
21570
21571 static dw_die_ref
21572 scope_die_for (tree t, dw_die_ref context_die)
21573 {
21574 dw_die_ref scope_die = NULL;
21575 tree containing_scope;
21576
21577 /* Non-types always go in the current scope. */
21578 gcc_assert (TYPE_P (t));
21579
21580 /* Use the scope of the typedef, rather than the scope of the type
21581 it refers to. */
21582 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21583 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21584 else
21585 containing_scope = TYPE_CONTEXT (t);
21586
21587 /* Use the containing namespace if there is one. */
21588 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21589 {
21590 if (context_die == lookup_decl_die (containing_scope))
21591 /* OK */;
21592 else if (debug_info_level > DINFO_LEVEL_TERSE)
21593 context_die = get_context_die (containing_scope);
21594 else
21595 containing_scope = NULL_TREE;
21596 }
21597
21598 /* Ignore function type "scopes" from the C frontend. They mean that
21599 a tagged type is local to a parmlist of a function declarator, but
21600 that isn't useful to DWARF. */
21601 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21602 containing_scope = NULL_TREE;
21603
21604 if (SCOPE_FILE_SCOPE_P (containing_scope))
21605 {
21606 /* If T uses a local type keep it local as well, to avoid references
21607 to function-local DIEs from outside the function. */
21608 if (current_function_decl && uses_local_type (t))
21609 scope_die = context_die;
21610 else
21611 scope_die = comp_unit_die ();
21612 }
21613 else if (TYPE_P (containing_scope))
21614 {
21615 /* For types, we can just look up the appropriate DIE. */
21616 if (debug_info_level > DINFO_LEVEL_TERSE)
21617 scope_die = get_context_die (containing_scope);
21618 else
21619 {
21620 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21621 if (scope_die == NULL)
21622 scope_die = comp_unit_die ();
21623 }
21624 }
21625 else
21626 scope_die = context_die;
21627
21628 return scope_die;
21629 }
21630
21631 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21632
21633 static inline int
21634 local_scope_p (dw_die_ref context_die)
21635 {
21636 for (; context_die; context_die = context_die->die_parent)
21637 if (context_die->die_tag == DW_TAG_inlined_subroutine
21638 || context_die->die_tag == DW_TAG_subprogram)
21639 return 1;
21640
21641 return 0;
21642 }
21643
21644 /* Returns nonzero if CONTEXT_DIE is a class. */
21645
21646 static inline int
21647 class_scope_p (dw_die_ref context_die)
21648 {
21649 return (context_die
21650 && (context_die->die_tag == DW_TAG_structure_type
21651 || context_die->die_tag == DW_TAG_class_type
21652 || context_die->die_tag == DW_TAG_interface_type
21653 || context_die->die_tag == DW_TAG_union_type));
21654 }
21655
21656 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21657 whether or not to treat a DIE in this context as a declaration. */
21658
21659 static inline int
21660 class_or_namespace_scope_p (dw_die_ref context_die)
21661 {
21662 return (class_scope_p (context_die)
21663 || (context_die && context_die->die_tag == DW_TAG_namespace));
21664 }
21665
21666 /* Many forms of DIEs require a "type description" attribute. This
21667 routine locates the proper "type descriptor" die for the type given
21668 by 'type' plus any additional qualifiers given by 'cv_quals', and
21669 adds a DW_AT_type attribute below the given die. */
21670
21671 static void
21672 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21673 bool reverse, dw_die_ref context_die)
21674 {
21675 enum tree_code code = TREE_CODE (type);
21676 dw_die_ref type_die = NULL;
21677
21678 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21679 or fixed-point type, use the inner type. This is because we have no
21680 support for unnamed types in base_type_die. This can happen if this is
21681 an Ada subrange type. Correct solution is emit a subrange type die. */
21682 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21683 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21684 type = TREE_TYPE (type), code = TREE_CODE (type);
21685
21686 if (code == ERROR_MARK
21687 /* Handle a special case. For functions whose return type is void, we
21688 generate *no* type attribute. (Note that no object may have type
21689 `void', so this only applies to function return types). */
21690 || code == VOID_TYPE)
21691 return;
21692
21693 type_die = modified_type_die (type,
21694 cv_quals | TYPE_QUALS (type),
21695 reverse,
21696 context_die);
21697
21698 if (type_die != NULL)
21699 add_AT_die_ref (object_die, DW_AT_type, type_die);
21700 }
21701
21702 /* Given an object die, add the calling convention attribute for the
21703 function call type. */
21704 static void
21705 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21706 {
21707 enum dwarf_calling_convention value = DW_CC_normal;
21708
21709 value = ((enum dwarf_calling_convention)
21710 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21711
21712 if (is_fortran ()
21713 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21714 {
21715 /* DWARF 2 doesn't provide a way to identify a program's source-level
21716 entry point. DW_AT_calling_convention attributes are only meant
21717 to describe functions' calling conventions. However, lacking a
21718 better way to signal the Fortran main program, we used this for
21719 a long time, following existing custom. Now, DWARF 4 has
21720 DW_AT_main_subprogram, which we add below, but some tools still
21721 rely on the old way, which we thus keep. */
21722 value = DW_CC_program;
21723
21724 if (dwarf_version >= 4 || !dwarf_strict)
21725 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21726 }
21727
21728 /* Only add the attribute if the backend requests it, and
21729 is not DW_CC_normal. */
21730 if (value && (value != DW_CC_normal))
21731 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21732 }
21733
21734 /* Given a tree pointer to a struct, class, union, or enum type node, return
21735 a pointer to the (string) tag name for the given type, or zero if the type
21736 was declared without a tag. */
21737
21738 static const char *
21739 type_tag (const_tree type)
21740 {
21741 const char *name = 0;
21742
21743 if (TYPE_NAME (type) != 0)
21744 {
21745 tree t = 0;
21746
21747 /* Find the IDENTIFIER_NODE for the type name. */
21748 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21749 && !TYPE_NAMELESS (type))
21750 t = TYPE_NAME (type);
21751
21752 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21753 a TYPE_DECL node, regardless of whether or not a `typedef' was
21754 involved. */
21755 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21756 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21757 {
21758 /* We want to be extra verbose. Don't call dwarf_name if
21759 DECL_NAME isn't set. The default hook for decl_printable_name
21760 doesn't like that, and in this context it's correct to return
21761 0, instead of "<anonymous>" or the like. */
21762 if (DECL_NAME (TYPE_NAME (type))
21763 && !DECL_NAMELESS (TYPE_NAME (type)))
21764 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21765 }
21766
21767 /* Now get the name as a string, or invent one. */
21768 if (!name && t != 0)
21769 name = IDENTIFIER_POINTER (t);
21770 }
21771
21772 return (name == 0 || *name == '\0') ? 0 : name;
21773 }
21774
21775 /* Return the type associated with a data member, make a special check
21776 for bit field types. */
21777
21778 static inline tree
21779 member_declared_type (const_tree member)
21780 {
21781 return (DECL_BIT_FIELD_TYPE (member)
21782 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21783 }
21784
21785 /* Get the decl's label, as described by its RTL. This may be different
21786 from the DECL_NAME name used in the source file. */
21787
21788 #if 0
21789 static const char *
21790 decl_start_label (tree decl)
21791 {
21792 rtx x;
21793 const char *fnname;
21794
21795 x = DECL_RTL (decl);
21796 gcc_assert (MEM_P (x));
21797
21798 x = XEXP (x, 0);
21799 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21800
21801 fnname = XSTR (x, 0);
21802 return fnname;
21803 }
21804 #endif
21805 \f
21806 /* For variable-length arrays that have been previously generated, but
21807 may be incomplete due to missing subscript info, fill the subscript
21808 info. Return TRUE if this is one of those cases. */
21809 static bool
21810 fill_variable_array_bounds (tree type)
21811 {
21812 if (TREE_ASM_WRITTEN (type)
21813 && TREE_CODE (type) == ARRAY_TYPE
21814 && variably_modified_type_p (type, NULL))
21815 {
21816 dw_die_ref array_die = lookup_type_die (type);
21817 if (!array_die)
21818 return false;
21819 add_subscript_info (array_die, type, !is_ada ());
21820 return true;
21821 }
21822 return false;
21823 }
21824
21825 /* These routines generate the internal representation of the DIE's for
21826 the compilation unit. Debugging information is collected by walking
21827 the declaration trees passed in from dwarf2out_decl(). */
21828
21829 static void
21830 gen_array_type_die (tree type, dw_die_ref context_die)
21831 {
21832 dw_die_ref array_die;
21833
21834 /* GNU compilers represent multidimensional array types as sequences of one
21835 dimensional array types whose element types are themselves array types.
21836 We sometimes squish that down to a single array_type DIE with multiple
21837 subscripts in the Dwarf debugging info. The draft Dwarf specification
21838 say that we are allowed to do this kind of compression in C, because
21839 there is no difference between an array of arrays and a multidimensional
21840 array. We don't do this for Ada to remain as close as possible to the
21841 actual representation, which is especially important against the language
21842 flexibilty wrt arrays of variable size. */
21843
21844 bool collapse_nested_arrays = !is_ada ();
21845
21846 if (fill_variable_array_bounds (type))
21847 return;
21848
21849 dw_die_ref scope_die = scope_die_for (type, context_die);
21850 tree element_type;
21851
21852 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21853 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21854 if (TREE_CODE (type) == ARRAY_TYPE
21855 && TYPE_STRING_FLAG (type)
21856 && is_fortran ()
21857 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21858 {
21859 HOST_WIDE_INT size;
21860
21861 array_die = new_die (DW_TAG_string_type, scope_die, type);
21862 add_name_attribute (array_die, type_tag (type));
21863 equate_type_number_to_die (type, array_die);
21864 size = int_size_in_bytes (type);
21865 if (size >= 0)
21866 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21867 /* ??? We can't annotate types late, but for LTO we may not
21868 generate a location early either (gfortran.dg/save_6.f90). */
21869 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21870 && TYPE_DOMAIN (type) != NULL_TREE
21871 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21872 {
21873 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21874 tree rszdecl = szdecl;
21875
21876 size = int_size_in_bytes (TREE_TYPE (szdecl));
21877 if (!DECL_P (szdecl))
21878 {
21879 if (TREE_CODE (szdecl) == INDIRECT_REF
21880 && DECL_P (TREE_OPERAND (szdecl, 0)))
21881 {
21882 rszdecl = TREE_OPERAND (szdecl, 0);
21883 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21884 != DWARF2_ADDR_SIZE)
21885 size = 0;
21886 }
21887 else
21888 size = 0;
21889 }
21890 if (size > 0)
21891 {
21892 dw_loc_list_ref loc
21893 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21894 NULL);
21895 if (loc)
21896 {
21897 add_AT_location_description (array_die, DW_AT_string_length,
21898 loc);
21899 if (size != DWARF2_ADDR_SIZE)
21900 add_AT_unsigned (array_die, dwarf_version >= 5
21901 ? DW_AT_string_length_byte_size
21902 : DW_AT_byte_size, size);
21903 }
21904 }
21905 }
21906 return;
21907 }
21908
21909 array_die = new_die (DW_TAG_array_type, scope_die, type);
21910 add_name_attribute (array_die, type_tag (type));
21911 equate_type_number_to_die (type, array_die);
21912
21913 if (TREE_CODE (type) == VECTOR_TYPE)
21914 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21915
21916 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21917 if (is_fortran ()
21918 && TREE_CODE (type) == ARRAY_TYPE
21919 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21920 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21921 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21922
21923 #if 0
21924 /* We default the array ordering. Debuggers will probably do the right
21925 things even if DW_AT_ordering is not present. It's not even an issue
21926 until we start to get into multidimensional arrays anyway. If a debugger
21927 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21928 then we'll have to put the DW_AT_ordering attribute back in. (But if
21929 and when we find out that we need to put these in, we will only do so
21930 for multidimensional arrays. */
21931 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21932 #endif
21933
21934 if (TREE_CODE (type) == VECTOR_TYPE)
21935 {
21936 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21937 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21938 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21939 add_bound_info (subrange_die, DW_AT_upper_bound,
21940 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21941 }
21942 else
21943 add_subscript_info (array_die, type, collapse_nested_arrays);
21944
21945 /* Add representation of the type of the elements of this array type and
21946 emit the corresponding DIE if we haven't done it already. */
21947 element_type = TREE_TYPE (type);
21948 if (collapse_nested_arrays)
21949 while (TREE_CODE (element_type) == ARRAY_TYPE)
21950 {
21951 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21952 break;
21953 element_type = TREE_TYPE (element_type);
21954 }
21955
21956 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21957 TREE_CODE (type) == ARRAY_TYPE
21958 && TYPE_REVERSE_STORAGE_ORDER (type),
21959 context_die);
21960
21961 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21962 if (TYPE_ARTIFICIAL (type))
21963 add_AT_flag (array_die, DW_AT_artificial, 1);
21964
21965 if (get_AT (array_die, DW_AT_name))
21966 add_pubtype (type, array_die);
21967
21968 add_alignment_attribute (array_die, type);
21969 }
21970
21971 /* This routine generates DIE for array with hidden descriptor, details
21972 are filled into *info by a langhook. */
21973
21974 static void
21975 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21976 dw_die_ref context_die)
21977 {
21978 const dw_die_ref scope_die = scope_die_for (type, context_die);
21979 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21980 struct loc_descr_context context = { type, info->base_decl, NULL,
21981 false, false };
21982 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21983 int dim;
21984
21985 add_name_attribute (array_die, type_tag (type));
21986 equate_type_number_to_die (type, array_die);
21987
21988 if (info->ndimensions > 1)
21989 switch (info->ordering)
21990 {
21991 case array_descr_ordering_row_major:
21992 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21993 break;
21994 case array_descr_ordering_column_major:
21995 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21996 break;
21997 default:
21998 break;
21999 }
22000
22001 if (dwarf_version >= 3 || !dwarf_strict)
22002 {
22003 if (info->data_location)
22004 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22005 dw_scalar_form_exprloc, &context);
22006 if (info->associated)
22007 add_scalar_info (array_die, DW_AT_associated, info->associated,
22008 dw_scalar_form_constant
22009 | dw_scalar_form_exprloc
22010 | dw_scalar_form_reference, &context);
22011 if (info->allocated)
22012 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22013 dw_scalar_form_constant
22014 | dw_scalar_form_exprloc
22015 | dw_scalar_form_reference, &context);
22016 if (info->stride)
22017 {
22018 const enum dwarf_attribute attr
22019 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22020 const int forms
22021 = (info->stride_in_bits)
22022 ? dw_scalar_form_constant
22023 : (dw_scalar_form_constant
22024 | dw_scalar_form_exprloc
22025 | dw_scalar_form_reference);
22026
22027 add_scalar_info (array_die, attr, info->stride, forms, &context);
22028 }
22029 }
22030 if (dwarf_version >= 5)
22031 {
22032 if (info->rank)
22033 {
22034 add_scalar_info (array_die, DW_AT_rank, info->rank,
22035 dw_scalar_form_constant
22036 | dw_scalar_form_exprloc, &context);
22037 subrange_tag = DW_TAG_generic_subrange;
22038 context.placeholder_arg = true;
22039 }
22040 }
22041
22042 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22043
22044 for (dim = 0; dim < info->ndimensions; dim++)
22045 {
22046 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22047
22048 if (info->dimen[dim].bounds_type)
22049 add_type_attribute (subrange_die,
22050 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22051 false, context_die);
22052 if (info->dimen[dim].lower_bound)
22053 add_bound_info (subrange_die, DW_AT_lower_bound,
22054 info->dimen[dim].lower_bound, &context);
22055 if (info->dimen[dim].upper_bound)
22056 add_bound_info (subrange_die, DW_AT_upper_bound,
22057 info->dimen[dim].upper_bound, &context);
22058 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22059 add_scalar_info (subrange_die, DW_AT_byte_stride,
22060 info->dimen[dim].stride,
22061 dw_scalar_form_constant
22062 | dw_scalar_form_exprloc
22063 | dw_scalar_form_reference,
22064 &context);
22065 }
22066
22067 gen_type_die (info->element_type, context_die);
22068 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22069 TREE_CODE (type) == ARRAY_TYPE
22070 && TYPE_REVERSE_STORAGE_ORDER (type),
22071 context_die);
22072
22073 if (get_AT (array_die, DW_AT_name))
22074 add_pubtype (type, array_die);
22075
22076 add_alignment_attribute (array_die, type);
22077 }
22078
22079 #if 0
22080 static void
22081 gen_entry_point_die (tree decl, dw_die_ref context_die)
22082 {
22083 tree origin = decl_ultimate_origin (decl);
22084 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22085
22086 if (origin != NULL)
22087 add_abstract_origin_attribute (decl_die, origin);
22088 else
22089 {
22090 add_name_and_src_coords_attributes (decl_die, decl);
22091 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22092 TYPE_UNQUALIFIED, false, context_die);
22093 }
22094
22095 if (DECL_ABSTRACT_P (decl))
22096 equate_decl_number_to_die (decl, decl_die);
22097 else
22098 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22099 }
22100 #endif
22101
22102 /* Walk through the list of incomplete types again, trying once more to
22103 emit full debugging info for them. */
22104
22105 static void
22106 retry_incomplete_types (void)
22107 {
22108 set_early_dwarf s;
22109 int i;
22110
22111 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22112 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22113 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22114 vec_safe_truncate (incomplete_types, 0);
22115 }
22116
22117 /* Determine what tag to use for a record type. */
22118
22119 static enum dwarf_tag
22120 record_type_tag (tree type)
22121 {
22122 if (! lang_hooks.types.classify_record)
22123 return DW_TAG_structure_type;
22124
22125 switch (lang_hooks.types.classify_record (type))
22126 {
22127 case RECORD_IS_STRUCT:
22128 return DW_TAG_structure_type;
22129
22130 case RECORD_IS_CLASS:
22131 return DW_TAG_class_type;
22132
22133 case RECORD_IS_INTERFACE:
22134 if (dwarf_version >= 3 || !dwarf_strict)
22135 return DW_TAG_interface_type;
22136 return DW_TAG_structure_type;
22137
22138 default:
22139 gcc_unreachable ();
22140 }
22141 }
22142
22143 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22144 include all of the information about the enumeration values also. Each
22145 enumerated type name/value is listed as a child of the enumerated type
22146 DIE. */
22147
22148 static dw_die_ref
22149 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22150 {
22151 dw_die_ref type_die = lookup_type_die (type);
22152 dw_die_ref orig_type_die = type_die;
22153
22154 if (type_die == NULL)
22155 {
22156 type_die = new_die (DW_TAG_enumeration_type,
22157 scope_die_for (type, context_die), type);
22158 equate_type_number_to_die (type, type_die);
22159 add_name_attribute (type_die, type_tag (type));
22160 if ((dwarf_version >= 4 || !dwarf_strict)
22161 && ENUM_IS_SCOPED (type))
22162 add_AT_flag (type_die, DW_AT_enum_class, 1);
22163 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22164 add_AT_flag (type_die, DW_AT_declaration, 1);
22165 if (!dwarf_strict)
22166 add_AT_unsigned (type_die, DW_AT_encoding,
22167 TYPE_UNSIGNED (type)
22168 ? DW_ATE_unsigned
22169 : DW_ATE_signed);
22170 }
22171 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22172 return type_die;
22173 else
22174 remove_AT (type_die, DW_AT_declaration);
22175
22176 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22177 given enum type is incomplete, do not generate the DW_AT_byte_size
22178 attribute or the DW_AT_element_list attribute. */
22179 if (TYPE_SIZE (type))
22180 {
22181 tree link;
22182
22183 if (!ENUM_IS_OPAQUE (type))
22184 TREE_ASM_WRITTEN (type) = 1;
22185 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22186 add_byte_size_attribute (type_die, type);
22187 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22188 add_alignment_attribute (type_die, type);
22189 if ((dwarf_version >= 3 || !dwarf_strict)
22190 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22191 {
22192 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22193 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22194 context_die);
22195 }
22196 if (TYPE_STUB_DECL (type) != NULL_TREE)
22197 {
22198 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22199 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22200 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22201 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22202 }
22203
22204 /* If the first reference to this type was as the return type of an
22205 inline function, then it may not have a parent. Fix this now. */
22206 if (type_die->die_parent == NULL)
22207 add_child_die (scope_die_for (type, context_die), type_die);
22208
22209 for (link = TYPE_VALUES (type);
22210 link != NULL; link = TREE_CHAIN (link))
22211 {
22212 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22213 tree value = TREE_VALUE (link);
22214
22215 gcc_assert (!ENUM_IS_OPAQUE (type));
22216 add_name_attribute (enum_die,
22217 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22218
22219 if (TREE_CODE (value) == CONST_DECL)
22220 value = DECL_INITIAL (value);
22221
22222 if (simple_type_size_in_bits (TREE_TYPE (value))
22223 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22224 {
22225 /* For constant forms created by add_AT_unsigned DWARF
22226 consumers (GDB, elfutils, etc.) always zero extend
22227 the value. Only when the actual value is negative
22228 do we need to use add_AT_int to generate a constant
22229 form that can represent negative values. */
22230 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22231 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22232 add_AT_unsigned (enum_die, DW_AT_const_value,
22233 (unsigned HOST_WIDE_INT) val);
22234 else
22235 add_AT_int (enum_die, DW_AT_const_value, val);
22236 }
22237 else
22238 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22239 that here. TODO: This should be re-worked to use correct
22240 signed/unsigned double tags for all cases. */
22241 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22242 }
22243
22244 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22245 if (TYPE_ARTIFICIAL (type)
22246 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22247 add_AT_flag (type_die, DW_AT_artificial, 1);
22248 }
22249 else
22250 add_AT_flag (type_die, DW_AT_declaration, 1);
22251
22252 add_pubtype (type, type_die);
22253
22254 return type_die;
22255 }
22256
22257 /* Generate a DIE to represent either a real live formal parameter decl or to
22258 represent just the type of some formal parameter position in some function
22259 type.
22260
22261 Note that this routine is a bit unusual because its argument may be a
22262 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22263 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22264 node. If it's the former then this function is being called to output a
22265 DIE to represent a formal parameter object (or some inlining thereof). If
22266 it's the latter, then this function is only being called to output a
22267 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22268 argument type of some subprogram type.
22269 If EMIT_NAME_P is true, name and source coordinate attributes
22270 are emitted. */
22271
22272 static dw_die_ref
22273 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22274 dw_die_ref context_die)
22275 {
22276 tree node_or_origin = node ? node : origin;
22277 tree ultimate_origin;
22278 dw_die_ref parm_die = NULL;
22279
22280 if (DECL_P (node_or_origin))
22281 {
22282 parm_die = lookup_decl_die (node);
22283
22284 /* If the contexts differ, we may not be talking about the same
22285 thing.
22286 ??? When in LTO the DIE parent is the "abstract" copy and the
22287 context_die is the specification "copy". But this whole block
22288 should eventually be no longer needed. */
22289 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22290 {
22291 if (!DECL_ABSTRACT_P (node))
22292 {
22293 /* This can happen when creating an inlined instance, in
22294 which case we need to create a new DIE that will get
22295 annotated with DW_AT_abstract_origin. */
22296 parm_die = NULL;
22297 }
22298 else
22299 gcc_unreachable ();
22300 }
22301
22302 if (parm_die && parm_die->die_parent == NULL)
22303 {
22304 /* Check that parm_die already has the right attributes that
22305 we would have added below. If any attributes are
22306 missing, fall through to add them. */
22307 if (! DECL_ABSTRACT_P (node_or_origin)
22308 && !get_AT (parm_die, DW_AT_location)
22309 && !get_AT (parm_die, DW_AT_const_value))
22310 /* We are missing location info, and are about to add it. */
22311 ;
22312 else
22313 {
22314 add_child_die (context_die, parm_die);
22315 return parm_die;
22316 }
22317 }
22318 }
22319
22320 /* If we have a previously generated DIE, use it, unless this is an
22321 concrete instance (origin != NULL), in which case we need a new
22322 DIE with a corresponding DW_AT_abstract_origin. */
22323 bool reusing_die;
22324 if (parm_die && origin == NULL)
22325 reusing_die = true;
22326 else
22327 {
22328 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22329 reusing_die = false;
22330 }
22331
22332 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22333 {
22334 case tcc_declaration:
22335 ultimate_origin = decl_ultimate_origin (node_or_origin);
22336 if (node || ultimate_origin)
22337 origin = ultimate_origin;
22338
22339 if (reusing_die)
22340 goto add_location;
22341
22342 if (origin != NULL)
22343 add_abstract_origin_attribute (parm_die, origin);
22344 else if (emit_name_p)
22345 add_name_and_src_coords_attributes (parm_die, node);
22346 if (origin == NULL
22347 || (! DECL_ABSTRACT_P (node_or_origin)
22348 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22349 decl_function_context
22350 (node_or_origin))))
22351 {
22352 tree type = TREE_TYPE (node_or_origin);
22353 if (decl_by_reference_p (node_or_origin))
22354 add_type_attribute (parm_die, TREE_TYPE (type),
22355 TYPE_UNQUALIFIED,
22356 false, context_die);
22357 else
22358 add_type_attribute (parm_die, type,
22359 decl_quals (node_or_origin),
22360 false, context_die);
22361 }
22362 if (origin == NULL && DECL_ARTIFICIAL (node))
22363 add_AT_flag (parm_die, DW_AT_artificial, 1);
22364 add_location:
22365 if (node && node != origin)
22366 equate_decl_number_to_die (node, parm_die);
22367 if (! DECL_ABSTRACT_P (node_or_origin))
22368 add_location_or_const_value_attribute (parm_die, node_or_origin,
22369 node == NULL);
22370
22371 break;
22372
22373 case tcc_type:
22374 /* We were called with some kind of a ..._TYPE node. */
22375 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22376 context_die);
22377 break;
22378
22379 default:
22380 gcc_unreachable ();
22381 }
22382
22383 return parm_die;
22384 }
22385
22386 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22387 children DW_TAG_formal_parameter DIEs representing the arguments of the
22388 parameter pack.
22389
22390 PARM_PACK must be a function parameter pack.
22391 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22392 must point to the subsequent arguments of the function PACK_ARG belongs to.
22393 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22394 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22395 following the last one for which a DIE was generated. */
22396
22397 static dw_die_ref
22398 gen_formal_parameter_pack_die (tree parm_pack,
22399 tree pack_arg,
22400 dw_die_ref subr_die,
22401 tree *next_arg)
22402 {
22403 tree arg;
22404 dw_die_ref parm_pack_die;
22405
22406 gcc_assert (parm_pack
22407 && lang_hooks.function_parameter_pack_p (parm_pack)
22408 && subr_die);
22409
22410 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22411 add_src_coords_attributes (parm_pack_die, parm_pack);
22412
22413 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22414 {
22415 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22416 parm_pack))
22417 break;
22418 gen_formal_parameter_die (arg, NULL,
22419 false /* Don't emit name attribute. */,
22420 parm_pack_die);
22421 }
22422 if (next_arg)
22423 *next_arg = arg;
22424 return parm_pack_die;
22425 }
22426
22427 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22428 at the end of an (ANSI prototyped) formal parameters list. */
22429
22430 static void
22431 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22432 {
22433 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22434 }
22435
22436 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22437 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22438 parameters as specified in some function type specification (except for
22439 those which appear as part of a function *definition*). */
22440
22441 static void
22442 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22443 {
22444 tree link;
22445 tree formal_type = NULL;
22446 tree first_parm_type;
22447 tree arg;
22448
22449 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22450 {
22451 arg = DECL_ARGUMENTS (function_or_method_type);
22452 function_or_method_type = TREE_TYPE (function_or_method_type);
22453 }
22454 else
22455 arg = NULL_TREE;
22456
22457 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22458
22459 /* Make our first pass over the list of formal parameter types and output a
22460 DW_TAG_formal_parameter DIE for each one. */
22461 for (link = first_parm_type; link; )
22462 {
22463 dw_die_ref parm_die;
22464
22465 formal_type = TREE_VALUE (link);
22466 if (formal_type == void_type_node)
22467 break;
22468
22469 /* Output a (nameless) DIE to represent the formal parameter itself. */
22470 parm_die = gen_formal_parameter_die (formal_type, NULL,
22471 true /* Emit name attribute. */,
22472 context_die);
22473 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22474 && link == first_parm_type)
22475 {
22476 add_AT_flag (parm_die, DW_AT_artificial, 1);
22477 if (dwarf_version >= 3 || !dwarf_strict)
22478 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22479 }
22480 else if (arg && DECL_ARTIFICIAL (arg))
22481 add_AT_flag (parm_die, DW_AT_artificial, 1);
22482
22483 link = TREE_CHAIN (link);
22484 if (arg)
22485 arg = DECL_CHAIN (arg);
22486 }
22487
22488 /* If this function type has an ellipsis, add a
22489 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22490 if (formal_type != void_type_node)
22491 gen_unspecified_parameters_die (function_or_method_type, context_die);
22492
22493 /* Make our second (and final) pass over the list of formal parameter types
22494 and output DIEs to represent those types (as necessary). */
22495 for (link = TYPE_ARG_TYPES (function_or_method_type);
22496 link && TREE_VALUE (link);
22497 link = TREE_CHAIN (link))
22498 gen_type_die (TREE_VALUE (link), context_die);
22499 }
22500
22501 /* We want to generate the DIE for TYPE so that we can generate the
22502 die for MEMBER, which has been defined; we will need to refer back
22503 to the member declaration nested within TYPE. If we're trying to
22504 generate minimal debug info for TYPE, processing TYPE won't do the
22505 trick; we need to attach the member declaration by hand. */
22506
22507 static void
22508 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22509 {
22510 gen_type_die (type, context_die);
22511
22512 /* If we're trying to avoid duplicate debug info, we may not have
22513 emitted the member decl for this function. Emit it now. */
22514 if (TYPE_STUB_DECL (type)
22515 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22516 && ! lookup_decl_die (member))
22517 {
22518 dw_die_ref type_die;
22519 gcc_assert (!decl_ultimate_origin (member));
22520
22521 type_die = lookup_type_die_strip_naming_typedef (type);
22522 if (TREE_CODE (member) == FUNCTION_DECL)
22523 gen_subprogram_die (member, type_die);
22524 else if (TREE_CODE (member) == FIELD_DECL)
22525 {
22526 /* Ignore the nameless fields that are used to skip bits but handle
22527 C++ anonymous unions and structs. */
22528 if (DECL_NAME (member) != NULL_TREE
22529 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22530 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22531 {
22532 struct vlr_context vlr_ctx = {
22533 DECL_CONTEXT (member), /* struct_type */
22534 NULL_TREE /* variant_part_offset */
22535 };
22536 gen_type_die (member_declared_type (member), type_die);
22537 gen_field_die (member, &vlr_ctx, type_die);
22538 }
22539 }
22540 else
22541 gen_variable_die (member, NULL_TREE, type_die);
22542 }
22543 }
22544 \f
22545 /* Forward declare these functions, because they are mutually recursive
22546 with their set_block_* pairing functions. */
22547 static void set_decl_origin_self (tree);
22548
22549 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22550 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22551 that it points to the node itself, thus indicating that the node is its
22552 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22553 the given node is NULL, recursively descend the decl/block tree which
22554 it is the root of, and for each other ..._DECL or BLOCK node contained
22555 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22556 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22557 values to point to themselves. */
22558
22559 static void
22560 set_block_origin_self (tree stmt)
22561 {
22562 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22563 {
22564 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22565
22566 {
22567 tree local_decl;
22568
22569 for (local_decl = BLOCK_VARS (stmt);
22570 local_decl != NULL_TREE;
22571 local_decl = DECL_CHAIN (local_decl))
22572 /* Do not recurse on nested functions since the inlining status
22573 of parent and child can be different as per the DWARF spec. */
22574 if (TREE_CODE (local_decl) != FUNCTION_DECL
22575 && !DECL_EXTERNAL (local_decl))
22576 set_decl_origin_self (local_decl);
22577 }
22578
22579 {
22580 tree subblock;
22581
22582 for (subblock = BLOCK_SUBBLOCKS (stmt);
22583 subblock != NULL_TREE;
22584 subblock = BLOCK_CHAIN (subblock))
22585 set_block_origin_self (subblock); /* Recurse. */
22586 }
22587 }
22588 }
22589
22590 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22591 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22592 node to so that it points to the node itself, thus indicating that the
22593 node represents its own (abstract) origin. Additionally, if the
22594 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22595 the decl/block tree of which the given node is the root of, and for
22596 each other ..._DECL or BLOCK node contained therein whose
22597 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22598 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22599 point to themselves. */
22600
22601 static void
22602 set_decl_origin_self (tree decl)
22603 {
22604 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22605 {
22606 DECL_ABSTRACT_ORIGIN (decl) = decl;
22607 if (TREE_CODE (decl) == FUNCTION_DECL)
22608 {
22609 tree arg;
22610
22611 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22612 DECL_ABSTRACT_ORIGIN (arg) = arg;
22613 if (DECL_INITIAL (decl) != NULL_TREE
22614 && DECL_INITIAL (decl) != error_mark_node)
22615 set_block_origin_self (DECL_INITIAL (decl));
22616 }
22617 }
22618 }
22619 \f
22620 /* Mark the early DIE for DECL as the abstract instance. */
22621
22622 static void
22623 dwarf2out_abstract_function (tree decl)
22624 {
22625 dw_die_ref old_die;
22626
22627 /* Make sure we have the actual abstract inline, not a clone. */
22628 decl = DECL_ORIGIN (decl);
22629
22630 if (DECL_IGNORED_P (decl))
22631 return;
22632
22633 /* In LTO we're all set. We already created abstract instances
22634 early and we want to avoid creating a concrete instance of that
22635 if we don't output it. */
22636 if (in_lto_p)
22637 return;
22638
22639 old_die = lookup_decl_die (decl);
22640 gcc_assert (old_die != NULL);
22641 if (get_AT (old_die, DW_AT_inline))
22642 /* We've already generated the abstract instance. */
22643 return;
22644
22645 /* Go ahead and put DW_AT_inline on the DIE. */
22646 if (DECL_DECLARED_INLINE_P (decl))
22647 {
22648 if (cgraph_function_possibly_inlined_p (decl))
22649 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22650 else
22651 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22652 }
22653 else
22654 {
22655 if (cgraph_function_possibly_inlined_p (decl))
22656 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22657 else
22658 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22659 }
22660
22661 if (DECL_DECLARED_INLINE_P (decl)
22662 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22663 add_AT_flag (old_die, DW_AT_artificial, 1);
22664
22665 set_decl_origin_self (decl);
22666 }
22667
22668 /* Helper function of premark_used_types() which gets called through
22669 htab_traverse.
22670
22671 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22672 marked as unused by prune_unused_types. */
22673
22674 bool
22675 premark_used_types_helper (tree const &type, void *)
22676 {
22677 dw_die_ref die;
22678
22679 die = lookup_type_die (type);
22680 if (die != NULL)
22681 die->die_perennial_p = 1;
22682 return true;
22683 }
22684
22685 /* Helper function of premark_types_used_by_global_vars which gets called
22686 through htab_traverse.
22687
22688 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22689 marked as unused by prune_unused_types. The DIE of the type is marked
22690 only if the global variable using the type will actually be emitted. */
22691
22692 int
22693 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22694 void *)
22695 {
22696 struct types_used_by_vars_entry *entry;
22697 dw_die_ref die;
22698
22699 entry = (struct types_used_by_vars_entry *) *slot;
22700 gcc_assert (entry->type != NULL
22701 && entry->var_decl != NULL);
22702 die = lookup_type_die (entry->type);
22703 if (die)
22704 {
22705 /* Ask cgraph if the global variable really is to be emitted.
22706 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22707 varpool_node *node = varpool_node::get (entry->var_decl);
22708 if (node && node->definition)
22709 {
22710 die->die_perennial_p = 1;
22711 /* Keep the parent DIEs as well. */
22712 while ((die = die->die_parent) && die->die_perennial_p == 0)
22713 die->die_perennial_p = 1;
22714 }
22715 }
22716 return 1;
22717 }
22718
22719 /* Mark all members of used_types_hash as perennial. */
22720
22721 static void
22722 premark_used_types (struct function *fun)
22723 {
22724 if (fun && fun->used_types_hash)
22725 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22726 }
22727
22728 /* Mark all members of types_used_by_vars_entry as perennial. */
22729
22730 static void
22731 premark_types_used_by_global_vars (void)
22732 {
22733 if (types_used_by_vars_hash)
22734 types_used_by_vars_hash
22735 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22736 }
22737
22738 /* Mark all variables used by the symtab as perennial. */
22739
22740 static void
22741 premark_used_variables (void)
22742 {
22743 /* Mark DIEs in the symtab as used. */
22744 varpool_node *var;
22745 FOR_EACH_VARIABLE (var)
22746 {
22747 dw_die_ref die = lookup_decl_die (var->decl);
22748 if (die)
22749 die->die_perennial_p = 1;
22750 }
22751 }
22752
22753 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22754 for CA_LOC call arg loc node. */
22755
22756 static dw_die_ref
22757 gen_call_site_die (tree decl, dw_die_ref subr_die,
22758 struct call_arg_loc_node *ca_loc)
22759 {
22760 dw_die_ref stmt_die = NULL, die;
22761 tree block = ca_loc->block;
22762
22763 while (block
22764 && block != DECL_INITIAL (decl)
22765 && TREE_CODE (block) == BLOCK)
22766 {
22767 stmt_die = lookup_block_die (block);
22768 if (stmt_die)
22769 break;
22770 block = BLOCK_SUPERCONTEXT (block);
22771 }
22772 if (stmt_die == NULL)
22773 stmt_die = subr_die;
22774 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22775 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22776 if (ca_loc->tail_call_p)
22777 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22778 if (ca_loc->symbol_ref)
22779 {
22780 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22781 if (tdie)
22782 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22783 else
22784 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22785 false);
22786 }
22787 return die;
22788 }
22789
22790 /* Generate a DIE to represent a declared function (either file-scope or
22791 block-local). */
22792
22793 static void
22794 gen_subprogram_die (tree decl, dw_die_ref context_die)
22795 {
22796 tree origin = decl_ultimate_origin (decl);
22797 dw_die_ref subr_die;
22798 dw_die_ref old_die = lookup_decl_die (decl);
22799
22800 /* This function gets called multiple times for different stages of
22801 the debug process. For example, for func() in this code:
22802
22803 namespace S
22804 {
22805 void func() { ... }
22806 }
22807
22808 ...we get called 4 times. Twice in early debug and twice in
22809 late debug:
22810
22811 Early debug
22812 -----------
22813
22814 1. Once while generating func() within the namespace. This is
22815 the declaration. The declaration bit below is set, as the
22816 context is the namespace.
22817
22818 A new DIE will be generated with DW_AT_declaration set.
22819
22820 2. Once for func() itself. This is the specification. The
22821 declaration bit below is clear as the context is the CU.
22822
22823 We will use the cached DIE from (1) to create a new DIE with
22824 DW_AT_specification pointing to the declaration in (1).
22825
22826 Late debug via rest_of_handle_final()
22827 -------------------------------------
22828
22829 3. Once generating func() within the namespace. This is also the
22830 declaration, as in (1), but this time we will early exit below
22831 as we have a cached DIE and a declaration needs no additional
22832 annotations (no locations), as the source declaration line
22833 info is enough.
22834
22835 4. Once for func() itself. As in (2), this is the specification,
22836 but this time we will re-use the cached DIE, and just annotate
22837 it with the location information that should now be available.
22838
22839 For something without namespaces, but with abstract instances, we
22840 are also called a multiple times:
22841
22842 class Base
22843 {
22844 public:
22845 Base (); // constructor declaration (1)
22846 };
22847
22848 Base::Base () { } // constructor specification (2)
22849
22850 Early debug
22851 -----------
22852
22853 1. Once for the Base() constructor by virtue of it being a
22854 member of the Base class. This is done via
22855 rest_of_type_compilation.
22856
22857 This is a declaration, so a new DIE will be created with
22858 DW_AT_declaration.
22859
22860 2. Once for the Base() constructor definition, but this time
22861 while generating the abstract instance of the base
22862 constructor (__base_ctor) which is being generated via early
22863 debug of reachable functions.
22864
22865 Even though we have a cached version of the declaration (1),
22866 we will create a DW_AT_specification of the declaration DIE
22867 in (1).
22868
22869 3. Once for the __base_ctor itself, but this time, we generate
22870 an DW_AT_abstract_origin version of the DW_AT_specification in
22871 (2).
22872
22873 Late debug via rest_of_handle_final
22874 -----------------------------------
22875
22876 4. One final time for the __base_ctor (which will have a cached
22877 DIE with DW_AT_abstract_origin created in (3). This time,
22878 we will just annotate the location information now
22879 available.
22880 */
22881 int declaration = (current_function_decl != decl
22882 || class_or_namespace_scope_p (context_die));
22883
22884 /* A declaration that has been previously dumped needs no
22885 additional information. */
22886 if (old_die && declaration)
22887 return;
22888
22889 /* Now that the C++ front end lazily declares artificial member fns, we
22890 might need to retrofit the declaration into its class. */
22891 if (!declaration && !origin && !old_die
22892 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22893 && !class_or_namespace_scope_p (context_die)
22894 && debug_info_level > DINFO_LEVEL_TERSE)
22895 old_die = force_decl_die (decl);
22896
22897 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22898 if (origin != NULL)
22899 {
22900 gcc_assert (!declaration || local_scope_p (context_die));
22901
22902 /* Fixup die_parent for the abstract instance of a nested
22903 inline function. */
22904 if (old_die && old_die->die_parent == NULL)
22905 add_child_die (context_die, old_die);
22906
22907 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22908 {
22909 /* If we have a DW_AT_abstract_origin we have a working
22910 cached version. */
22911 subr_die = old_die;
22912 }
22913 else
22914 {
22915 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22916 add_abstract_origin_attribute (subr_die, origin);
22917 /* This is where the actual code for a cloned function is.
22918 Let's emit linkage name attribute for it. This helps
22919 debuggers to e.g, set breakpoints into
22920 constructors/destructors when the user asks "break
22921 K::K". */
22922 add_linkage_name (subr_die, decl);
22923 }
22924 }
22925 /* A cached copy, possibly from early dwarf generation. Reuse as
22926 much as possible. */
22927 else if (old_die)
22928 {
22929 if (!get_AT_flag (old_die, DW_AT_declaration)
22930 /* We can have a normal definition following an inline one in the
22931 case of redefinition of GNU C extern inlines.
22932 It seems reasonable to use AT_specification in this case. */
22933 && !get_AT (old_die, DW_AT_inline))
22934 {
22935 /* Detect and ignore this case, where we are trying to output
22936 something we have already output. */
22937 if (get_AT (old_die, DW_AT_low_pc)
22938 || get_AT (old_die, DW_AT_ranges))
22939 return;
22940
22941 /* If we have no location information, this must be a
22942 partially generated DIE from early dwarf generation.
22943 Fall through and generate it. */
22944 }
22945
22946 /* If the definition comes from the same place as the declaration,
22947 maybe use the old DIE. We always want the DIE for this function
22948 that has the *_pc attributes to be under comp_unit_die so the
22949 debugger can find it. We also need to do this for abstract
22950 instances of inlines, since the spec requires the out-of-line copy
22951 to have the same parent. For local class methods, this doesn't
22952 apply; we just use the old DIE. */
22953 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22954 struct dwarf_file_data * file_index = lookup_filename (s.file);
22955 if (((is_unit_die (old_die->die_parent)
22956 /* This condition fixes the inconsistency/ICE with the
22957 following Fortran test (or some derivative thereof) while
22958 building libgfortran:
22959
22960 module some_m
22961 contains
22962 logical function funky (FLAG)
22963 funky = .true.
22964 end function
22965 end module
22966 */
22967 || (old_die->die_parent
22968 && old_die->die_parent->die_tag == DW_TAG_module)
22969 || local_scope_p (old_die->die_parent)
22970 || context_die == NULL)
22971 && (DECL_ARTIFICIAL (decl)
22972 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22973 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22974 == (unsigned) s.line)
22975 && (!debug_column_info
22976 || s.column == 0
22977 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22978 == (unsigned) s.column)))))
22979 /* With LTO if there's an abstract instance for
22980 the old DIE, this is a concrete instance and
22981 thus re-use the DIE. */
22982 || get_AT (old_die, DW_AT_abstract_origin))
22983 {
22984 subr_die = old_die;
22985
22986 /* Clear out the declaration attribute, but leave the
22987 parameters so they can be augmented with location
22988 information later. Unless this was a declaration, in
22989 which case, wipe out the nameless parameters and recreate
22990 them further down. */
22991 if (remove_AT (subr_die, DW_AT_declaration))
22992 {
22993
22994 remove_AT (subr_die, DW_AT_object_pointer);
22995 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22996 }
22997 }
22998 /* Make a specification pointing to the previously built
22999 declaration. */
23000 else
23001 {
23002 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23003 add_AT_specification (subr_die, old_die);
23004 add_pubname (decl, subr_die);
23005 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23006 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23007 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23008 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23009 if (debug_column_info
23010 && s.column
23011 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23012 != (unsigned) s.column))
23013 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23014
23015 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
23016 emit the real type on the definition die. */
23017 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23018 {
23019 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23020 if (die == auto_die || die == decltype_auto_die)
23021 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23022 TYPE_UNQUALIFIED, false, context_die);
23023 }
23024
23025 /* When we process the method declaration, we haven't seen
23026 the out-of-class defaulted definition yet, so we have to
23027 recheck now. */
23028 if ((dwarf_version >= 5 || ! dwarf_strict)
23029 && !get_AT (subr_die, DW_AT_defaulted))
23030 {
23031 int defaulted
23032 = lang_hooks.decls.decl_dwarf_attribute (decl,
23033 DW_AT_defaulted);
23034 if (defaulted != -1)
23035 {
23036 /* Other values must have been handled before. */
23037 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23038 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23039 }
23040 }
23041 }
23042 }
23043 /* Create a fresh DIE for anything else. */
23044 else
23045 {
23046 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23047
23048 if (TREE_PUBLIC (decl))
23049 add_AT_flag (subr_die, DW_AT_external, 1);
23050
23051 add_name_and_src_coords_attributes (subr_die, decl);
23052 add_pubname (decl, subr_die);
23053 if (debug_info_level > DINFO_LEVEL_TERSE)
23054 {
23055 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23056 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23057 TYPE_UNQUALIFIED, false, context_die);
23058 }
23059
23060 add_pure_or_virtual_attribute (subr_die, decl);
23061 if (DECL_ARTIFICIAL (decl))
23062 add_AT_flag (subr_die, DW_AT_artificial, 1);
23063
23064 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23065 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23066
23067 add_alignment_attribute (subr_die, decl);
23068
23069 add_accessibility_attribute (subr_die, decl);
23070 }
23071
23072 /* Unless we have an existing non-declaration DIE, equate the new
23073 DIE. */
23074 if (!old_die || is_declaration_die (old_die))
23075 equate_decl_number_to_die (decl, subr_die);
23076
23077 if (declaration)
23078 {
23079 if (!old_die || !get_AT (old_die, DW_AT_inline))
23080 {
23081 add_AT_flag (subr_die, DW_AT_declaration, 1);
23082
23083 /* If this is an explicit function declaration then generate
23084 a DW_AT_explicit attribute. */
23085 if ((dwarf_version >= 3 || !dwarf_strict)
23086 && lang_hooks.decls.decl_dwarf_attribute (decl,
23087 DW_AT_explicit) == 1)
23088 add_AT_flag (subr_die, DW_AT_explicit, 1);
23089
23090 /* If this is a C++11 deleted special function member then generate
23091 a DW_AT_deleted attribute. */
23092 if ((dwarf_version >= 5 || !dwarf_strict)
23093 && lang_hooks.decls.decl_dwarf_attribute (decl,
23094 DW_AT_deleted) == 1)
23095 add_AT_flag (subr_die, DW_AT_deleted, 1);
23096
23097 /* If this is a C++11 defaulted special function member then
23098 generate a DW_AT_defaulted attribute. */
23099 if (dwarf_version >= 5 || !dwarf_strict)
23100 {
23101 int defaulted
23102 = lang_hooks.decls.decl_dwarf_attribute (decl,
23103 DW_AT_defaulted);
23104 if (defaulted != -1)
23105 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23106 }
23107
23108 /* If this is a C++11 non-static member function with & ref-qualifier
23109 then generate a DW_AT_reference attribute. */
23110 if ((dwarf_version >= 5 || !dwarf_strict)
23111 && lang_hooks.decls.decl_dwarf_attribute (decl,
23112 DW_AT_reference) == 1)
23113 add_AT_flag (subr_die, DW_AT_reference, 1);
23114
23115 /* If this is a C++11 non-static member function with &&
23116 ref-qualifier then generate a DW_AT_reference attribute. */
23117 if ((dwarf_version >= 5 || !dwarf_strict)
23118 && lang_hooks.decls.decl_dwarf_attribute (decl,
23119 DW_AT_rvalue_reference)
23120 == 1)
23121 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23122 }
23123 }
23124 /* For non DECL_EXTERNALs, if range information is available, fill
23125 the DIE with it. */
23126 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23127 {
23128 HOST_WIDE_INT cfa_fb_offset;
23129
23130 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23131
23132 if (!crtl->has_bb_partition)
23133 {
23134 dw_fde_ref fde = fun->fde;
23135 if (fde->dw_fde_begin)
23136 {
23137 /* We have already generated the labels. */
23138 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23139 fde->dw_fde_end, false);
23140 }
23141 else
23142 {
23143 /* Create start/end labels and add the range. */
23144 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23145 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23146 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23147 current_function_funcdef_no);
23148 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23149 current_function_funcdef_no);
23150 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23151 false);
23152 }
23153
23154 #if VMS_DEBUGGING_INFO
23155 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23156 Section 2.3 Prologue and Epilogue Attributes:
23157 When a breakpoint is set on entry to a function, it is generally
23158 desirable for execution to be suspended, not on the very first
23159 instruction of the function, but rather at a point after the
23160 function's frame has been set up, after any language defined local
23161 declaration processing has been completed, and before execution of
23162 the first statement of the function begins. Debuggers generally
23163 cannot properly determine where this point is. Similarly for a
23164 breakpoint set on exit from a function. The prologue and epilogue
23165 attributes allow a compiler to communicate the location(s) to use. */
23166
23167 {
23168 if (fde->dw_fde_vms_end_prologue)
23169 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23170 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23171
23172 if (fde->dw_fde_vms_begin_epilogue)
23173 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23174 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23175 }
23176 #endif
23177
23178 }
23179 else
23180 {
23181 /* Generate pubnames entries for the split function code ranges. */
23182 dw_fde_ref fde = fun->fde;
23183
23184 if (fde->dw_fde_second_begin)
23185 {
23186 if (dwarf_version >= 3 || !dwarf_strict)
23187 {
23188 /* We should use ranges for non-contiguous code section
23189 addresses. Use the actual code range for the initial
23190 section, since the HOT/COLD labels might precede an
23191 alignment offset. */
23192 bool range_list_added = false;
23193 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23194 fde->dw_fde_end, &range_list_added,
23195 false);
23196 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23197 fde->dw_fde_second_end,
23198 &range_list_added, false);
23199 if (range_list_added)
23200 add_ranges (NULL);
23201 }
23202 else
23203 {
23204 /* There is no real support in DW2 for this .. so we make
23205 a work-around. First, emit the pub name for the segment
23206 containing the function label. Then make and emit a
23207 simplified subprogram DIE for the second segment with the
23208 name pre-fixed by __hot/cold_sect_of_. We use the same
23209 linkage name for the second die so that gdb will find both
23210 sections when given "b foo". */
23211 const char *name = NULL;
23212 tree decl_name = DECL_NAME (decl);
23213 dw_die_ref seg_die;
23214
23215 /* Do the 'primary' section. */
23216 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23217 fde->dw_fde_end, false);
23218
23219 /* Build a minimal DIE for the secondary section. */
23220 seg_die = new_die (DW_TAG_subprogram,
23221 subr_die->die_parent, decl);
23222
23223 if (TREE_PUBLIC (decl))
23224 add_AT_flag (seg_die, DW_AT_external, 1);
23225
23226 if (decl_name != NULL
23227 && IDENTIFIER_POINTER (decl_name) != NULL)
23228 {
23229 name = dwarf2_name (decl, 1);
23230 if (! DECL_ARTIFICIAL (decl))
23231 add_src_coords_attributes (seg_die, decl);
23232
23233 add_linkage_name (seg_die, decl);
23234 }
23235 gcc_assert (name != NULL);
23236 add_pure_or_virtual_attribute (seg_die, decl);
23237 if (DECL_ARTIFICIAL (decl))
23238 add_AT_flag (seg_die, DW_AT_artificial, 1);
23239
23240 name = concat ("__second_sect_of_", name, NULL);
23241 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23242 fde->dw_fde_second_end, false);
23243 add_name_attribute (seg_die, name);
23244 if (want_pubnames ())
23245 add_pubname_string (name, seg_die);
23246 }
23247 }
23248 else
23249 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23250 false);
23251 }
23252
23253 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23254
23255 /* We define the "frame base" as the function's CFA. This is more
23256 convenient for several reasons: (1) It's stable across the prologue
23257 and epilogue, which makes it better than just a frame pointer,
23258 (2) With dwarf3, there exists a one-byte encoding that allows us
23259 to reference the .debug_frame data by proxy, but failing that,
23260 (3) We can at least reuse the code inspection and interpretation
23261 code that determines the CFA position at various points in the
23262 function. */
23263 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23264 {
23265 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23266 add_AT_loc (subr_die, DW_AT_frame_base, op);
23267 }
23268 else
23269 {
23270 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23271 if (list->dw_loc_next)
23272 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23273 else
23274 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23275 }
23276
23277 /* Compute a displacement from the "steady-state frame pointer" to
23278 the CFA. The former is what all stack slots and argument slots
23279 will reference in the rtl; the latter is what we've told the
23280 debugger about. We'll need to adjust all frame_base references
23281 by this displacement. */
23282 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23283
23284 if (fun->static_chain_decl)
23285 {
23286 /* DWARF requires here a location expression that computes the
23287 address of the enclosing subprogram's frame base. The machinery
23288 in tree-nested.c is supposed to store this specific address in the
23289 last field of the FRAME record. */
23290 const tree frame_type
23291 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23292 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23293
23294 tree fb_expr
23295 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23296 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23297 fb_expr, fb_decl, NULL_TREE);
23298
23299 add_AT_location_description (subr_die, DW_AT_static_link,
23300 loc_list_from_tree (fb_expr, 0, NULL));
23301 }
23302
23303 resolve_variable_values ();
23304 }
23305
23306 /* Generate child dies for template paramaters. */
23307 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23308 gen_generic_params_dies (decl);
23309
23310 /* Now output descriptions of the arguments for this function. This gets
23311 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23312 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23313 `...' at the end of the formal parameter list. In order to find out if
23314 there was a trailing ellipsis or not, we must instead look at the type
23315 associated with the FUNCTION_DECL. This will be a node of type
23316 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23317 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23318 an ellipsis at the end. */
23319
23320 /* In the case where we are describing a mere function declaration, all we
23321 need to do here (and all we *can* do here) is to describe the *types* of
23322 its formal parameters. */
23323 if (debug_info_level <= DINFO_LEVEL_TERSE)
23324 ;
23325 else if (declaration)
23326 gen_formal_types_die (decl, subr_die);
23327 else
23328 {
23329 /* Generate DIEs to represent all known formal parameters. */
23330 tree parm = DECL_ARGUMENTS (decl);
23331 tree generic_decl = early_dwarf
23332 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23333 tree generic_decl_parm = generic_decl
23334 ? DECL_ARGUMENTS (generic_decl)
23335 : NULL;
23336
23337 /* Now we want to walk the list of parameters of the function and
23338 emit their relevant DIEs.
23339
23340 We consider the case of DECL being an instance of a generic function
23341 as well as it being a normal function.
23342
23343 If DECL is an instance of a generic function we walk the
23344 parameters of the generic function declaration _and_ the parameters of
23345 DECL itself. This is useful because we want to emit specific DIEs for
23346 function parameter packs and those are declared as part of the
23347 generic function declaration. In that particular case,
23348 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23349 That DIE has children DIEs representing the set of arguments
23350 of the pack. Note that the set of pack arguments can be empty.
23351 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23352 children DIE.
23353
23354 Otherwise, we just consider the parameters of DECL. */
23355 while (generic_decl_parm || parm)
23356 {
23357 if (generic_decl_parm
23358 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23359 gen_formal_parameter_pack_die (generic_decl_parm,
23360 parm, subr_die,
23361 &parm);
23362 else if (parm)
23363 {
23364 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23365
23366 if (early_dwarf
23367 && parm == DECL_ARGUMENTS (decl)
23368 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23369 && parm_die
23370 && (dwarf_version >= 3 || !dwarf_strict))
23371 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23372
23373 parm = DECL_CHAIN (parm);
23374 }
23375
23376 if (generic_decl_parm)
23377 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23378 }
23379
23380 /* Decide whether we need an unspecified_parameters DIE at the end.
23381 There are 2 more cases to do this for: 1) the ansi ... declaration -
23382 this is detectable when the end of the arg list is not a
23383 void_type_node 2) an unprototyped function declaration (not a
23384 definition). This just means that we have no info about the
23385 parameters at all. */
23386 if (early_dwarf)
23387 {
23388 if (prototype_p (TREE_TYPE (decl)))
23389 {
23390 /* This is the prototyped case, check for.... */
23391 if (stdarg_p (TREE_TYPE (decl)))
23392 gen_unspecified_parameters_die (decl, subr_die);
23393 }
23394 else if (DECL_INITIAL (decl) == NULL_TREE)
23395 gen_unspecified_parameters_die (decl, subr_die);
23396 }
23397 }
23398
23399 if (subr_die != old_die)
23400 /* Add the calling convention attribute if requested. */
23401 add_calling_convention_attribute (subr_die, decl);
23402
23403 /* Output Dwarf info for all of the stuff within the body of the function
23404 (if it has one - it may be just a declaration).
23405
23406 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23407 a function. This BLOCK actually represents the outermost binding contour
23408 for the function, i.e. the contour in which the function's formal
23409 parameters and labels get declared. Curiously, it appears that the front
23410 end doesn't actually put the PARM_DECL nodes for the current function onto
23411 the BLOCK_VARS list for this outer scope, but are strung off of the
23412 DECL_ARGUMENTS list for the function instead.
23413
23414 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23415 the LABEL_DECL nodes for the function however, and we output DWARF info
23416 for those in decls_for_scope. Just within the `outer_scope' there will be
23417 a BLOCK node representing the function's outermost pair of curly braces,
23418 and any blocks used for the base and member initializers of a C++
23419 constructor function. */
23420 tree outer_scope = DECL_INITIAL (decl);
23421 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23422 {
23423 int call_site_note_count = 0;
23424 int tail_call_site_note_count = 0;
23425
23426 /* Emit a DW_TAG_variable DIE for a named return value. */
23427 if (DECL_NAME (DECL_RESULT (decl)))
23428 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23429
23430 /* The first time through decls_for_scope we will generate the
23431 DIEs for the locals. The second time, we fill in the
23432 location info. */
23433 decls_for_scope (outer_scope, subr_die);
23434
23435 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23436 {
23437 struct call_arg_loc_node *ca_loc;
23438 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23439 {
23440 dw_die_ref die = NULL;
23441 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23442 rtx arg, next_arg;
23443 tree arg_decl = NULL_TREE;
23444
23445 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23446 ? XEXP (ca_loc->call_arg_loc_note, 0)
23447 : NULL_RTX);
23448 arg; arg = next_arg)
23449 {
23450 dw_loc_descr_ref reg, val;
23451 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23452 dw_die_ref cdie, tdie = NULL;
23453
23454 next_arg = XEXP (arg, 1);
23455 if (REG_P (XEXP (XEXP (arg, 0), 0))
23456 && next_arg
23457 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23458 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23459 && REGNO (XEXP (XEXP (arg, 0), 0))
23460 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23461 next_arg = XEXP (next_arg, 1);
23462 if (mode == VOIDmode)
23463 {
23464 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23465 if (mode == VOIDmode)
23466 mode = GET_MODE (XEXP (arg, 0));
23467 }
23468 if (mode == VOIDmode || mode == BLKmode)
23469 continue;
23470 /* Get dynamic information about call target only if we
23471 have no static information: we cannot generate both
23472 DW_AT_call_origin and DW_AT_call_target
23473 attributes. */
23474 if (ca_loc->symbol_ref == NULL_RTX)
23475 {
23476 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23477 {
23478 tloc = XEXP (XEXP (arg, 0), 1);
23479 continue;
23480 }
23481 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23482 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23483 {
23484 tlocc = XEXP (XEXP (arg, 0), 1);
23485 continue;
23486 }
23487 }
23488 reg = NULL;
23489 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23490 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23491 VAR_INIT_STATUS_INITIALIZED);
23492 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23493 {
23494 rtx mem = XEXP (XEXP (arg, 0), 0);
23495 reg = mem_loc_descriptor (XEXP (mem, 0),
23496 get_address_mode (mem),
23497 GET_MODE (mem),
23498 VAR_INIT_STATUS_INITIALIZED);
23499 }
23500 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23501 == DEBUG_PARAMETER_REF)
23502 {
23503 tree tdecl
23504 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23505 tdie = lookup_decl_die (tdecl);
23506 if (tdie == NULL)
23507 continue;
23508 arg_decl = tdecl;
23509 }
23510 else
23511 continue;
23512 if (reg == NULL
23513 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23514 != DEBUG_PARAMETER_REF)
23515 continue;
23516 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23517 VOIDmode,
23518 VAR_INIT_STATUS_INITIALIZED);
23519 if (val == NULL)
23520 continue;
23521 if (die == NULL)
23522 die = gen_call_site_die (decl, subr_die, ca_loc);
23523 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23524 NULL_TREE);
23525 add_desc_attribute (cdie, arg_decl);
23526 if (reg != NULL)
23527 add_AT_loc (cdie, DW_AT_location, reg);
23528 else if (tdie != NULL)
23529 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23530 tdie);
23531 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23532 if (next_arg != XEXP (arg, 1))
23533 {
23534 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23535 if (mode == VOIDmode)
23536 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23537 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23538 0), 1),
23539 mode, VOIDmode,
23540 VAR_INIT_STATUS_INITIALIZED);
23541 if (val != NULL)
23542 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23543 val);
23544 }
23545 }
23546 if (die == NULL
23547 && (ca_loc->symbol_ref || tloc))
23548 die = gen_call_site_die (decl, subr_die, ca_loc);
23549 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23550 {
23551 dw_loc_descr_ref tval = NULL;
23552
23553 if (tloc != NULL_RTX)
23554 tval = mem_loc_descriptor (tloc,
23555 GET_MODE (tloc) == VOIDmode
23556 ? Pmode : GET_MODE (tloc),
23557 VOIDmode,
23558 VAR_INIT_STATUS_INITIALIZED);
23559 if (tval)
23560 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23561 else if (tlocc != NULL_RTX)
23562 {
23563 tval = mem_loc_descriptor (tlocc,
23564 GET_MODE (tlocc) == VOIDmode
23565 ? Pmode : GET_MODE (tlocc),
23566 VOIDmode,
23567 VAR_INIT_STATUS_INITIALIZED);
23568 if (tval)
23569 add_AT_loc (die,
23570 dwarf_AT (DW_AT_call_target_clobbered),
23571 tval);
23572 }
23573 }
23574 if (die != NULL)
23575 {
23576 call_site_note_count++;
23577 if (ca_loc->tail_call_p)
23578 tail_call_site_note_count++;
23579 }
23580 }
23581 }
23582 call_arg_locations = NULL;
23583 call_arg_loc_last = NULL;
23584 if (tail_call_site_count >= 0
23585 && tail_call_site_count == tail_call_site_note_count
23586 && (!dwarf_strict || dwarf_version >= 5))
23587 {
23588 if (call_site_count >= 0
23589 && call_site_count == call_site_note_count)
23590 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23591 else
23592 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23593 }
23594 call_site_count = -1;
23595 tail_call_site_count = -1;
23596 }
23597
23598 /* Mark used types after we have created DIEs for the functions scopes. */
23599 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23600 }
23601
23602 /* Returns a hash value for X (which really is a die_struct). */
23603
23604 hashval_t
23605 block_die_hasher::hash (die_struct *d)
23606 {
23607 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23608 }
23609
23610 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23611 as decl_id and die_parent of die_struct Y. */
23612
23613 bool
23614 block_die_hasher::equal (die_struct *x, die_struct *y)
23615 {
23616 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23617 }
23618
23619 /* Hold information about markers for inlined entry points. */
23620 struct GTY ((for_user)) inline_entry_data
23621 {
23622 /* The block that's the inlined_function_outer_scope for an inlined
23623 function. */
23624 tree block;
23625
23626 /* The label at the inlined entry point. */
23627 const char *label_pfx;
23628 unsigned int label_num;
23629
23630 /* The view number to be used as the inlined entry point. */
23631 var_loc_view view;
23632 };
23633
23634 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23635 {
23636 typedef tree compare_type;
23637 static inline hashval_t hash (const inline_entry_data *);
23638 static inline bool equal (const inline_entry_data *, const_tree);
23639 };
23640
23641 /* Hash table routines for inline_entry_data. */
23642
23643 inline hashval_t
23644 inline_entry_data_hasher::hash (const inline_entry_data *data)
23645 {
23646 return htab_hash_pointer (data->block);
23647 }
23648
23649 inline bool
23650 inline_entry_data_hasher::equal (const inline_entry_data *data,
23651 const_tree block)
23652 {
23653 return data->block == block;
23654 }
23655
23656 /* Inlined entry points pending DIE creation in this compilation unit. */
23657
23658 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23659
23660
23661 /* Return TRUE if DECL, which may have been previously generated as
23662 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23663 true if decl (or its origin) is either an extern declaration or a
23664 class/namespace scoped declaration.
23665
23666 The declare_in_namespace support causes us to get two DIEs for one
23667 variable, both of which are declarations. We want to avoid
23668 considering one to be a specification, so we must test for
23669 DECLARATION and DW_AT_declaration. */
23670 static inline bool
23671 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23672 {
23673 return (old_die && TREE_STATIC (decl) && !declaration
23674 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23675 }
23676
23677 /* Return true if DECL is a local static. */
23678
23679 static inline bool
23680 local_function_static (tree decl)
23681 {
23682 gcc_assert (VAR_P (decl));
23683 return TREE_STATIC (decl)
23684 && DECL_CONTEXT (decl)
23685 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23686 }
23687
23688 /* Generate a DIE to represent a declared data object.
23689 Either DECL or ORIGIN must be non-null. */
23690
23691 static void
23692 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23693 {
23694 HOST_WIDE_INT off = 0;
23695 tree com_decl;
23696 tree decl_or_origin = decl ? decl : origin;
23697 tree ultimate_origin;
23698 dw_die_ref var_die;
23699 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23700 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23701 || class_or_namespace_scope_p (context_die));
23702 bool specialization_p = false;
23703 bool no_linkage_name = false;
23704
23705 /* While C++ inline static data members have definitions inside of the
23706 class, force the first DIE to be a declaration, then let gen_member_die
23707 reparent it to the class context and call gen_variable_die again
23708 to create the outside of the class DIE for the definition. */
23709 if (!declaration
23710 && old_die == NULL
23711 && decl
23712 && DECL_CONTEXT (decl)
23713 && TYPE_P (DECL_CONTEXT (decl))
23714 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23715 {
23716 declaration = true;
23717 if (dwarf_version < 5)
23718 no_linkage_name = true;
23719 }
23720
23721 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23722 if (decl || ultimate_origin)
23723 origin = ultimate_origin;
23724 com_decl = fortran_common (decl_or_origin, &off);
23725
23726 /* Symbol in common gets emitted as a child of the common block, in the form
23727 of a data member. */
23728 if (com_decl)
23729 {
23730 dw_die_ref com_die;
23731 dw_loc_list_ref loc = NULL;
23732 die_node com_die_arg;
23733
23734 var_die = lookup_decl_die (decl_or_origin);
23735 if (var_die)
23736 {
23737 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23738 {
23739 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23740 if (loc)
23741 {
23742 if (off)
23743 {
23744 /* Optimize the common case. */
23745 if (single_element_loc_list_p (loc)
23746 && loc->expr->dw_loc_opc == DW_OP_addr
23747 && loc->expr->dw_loc_next == NULL
23748 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23749 == SYMBOL_REF)
23750 {
23751 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23752 loc->expr->dw_loc_oprnd1.v.val_addr
23753 = plus_constant (GET_MODE (x), x , off);
23754 }
23755 else
23756 loc_list_plus_const (loc, off);
23757 }
23758 add_AT_location_description (var_die, DW_AT_location, loc);
23759 remove_AT (var_die, DW_AT_declaration);
23760 }
23761 }
23762 return;
23763 }
23764
23765 if (common_block_die_table == NULL)
23766 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23767
23768 com_die_arg.decl_id = DECL_UID (com_decl);
23769 com_die_arg.die_parent = context_die;
23770 com_die = common_block_die_table->find (&com_die_arg);
23771 if (! early_dwarf)
23772 loc = loc_list_from_tree (com_decl, 2, NULL);
23773 if (com_die == NULL)
23774 {
23775 const char *cnam
23776 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23777 die_node **slot;
23778
23779 com_die = new_die (DW_TAG_common_block, context_die, decl);
23780 add_name_and_src_coords_attributes (com_die, com_decl);
23781 if (loc)
23782 {
23783 add_AT_location_description (com_die, DW_AT_location, loc);
23784 /* Avoid sharing the same loc descriptor between
23785 DW_TAG_common_block and DW_TAG_variable. */
23786 loc = loc_list_from_tree (com_decl, 2, NULL);
23787 }
23788 else if (DECL_EXTERNAL (decl_or_origin))
23789 add_AT_flag (com_die, DW_AT_declaration, 1);
23790 if (want_pubnames ())
23791 add_pubname_string (cnam, com_die); /* ??? needed? */
23792 com_die->decl_id = DECL_UID (com_decl);
23793 slot = common_block_die_table->find_slot (com_die, INSERT);
23794 *slot = com_die;
23795 }
23796 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23797 {
23798 add_AT_location_description (com_die, DW_AT_location, loc);
23799 loc = loc_list_from_tree (com_decl, 2, NULL);
23800 remove_AT (com_die, DW_AT_declaration);
23801 }
23802 var_die = new_die (DW_TAG_variable, com_die, decl);
23803 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23804 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23805 decl_quals (decl_or_origin), false,
23806 context_die);
23807 add_alignment_attribute (var_die, decl);
23808 add_AT_flag (var_die, DW_AT_external, 1);
23809 if (loc)
23810 {
23811 if (off)
23812 {
23813 /* Optimize the common case. */
23814 if (single_element_loc_list_p (loc)
23815 && loc->expr->dw_loc_opc == DW_OP_addr
23816 && loc->expr->dw_loc_next == NULL
23817 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23818 {
23819 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23820 loc->expr->dw_loc_oprnd1.v.val_addr
23821 = plus_constant (GET_MODE (x), x, off);
23822 }
23823 else
23824 loc_list_plus_const (loc, off);
23825 }
23826 add_AT_location_description (var_die, DW_AT_location, loc);
23827 }
23828 else if (DECL_EXTERNAL (decl_or_origin))
23829 add_AT_flag (var_die, DW_AT_declaration, 1);
23830 if (decl)
23831 equate_decl_number_to_die (decl, var_die);
23832 return;
23833 }
23834
23835 if (old_die)
23836 {
23837 if (declaration)
23838 {
23839 /* A declaration that has been previously dumped, needs no
23840 further annotations, since it doesn't need location on
23841 the second pass. */
23842 return;
23843 }
23844 else if (decl_will_get_specification_p (old_die, decl, declaration)
23845 && !get_AT (old_die, DW_AT_specification))
23846 {
23847 /* Fall-thru so we can make a new variable die along with a
23848 DW_AT_specification. */
23849 }
23850 else if (origin && old_die->die_parent != context_die)
23851 {
23852 /* If we will be creating an inlined instance, we need a
23853 new DIE that will get annotated with
23854 DW_AT_abstract_origin. */
23855 gcc_assert (!DECL_ABSTRACT_P (decl));
23856 }
23857 else
23858 {
23859 /* If a DIE was dumped early, it still needs location info.
23860 Skip to where we fill the location bits. */
23861 var_die = old_die;
23862
23863 /* ??? In LTRANS we cannot annotate early created variably
23864 modified type DIEs without copying them and adjusting all
23865 references to them. Thus we dumped them again. Also add a
23866 reference to them but beware of -g0 compile and -g link
23867 in which case the reference will be already present. */
23868 tree type = TREE_TYPE (decl_or_origin);
23869 if (in_lto_p
23870 && ! get_AT (var_die, DW_AT_type)
23871 && variably_modified_type_p
23872 (type, decl_function_context (decl_or_origin)))
23873 {
23874 if (decl_by_reference_p (decl_or_origin))
23875 add_type_attribute (var_die, TREE_TYPE (type),
23876 TYPE_UNQUALIFIED, false, context_die);
23877 else
23878 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23879 false, context_die);
23880 }
23881
23882 goto gen_variable_die_location;
23883 }
23884 }
23885
23886 /* For static data members, the declaration in the class is supposed
23887 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23888 also in DWARF2; the specification should still be DW_TAG_variable
23889 referencing the DW_TAG_member DIE. */
23890 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23891 var_die = new_die (DW_TAG_member, context_die, decl);
23892 else
23893 var_die = new_die (DW_TAG_variable, context_die, decl);
23894
23895 if (origin != NULL)
23896 add_abstract_origin_attribute (var_die, origin);
23897
23898 /* Loop unrolling can create multiple blocks that refer to the same
23899 static variable, so we must test for the DW_AT_declaration flag.
23900
23901 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23902 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23903 sharing them.
23904
23905 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23906 else if (decl_will_get_specification_p (old_die, decl, declaration))
23907 {
23908 /* This is a definition of a C++ class level static. */
23909 add_AT_specification (var_die, old_die);
23910 specialization_p = true;
23911 if (DECL_NAME (decl))
23912 {
23913 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23914 struct dwarf_file_data * file_index = lookup_filename (s.file);
23915
23916 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23917 add_AT_file (var_die, DW_AT_decl_file, file_index);
23918
23919 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23920 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23921
23922 if (debug_column_info
23923 && s.column
23924 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23925 != (unsigned) s.column))
23926 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23927
23928 if (old_die->die_tag == DW_TAG_member)
23929 add_linkage_name (var_die, decl);
23930 }
23931 }
23932 else
23933 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23934
23935 if ((origin == NULL && !specialization_p)
23936 || (origin != NULL
23937 && !DECL_ABSTRACT_P (decl_or_origin)
23938 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23939 decl_function_context
23940 (decl_or_origin))))
23941 {
23942 tree type = TREE_TYPE (decl_or_origin);
23943
23944 if (decl_by_reference_p (decl_or_origin))
23945 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23946 context_die);
23947 else
23948 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23949 context_die);
23950 }
23951
23952 if (origin == NULL && !specialization_p)
23953 {
23954 if (TREE_PUBLIC (decl))
23955 add_AT_flag (var_die, DW_AT_external, 1);
23956
23957 if (DECL_ARTIFICIAL (decl))
23958 add_AT_flag (var_die, DW_AT_artificial, 1);
23959
23960 add_alignment_attribute (var_die, decl);
23961
23962 add_accessibility_attribute (var_die, decl);
23963 }
23964
23965 if (declaration)
23966 add_AT_flag (var_die, DW_AT_declaration, 1);
23967
23968 if (decl && (DECL_ABSTRACT_P (decl)
23969 || !old_die || is_declaration_die (old_die)))
23970 equate_decl_number_to_die (decl, var_die);
23971
23972 gen_variable_die_location:
23973 if (! declaration
23974 && (! DECL_ABSTRACT_P (decl_or_origin)
23975 /* Local static vars are shared between all clones/inlines,
23976 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23977 already set. */
23978 || (VAR_P (decl_or_origin)
23979 && TREE_STATIC (decl_or_origin)
23980 && DECL_RTL_SET_P (decl_or_origin))))
23981 {
23982 if (early_dwarf)
23983 add_pubname (decl_or_origin, var_die);
23984 else
23985 add_location_or_const_value_attribute (var_die, decl_or_origin,
23986 decl == NULL);
23987 }
23988 else
23989 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23990
23991 if ((dwarf_version >= 4 || !dwarf_strict)
23992 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23993 DW_AT_const_expr) == 1
23994 && !get_AT (var_die, DW_AT_const_expr)
23995 && !specialization_p)
23996 add_AT_flag (var_die, DW_AT_const_expr, 1);
23997
23998 if (!dwarf_strict)
23999 {
24000 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24001 DW_AT_inline);
24002 if (inl != -1
24003 && !get_AT (var_die, DW_AT_inline)
24004 && !specialization_p)
24005 add_AT_unsigned (var_die, DW_AT_inline, inl);
24006 }
24007 }
24008
24009 /* Generate a DIE to represent a named constant. */
24010
24011 static void
24012 gen_const_die (tree decl, dw_die_ref context_die)
24013 {
24014 dw_die_ref const_die;
24015 tree type = TREE_TYPE (decl);
24016
24017 const_die = lookup_decl_die (decl);
24018 if (const_die)
24019 return;
24020
24021 const_die = new_die (DW_TAG_constant, context_die, decl);
24022 equate_decl_number_to_die (decl, const_die);
24023 add_name_and_src_coords_attributes (const_die, decl);
24024 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24025 if (TREE_PUBLIC (decl))
24026 add_AT_flag (const_die, DW_AT_external, 1);
24027 if (DECL_ARTIFICIAL (decl))
24028 add_AT_flag (const_die, DW_AT_artificial, 1);
24029 tree_add_const_value_attribute_for_decl (const_die, decl);
24030 }
24031
24032 /* Generate a DIE to represent a label identifier. */
24033
24034 static void
24035 gen_label_die (tree decl, dw_die_ref context_die)
24036 {
24037 tree origin = decl_ultimate_origin (decl);
24038 dw_die_ref lbl_die = lookup_decl_die (decl);
24039 rtx insn;
24040 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24041
24042 if (!lbl_die)
24043 {
24044 lbl_die = new_die (DW_TAG_label, context_die, decl);
24045 equate_decl_number_to_die (decl, lbl_die);
24046
24047 if (origin != NULL)
24048 add_abstract_origin_attribute (lbl_die, origin);
24049 else
24050 add_name_and_src_coords_attributes (lbl_die, decl);
24051 }
24052
24053 if (DECL_ABSTRACT_P (decl))
24054 equate_decl_number_to_die (decl, lbl_die);
24055 else if (! early_dwarf)
24056 {
24057 insn = DECL_RTL_IF_SET (decl);
24058
24059 /* Deleted labels are programmer specified labels which have been
24060 eliminated because of various optimizations. We still emit them
24061 here so that it is possible to put breakpoints on them. */
24062 if (insn
24063 && (LABEL_P (insn)
24064 || ((NOTE_P (insn)
24065 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24066 {
24067 /* When optimization is enabled (via -O) some parts of the compiler
24068 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24069 represent source-level labels which were explicitly declared by
24070 the user. This really shouldn't be happening though, so catch
24071 it if it ever does happen. */
24072 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24073
24074 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24075 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24076 }
24077 else if (insn
24078 && NOTE_P (insn)
24079 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24080 && CODE_LABEL_NUMBER (insn) != -1)
24081 {
24082 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24083 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24084 }
24085 }
24086 }
24087
24088 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24089 attributes to the DIE for a block STMT, to describe where the inlined
24090 function was called from. This is similar to add_src_coords_attributes. */
24091
24092 static inline void
24093 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24094 {
24095 /* We can end up with BUILTINS_LOCATION here. */
24096 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24097 return;
24098
24099 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24100
24101 if (dwarf_version >= 3 || !dwarf_strict)
24102 {
24103 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24104 add_AT_unsigned (die, DW_AT_call_line, s.line);
24105 if (debug_column_info && s.column)
24106 add_AT_unsigned (die, DW_AT_call_column, s.column);
24107 }
24108 }
24109
24110
24111 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24112 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24113
24114 static inline void
24115 add_high_low_attributes (tree stmt, dw_die_ref die)
24116 {
24117 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24118
24119 if (inline_entry_data **iedp
24120 = !inline_entry_data_table ? NULL
24121 : inline_entry_data_table->find_slot_with_hash (stmt,
24122 htab_hash_pointer (stmt),
24123 NO_INSERT))
24124 {
24125 inline_entry_data *ied = *iedp;
24126 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24127 gcc_assert (debug_inline_points);
24128 gcc_assert (inlined_function_outer_scope_p (stmt));
24129
24130 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24131 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24132
24133 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24134 && !dwarf_strict)
24135 {
24136 if (!output_asm_line_debug_info ())
24137 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24138 else
24139 {
24140 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24141 /* FIXME: this will resolve to a small number. Could we
24142 possibly emit smaller data? Ideally we'd emit a
24143 uleb128, but that would make the size of DIEs
24144 impossible for the compiler to compute, since it's
24145 the assembler that computes the value of the view
24146 label in this case. Ideally, we'd have a single form
24147 encompassing both the address and the view, and
24148 indirecting them through a table might make things
24149 easier, but even that would be more wasteful,
24150 space-wise, than what we have now. */
24151 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24152 }
24153 }
24154
24155 inline_entry_data_table->clear_slot (iedp);
24156 }
24157
24158 if (BLOCK_FRAGMENT_CHAIN (stmt)
24159 && (dwarf_version >= 3 || !dwarf_strict))
24160 {
24161 tree chain, superblock = NULL_TREE;
24162 dw_die_ref pdie;
24163 dw_attr_node *attr = NULL;
24164
24165 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24166 {
24167 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24168 BLOCK_NUMBER (stmt));
24169 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24170 }
24171
24172 /* Optimize duplicate .debug_ranges lists or even tails of
24173 lists. If this BLOCK has same ranges as its supercontext,
24174 lookup DW_AT_ranges attribute in the supercontext (and
24175 recursively so), verify that the ranges_table contains the
24176 right values and use it instead of adding a new .debug_range. */
24177 for (chain = stmt, pdie = die;
24178 BLOCK_SAME_RANGE (chain);
24179 chain = BLOCK_SUPERCONTEXT (chain))
24180 {
24181 dw_attr_node *new_attr;
24182
24183 pdie = pdie->die_parent;
24184 if (pdie == NULL)
24185 break;
24186 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24187 break;
24188 new_attr = get_AT (pdie, DW_AT_ranges);
24189 if (new_attr == NULL
24190 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24191 break;
24192 attr = new_attr;
24193 superblock = BLOCK_SUPERCONTEXT (chain);
24194 }
24195 if (attr != NULL
24196 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24197 == (int)BLOCK_NUMBER (superblock))
24198 && BLOCK_FRAGMENT_CHAIN (superblock))
24199 {
24200 unsigned long off = attr->dw_attr_val.v.val_offset;
24201 unsigned long supercnt = 0, thiscnt = 0;
24202 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24203 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24204 {
24205 ++supercnt;
24206 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24207 == (int)BLOCK_NUMBER (chain));
24208 }
24209 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24210 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24211 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24212 ++thiscnt;
24213 gcc_assert (supercnt >= thiscnt);
24214 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24215 false);
24216 note_rnglist_head (off + supercnt - thiscnt);
24217 return;
24218 }
24219
24220 unsigned int offset = add_ranges (stmt, true);
24221 add_AT_range_list (die, DW_AT_ranges, offset, false);
24222 note_rnglist_head (offset);
24223
24224 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24225 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24226 do
24227 {
24228 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24229 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24230 chain = BLOCK_FRAGMENT_CHAIN (chain);
24231 }
24232 while (chain);
24233 add_ranges (NULL);
24234 }
24235 else
24236 {
24237 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24238 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24239 BLOCK_NUMBER (stmt));
24240 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24241 BLOCK_NUMBER (stmt));
24242 add_AT_low_high_pc (die, label, label_high, false);
24243 }
24244 }
24245
24246 /* Generate a DIE for a lexical block. */
24247
24248 static void
24249 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24250 {
24251 dw_die_ref old_die = lookup_block_die (stmt);
24252 dw_die_ref stmt_die = NULL;
24253 if (!old_die)
24254 {
24255 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24256 equate_block_to_die (stmt, stmt_die);
24257 }
24258
24259 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24260 {
24261 /* If this is an inlined or conrecte instance, create a new lexical
24262 die for anything below to attach DW_AT_abstract_origin to. */
24263 if (old_die)
24264 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24265
24266 tree origin = block_ultimate_origin (stmt);
24267 if (origin != NULL_TREE && (origin != stmt || old_die))
24268 add_abstract_origin_attribute (stmt_die, origin);
24269
24270 old_die = NULL;
24271 }
24272
24273 if (old_die)
24274 stmt_die = old_die;
24275
24276 /* A non abstract block whose blocks have already been reordered
24277 should have the instruction range for this block. If so, set the
24278 high/low attributes. */
24279 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24280 {
24281 gcc_assert (stmt_die);
24282 add_high_low_attributes (stmt, stmt_die);
24283 }
24284
24285 decls_for_scope (stmt, stmt_die);
24286 }
24287
24288 /* Generate a DIE for an inlined subprogram. */
24289
24290 static void
24291 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24292 {
24293 tree decl = block_ultimate_origin (stmt);
24294
24295 /* Make sure any inlined functions are known to be inlineable. */
24296 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24297 || cgraph_function_possibly_inlined_p (decl));
24298
24299 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24300
24301 if (call_arg_locations || debug_inline_points)
24302 equate_block_to_die (stmt, subr_die);
24303 add_abstract_origin_attribute (subr_die, decl);
24304 if (TREE_ASM_WRITTEN (stmt))
24305 add_high_low_attributes (stmt, subr_die);
24306 add_call_src_coords_attributes (stmt, subr_die);
24307
24308 /* The inliner creates an extra BLOCK for the parameter setup,
24309 we want to merge that with the actual outermost BLOCK of the
24310 inlined function to avoid duplicate locals in consumers.
24311 Do that by doing the recursion to subblocks on the single subblock
24312 of STMT. */
24313 bool unwrap_one = false;
24314 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24315 {
24316 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24317 if (origin
24318 && TREE_CODE (origin) == BLOCK
24319 && BLOCK_SUPERCONTEXT (origin) == decl)
24320 unwrap_one = true;
24321 }
24322 decls_for_scope (stmt, subr_die, !unwrap_one);
24323 if (unwrap_one)
24324 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24325 }
24326
24327 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24328 the comment for VLR_CONTEXT. */
24329
24330 static void
24331 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24332 {
24333 dw_die_ref decl_die;
24334
24335 if (TREE_TYPE (decl) == error_mark_node)
24336 return;
24337
24338 decl_die = new_die (DW_TAG_member, context_die, decl);
24339 add_name_and_src_coords_attributes (decl_die, decl);
24340 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24341 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24342 context_die);
24343
24344 if (DECL_BIT_FIELD_TYPE (decl))
24345 {
24346 add_byte_size_attribute (decl_die, decl);
24347 add_bit_size_attribute (decl_die, decl);
24348 add_bit_offset_attribute (decl_die, decl, ctx);
24349 }
24350
24351 add_alignment_attribute (decl_die, decl);
24352
24353 /* If we have a variant part offset, then we are supposed to process a member
24354 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24355 trees. */
24356 gcc_assert (ctx->variant_part_offset == NULL_TREE
24357 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24358 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24359 add_data_member_location_attribute (decl_die, decl, ctx);
24360
24361 if (DECL_ARTIFICIAL (decl))
24362 add_AT_flag (decl_die, DW_AT_artificial, 1);
24363
24364 add_accessibility_attribute (decl_die, decl);
24365
24366 /* Equate decl number to die, so that we can look up this decl later on. */
24367 equate_decl_number_to_die (decl, decl_die);
24368 }
24369
24370 /* Generate a DIE for a pointer to a member type. TYPE can be an
24371 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24372 pointer to member function. */
24373
24374 static void
24375 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24376 {
24377 if (lookup_type_die (type))
24378 return;
24379
24380 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24381 scope_die_for (type, context_die), type);
24382
24383 equate_type_number_to_die (type, ptr_die);
24384 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24385 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24386 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24387 context_die);
24388 add_alignment_attribute (ptr_die, type);
24389
24390 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24391 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24392 {
24393 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24394 add_AT_loc (ptr_die, DW_AT_use_location, op);
24395 }
24396 }
24397
24398 static char *producer_string;
24399
24400 /* Return a heap allocated producer string including command line options
24401 if -grecord-gcc-switches. */
24402
24403 static char *
24404 gen_producer_string (void)
24405 {
24406 size_t j;
24407 auto_vec<const char *> switches;
24408 const char *language_string = lang_hooks.name;
24409 char *producer, *tail;
24410 const char *p;
24411 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24412 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24413
24414 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24415 switch (save_decoded_options[j].opt_index)
24416 {
24417 case OPT_o:
24418 case OPT_d:
24419 case OPT_dumpbase:
24420 case OPT_dumpdir:
24421 case OPT_auxbase:
24422 case OPT_auxbase_strip:
24423 case OPT_quiet:
24424 case OPT_version:
24425 case OPT_v:
24426 case OPT_w:
24427 case OPT_L:
24428 case OPT_D:
24429 case OPT_I:
24430 case OPT_U:
24431 case OPT_SPECIAL_unknown:
24432 case OPT_SPECIAL_ignore:
24433 case OPT_SPECIAL_deprecated:
24434 case OPT_SPECIAL_program_name:
24435 case OPT_SPECIAL_input_file:
24436 case OPT_grecord_gcc_switches:
24437 case OPT__output_pch_:
24438 case OPT_fdiagnostics_show_location_:
24439 case OPT_fdiagnostics_show_option:
24440 case OPT_fdiagnostics_show_caret:
24441 case OPT_fdiagnostics_show_labels:
24442 case OPT_fdiagnostics_show_line_numbers:
24443 case OPT_fdiagnostics_color_:
24444 case OPT_fdiagnostics_format_:
24445 case OPT_fverbose_asm:
24446 case OPT____:
24447 case OPT__sysroot_:
24448 case OPT_nostdinc:
24449 case OPT_nostdinc__:
24450 case OPT_fpreprocessed:
24451 case OPT_fltrans_output_list_:
24452 case OPT_fresolution_:
24453 case OPT_fdebug_prefix_map_:
24454 case OPT_fmacro_prefix_map_:
24455 case OPT_ffile_prefix_map_:
24456 case OPT_fcompare_debug:
24457 case OPT_fchecking:
24458 case OPT_fchecking_:
24459 /* Ignore these. */
24460 continue;
24461 default:
24462 if (cl_options[save_decoded_options[j].opt_index].flags
24463 & CL_NO_DWARF_RECORD)
24464 continue;
24465 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24466 == '-');
24467 switch (save_decoded_options[j].canonical_option[0][1])
24468 {
24469 case 'M':
24470 case 'i':
24471 case 'W':
24472 continue;
24473 case 'f':
24474 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24475 "dump", 4) == 0)
24476 continue;
24477 break;
24478 default:
24479 break;
24480 }
24481 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24482 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24483 break;
24484 }
24485
24486 producer = XNEWVEC (char, plen + 1 + len + 1);
24487 tail = producer;
24488 sprintf (tail, "%s %s", language_string, version_string);
24489 tail += plen;
24490
24491 FOR_EACH_VEC_ELT (switches, j, p)
24492 {
24493 len = strlen (p);
24494 *tail = ' ';
24495 memcpy (tail + 1, p, len);
24496 tail += len + 1;
24497 }
24498
24499 *tail = '\0';
24500 return producer;
24501 }
24502
24503 /* Given a C and/or C++ language/version string return the "highest".
24504 C++ is assumed to be "higher" than C in this case. Used for merging
24505 LTO translation unit languages. */
24506 static const char *
24507 highest_c_language (const char *lang1, const char *lang2)
24508 {
24509 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24510 return "GNU C++17";
24511 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24512 return "GNU C++14";
24513 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24514 return "GNU C++11";
24515 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24516 return "GNU C++98";
24517
24518 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24519 return "GNU C2X";
24520 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24521 return "GNU C17";
24522 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24523 return "GNU C11";
24524 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24525 return "GNU C99";
24526 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24527 return "GNU C89";
24528
24529 gcc_unreachable ();
24530 }
24531
24532
24533 /* Generate the DIE for the compilation unit. */
24534
24535 static dw_die_ref
24536 gen_compile_unit_die (const char *filename)
24537 {
24538 dw_die_ref die;
24539 const char *language_string = lang_hooks.name;
24540 int language;
24541
24542 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24543
24544 if (filename)
24545 {
24546 add_name_attribute (die, filename);
24547 /* Don't add cwd for <built-in>. */
24548 if (filename[0] != '<')
24549 add_comp_dir_attribute (die);
24550 }
24551
24552 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24553
24554 /* If our producer is LTO try to figure out a common language to use
24555 from the global list of translation units. */
24556 if (strcmp (language_string, "GNU GIMPLE") == 0)
24557 {
24558 unsigned i;
24559 tree t;
24560 const char *common_lang = NULL;
24561
24562 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24563 {
24564 if (!TRANSLATION_UNIT_LANGUAGE (t))
24565 continue;
24566 if (!common_lang)
24567 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24568 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24569 ;
24570 else if (strncmp (common_lang, "GNU C", 5) == 0
24571 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24572 /* Mixing C and C++ is ok, use C++ in that case. */
24573 common_lang = highest_c_language (common_lang,
24574 TRANSLATION_UNIT_LANGUAGE (t));
24575 else
24576 {
24577 /* Fall back to C. */
24578 common_lang = NULL;
24579 break;
24580 }
24581 }
24582
24583 if (common_lang)
24584 language_string = common_lang;
24585 }
24586
24587 language = DW_LANG_C;
24588 if (strncmp (language_string, "GNU C", 5) == 0
24589 && ISDIGIT (language_string[5]))
24590 {
24591 language = DW_LANG_C89;
24592 if (dwarf_version >= 3 || !dwarf_strict)
24593 {
24594 if (strcmp (language_string, "GNU C89") != 0)
24595 language = DW_LANG_C99;
24596
24597 if (dwarf_version >= 5 /* || !dwarf_strict */)
24598 if (strcmp (language_string, "GNU C11") == 0
24599 || strcmp (language_string, "GNU C17") == 0
24600 || strcmp (language_string, "GNU C2X"))
24601 language = DW_LANG_C11;
24602 }
24603 }
24604 else if (strncmp (language_string, "GNU C++", 7) == 0)
24605 {
24606 language = DW_LANG_C_plus_plus;
24607 if (dwarf_version >= 5 /* || !dwarf_strict */)
24608 {
24609 if (strcmp (language_string, "GNU C++11") == 0)
24610 language = DW_LANG_C_plus_plus_11;
24611 else if (strcmp (language_string, "GNU C++14") == 0)
24612 language = DW_LANG_C_plus_plus_14;
24613 else if (strcmp (language_string, "GNU C++17") == 0)
24614 /* For now. */
24615 language = DW_LANG_C_plus_plus_14;
24616 }
24617 }
24618 else if (strcmp (language_string, "GNU F77") == 0)
24619 language = DW_LANG_Fortran77;
24620 else if (dwarf_version >= 3 || !dwarf_strict)
24621 {
24622 if (strcmp (language_string, "GNU Ada") == 0)
24623 language = DW_LANG_Ada95;
24624 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24625 {
24626 language = DW_LANG_Fortran95;
24627 if (dwarf_version >= 5 /* || !dwarf_strict */)
24628 {
24629 if (strcmp (language_string, "GNU Fortran2003") == 0)
24630 language = DW_LANG_Fortran03;
24631 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24632 language = DW_LANG_Fortran08;
24633 }
24634 }
24635 else if (strcmp (language_string, "GNU Objective-C") == 0)
24636 language = DW_LANG_ObjC;
24637 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24638 language = DW_LANG_ObjC_plus_plus;
24639 else if (strcmp (language_string, "GNU D") == 0)
24640 language = DW_LANG_D;
24641 else if (dwarf_version >= 5 || !dwarf_strict)
24642 {
24643 if (strcmp (language_string, "GNU Go") == 0)
24644 language = DW_LANG_Go;
24645 }
24646 }
24647 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24648 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24649 language = DW_LANG_Fortran90;
24650 /* Likewise for Ada. */
24651 else if (strcmp (language_string, "GNU Ada") == 0)
24652 language = DW_LANG_Ada83;
24653
24654 add_AT_unsigned (die, DW_AT_language, language);
24655
24656 switch (language)
24657 {
24658 case DW_LANG_Fortran77:
24659 case DW_LANG_Fortran90:
24660 case DW_LANG_Fortran95:
24661 case DW_LANG_Fortran03:
24662 case DW_LANG_Fortran08:
24663 /* Fortran has case insensitive identifiers and the front-end
24664 lowercases everything. */
24665 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24666 break;
24667 default:
24668 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24669 break;
24670 }
24671 return die;
24672 }
24673
24674 /* Generate the DIE for a base class. */
24675
24676 static void
24677 gen_inheritance_die (tree binfo, tree access, tree type,
24678 dw_die_ref context_die)
24679 {
24680 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24681 struct vlr_context ctx = { type, NULL };
24682
24683 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24684 context_die);
24685 add_data_member_location_attribute (die, binfo, &ctx);
24686
24687 if (BINFO_VIRTUAL_P (binfo))
24688 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24689
24690 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24691 children, otherwise the default is DW_ACCESS_public. In DWARF2
24692 the default has always been DW_ACCESS_private. */
24693 if (access == access_public_node)
24694 {
24695 if (dwarf_version == 2
24696 || context_die->die_tag == DW_TAG_class_type)
24697 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24698 }
24699 else if (access == access_protected_node)
24700 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24701 else if (dwarf_version > 2
24702 && context_die->die_tag != DW_TAG_class_type)
24703 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24704 }
24705
24706 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24707 structure. */
24708
24709 static bool
24710 is_variant_part (tree decl)
24711 {
24712 return (TREE_CODE (decl) == FIELD_DECL
24713 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24714 }
24715
24716 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24717 return the FIELD_DECL. Return NULL_TREE otherwise. */
24718
24719 static tree
24720 analyze_discr_in_predicate (tree operand, tree struct_type)
24721 {
24722 while (CONVERT_EXPR_P (operand))
24723 operand = TREE_OPERAND (operand, 0);
24724
24725 /* Match field access to members of struct_type only. */
24726 if (TREE_CODE (operand) == COMPONENT_REF
24727 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24728 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24729 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24730 return TREE_OPERAND (operand, 1);
24731 else
24732 return NULL_TREE;
24733 }
24734
24735 /* Check that SRC is a constant integer that can be represented as a native
24736 integer constant (either signed or unsigned). If so, store it into DEST and
24737 return true. Return false otherwise. */
24738
24739 static bool
24740 get_discr_value (tree src, dw_discr_value *dest)
24741 {
24742 tree discr_type = TREE_TYPE (src);
24743
24744 if (lang_hooks.types.get_debug_type)
24745 {
24746 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24747 if (debug_type != NULL)
24748 discr_type = debug_type;
24749 }
24750
24751 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24752 return false;
24753
24754 /* Signedness can vary between the original type and the debug type. This
24755 can happen for character types in Ada for instance: the character type
24756 used for code generation can be signed, to be compatible with the C one,
24757 but from a debugger point of view, it must be unsigned. */
24758 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24759 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24760
24761 if (is_orig_unsigned != is_debug_unsigned)
24762 src = fold_convert (discr_type, src);
24763
24764 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24765 return false;
24766
24767 dest->pos = is_debug_unsigned;
24768 if (is_debug_unsigned)
24769 dest->v.uval = tree_to_uhwi (src);
24770 else
24771 dest->v.sval = tree_to_shwi (src);
24772
24773 return true;
24774 }
24775
24776 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24777 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24778 store NULL_TREE in DISCR_DECL. Otherwise:
24779
24780 - store the discriminant field in STRUCT_TYPE that controls the variant
24781 part to *DISCR_DECL
24782
24783 - put in *DISCR_LISTS_P an array where for each variant, the item
24784 represents the corresponding matching list of discriminant values.
24785
24786 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24787 the above array.
24788
24789 Note that when the array is allocated (i.e. when the analysis is
24790 successful), it is up to the caller to free the array. */
24791
24792 static void
24793 analyze_variants_discr (tree variant_part_decl,
24794 tree struct_type,
24795 tree *discr_decl,
24796 dw_discr_list_ref **discr_lists_p,
24797 unsigned *discr_lists_length)
24798 {
24799 tree variant_part_type = TREE_TYPE (variant_part_decl);
24800 tree variant;
24801 dw_discr_list_ref *discr_lists;
24802 unsigned i;
24803
24804 /* Compute how many variants there are in this variant part. */
24805 *discr_lists_length = 0;
24806 for (variant = TYPE_FIELDS (variant_part_type);
24807 variant != NULL_TREE;
24808 variant = DECL_CHAIN (variant))
24809 ++*discr_lists_length;
24810
24811 *discr_decl = NULL_TREE;
24812 *discr_lists_p
24813 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24814 sizeof (**discr_lists_p));
24815 discr_lists = *discr_lists_p;
24816
24817 /* And then analyze all variants to extract discriminant information for all
24818 of them. This analysis is conservative: as soon as we detect something we
24819 do not support, abort everything and pretend we found nothing. */
24820 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24821 variant != NULL_TREE;
24822 variant = DECL_CHAIN (variant), ++i)
24823 {
24824 tree match_expr = DECL_QUALIFIER (variant);
24825
24826 /* Now, try to analyze the predicate and deduce a discriminant for
24827 it. */
24828 if (match_expr == boolean_true_node)
24829 /* Typically happens for the default variant: it matches all cases that
24830 previous variants rejected. Don't output any matching value for
24831 this one. */
24832 continue;
24833
24834 /* The following loop tries to iterate over each discriminant
24835 possibility: single values or ranges. */
24836 while (match_expr != NULL_TREE)
24837 {
24838 tree next_round_match_expr;
24839 tree candidate_discr = NULL_TREE;
24840 dw_discr_list_ref new_node = NULL;
24841
24842 /* Possibilities are matched one after the other by nested
24843 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24844 continue with the rest at next iteration. */
24845 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24846 {
24847 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24848 match_expr = TREE_OPERAND (match_expr, 1);
24849 }
24850 else
24851 next_round_match_expr = NULL_TREE;
24852
24853 if (match_expr == boolean_false_node)
24854 /* This sub-expression matches nothing: just wait for the next
24855 one. */
24856 ;
24857
24858 else if (TREE_CODE (match_expr) == EQ_EXPR)
24859 {
24860 /* We are matching: <discr_field> == <integer_cst>
24861 This sub-expression matches a single value. */
24862 tree integer_cst = TREE_OPERAND (match_expr, 1);
24863
24864 candidate_discr
24865 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24866 struct_type);
24867
24868 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24869 if (!get_discr_value (integer_cst,
24870 &new_node->dw_discr_lower_bound))
24871 goto abort;
24872 new_node->dw_discr_range = false;
24873 }
24874
24875 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24876 {
24877 /* We are matching:
24878 <discr_field> > <integer_cst>
24879 && <discr_field> < <integer_cst>.
24880 This sub-expression matches the range of values between the
24881 two matched integer constants. Note that comparisons can be
24882 inclusive or exclusive. */
24883 tree candidate_discr_1, candidate_discr_2;
24884 tree lower_cst, upper_cst;
24885 bool lower_cst_included, upper_cst_included;
24886 tree lower_op = TREE_OPERAND (match_expr, 0);
24887 tree upper_op = TREE_OPERAND (match_expr, 1);
24888
24889 /* When the comparison is exclusive, the integer constant is not
24890 the discriminant range bound we are looking for: we will have
24891 to increment or decrement it. */
24892 if (TREE_CODE (lower_op) == GE_EXPR)
24893 lower_cst_included = true;
24894 else if (TREE_CODE (lower_op) == GT_EXPR)
24895 lower_cst_included = false;
24896 else
24897 goto abort;
24898
24899 if (TREE_CODE (upper_op) == LE_EXPR)
24900 upper_cst_included = true;
24901 else if (TREE_CODE (upper_op) == LT_EXPR)
24902 upper_cst_included = false;
24903 else
24904 goto abort;
24905
24906 /* Extract the discriminant from the first operand and check it
24907 is consistant with the same analysis in the second
24908 operand. */
24909 candidate_discr_1
24910 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24911 struct_type);
24912 candidate_discr_2
24913 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24914 struct_type);
24915 if (candidate_discr_1 == candidate_discr_2)
24916 candidate_discr = candidate_discr_1;
24917 else
24918 goto abort;
24919
24920 /* Extract bounds from both. */
24921 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24922 lower_cst = TREE_OPERAND (lower_op, 1);
24923 upper_cst = TREE_OPERAND (upper_op, 1);
24924
24925 if (!lower_cst_included)
24926 lower_cst
24927 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24928 build_int_cst (TREE_TYPE (lower_cst), 1));
24929 if (!upper_cst_included)
24930 upper_cst
24931 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24932 build_int_cst (TREE_TYPE (upper_cst), 1));
24933
24934 if (!get_discr_value (lower_cst,
24935 &new_node->dw_discr_lower_bound)
24936 || !get_discr_value (upper_cst,
24937 &new_node->dw_discr_upper_bound))
24938 goto abort;
24939
24940 new_node->dw_discr_range = true;
24941 }
24942
24943 else if ((candidate_discr
24944 = analyze_discr_in_predicate (match_expr, struct_type))
24945 && TREE_TYPE (candidate_discr) == boolean_type_node)
24946 {
24947 /* We are matching: <discr_field> for a boolean discriminant.
24948 This sub-expression matches boolean_true_node. */
24949 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24950 if (!get_discr_value (boolean_true_node,
24951 &new_node->dw_discr_lower_bound))
24952 goto abort;
24953 new_node->dw_discr_range = false;
24954 }
24955
24956 else
24957 /* Unsupported sub-expression: we cannot determine the set of
24958 matching discriminant values. Abort everything. */
24959 goto abort;
24960
24961 /* If the discriminant info is not consistant with what we saw so
24962 far, consider the analysis failed and abort everything. */
24963 if (candidate_discr == NULL_TREE
24964 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24965 goto abort;
24966 else
24967 *discr_decl = candidate_discr;
24968
24969 if (new_node != NULL)
24970 {
24971 new_node->dw_discr_next = discr_lists[i];
24972 discr_lists[i] = new_node;
24973 }
24974 match_expr = next_round_match_expr;
24975 }
24976 }
24977
24978 /* If we reach this point, we could match everything we were interested
24979 in. */
24980 return;
24981
24982 abort:
24983 /* Clean all data structure and return no result. */
24984 free (*discr_lists_p);
24985 *discr_lists_p = NULL;
24986 *discr_decl = NULL_TREE;
24987 }
24988
24989 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24990 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24991 under CONTEXT_DIE.
24992
24993 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24994 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24995 this type, which are record types, represent the available variants and each
24996 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24997 values are inferred from these attributes.
24998
24999 In trees, the offsets for the fields inside these sub-records are relative
25000 to the variant part itself, whereas the corresponding DIEs should have
25001 offset attributes that are relative to the embedding record base address.
25002 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
25003 must be an expression that computes the offset of the variant part to
25004 describe in DWARF. */
25005
25006 static void
25007 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
25008 dw_die_ref context_die)
25009 {
25010 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25011 tree variant_part_offset = vlr_ctx->variant_part_offset;
25012 struct loc_descr_context ctx = {
25013 vlr_ctx->struct_type, /* context_type */
25014 NULL_TREE, /* base_decl */
25015 NULL, /* dpi */
25016 false, /* placeholder_arg */
25017 false /* placeholder_seen */
25018 };
25019
25020 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25021 NULL_TREE if there is no such field. */
25022 tree discr_decl = NULL_TREE;
25023 dw_discr_list_ref *discr_lists;
25024 unsigned discr_lists_length = 0;
25025 unsigned i;
25026
25027 dw_die_ref dwarf_proc_die = NULL;
25028 dw_die_ref variant_part_die
25029 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25030
25031 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25032
25033 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25034 &discr_decl, &discr_lists, &discr_lists_length);
25035
25036 if (discr_decl != NULL_TREE)
25037 {
25038 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25039
25040 if (discr_die)
25041 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25042 else
25043 /* We have no DIE for the discriminant, so just discard all
25044 discrimimant information in the output. */
25045 discr_decl = NULL_TREE;
25046 }
25047
25048 /* If the offset for this variant part is more complex than a constant,
25049 create a DWARF procedure for it so that we will not have to generate DWARF
25050 expressions for it for each member. */
25051 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25052 && (dwarf_version >= 3 || !dwarf_strict))
25053 {
25054 const tree dwarf_proc_fndecl
25055 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25056 build_function_type (TREE_TYPE (variant_part_offset),
25057 NULL_TREE));
25058 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25059 const dw_loc_descr_ref dwarf_proc_body
25060 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25061
25062 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25063 dwarf_proc_fndecl, context_die);
25064 if (dwarf_proc_die != NULL)
25065 variant_part_offset = dwarf_proc_call;
25066 }
25067
25068 /* Output DIEs for all variants. */
25069 i = 0;
25070 for (tree variant = TYPE_FIELDS (variant_part_type);
25071 variant != NULL_TREE;
25072 variant = DECL_CHAIN (variant), ++i)
25073 {
25074 tree variant_type = TREE_TYPE (variant);
25075 dw_die_ref variant_die;
25076
25077 /* All variants (i.e. members of a variant part) are supposed to be
25078 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25079 under these records. */
25080 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25081
25082 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25083 equate_decl_number_to_die (variant, variant_die);
25084
25085 /* Output discriminant values this variant matches, if any. */
25086 if (discr_decl == NULL || discr_lists[i] == NULL)
25087 /* In the case we have discriminant information at all, this is
25088 probably the default variant: as the standard says, don't
25089 output any discriminant value/list attribute. */
25090 ;
25091 else if (discr_lists[i]->dw_discr_next == NULL
25092 && !discr_lists[i]->dw_discr_range)
25093 /* If there is only one accepted value, don't bother outputting a
25094 list. */
25095 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25096 else
25097 add_discr_list (variant_die, discr_lists[i]);
25098
25099 for (tree member = TYPE_FIELDS (variant_type);
25100 member != NULL_TREE;
25101 member = DECL_CHAIN (member))
25102 {
25103 struct vlr_context vlr_sub_ctx = {
25104 vlr_ctx->struct_type, /* struct_type */
25105 NULL /* variant_part_offset */
25106 };
25107 if (is_variant_part (member))
25108 {
25109 /* All offsets for fields inside variant parts are relative to
25110 the top-level embedding RECORD_TYPE's base address. On the
25111 other hand, offsets in GCC's types are relative to the
25112 nested-most variant part. So we have to sum offsets each time
25113 we recurse. */
25114
25115 vlr_sub_ctx.variant_part_offset
25116 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25117 variant_part_offset, byte_position (member));
25118 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25119 }
25120 else
25121 {
25122 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25123 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25124 }
25125 }
25126 }
25127
25128 free (discr_lists);
25129 }
25130
25131 /* Generate a DIE for a class member. */
25132
25133 static void
25134 gen_member_die (tree type, dw_die_ref context_die)
25135 {
25136 tree member;
25137 tree binfo = TYPE_BINFO (type);
25138
25139 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25140
25141 /* If this is not an incomplete type, output descriptions of each of its
25142 members. Note that as we output the DIEs necessary to represent the
25143 members of this record or union type, we will also be trying to output
25144 DIEs to represent the *types* of those members. However the `type'
25145 function (above) will specifically avoid generating type DIEs for member
25146 types *within* the list of member DIEs for this (containing) type except
25147 for those types (of members) which are explicitly marked as also being
25148 members of this (containing) type themselves. The g++ front- end can
25149 force any given type to be treated as a member of some other (containing)
25150 type by setting the TYPE_CONTEXT of the given (member) type to point to
25151 the TREE node representing the appropriate (containing) type. */
25152
25153 /* First output info about the base classes. */
25154 if (binfo && early_dwarf)
25155 {
25156 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25157 int i;
25158 tree base;
25159
25160 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25161 gen_inheritance_die (base,
25162 (accesses ? (*accesses)[i] : access_public_node),
25163 type,
25164 context_die);
25165 }
25166
25167 /* Now output info about the members. */
25168 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25169 {
25170 /* Ignore clones. */
25171 if (DECL_ABSTRACT_ORIGIN (member))
25172 continue;
25173
25174 struct vlr_context vlr_ctx = { type, NULL_TREE };
25175 bool static_inline_p
25176 = (VAR_P (member)
25177 && TREE_STATIC (member)
25178 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25179 != -1));
25180
25181 /* If we thought we were generating minimal debug info for TYPE
25182 and then changed our minds, some of the member declarations
25183 may have already been defined. Don't define them again, but
25184 do put them in the right order. */
25185
25186 if (dw_die_ref child = lookup_decl_die (member))
25187 {
25188 /* Handle inline static data members, which only have in-class
25189 declarations. */
25190 bool splice = true;
25191
25192 dw_die_ref ref = NULL;
25193 if (child->die_tag == DW_TAG_variable
25194 && child->die_parent == comp_unit_die ())
25195 {
25196 ref = get_AT_ref (child, DW_AT_specification);
25197
25198 /* For C++17 inline static data members followed by redundant
25199 out of class redeclaration, we might get here with
25200 child being the DIE created for the out of class
25201 redeclaration and with its DW_AT_specification being
25202 the DIE created for in-class definition. We want to
25203 reparent the latter, and don't want to create another
25204 DIE with DW_AT_specification in that case, because
25205 we already have one. */
25206 if (ref
25207 && static_inline_p
25208 && ref->die_tag == DW_TAG_variable
25209 && ref->die_parent == comp_unit_die ()
25210 && get_AT (ref, DW_AT_specification) == NULL)
25211 {
25212 child = ref;
25213 ref = NULL;
25214 static_inline_p = false;
25215 }
25216
25217 if (!ref)
25218 {
25219 reparent_child (child, context_die);
25220 if (dwarf_version < 5)
25221 child->die_tag = DW_TAG_member;
25222 splice = false;
25223 }
25224 }
25225
25226 if (splice)
25227 splice_child_die (context_die, child);
25228 }
25229
25230 /* Do not generate standard DWARF for variant parts if we are generating
25231 the corresponding GNAT encodings: DIEs generated for both would
25232 conflict in our mappings. */
25233 else if (is_variant_part (member)
25234 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25235 {
25236 vlr_ctx.variant_part_offset = byte_position (member);
25237 gen_variant_part (member, &vlr_ctx, context_die);
25238 }
25239 else
25240 {
25241 vlr_ctx.variant_part_offset = NULL_TREE;
25242 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25243 }
25244
25245 /* For C++ inline static data members emit immediately a DW_TAG_variable
25246 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25247 DW_AT_specification. */
25248 if (static_inline_p)
25249 {
25250 int old_extern = DECL_EXTERNAL (member);
25251 DECL_EXTERNAL (member) = 0;
25252 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25253 DECL_EXTERNAL (member) = old_extern;
25254 }
25255 }
25256 }
25257
25258 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25259 is set, we pretend that the type was never defined, so we only get the
25260 member DIEs needed by later specification DIEs. */
25261
25262 static void
25263 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25264 enum debug_info_usage usage)
25265 {
25266 if (TREE_ASM_WRITTEN (type))
25267 {
25268 /* Fill in the bound of variable-length fields in late dwarf if
25269 still incomplete. */
25270 if (!early_dwarf && variably_modified_type_p (type, NULL))
25271 for (tree member = TYPE_FIELDS (type);
25272 member;
25273 member = DECL_CHAIN (member))
25274 fill_variable_array_bounds (TREE_TYPE (member));
25275 return;
25276 }
25277
25278 dw_die_ref type_die = lookup_type_die (type);
25279 dw_die_ref scope_die = 0;
25280 int nested = 0;
25281 int complete = (TYPE_SIZE (type)
25282 && (! TYPE_STUB_DECL (type)
25283 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25284 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25285 complete = complete && should_emit_struct_debug (type, usage);
25286
25287 if (type_die && ! complete)
25288 return;
25289
25290 if (TYPE_CONTEXT (type) != NULL_TREE
25291 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25292 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25293 nested = 1;
25294
25295 scope_die = scope_die_for (type, context_die);
25296
25297 /* Generate child dies for template paramaters. */
25298 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25299 schedule_generic_params_dies_gen (type);
25300
25301 if (! type_die || (nested && is_cu_die (scope_die)))
25302 /* First occurrence of type or toplevel definition of nested class. */
25303 {
25304 dw_die_ref old_die = type_die;
25305
25306 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25307 ? record_type_tag (type) : DW_TAG_union_type,
25308 scope_die, type);
25309 equate_type_number_to_die (type, type_die);
25310 if (old_die)
25311 add_AT_specification (type_die, old_die);
25312 else
25313 add_name_attribute (type_die, type_tag (type));
25314 }
25315 else
25316 remove_AT (type_die, DW_AT_declaration);
25317
25318 /* If this type has been completed, then give it a byte_size attribute and
25319 then give a list of members. */
25320 if (complete && !ns_decl)
25321 {
25322 /* Prevent infinite recursion in cases where the type of some member of
25323 this type is expressed in terms of this type itself. */
25324 TREE_ASM_WRITTEN (type) = 1;
25325 add_byte_size_attribute (type_die, type);
25326 add_alignment_attribute (type_die, type);
25327 if (TYPE_STUB_DECL (type) != NULL_TREE)
25328 {
25329 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25330 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25331 }
25332
25333 /* If the first reference to this type was as the return type of an
25334 inline function, then it may not have a parent. Fix this now. */
25335 if (type_die->die_parent == NULL)
25336 add_child_die (scope_die, type_die);
25337
25338 gen_member_die (type, type_die);
25339
25340 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25341 if (TYPE_ARTIFICIAL (type))
25342 add_AT_flag (type_die, DW_AT_artificial, 1);
25343
25344 /* GNU extension: Record what type our vtable lives in. */
25345 if (TYPE_VFIELD (type))
25346 {
25347 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25348
25349 gen_type_die (vtype, context_die);
25350 add_AT_die_ref (type_die, DW_AT_containing_type,
25351 lookup_type_die (vtype));
25352 }
25353 }
25354 else
25355 {
25356 add_AT_flag (type_die, DW_AT_declaration, 1);
25357
25358 /* We don't need to do this for function-local types. */
25359 if (TYPE_STUB_DECL (type)
25360 && ! decl_function_context (TYPE_STUB_DECL (type)))
25361 vec_safe_push (incomplete_types, type);
25362 }
25363
25364 if (get_AT (type_die, DW_AT_name))
25365 add_pubtype (type, type_die);
25366 }
25367
25368 /* Generate a DIE for a subroutine _type_. */
25369
25370 static void
25371 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25372 {
25373 tree return_type = TREE_TYPE (type);
25374 dw_die_ref subr_die
25375 = new_die (DW_TAG_subroutine_type,
25376 scope_die_for (type, context_die), type);
25377
25378 equate_type_number_to_die (type, subr_die);
25379 add_prototyped_attribute (subr_die, type);
25380 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25381 context_die);
25382 add_alignment_attribute (subr_die, type);
25383 gen_formal_types_die (type, subr_die);
25384
25385 if (get_AT (subr_die, DW_AT_name))
25386 add_pubtype (type, subr_die);
25387 if ((dwarf_version >= 5 || !dwarf_strict)
25388 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25389 add_AT_flag (subr_die, DW_AT_reference, 1);
25390 if ((dwarf_version >= 5 || !dwarf_strict)
25391 && lang_hooks.types.type_dwarf_attribute (type,
25392 DW_AT_rvalue_reference) != -1)
25393 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25394 }
25395
25396 /* Generate a DIE for a type definition. */
25397
25398 static void
25399 gen_typedef_die (tree decl, dw_die_ref context_die)
25400 {
25401 dw_die_ref type_die;
25402 tree type;
25403
25404 if (TREE_ASM_WRITTEN (decl))
25405 {
25406 if (DECL_ORIGINAL_TYPE (decl))
25407 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25408 return;
25409 }
25410
25411 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25412 checks in process_scope_var and modified_type_die), this should be called
25413 only for original types. */
25414 gcc_assert (decl_ultimate_origin (decl) == NULL
25415 || decl_ultimate_origin (decl) == decl);
25416
25417 TREE_ASM_WRITTEN (decl) = 1;
25418 type_die = new_die (DW_TAG_typedef, context_die, decl);
25419
25420 add_name_and_src_coords_attributes (type_die, decl);
25421 if (DECL_ORIGINAL_TYPE (decl))
25422 {
25423 type = DECL_ORIGINAL_TYPE (decl);
25424 if (type == error_mark_node)
25425 return;
25426
25427 gcc_assert (type != TREE_TYPE (decl));
25428 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25429 }
25430 else
25431 {
25432 type = TREE_TYPE (decl);
25433 if (type == error_mark_node)
25434 return;
25435
25436 if (is_naming_typedef_decl (TYPE_NAME (type)))
25437 {
25438 /* Here, we are in the case of decl being a typedef naming
25439 an anonymous type, e.g:
25440 typedef struct {...} foo;
25441 In that case TREE_TYPE (decl) is not a typedef variant
25442 type and TYPE_NAME of the anonymous type is set to the
25443 TYPE_DECL of the typedef. This construct is emitted by
25444 the C++ FE.
25445
25446 TYPE is the anonymous struct named by the typedef
25447 DECL. As we need the DW_AT_type attribute of the
25448 DW_TAG_typedef to point to the DIE of TYPE, let's
25449 generate that DIE right away. add_type_attribute
25450 called below will then pick (via lookup_type_die) that
25451 anonymous struct DIE. */
25452 if (!TREE_ASM_WRITTEN (type))
25453 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25454
25455 /* This is a GNU Extension. We are adding a
25456 DW_AT_linkage_name attribute to the DIE of the
25457 anonymous struct TYPE. The value of that attribute
25458 is the name of the typedef decl naming the anonymous
25459 struct. This greatly eases the work of consumers of
25460 this debug info. */
25461 add_linkage_name_raw (lookup_type_die (type), decl);
25462 }
25463 }
25464
25465 add_type_attribute (type_die, type, decl_quals (decl), false,
25466 context_die);
25467
25468 if (is_naming_typedef_decl (decl))
25469 /* We want that all subsequent calls to lookup_type_die with
25470 TYPE in argument yield the DW_TAG_typedef we have just
25471 created. */
25472 equate_type_number_to_die (type, type_die);
25473
25474 add_alignment_attribute (type_die, TREE_TYPE (decl));
25475
25476 add_accessibility_attribute (type_die, decl);
25477
25478 if (DECL_ABSTRACT_P (decl))
25479 equate_decl_number_to_die (decl, type_die);
25480
25481 if (get_AT (type_die, DW_AT_name))
25482 add_pubtype (decl, type_die);
25483 }
25484
25485 /* Generate a DIE for a struct, class, enum or union type. */
25486
25487 static void
25488 gen_tagged_type_die (tree type,
25489 dw_die_ref context_die,
25490 enum debug_info_usage usage)
25491 {
25492 if (type == NULL_TREE
25493 || !is_tagged_type (type))
25494 return;
25495
25496 if (TREE_ASM_WRITTEN (type))
25497 ;
25498 /* If this is a nested type whose containing class hasn't been written
25499 out yet, writing it out will cover this one, too. This does not apply
25500 to instantiations of member class templates; they need to be added to
25501 the containing class as they are generated. FIXME: This hurts the
25502 idea of combining type decls from multiple TUs, since we can't predict
25503 what set of template instantiations we'll get. */
25504 else if (TYPE_CONTEXT (type)
25505 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25506 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25507 {
25508 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25509
25510 if (TREE_ASM_WRITTEN (type))
25511 return;
25512
25513 /* If that failed, attach ourselves to the stub. */
25514 context_die = lookup_type_die (TYPE_CONTEXT (type));
25515 }
25516 else if (TYPE_CONTEXT (type) != NULL_TREE
25517 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25518 {
25519 /* If this type is local to a function that hasn't been written
25520 out yet, use a NULL context for now; it will be fixed up in
25521 decls_for_scope. */
25522 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25523 /* A declaration DIE doesn't count; nested types need to go in the
25524 specification. */
25525 if (context_die && is_declaration_die (context_die))
25526 context_die = NULL;
25527 }
25528 else
25529 context_die = declare_in_namespace (type, context_die);
25530
25531 if (TREE_CODE (type) == ENUMERAL_TYPE)
25532 {
25533 /* This might have been written out by the call to
25534 declare_in_namespace. */
25535 if (!TREE_ASM_WRITTEN (type))
25536 gen_enumeration_type_die (type, context_die);
25537 }
25538 else
25539 gen_struct_or_union_type_die (type, context_die, usage);
25540
25541 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25542 it up if it is ever completed. gen_*_type_die will set it for us
25543 when appropriate. */
25544 }
25545
25546 /* Generate a type description DIE. */
25547
25548 static void
25549 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25550 enum debug_info_usage usage)
25551 {
25552 struct array_descr_info info;
25553
25554 if (type == NULL_TREE || type == error_mark_node)
25555 return;
25556
25557 if (flag_checking && type)
25558 verify_type (type);
25559
25560 if (TYPE_NAME (type) != NULL_TREE
25561 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25562 && is_redundant_typedef (TYPE_NAME (type))
25563 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25564 /* The DECL of this type is a typedef we don't want to emit debug
25565 info for but we want debug info for its underlying typedef.
25566 This can happen for e.g, the injected-class-name of a C++
25567 type. */
25568 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25569
25570 /* If TYPE is a typedef type variant, let's generate debug info
25571 for the parent typedef which TYPE is a type of. */
25572 if (typedef_variant_p (type))
25573 {
25574 if (TREE_ASM_WRITTEN (type))
25575 return;
25576
25577 tree name = TYPE_NAME (type);
25578 tree origin = decl_ultimate_origin (name);
25579 if (origin != NULL && origin != name)
25580 {
25581 gen_decl_die (origin, NULL, NULL, context_die);
25582 return;
25583 }
25584
25585 /* Prevent broken recursion; we can't hand off to the same type. */
25586 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25587
25588 /* Give typedefs the right scope. */
25589 context_die = scope_die_for (type, context_die);
25590
25591 TREE_ASM_WRITTEN (type) = 1;
25592
25593 gen_decl_die (name, NULL, NULL, context_die);
25594 return;
25595 }
25596
25597 /* If type is an anonymous tagged type named by a typedef, let's
25598 generate debug info for the typedef. */
25599 if (is_naming_typedef_decl (TYPE_NAME (type)))
25600 {
25601 /* Give typedefs the right scope. */
25602 context_die = scope_die_for (type, context_die);
25603
25604 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25605 return;
25606 }
25607
25608 if (lang_hooks.types.get_debug_type)
25609 {
25610 tree debug_type = lang_hooks.types.get_debug_type (type);
25611
25612 if (debug_type != NULL_TREE && debug_type != type)
25613 {
25614 gen_type_die_with_usage (debug_type, context_die, usage);
25615 return;
25616 }
25617 }
25618
25619 /* We are going to output a DIE to represent the unqualified version
25620 of this type (i.e. without any const or volatile qualifiers) so
25621 get the main variant (i.e. the unqualified version) of this type
25622 now. (Vectors and arrays are special because the debugging info is in the
25623 cloned type itself. Similarly function/method types can contain extra
25624 ref-qualification). */
25625 if (TREE_CODE (type) == FUNCTION_TYPE
25626 || TREE_CODE (type) == METHOD_TYPE)
25627 {
25628 /* For function/method types, can't use type_main_variant here,
25629 because that can have different ref-qualifiers for C++,
25630 but try to canonicalize. */
25631 tree main = TYPE_MAIN_VARIANT (type);
25632 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25633 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25634 && check_base_type (t, main)
25635 && check_lang_type (t, type))
25636 {
25637 type = t;
25638 break;
25639 }
25640 }
25641 else if (TREE_CODE (type) != VECTOR_TYPE
25642 && TREE_CODE (type) != ARRAY_TYPE)
25643 type = type_main_variant (type);
25644
25645 /* If this is an array type with hidden descriptor, handle it first. */
25646 if (!TREE_ASM_WRITTEN (type)
25647 && lang_hooks.types.get_array_descr_info)
25648 {
25649 memset (&info, 0, sizeof (info));
25650 if (lang_hooks.types.get_array_descr_info (type, &info))
25651 {
25652 /* Fortran sometimes emits array types with no dimension. */
25653 gcc_assert (info.ndimensions >= 0
25654 && (info.ndimensions
25655 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25656 gen_descr_array_type_die (type, &info, context_die);
25657 TREE_ASM_WRITTEN (type) = 1;
25658 return;
25659 }
25660 }
25661
25662 if (TREE_ASM_WRITTEN (type))
25663 {
25664 /* Variable-length types may be incomplete even if
25665 TREE_ASM_WRITTEN. For such types, fall through to
25666 gen_array_type_die() and possibly fill in
25667 DW_AT_{upper,lower}_bound attributes. */
25668 if ((TREE_CODE (type) != ARRAY_TYPE
25669 && TREE_CODE (type) != RECORD_TYPE
25670 && TREE_CODE (type) != UNION_TYPE
25671 && TREE_CODE (type) != QUAL_UNION_TYPE)
25672 || !variably_modified_type_p (type, NULL))
25673 return;
25674 }
25675
25676 switch (TREE_CODE (type))
25677 {
25678 case ERROR_MARK:
25679 break;
25680
25681 case POINTER_TYPE:
25682 case REFERENCE_TYPE:
25683 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25684 ensures that the gen_type_die recursion will terminate even if the
25685 type is recursive. Recursive types are possible in Ada. */
25686 /* ??? We could perhaps do this for all types before the switch
25687 statement. */
25688 TREE_ASM_WRITTEN (type) = 1;
25689
25690 /* For these types, all that is required is that we output a DIE (or a
25691 set of DIEs) to represent the "basis" type. */
25692 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25693 DINFO_USAGE_IND_USE);
25694 break;
25695
25696 case OFFSET_TYPE:
25697 /* This code is used for C++ pointer-to-data-member types.
25698 Output a description of the relevant class type. */
25699 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25700 DINFO_USAGE_IND_USE);
25701
25702 /* Output a description of the type of the object pointed to. */
25703 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25704 DINFO_USAGE_IND_USE);
25705
25706 /* Now output a DIE to represent this pointer-to-data-member type
25707 itself. */
25708 gen_ptr_to_mbr_type_die (type, context_die);
25709 break;
25710
25711 case FUNCTION_TYPE:
25712 /* Force out return type (in case it wasn't forced out already). */
25713 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25714 DINFO_USAGE_DIR_USE);
25715 gen_subroutine_type_die (type, context_die);
25716 break;
25717
25718 case METHOD_TYPE:
25719 /* Force out return type (in case it wasn't forced out already). */
25720 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25721 DINFO_USAGE_DIR_USE);
25722 gen_subroutine_type_die (type, context_die);
25723 break;
25724
25725 case ARRAY_TYPE:
25726 case VECTOR_TYPE:
25727 gen_array_type_die (type, context_die);
25728 break;
25729
25730 case ENUMERAL_TYPE:
25731 case RECORD_TYPE:
25732 case UNION_TYPE:
25733 case QUAL_UNION_TYPE:
25734 gen_tagged_type_die (type, context_die, usage);
25735 return;
25736
25737 case VOID_TYPE:
25738 case INTEGER_TYPE:
25739 case REAL_TYPE:
25740 case FIXED_POINT_TYPE:
25741 case COMPLEX_TYPE:
25742 case BOOLEAN_TYPE:
25743 /* No DIEs needed for fundamental types. */
25744 break;
25745
25746 case NULLPTR_TYPE:
25747 case LANG_TYPE:
25748 /* Just use DW_TAG_unspecified_type. */
25749 {
25750 dw_die_ref type_die = lookup_type_die (type);
25751 if (type_die == NULL)
25752 {
25753 tree name = TYPE_IDENTIFIER (type);
25754 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25755 type);
25756 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25757 equate_type_number_to_die (type, type_die);
25758 }
25759 }
25760 break;
25761
25762 default:
25763 if (is_cxx_auto (type))
25764 {
25765 tree name = TYPE_IDENTIFIER (type);
25766 dw_die_ref *die = (name == get_identifier ("auto")
25767 ? &auto_die : &decltype_auto_die);
25768 if (!*die)
25769 {
25770 *die = new_die (DW_TAG_unspecified_type,
25771 comp_unit_die (), NULL_TREE);
25772 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25773 }
25774 equate_type_number_to_die (type, *die);
25775 break;
25776 }
25777 gcc_unreachable ();
25778 }
25779
25780 TREE_ASM_WRITTEN (type) = 1;
25781 }
25782
25783 static void
25784 gen_type_die (tree type, dw_die_ref context_die)
25785 {
25786 if (type != error_mark_node)
25787 {
25788 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25789 if (flag_checking)
25790 {
25791 dw_die_ref die = lookup_type_die (type);
25792 if (die)
25793 check_die (die);
25794 }
25795 }
25796 }
25797
25798 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25799 things which are local to the given block. */
25800
25801 static void
25802 gen_block_die (tree stmt, dw_die_ref context_die)
25803 {
25804 int must_output_die = 0;
25805 bool inlined_func;
25806
25807 /* Ignore blocks that are NULL. */
25808 if (stmt == NULL_TREE)
25809 return;
25810
25811 inlined_func = inlined_function_outer_scope_p (stmt);
25812
25813 /* If the block is one fragment of a non-contiguous block, do not
25814 process the variables, since they will have been done by the
25815 origin block. Do process subblocks. */
25816 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25817 {
25818 tree sub;
25819
25820 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25821 gen_block_die (sub, context_die);
25822
25823 return;
25824 }
25825
25826 /* Determine if we need to output any Dwarf DIEs at all to represent this
25827 block. */
25828 if (inlined_func)
25829 /* The outer scopes for inlinings *must* always be represented. We
25830 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25831 must_output_die = 1;
25832 else if (lookup_block_die (stmt))
25833 /* If we already have a DIE then it was filled early. Meanwhile
25834 we might have pruned all BLOCK_VARS as optimized out but we
25835 still want to generate high/low PC attributes so output it. */
25836 must_output_die = 1;
25837 else if (TREE_USED (stmt)
25838 || TREE_ASM_WRITTEN (stmt))
25839 {
25840 /* Determine if this block directly contains any "significant"
25841 local declarations which we will need to output DIEs for. */
25842 if (debug_info_level > DINFO_LEVEL_TERSE)
25843 {
25844 /* We are not in terse mode so any local declaration that
25845 is not ignored for debug purposes counts as being a
25846 "significant" one. */
25847 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25848 must_output_die = 1;
25849 else
25850 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25851 if (!DECL_IGNORED_P (var))
25852 {
25853 must_output_die = 1;
25854 break;
25855 }
25856 }
25857 else if (!dwarf2out_ignore_block (stmt))
25858 must_output_die = 1;
25859 }
25860
25861 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25862 DIE for any block which contains no significant local declarations at
25863 all. Rather, in such cases we just call `decls_for_scope' so that any
25864 needed Dwarf info for any sub-blocks will get properly generated. Note
25865 that in terse mode, our definition of what constitutes a "significant"
25866 local declaration gets restricted to include only inlined function
25867 instances and local (nested) function definitions. */
25868 if (must_output_die)
25869 {
25870 if (inlined_func)
25871 gen_inlined_subroutine_die (stmt, context_die);
25872 else
25873 gen_lexical_block_die (stmt, context_die);
25874 }
25875 else
25876 decls_for_scope (stmt, context_die);
25877 }
25878
25879 /* Process variable DECL (or variable with origin ORIGIN) within
25880 block STMT and add it to CONTEXT_DIE. */
25881 static void
25882 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25883 {
25884 dw_die_ref die;
25885 tree decl_or_origin = decl ? decl : origin;
25886
25887 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25888 die = lookup_decl_die (decl_or_origin);
25889 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25890 {
25891 if (TYPE_DECL_IS_STUB (decl_or_origin))
25892 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25893 else
25894 die = lookup_decl_die (decl_or_origin);
25895 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25896 if (! die && ! early_dwarf)
25897 return;
25898 }
25899 else
25900 die = NULL;
25901
25902 /* Avoid creating DIEs for local typedefs and concrete static variables that
25903 will only be pruned later. */
25904 if ((origin || decl_ultimate_origin (decl))
25905 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25906 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25907 {
25908 origin = decl_ultimate_origin (decl_or_origin);
25909 if (decl && VAR_P (decl) && die != NULL)
25910 {
25911 die = lookup_decl_die (origin);
25912 if (die != NULL)
25913 equate_decl_number_to_die (decl, die);
25914 }
25915 return;
25916 }
25917
25918 if (die != NULL && die->die_parent == NULL)
25919 add_child_die (context_die, die);
25920 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25921 {
25922 if (early_dwarf)
25923 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25924 stmt, context_die);
25925 }
25926 else
25927 {
25928 if (decl && DECL_P (decl))
25929 {
25930 die = lookup_decl_die (decl);
25931
25932 /* Early created DIEs do not have a parent as the decls refer
25933 to the function as DECL_CONTEXT rather than the BLOCK. */
25934 if (die && die->die_parent == NULL)
25935 {
25936 gcc_assert (in_lto_p);
25937 add_child_die (context_die, die);
25938 }
25939 }
25940
25941 gen_decl_die (decl, origin, NULL, context_die);
25942 }
25943 }
25944
25945 /* Generate all of the decls declared within a given scope and (recursively)
25946 all of its sub-blocks. */
25947
25948 static void
25949 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25950 {
25951 tree decl;
25952 unsigned int i;
25953 tree subblocks;
25954
25955 /* Ignore NULL blocks. */
25956 if (stmt == NULL_TREE)
25957 return;
25958
25959 /* Output the DIEs to represent all of the data objects and typedefs
25960 declared directly within this block but not within any nested
25961 sub-blocks. Also, nested function and tag DIEs have been
25962 generated with a parent of NULL; fix that up now. We don't
25963 have to do this if we're at -g1. */
25964 if (debug_info_level > DINFO_LEVEL_TERSE)
25965 {
25966 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25967 process_scope_var (stmt, decl, NULL_TREE, context_die);
25968 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25969 origin - avoid doing this twice as we have no good way to see
25970 if we've done it once already. */
25971 if (! early_dwarf)
25972 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25973 {
25974 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25975 if (decl == current_function_decl)
25976 /* Ignore declarations of the current function, while they
25977 are declarations, gen_subprogram_die would treat them
25978 as definitions again, because they are equal to
25979 current_function_decl and endlessly recurse. */;
25980 else if (TREE_CODE (decl) == FUNCTION_DECL)
25981 process_scope_var (stmt, decl, NULL_TREE, context_die);
25982 else
25983 process_scope_var (stmt, NULL_TREE, decl, context_die);
25984 }
25985 }
25986
25987 /* Even if we're at -g1, we need to process the subblocks in order to get
25988 inlined call information. */
25989
25990 /* Output the DIEs to represent all sub-blocks (and the items declared
25991 therein) of this block. */
25992 if (recurse)
25993 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25994 subblocks != NULL;
25995 subblocks = BLOCK_CHAIN (subblocks))
25996 gen_block_die (subblocks, context_die);
25997 }
25998
25999 /* Is this a typedef we can avoid emitting? */
26000
26001 static bool
26002 is_redundant_typedef (const_tree decl)
26003 {
26004 if (TYPE_DECL_IS_STUB (decl))
26005 return true;
26006
26007 if (DECL_ARTIFICIAL (decl)
26008 && DECL_CONTEXT (decl)
26009 && is_tagged_type (DECL_CONTEXT (decl))
26010 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26011 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26012 /* Also ignore the artificial member typedef for the class name. */
26013 return true;
26014
26015 return false;
26016 }
26017
26018 /* Return TRUE if TYPE is a typedef that names a type for linkage
26019 purposes. This kind of typedefs is produced by the C++ FE for
26020 constructs like:
26021
26022 typedef struct {...} foo;
26023
26024 In that case, there is no typedef variant type produced for foo.
26025 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26026 struct type. */
26027
26028 static bool
26029 is_naming_typedef_decl (const_tree decl)
26030 {
26031 if (decl == NULL_TREE
26032 || TREE_CODE (decl) != TYPE_DECL
26033 || DECL_NAMELESS (decl)
26034 || !is_tagged_type (TREE_TYPE (decl))
26035 || DECL_IS_BUILTIN (decl)
26036 || is_redundant_typedef (decl)
26037 /* It looks like Ada produces TYPE_DECLs that are very similar
26038 to C++ naming typedefs but that have different
26039 semantics. Let's be specific to c++ for now. */
26040 || !is_cxx (decl))
26041 return FALSE;
26042
26043 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26044 && TYPE_NAME (TREE_TYPE (decl)) == decl
26045 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26046 != TYPE_NAME (TREE_TYPE (decl))));
26047 }
26048
26049 /* Looks up the DIE for a context. */
26050
26051 static inline dw_die_ref
26052 lookup_context_die (tree context)
26053 {
26054 if (context)
26055 {
26056 /* Find die that represents this context. */
26057 if (TYPE_P (context))
26058 {
26059 context = TYPE_MAIN_VARIANT (context);
26060 dw_die_ref ctx = lookup_type_die (context);
26061 if (!ctx)
26062 return NULL;
26063 return strip_naming_typedef (context, ctx);
26064 }
26065 else
26066 return lookup_decl_die (context);
26067 }
26068 return comp_unit_die ();
26069 }
26070
26071 /* Returns the DIE for a context. */
26072
26073 static inline dw_die_ref
26074 get_context_die (tree context)
26075 {
26076 if (context)
26077 {
26078 /* Find die that represents this context. */
26079 if (TYPE_P (context))
26080 {
26081 context = TYPE_MAIN_VARIANT (context);
26082 return strip_naming_typedef (context, force_type_die (context));
26083 }
26084 else
26085 return force_decl_die (context);
26086 }
26087 return comp_unit_die ();
26088 }
26089
26090 /* Returns the DIE for decl. A DIE will always be returned. */
26091
26092 static dw_die_ref
26093 force_decl_die (tree decl)
26094 {
26095 dw_die_ref decl_die;
26096 unsigned saved_external_flag;
26097 tree save_fn = NULL_TREE;
26098 decl_die = lookup_decl_die (decl);
26099 if (!decl_die)
26100 {
26101 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26102
26103 decl_die = lookup_decl_die (decl);
26104 if (decl_die)
26105 return decl_die;
26106
26107 switch (TREE_CODE (decl))
26108 {
26109 case FUNCTION_DECL:
26110 /* Clear current_function_decl, so that gen_subprogram_die thinks
26111 that this is a declaration. At this point, we just want to force
26112 declaration die. */
26113 save_fn = current_function_decl;
26114 current_function_decl = NULL_TREE;
26115 gen_subprogram_die (decl, context_die);
26116 current_function_decl = save_fn;
26117 break;
26118
26119 case VAR_DECL:
26120 /* Set external flag to force declaration die. Restore it after
26121 gen_decl_die() call. */
26122 saved_external_flag = DECL_EXTERNAL (decl);
26123 DECL_EXTERNAL (decl) = 1;
26124 gen_decl_die (decl, NULL, NULL, context_die);
26125 DECL_EXTERNAL (decl) = saved_external_flag;
26126 break;
26127
26128 case NAMESPACE_DECL:
26129 if (dwarf_version >= 3 || !dwarf_strict)
26130 dwarf2out_decl (decl);
26131 else
26132 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26133 decl_die = comp_unit_die ();
26134 break;
26135
26136 case TRANSLATION_UNIT_DECL:
26137 decl_die = comp_unit_die ();
26138 break;
26139
26140 default:
26141 gcc_unreachable ();
26142 }
26143
26144 /* We should be able to find the DIE now. */
26145 if (!decl_die)
26146 decl_die = lookup_decl_die (decl);
26147 gcc_assert (decl_die);
26148 }
26149
26150 return decl_die;
26151 }
26152
26153 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26154 always returned. */
26155
26156 static dw_die_ref
26157 force_type_die (tree type)
26158 {
26159 dw_die_ref type_die;
26160
26161 type_die = lookup_type_die (type);
26162 if (!type_die)
26163 {
26164 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26165
26166 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26167 false, context_die);
26168 gcc_assert (type_die);
26169 }
26170 return type_die;
26171 }
26172
26173 /* Force out any required namespaces to be able to output DECL,
26174 and return the new context_die for it, if it's changed. */
26175
26176 static dw_die_ref
26177 setup_namespace_context (tree thing, dw_die_ref context_die)
26178 {
26179 tree context = (DECL_P (thing)
26180 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26181 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26182 /* Force out the namespace. */
26183 context_die = force_decl_die (context);
26184
26185 return context_die;
26186 }
26187
26188 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26189 type) within its namespace, if appropriate.
26190
26191 For compatibility with older debuggers, namespace DIEs only contain
26192 declarations; all definitions are emitted at CU scope, with
26193 DW_AT_specification pointing to the declaration (like with class
26194 members). */
26195
26196 static dw_die_ref
26197 declare_in_namespace (tree thing, dw_die_ref context_die)
26198 {
26199 dw_die_ref ns_context;
26200
26201 if (debug_info_level <= DINFO_LEVEL_TERSE)
26202 return context_die;
26203
26204 /* External declarations in the local scope only need to be emitted
26205 once, not once in the namespace and once in the scope.
26206
26207 This avoids declaring the `extern' below in the
26208 namespace DIE as well as in the innermost scope:
26209
26210 namespace S
26211 {
26212 int i=5;
26213 int foo()
26214 {
26215 int i=8;
26216 extern int i;
26217 return i;
26218 }
26219 }
26220 */
26221 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26222 return context_die;
26223
26224 /* If this decl is from an inlined function, then don't try to emit it in its
26225 namespace, as we will get confused. It would have already been emitted
26226 when the abstract instance of the inline function was emitted anyways. */
26227 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26228 return context_die;
26229
26230 ns_context = setup_namespace_context (thing, context_die);
26231
26232 if (ns_context != context_die)
26233 {
26234 if (is_fortran () || is_dlang ())
26235 return ns_context;
26236 if (DECL_P (thing))
26237 gen_decl_die (thing, NULL, NULL, ns_context);
26238 else
26239 gen_type_die (thing, ns_context);
26240 }
26241 return context_die;
26242 }
26243
26244 /* Generate a DIE for a namespace or namespace alias. */
26245
26246 static void
26247 gen_namespace_die (tree decl, dw_die_ref context_die)
26248 {
26249 dw_die_ref namespace_die;
26250
26251 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26252 they are an alias of. */
26253 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26254 {
26255 /* Output a real namespace or module. */
26256 context_die = setup_namespace_context (decl, comp_unit_die ());
26257 namespace_die = new_die (is_fortran () || is_dlang ()
26258 ? DW_TAG_module : DW_TAG_namespace,
26259 context_die, decl);
26260 /* For Fortran modules defined in different CU don't add src coords. */
26261 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26262 {
26263 const char *name = dwarf2_name (decl, 0);
26264 if (name)
26265 add_name_attribute (namespace_die, name);
26266 }
26267 else
26268 add_name_and_src_coords_attributes (namespace_die, decl);
26269 if (DECL_EXTERNAL (decl))
26270 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26271 equate_decl_number_to_die (decl, namespace_die);
26272 }
26273 else
26274 {
26275 /* Output a namespace alias. */
26276
26277 /* Force out the namespace we are an alias of, if necessary. */
26278 dw_die_ref origin_die
26279 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26280
26281 if (DECL_FILE_SCOPE_P (decl)
26282 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26283 context_die = setup_namespace_context (decl, comp_unit_die ());
26284 /* Now create the namespace alias DIE. */
26285 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26286 add_name_and_src_coords_attributes (namespace_die, decl);
26287 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26288 equate_decl_number_to_die (decl, namespace_die);
26289 }
26290 if ((dwarf_version >= 5 || !dwarf_strict)
26291 && lang_hooks.decls.decl_dwarf_attribute (decl,
26292 DW_AT_export_symbols) == 1)
26293 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26294
26295 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26296 if (want_pubnames ())
26297 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26298 }
26299
26300 /* Generate Dwarf debug information for a decl described by DECL.
26301 The return value is currently only meaningful for PARM_DECLs,
26302 for all other decls it returns NULL.
26303
26304 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26305 It can be NULL otherwise. */
26306
26307 static dw_die_ref
26308 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26309 dw_die_ref context_die)
26310 {
26311 tree decl_or_origin = decl ? decl : origin;
26312 tree class_origin = NULL, ultimate_origin;
26313
26314 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26315 return NULL;
26316
26317 switch (TREE_CODE (decl_or_origin))
26318 {
26319 case ERROR_MARK:
26320 break;
26321
26322 case CONST_DECL:
26323 if (!is_fortran () && !is_ada () && !is_dlang ())
26324 {
26325 /* The individual enumerators of an enum type get output when we output
26326 the Dwarf representation of the relevant enum type itself. */
26327 break;
26328 }
26329
26330 /* Emit its type. */
26331 gen_type_die (TREE_TYPE (decl), context_die);
26332
26333 /* And its containing namespace. */
26334 context_die = declare_in_namespace (decl, context_die);
26335
26336 gen_const_die (decl, context_die);
26337 break;
26338
26339 case FUNCTION_DECL:
26340 #if 0
26341 /* FIXME */
26342 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26343 on local redeclarations of global functions. That seems broken. */
26344 if (current_function_decl != decl)
26345 /* This is only a declaration. */;
26346 #endif
26347
26348 /* We should have abstract copies already and should not generate
26349 stray type DIEs in late LTO dumping. */
26350 if (! early_dwarf)
26351 ;
26352
26353 /* If we're emitting a clone, emit info for the abstract instance. */
26354 else if (origin || DECL_ORIGIN (decl) != decl)
26355 dwarf2out_abstract_function (origin
26356 ? DECL_ORIGIN (origin)
26357 : DECL_ABSTRACT_ORIGIN (decl));
26358
26359 /* If we're emitting a possibly inlined function emit it as
26360 abstract instance. */
26361 else if (cgraph_function_possibly_inlined_p (decl)
26362 && ! DECL_ABSTRACT_P (decl)
26363 && ! class_or_namespace_scope_p (context_die)
26364 /* dwarf2out_abstract_function won't emit a die if this is just
26365 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26366 that case, because that works only if we have a die. */
26367 && DECL_INITIAL (decl) != NULL_TREE)
26368 dwarf2out_abstract_function (decl);
26369
26370 /* Otherwise we're emitting the primary DIE for this decl. */
26371 else if (debug_info_level > DINFO_LEVEL_TERSE)
26372 {
26373 /* Before we describe the FUNCTION_DECL itself, make sure that we
26374 have its containing type. */
26375 if (!origin)
26376 origin = decl_class_context (decl);
26377 if (origin != NULL_TREE)
26378 gen_type_die (origin, context_die);
26379
26380 /* And its return type. */
26381 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26382
26383 /* And its virtual context. */
26384 if (DECL_VINDEX (decl) != NULL_TREE)
26385 gen_type_die (DECL_CONTEXT (decl), context_die);
26386
26387 /* Make sure we have a member DIE for decl. */
26388 if (origin != NULL_TREE)
26389 gen_type_die_for_member (origin, decl, context_die);
26390
26391 /* And its containing namespace. */
26392 context_die = declare_in_namespace (decl, context_die);
26393 }
26394
26395 /* Now output a DIE to represent the function itself. */
26396 if (decl)
26397 gen_subprogram_die (decl, context_die);
26398 break;
26399
26400 case TYPE_DECL:
26401 /* If we are in terse mode, don't generate any DIEs to represent any
26402 actual typedefs. */
26403 if (debug_info_level <= DINFO_LEVEL_TERSE)
26404 break;
26405
26406 /* In the special case of a TYPE_DECL node representing the declaration
26407 of some type tag, if the given TYPE_DECL is marked as having been
26408 instantiated from some other (original) TYPE_DECL node (e.g. one which
26409 was generated within the original definition of an inline function) we
26410 used to generate a special (abbreviated) DW_TAG_structure_type,
26411 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26412 should be actually referencing those DIEs, as variable DIEs with that
26413 type would be emitted already in the abstract origin, so it was always
26414 removed during unused type prunning. Don't add anything in this
26415 case. */
26416 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26417 break;
26418
26419 if (is_redundant_typedef (decl))
26420 gen_type_die (TREE_TYPE (decl), context_die);
26421 else
26422 /* Output a DIE to represent the typedef itself. */
26423 gen_typedef_die (decl, context_die);
26424 break;
26425
26426 case LABEL_DECL:
26427 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26428 gen_label_die (decl, context_die);
26429 break;
26430
26431 case VAR_DECL:
26432 case RESULT_DECL:
26433 /* If we are in terse mode, don't generate any DIEs to represent any
26434 variable declarations or definitions. */
26435 if (debug_info_level <= DINFO_LEVEL_TERSE)
26436 break;
26437
26438 /* Avoid generating stray type DIEs during late dwarf dumping.
26439 All types have been dumped early. */
26440 if (early_dwarf
26441 /* ??? But in LTRANS we cannot annotate early created variably
26442 modified type DIEs without copying them and adjusting all
26443 references to them. Dump them again as happens for inlining
26444 which copies both the decl and the types. */
26445 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26446 in VLA bound information for example. */
26447 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26448 current_function_decl)))
26449 {
26450 /* Output any DIEs that are needed to specify the type of this data
26451 object. */
26452 if (decl_by_reference_p (decl_or_origin))
26453 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26454 else
26455 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26456 }
26457
26458 if (early_dwarf)
26459 {
26460 /* And its containing type. */
26461 class_origin = decl_class_context (decl_or_origin);
26462 if (class_origin != NULL_TREE)
26463 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26464
26465 /* And its containing namespace. */
26466 context_die = declare_in_namespace (decl_or_origin, context_die);
26467 }
26468
26469 /* Now output the DIE to represent the data object itself. This gets
26470 complicated because of the possibility that the VAR_DECL really
26471 represents an inlined instance of a formal parameter for an inline
26472 function. */
26473 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26474 if (ultimate_origin != NULL_TREE
26475 && TREE_CODE (ultimate_origin) == PARM_DECL)
26476 gen_formal_parameter_die (decl, origin,
26477 true /* Emit name attribute. */,
26478 context_die);
26479 else
26480 gen_variable_die (decl, origin, context_die);
26481 break;
26482
26483 case FIELD_DECL:
26484 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26485 /* Ignore the nameless fields that are used to skip bits but handle C++
26486 anonymous unions and structs. */
26487 if (DECL_NAME (decl) != NULL_TREE
26488 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26489 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26490 {
26491 gen_type_die (member_declared_type (decl), context_die);
26492 gen_field_die (decl, ctx, context_die);
26493 }
26494 break;
26495
26496 case PARM_DECL:
26497 /* Avoid generating stray type DIEs during late dwarf dumping.
26498 All types have been dumped early. */
26499 if (early_dwarf
26500 /* ??? But in LTRANS we cannot annotate early created variably
26501 modified type DIEs without copying them and adjusting all
26502 references to them. Dump them again as happens for inlining
26503 which copies both the decl and the types. */
26504 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26505 in VLA bound information for example. */
26506 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26507 current_function_decl)))
26508 {
26509 if (DECL_BY_REFERENCE (decl_or_origin))
26510 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26511 else
26512 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26513 }
26514 return gen_formal_parameter_die (decl, origin,
26515 true /* Emit name attribute. */,
26516 context_die);
26517
26518 case NAMESPACE_DECL:
26519 if (dwarf_version >= 3 || !dwarf_strict)
26520 gen_namespace_die (decl, context_die);
26521 break;
26522
26523 case IMPORTED_DECL:
26524 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26525 DECL_CONTEXT (decl), context_die);
26526 break;
26527
26528 case NAMELIST_DECL:
26529 gen_namelist_decl (DECL_NAME (decl), context_die,
26530 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26531 break;
26532
26533 default:
26534 /* Probably some frontend-internal decl. Assume we don't care. */
26535 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26536 break;
26537 }
26538
26539 return NULL;
26540 }
26541 \f
26542 /* Output initial debug information for global DECL. Called at the
26543 end of the parsing process.
26544
26545 This is the initial debug generation process. As such, the DIEs
26546 generated may be incomplete. A later debug generation pass
26547 (dwarf2out_late_global_decl) will augment the information generated
26548 in this pass (e.g., with complete location info). */
26549
26550 static void
26551 dwarf2out_early_global_decl (tree decl)
26552 {
26553 set_early_dwarf s;
26554
26555 /* gen_decl_die() will set DECL_ABSTRACT because
26556 cgraph_function_possibly_inlined_p() returns true. This is in
26557 turn will cause DW_AT_inline attributes to be set.
26558
26559 This happens because at early dwarf generation, there is no
26560 cgraph information, causing cgraph_function_possibly_inlined_p()
26561 to return true. Trick cgraph_function_possibly_inlined_p()
26562 while we generate dwarf early. */
26563 bool save = symtab->global_info_ready;
26564 symtab->global_info_ready = true;
26565
26566 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26567 other DECLs and they can point to template types or other things
26568 that dwarf2out can't handle when done via dwarf2out_decl. */
26569 if (TREE_CODE (decl) != TYPE_DECL
26570 && TREE_CODE (decl) != PARM_DECL)
26571 {
26572 if (TREE_CODE (decl) == FUNCTION_DECL)
26573 {
26574 tree save_fndecl = current_function_decl;
26575
26576 /* For nested functions, make sure we have DIEs for the parents first
26577 so that all nested DIEs are generated at the proper scope in the
26578 first shot. */
26579 tree context = decl_function_context (decl);
26580 if (context != NULL)
26581 {
26582 dw_die_ref context_die = lookup_decl_die (context);
26583 current_function_decl = context;
26584
26585 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26586 enough so that it lands in its own context. This avoids type
26587 pruning issues later on. */
26588 if (context_die == NULL || is_declaration_die (context_die))
26589 dwarf2out_early_global_decl (context);
26590 }
26591
26592 /* Emit an abstract origin of a function first. This happens
26593 with C++ constructor clones for example and makes
26594 dwarf2out_abstract_function happy which requires the early
26595 DIE of the abstract instance to be present. */
26596 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26597 dw_die_ref origin_die;
26598 if (origin != NULL
26599 /* Do not emit the DIE multiple times but make sure to
26600 process it fully here in case we just saw a declaration. */
26601 && ((origin_die = lookup_decl_die (origin)) == NULL
26602 || is_declaration_die (origin_die)))
26603 {
26604 current_function_decl = origin;
26605 dwarf2out_decl (origin);
26606 }
26607
26608 /* Emit the DIE for decl but avoid doing that multiple times. */
26609 dw_die_ref old_die;
26610 if ((old_die = lookup_decl_die (decl)) == NULL
26611 || is_declaration_die (old_die))
26612 {
26613 current_function_decl = decl;
26614 dwarf2out_decl (decl);
26615 }
26616
26617 current_function_decl = save_fndecl;
26618 }
26619 else
26620 dwarf2out_decl (decl);
26621 }
26622 symtab->global_info_ready = save;
26623 }
26624
26625 /* Return whether EXPR is an expression with the following pattern:
26626 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26627
26628 static bool
26629 is_trivial_indirect_ref (tree expr)
26630 {
26631 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26632 return false;
26633
26634 tree nop = TREE_OPERAND (expr, 0);
26635 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26636 return false;
26637
26638 tree int_cst = TREE_OPERAND (nop, 0);
26639 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26640 }
26641
26642 /* Output debug information for global decl DECL. Called from
26643 toplev.c after compilation proper has finished. */
26644
26645 static void
26646 dwarf2out_late_global_decl (tree decl)
26647 {
26648 /* Fill-in any location information we were unable to determine
26649 on the first pass. */
26650 if (VAR_P (decl))
26651 {
26652 dw_die_ref die = lookup_decl_die (decl);
26653
26654 /* We may have to generate early debug late for LTO in case debug
26655 was not enabled at compile-time or the target doesn't support
26656 the LTO early debug scheme. */
26657 if (! die && in_lto_p)
26658 {
26659 dwarf2out_decl (decl);
26660 die = lookup_decl_die (decl);
26661 }
26662
26663 if (die)
26664 {
26665 /* We get called via the symtab code invoking late_global_decl
26666 for symbols that are optimized out.
26667
26668 Do not add locations for those, except if they have a
26669 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26670 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26671 INDIRECT_REF expression, as this could generate relocations to
26672 text symbols in LTO object files, which is invalid. */
26673 varpool_node *node = varpool_node::get (decl);
26674 if ((! node || ! node->definition)
26675 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26676 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26677 tree_add_const_value_attribute_for_decl (die, decl);
26678 else
26679 add_location_or_const_value_attribute (die, decl, false);
26680 }
26681 }
26682 }
26683
26684 /* Output debug information for type decl DECL. Called from toplev.c
26685 and from language front ends (to record built-in types). */
26686 static void
26687 dwarf2out_type_decl (tree decl, int local)
26688 {
26689 if (!local)
26690 {
26691 set_early_dwarf s;
26692 dwarf2out_decl (decl);
26693 }
26694 }
26695
26696 /* Output debug information for imported module or decl DECL.
26697 NAME is non-NULL name in the lexical block if the decl has been renamed.
26698 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26699 that DECL belongs to.
26700 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26701 static void
26702 dwarf2out_imported_module_or_decl_1 (tree decl,
26703 tree name,
26704 tree lexical_block,
26705 dw_die_ref lexical_block_die)
26706 {
26707 expanded_location xloc;
26708 dw_die_ref imported_die = NULL;
26709 dw_die_ref at_import_die;
26710
26711 if (TREE_CODE (decl) == IMPORTED_DECL)
26712 {
26713 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26714 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26715 gcc_assert (decl);
26716 }
26717 else
26718 xloc = expand_location (input_location);
26719
26720 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26721 {
26722 at_import_die = force_type_die (TREE_TYPE (decl));
26723 /* For namespace N { typedef void T; } using N::T; base_type_die
26724 returns NULL, but DW_TAG_imported_declaration requires
26725 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26726 if (!at_import_die)
26727 {
26728 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26729 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26730 at_import_die = lookup_type_die (TREE_TYPE (decl));
26731 gcc_assert (at_import_die);
26732 }
26733 }
26734 else
26735 {
26736 at_import_die = lookup_decl_die (decl);
26737 if (!at_import_die)
26738 {
26739 /* If we're trying to avoid duplicate debug info, we may not have
26740 emitted the member decl for this field. Emit it now. */
26741 if (TREE_CODE (decl) == FIELD_DECL)
26742 {
26743 tree type = DECL_CONTEXT (decl);
26744
26745 if (TYPE_CONTEXT (type)
26746 && TYPE_P (TYPE_CONTEXT (type))
26747 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26748 DINFO_USAGE_DIR_USE))
26749 return;
26750 gen_type_die_for_member (type, decl,
26751 get_context_die (TYPE_CONTEXT (type)));
26752 }
26753 if (TREE_CODE (decl) == NAMELIST_DECL)
26754 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26755 get_context_die (DECL_CONTEXT (decl)),
26756 NULL_TREE);
26757 else
26758 at_import_die = force_decl_die (decl);
26759 }
26760 }
26761
26762 if (TREE_CODE (decl) == NAMESPACE_DECL)
26763 {
26764 if (dwarf_version >= 3 || !dwarf_strict)
26765 imported_die = new_die (DW_TAG_imported_module,
26766 lexical_block_die,
26767 lexical_block);
26768 else
26769 return;
26770 }
26771 else
26772 imported_die = new_die (DW_TAG_imported_declaration,
26773 lexical_block_die,
26774 lexical_block);
26775
26776 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26777 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26778 if (debug_column_info && xloc.column)
26779 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26780 if (name)
26781 add_AT_string (imported_die, DW_AT_name,
26782 IDENTIFIER_POINTER (name));
26783 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26784 }
26785
26786 /* Output debug information for imported module or decl DECL.
26787 NAME is non-NULL name in context if the decl has been renamed.
26788 CHILD is true if decl is one of the renamed decls as part of
26789 importing whole module.
26790 IMPLICIT is set if this hook is called for an implicit import
26791 such as inline namespace. */
26792
26793 static void
26794 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26795 bool child, bool implicit)
26796 {
26797 /* dw_die_ref at_import_die; */
26798 dw_die_ref scope_die;
26799
26800 if (debug_info_level <= DINFO_LEVEL_TERSE)
26801 return;
26802
26803 gcc_assert (decl);
26804
26805 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26806 should be enough, for DWARF4 and older even if we emit as extension
26807 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26808 for the benefit of consumers unaware of DW_AT_export_symbols. */
26809 if (implicit
26810 && dwarf_version >= 5
26811 && lang_hooks.decls.decl_dwarf_attribute (decl,
26812 DW_AT_export_symbols) == 1)
26813 return;
26814
26815 set_early_dwarf s;
26816
26817 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26818 We need decl DIE for reference and scope die. First, get DIE for the decl
26819 itself. */
26820
26821 /* Get the scope die for decl context. Use comp_unit_die for global module
26822 or decl. If die is not found for non globals, force new die. */
26823 if (context
26824 && TYPE_P (context)
26825 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26826 return;
26827
26828 scope_die = get_context_die (context);
26829
26830 if (child)
26831 {
26832 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26833 there is nothing we can do, here. */
26834 if (dwarf_version < 3 && dwarf_strict)
26835 return;
26836
26837 gcc_assert (scope_die->die_child);
26838 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26839 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26840 scope_die = scope_die->die_child;
26841 }
26842
26843 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26844 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26845 }
26846
26847 /* Output debug information for namelists. */
26848
26849 static dw_die_ref
26850 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26851 {
26852 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26853 tree value;
26854 unsigned i;
26855
26856 if (debug_info_level <= DINFO_LEVEL_TERSE)
26857 return NULL;
26858
26859 gcc_assert (scope_die != NULL);
26860 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26861 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26862
26863 /* If there are no item_decls, we have a nondefining namelist, e.g.
26864 with USE association; hence, set DW_AT_declaration. */
26865 if (item_decls == NULL_TREE)
26866 {
26867 add_AT_flag (nml_die, DW_AT_declaration, 1);
26868 return nml_die;
26869 }
26870
26871 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26872 {
26873 nml_item_ref_die = lookup_decl_die (value);
26874 if (!nml_item_ref_die)
26875 nml_item_ref_die = force_decl_die (value);
26876
26877 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26878 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26879 }
26880 return nml_die;
26881 }
26882
26883
26884 /* Write the debugging output for DECL and return the DIE. */
26885
26886 static void
26887 dwarf2out_decl (tree decl)
26888 {
26889 dw_die_ref context_die = comp_unit_die ();
26890
26891 switch (TREE_CODE (decl))
26892 {
26893 case ERROR_MARK:
26894 return;
26895
26896 case FUNCTION_DECL:
26897 /* If we're a nested function, initially use a parent of NULL; if we're
26898 a plain function, this will be fixed up in decls_for_scope. If
26899 we're a method, it will be ignored, since we already have a DIE.
26900 Avoid doing this late though since clones of class methods may
26901 otherwise end up in limbo and create type DIEs late. */
26902 if (early_dwarf
26903 && decl_function_context (decl)
26904 /* But if we're in terse mode, we don't care about scope. */
26905 && debug_info_level > DINFO_LEVEL_TERSE)
26906 context_die = NULL;
26907 break;
26908
26909 case VAR_DECL:
26910 /* For local statics lookup proper context die. */
26911 if (local_function_static (decl))
26912 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26913
26914 /* If we are in terse mode, don't generate any DIEs to represent any
26915 variable declarations or definitions. */
26916 if (debug_info_level <= DINFO_LEVEL_TERSE)
26917 return;
26918 break;
26919
26920 case CONST_DECL:
26921 if (debug_info_level <= DINFO_LEVEL_TERSE)
26922 return;
26923 if (!is_fortran () && !is_ada () && !is_dlang ())
26924 return;
26925 if (TREE_STATIC (decl) && decl_function_context (decl))
26926 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26927 break;
26928
26929 case NAMESPACE_DECL:
26930 case IMPORTED_DECL:
26931 if (debug_info_level <= DINFO_LEVEL_TERSE)
26932 return;
26933 if (lookup_decl_die (decl) != NULL)
26934 return;
26935 break;
26936
26937 case TYPE_DECL:
26938 /* Don't emit stubs for types unless they are needed by other DIEs. */
26939 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26940 return;
26941
26942 /* Don't bother trying to generate any DIEs to represent any of the
26943 normal built-in types for the language we are compiling. */
26944 if (DECL_IS_BUILTIN (decl))
26945 return;
26946
26947 /* If we are in terse mode, don't generate any DIEs for types. */
26948 if (debug_info_level <= DINFO_LEVEL_TERSE)
26949 return;
26950
26951 /* If we're a function-scope tag, initially use a parent of NULL;
26952 this will be fixed up in decls_for_scope. */
26953 if (decl_function_context (decl))
26954 context_die = NULL;
26955
26956 break;
26957
26958 case NAMELIST_DECL:
26959 break;
26960
26961 default:
26962 return;
26963 }
26964
26965 gen_decl_die (decl, NULL, NULL, context_die);
26966
26967 if (flag_checking)
26968 {
26969 dw_die_ref die = lookup_decl_die (decl);
26970 if (die)
26971 check_die (die);
26972 }
26973 }
26974
26975 /* Write the debugging output for DECL. */
26976
26977 static void
26978 dwarf2out_function_decl (tree decl)
26979 {
26980 dwarf2out_decl (decl);
26981 call_arg_locations = NULL;
26982 call_arg_loc_last = NULL;
26983 call_site_count = -1;
26984 tail_call_site_count = -1;
26985 decl_loc_table->empty ();
26986 cached_dw_loc_list_table->empty ();
26987 }
26988
26989 /* Output a marker (i.e. a label) for the beginning of the generated code for
26990 a lexical block. */
26991
26992 static void
26993 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26994 unsigned int blocknum)
26995 {
26996 switch_to_section (current_function_section ());
26997 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26998 }
26999
27000 /* Output a marker (i.e. a label) for the end of the generated code for a
27001 lexical block. */
27002
27003 static void
27004 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
27005 {
27006 switch_to_section (current_function_section ());
27007 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27008 }
27009
27010 /* Returns nonzero if it is appropriate not to emit any debugging
27011 information for BLOCK, because it doesn't contain any instructions.
27012
27013 Don't allow this for blocks with nested functions or local classes
27014 as we would end up with orphans, and in the presence of scheduling
27015 we may end up calling them anyway. */
27016
27017 static bool
27018 dwarf2out_ignore_block (const_tree block)
27019 {
27020 tree decl;
27021 unsigned int i;
27022
27023 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27024 if (TREE_CODE (decl) == FUNCTION_DECL
27025 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27026 return 0;
27027 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27028 {
27029 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27030 if (TREE_CODE (decl) == FUNCTION_DECL
27031 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27032 return 0;
27033 }
27034
27035 return 1;
27036 }
27037
27038 /* Hash table routines for file_hash. */
27039
27040 bool
27041 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27042 {
27043 return filename_cmp (p1->filename, p2) == 0;
27044 }
27045
27046 hashval_t
27047 dwarf_file_hasher::hash (dwarf_file_data *p)
27048 {
27049 return htab_hash_string (p->filename);
27050 }
27051
27052 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27053 dwarf2out.c) and return its "index". The index of each (known) filename is
27054 just a unique number which is associated with only that one filename. We
27055 need such numbers for the sake of generating labels (in the .debug_sfnames
27056 section) and references to those files numbers (in the .debug_srcinfo
27057 and .debug_macinfo sections). If the filename given as an argument is not
27058 found in our current list, add it to the list and assign it the next
27059 available unique index number. */
27060
27061 static struct dwarf_file_data *
27062 lookup_filename (const char *file_name)
27063 {
27064 struct dwarf_file_data * created;
27065
27066 if (!file_name)
27067 return NULL;
27068
27069 dwarf_file_data **slot
27070 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27071 INSERT);
27072 if (*slot)
27073 return *slot;
27074
27075 created = ggc_alloc<dwarf_file_data> ();
27076 created->filename = file_name;
27077 created->emitted_number = 0;
27078 *slot = created;
27079 return created;
27080 }
27081
27082 /* If the assembler will construct the file table, then translate the compiler
27083 internal file table number into the assembler file table number, and emit
27084 a .file directive if we haven't already emitted one yet. The file table
27085 numbers are different because we prune debug info for unused variables and
27086 types, which may include filenames. */
27087
27088 static int
27089 maybe_emit_file (struct dwarf_file_data * fd)
27090 {
27091 if (! fd->emitted_number)
27092 {
27093 if (last_emitted_file)
27094 fd->emitted_number = last_emitted_file->emitted_number + 1;
27095 else
27096 fd->emitted_number = 1;
27097 last_emitted_file = fd;
27098
27099 if (output_asm_line_debug_info ())
27100 {
27101 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27102 output_quoted_string (asm_out_file,
27103 remap_debug_filename (fd->filename));
27104 fputc ('\n', asm_out_file);
27105 }
27106 }
27107
27108 return fd->emitted_number;
27109 }
27110
27111 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27112 That generation should happen after function debug info has been
27113 generated. The value of the attribute is the constant value of ARG. */
27114
27115 static void
27116 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27117 {
27118 die_arg_entry entry;
27119
27120 if (!die || !arg)
27121 return;
27122
27123 gcc_assert (early_dwarf);
27124
27125 if (!tmpl_value_parm_die_table)
27126 vec_alloc (tmpl_value_parm_die_table, 32);
27127
27128 entry.die = die;
27129 entry.arg = arg;
27130 vec_safe_push (tmpl_value_parm_die_table, entry);
27131 }
27132
27133 /* Return TRUE if T is an instance of generic type, FALSE
27134 otherwise. */
27135
27136 static bool
27137 generic_type_p (tree t)
27138 {
27139 if (t == NULL_TREE || !TYPE_P (t))
27140 return false;
27141 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27142 }
27143
27144 /* Schedule the generation of the generic parameter dies for the
27145 instance of generic type T. The proper generation itself is later
27146 done by gen_scheduled_generic_parms_dies. */
27147
27148 static void
27149 schedule_generic_params_dies_gen (tree t)
27150 {
27151 if (!generic_type_p (t))
27152 return;
27153
27154 gcc_assert (early_dwarf);
27155
27156 if (!generic_type_instances)
27157 vec_alloc (generic_type_instances, 256);
27158
27159 vec_safe_push (generic_type_instances, t);
27160 }
27161
27162 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27163 by append_entry_to_tmpl_value_parm_die_table. This function must
27164 be called after function DIEs have been generated. */
27165
27166 static void
27167 gen_remaining_tmpl_value_param_die_attribute (void)
27168 {
27169 if (tmpl_value_parm_die_table)
27170 {
27171 unsigned i, j;
27172 die_arg_entry *e;
27173
27174 /* We do this in two phases - first get the cases we can
27175 handle during early-finish, preserving those we cannot
27176 (containing symbolic constants where we don't yet know
27177 whether we are going to output the referenced symbols).
27178 For those we try again at late-finish. */
27179 j = 0;
27180 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27181 {
27182 if (!e->die->removed
27183 && !tree_add_const_value_attribute (e->die, e->arg))
27184 {
27185 dw_loc_descr_ref loc = NULL;
27186 if (! early_dwarf
27187 && (dwarf_version >= 5 || !dwarf_strict))
27188 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27189 if (loc)
27190 add_AT_loc (e->die, DW_AT_location, loc);
27191 else
27192 (*tmpl_value_parm_die_table)[j++] = *e;
27193 }
27194 }
27195 tmpl_value_parm_die_table->truncate (j);
27196 }
27197 }
27198
27199 /* Generate generic parameters DIEs for instances of generic types
27200 that have been previously scheduled by
27201 schedule_generic_params_dies_gen. This function must be called
27202 after all the types of the CU have been laid out. */
27203
27204 static void
27205 gen_scheduled_generic_parms_dies (void)
27206 {
27207 unsigned i;
27208 tree t;
27209
27210 if (!generic_type_instances)
27211 return;
27212
27213 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27214 if (COMPLETE_TYPE_P (t))
27215 gen_generic_params_dies (t);
27216
27217 generic_type_instances = NULL;
27218 }
27219
27220
27221 /* Replace DW_AT_name for the decl with name. */
27222
27223 static void
27224 dwarf2out_set_name (tree decl, tree name)
27225 {
27226 dw_die_ref die;
27227 dw_attr_node *attr;
27228 const char *dname;
27229
27230 die = TYPE_SYMTAB_DIE (decl);
27231 if (!die)
27232 return;
27233
27234 dname = dwarf2_name (name, 0);
27235 if (!dname)
27236 return;
27237
27238 attr = get_AT (die, DW_AT_name);
27239 if (attr)
27240 {
27241 struct indirect_string_node *node;
27242
27243 node = find_AT_string (dname);
27244 /* replace the string. */
27245 attr->dw_attr_val.v.val_str = node;
27246 }
27247
27248 else
27249 add_name_attribute (die, dname);
27250 }
27251
27252 /* True if before or during processing of the first function being emitted. */
27253 static bool in_first_function_p = true;
27254 /* True if loc_note during dwarf2out_var_location call might still be
27255 before first real instruction at address equal to .Ltext0. */
27256 static bool maybe_at_text_label_p = true;
27257 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27258 static unsigned int first_loclabel_num_not_at_text_label;
27259
27260 /* Look ahead for a real insn, or for a begin stmt marker. */
27261
27262 static rtx_insn *
27263 dwarf2out_next_real_insn (rtx_insn *loc_note)
27264 {
27265 rtx_insn *next_real = NEXT_INSN (loc_note);
27266
27267 while (next_real)
27268 if (INSN_P (next_real))
27269 break;
27270 else
27271 next_real = NEXT_INSN (next_real);
27272
27273 return next_real;
27274 }
27275
27276 /* Called by the final INSN scan whenever we see a var location. We
27277 use it to drop labels in the right places, and throw the location in
27278 our lookup table. */
27279
27280 static void
27281 dwarf2out_var_location (rtx_insn *loc_note)
27282 {
27283 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27284 struct var_loc_node *newloc;
27285 rtx_insn *next_real, *next_note;
27286 rtx_insn *call_insn = NULL;
27287 static const char *last_label;
27288 static const char *last_postcall_label;
27289 static bool last_in_cold_section_p;
27290 static rtx_insn *expected_next_loc_note;
27291 tree decl;
27292 bool var_loc_p;
27293 var_loc_view view = 0;
27294
27295 if (!NOTE_P (loc_note))
27296 {
27297 if (CALL_P (loc_note))
27298 {
27299 maybe_reset_location_view (loc_note, cur_line_info_table);
27300 call_site_count++;
27301 if (SIBLING_CALL_P (loc_note))
27302 tail_call_site_count++;
27303 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27304 {
27305 call_insn = loc_note;
27306 loc_note = NULL;
27307 var_loc_p = false;
27308
27309 next_real = dwarf2out_next_real_insn (call_insn);
27310 next_note = NULL;
27311 cached_next_real_insn = NULL;
27312 goto create_label;
27313 }
27314 if (optimize == 0 && !flag_var_tracking)
27315 {
27316 /* When the var-tracking pass is not running, there is no note
27317 for indirect calls whose target is compile-time known. In this
27318 case, process such calls specifically so that we generate call
27319 sites for them anyway. */
27320 rtx x = PATTERN (loc_note);
27321 if (GET_CODE (x) == PARALLEL)
27322 x = XVECEXP (x, 0, 0);
27323 if (GET_CODE (x) == SET)
27324 x = SET_SRC (x);
27325 if (GET_CODE (x) == CALL)
27326 x = XEXP (x, 0);
27327 if (!MEM_P (x)
27328 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27329 || !SYMBOL_REF_DECL (XEXP (x, 0))
27330 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27331 != FUNCTION_DECL))
27332 {
27333 call_insn = loc_note;
27334 loc_note = NULL;
27335 var_loc_p = false;
27336
27337 next_real = dwarf2out_next_real_insn (call_insn);
27338 next_note = NULL;
27339 cached_next_real_insn = NULL;
27340 goto create_label;
27341 }
27342 }
27343 }
27344 else if (!debug_variable_location_views)
27345 gcc_unreachable ();
27346 else
27347 maybe_reset_location_view (loc_note, cur_line_info_table);
27348
27349 return;
27350 }
27351
27352 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27353 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27354 return;
27355
27356 /* Optimize processing a large consecutive sequence of location
27357 notes so we don't spend too much time in next_real_insn. If the
27358 next insn is another location note, remember the next_real_insn
27359 calculation for next time. */
27360 next_real = cached_next_real_insn;
27361 if (next_real)
27362 {
27363 if (expected_next_loc_note != loc_note)
27364 next_real = NULL;
27365 }
27366
27367 next_note = NEXT_INSN (loc_note);
27368 if (! next_note
27369 || next_note->deleted ()
27370 || ! NOTE_P (next_note)
27371 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27372 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27373 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27374 next_note = NULL;
27375
27376 if (! next_real)
27377 next_real = dwarf2out_next_real_insn (loc_note);
27378
27379 if (next_note)
27380 {
27381 expected_next_loc_note = next_note;
27382 cached_next_real_insn = next_real;
27383 }
27384 else
27385 cached_next_real_insn = NULL;
27386
27387 /* If there are no instructions which would be affected by this note,
27388 don't do anything. */
27389 if (var_loc_p
27390 && next_real == NULL_RTX
27391 && !NOTE_DURING_CALL_P (loc_note))
27392 return;
27393
27394 create_label:
27395
27396 if (next_real == NULL_RTX)
27397 next_real = get_last_insn ();
27398
27399 /* If there were any real insns between note we processed last time
27400 and this note (or if it is the first note), clear
27401 last_{,postcall_}label so that they are not reused this time. */
27402 if (last_var_location_insn == NULL_RTX
27403 || last_var_location_insn != next_real
27404 || last_in_cold_section_p != in_cold_section_p)
27405 {
27406 last_label = NULL;
27407 last_postcall_label = NULL;
27408 }
27409
27410 if (var_loc_p)
27411 {
27412 const char *label
27413 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27414 view = cur_line_info_table->view;
27415 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27416 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27417 if (newloc == NULL)
27418 return;
27419 }
27420 else
27421 {
27422 decl = NULL_TREE;
27423 newloc = NULL;
27424 }
27425
27426 /* If there were no real insns between note we processed last time
27427 and this note, use the label we emitted last time. Otherwise
27428 create a new label and emit it. */
27429 if (last_label == NULL)
27430 {
27431 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27432 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27433 loclabel_num++;
27434 last_label = ggc_strdup (loclabel);
27435 /* See if loclabel might be equal to .Ltext0. If yes,
27436 bump first_loclabel_num_not_at_text_label. */
27437 if (!have_multiple_function_sections
27438 && in_first_function_p
27439 && maybe_at_text_label_p)
27440 {
27441 static rtx_insn *last_start;
27442 rtx_insn *insn;
27443 for (insn = loc_note; insn; insn = previous_insn (insn))
27444 if (insn == last_start)
27445 break;
27446 else if (!NONDEBUG_INSN_P (insn))
27447 continue;
27448 else
27449 {
27450 rtx body = PATTERN (insn);
27451 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27452 continue;
27453 /* Inline asm could occupy zero bytes. */
27454 else if (GET_CODE (body) == ASM_INPUT
27455 || asm_noperands (body) >= 0)
27456 continue;
27457 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27458 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27459 continue;
27460 #endif
27461 else
27462 {
27463 /* Assume insn has non-zero length. */
27464 maybe_at_text_label_p = false;
27465 break;
27466 }
27467 }
27468 if (maybe_at_text_label_p)
27469 {
27470 last_start = loc_note;
27471 first_loclabel_num_not_at_text_label = loclabel_num;
27472 }
27473 }
27474 }
27475
27476 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27477 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27478
27479 if (!var_loc_p)
27480 {
27481 struct call_arg_loc_node *ca_loc
27482 = ggc_cleared_alloc<call_arg_loc_node> ();
27483 rtx_insn *prev = call_insn;
27484
27485 ca_loc->call_arg_loc_note
27486 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27487 ca_loc->next = NULL;
27488 ca_loc->label = last_label;
27489 gcc_assert (prev
27490 && (CALL_P (prev)
27491 || (NONJUMP_INSN_P (prev)
27492 && GET_CODE (PATTERN (prev)) == SEQUENCE
27493 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27494 if (!CALL_P (prev))
27495 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27496 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27497
27498 /* Look for a SYMBOL_REF in the "prev" instruction. */
27499 rtx x = get_call_rtx_from (PATTERN (prev));
27500 if (x)
27501 {
27502 /* Try to get the call symbol, if any. */
27503 if (MEM_P (XEXP (x, 0)))
27504 x = XEXP (x, 0);
27505 /* First, look for a memory access to a symbol_ref. */
27506 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27507 && SYMBOL_REF_DECL (XEXP (x, 0))
27508 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27509 ca_loc->symbol_ref = XEXP (x, 0);
27510 /* Otherwise, look at a compile-time known user-level function
27511 declaration. */
27512 else if (MEM_P (x)
27513 && MEM_EXPR (x)
27514 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27515 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27516 }
27517
27518 ca_loc->block = insn_scope (prev);
27519 if (call_arg_locations)
27520 call_arg_loc_last->next = ca_loc;
27521 else
27522 call_arg_locations = ca_loc;
27523 call_arg_loc_last = ca_loc;
27524 }
27525 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27526 {
27527 newloc->label = last_label;
27528 newloc->view = view;
27529 }
27530 else
27531 {
27532 if (!last_postcall_label)
27533 {
27534 sprintf (loclabel, "%s-1", last_label);
27535 last_postcall_label = ggc_strdup (loclabel);
27536 }
27537 newloc->label = last_postcall_label;
27538 /* ??? This view is at last_label, not last_label-1, but we
27539 could only assume view at last_label-1 is zero if we could
27540 assume calls always have length greater than one. This is
27541 probably true in general, though there might be a rare
27542 exception to this rule, e.g. if a call insn is optimized out
27543 by target magic. Then, even the -1 in the label will be
27544 wrong, which might invalidate the range. Anyway, using view,
27545 though technically possibly incorrect, will work as far as
27546 ranges go: since L-1 is in the middle of the call insn,
27547 (L-1).0 and (L-1).V shouldn't make any difference, and having
27548 the loclist entry refer to the .loc entry might be useful, so
27549 leave it like this. */
27550 newloc->view = view;
27551 }
27552
27553 if (var_loc_p && flag_debug_asm)
27554 {
27555 const char *name, *sep, *patstr;
27556 if (decl && DECL_NAME (decl))
27557 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27558 else
27559 name = "";
27560 if (NOTE_VAR_LOCATION_LOC (loc_note))
27561 {
27562 sep = " => ";
27563 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27564 }
27565 else
27566 {
27567 sep = " ";
27568 patstr = "RESET";
27569 }
27570 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27571 name, sep, patstr);
27572 }
27573
27574 last_var_location_insn = next_real;
27575 last_in_cold_section_p = in_cold_section_p;
27576 }
27577
27578 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27579 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27580 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27581 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27582 BLOCK_FRAGMENT_ORIGIN links. */
27583 static bool
27584 block_within_block_p (tree block, tree outer, bool bothways)
27585 {
27586 if (block == outer)
27587 return true;
27588
27589 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27590 for (tree context = BLOCK_SUPERCONTEXT (block);
27591 context != outer;
27592 context = BLOCK_SUPERCONTEXT (context))
27593 if (!context || TREE_CODE (context) != BLOCK)
27594 return false;
27595
27596 if (!bothways)
27597 return true;
27598
27599 /* Now check that each block is actually referenced by its
27600 parent. */
27601 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27602 context = BLOCK_SUPERCONTEXT (context))
27603 {
27604 if (BLOCK_FRAGMENT_ORIGIN (context))
27605 {
27606 gcc_assert (!BLOCK_SUBBLOCKS (context));
27607 context = BLOCK_FRAGMENT_ORIGIN (context);
27608 }
27609 for (tree sub = BLOCK_SUBBLOCKS (context);
27610 sub != block;
27611 sub = BLOCK_CHAIN (sub))
27612 if (!sub)
27613 return false;
27614 if (context == outer)
27615 return true;
27616 else
27617 block = context;
27618 }
27619 }
27620
27621 /* Called during final while assembling the marker of the entry point
27622 for an inlined function. */
27623
27624 static void
27625 dwarf2out_inline_entry (tree block)
27626 {
27627 gcc_assert (debug_inline_points);
27628
27629 /* If we can't represent it, don't bother. */
27630 if (!(dwarf_version >= 3 || !dwarf_strict))
27631 return;
27632
27633 gcc_assert (DECL_P (block_ultimate_origin (block)));
27634
27635 /* Sanity check the block tree. This would catch a case in which
27636 BLOCK got removed from the tree reachable from the outermost
27637 lexical block, but got retained in markers. It would still link
27638 back to its parents, but some ancestor would be missing a link
27639 down the path to the sub BLOCK. If the block got removed, its
27640 BLOCK_NUMBER will not be a usable value. */
27641 if (flag_checking)
27642 gcc_assert (block_within_block_p (block,
27643 DECL_INITIAL (current_function_decl),
27644 true));
27645
27646 gcc_assert (inlined_function_outer_scope_p (block));
27647 gcc_assert (!lookup_block_die (block));
27648
27649 if (BLOCK_FRAGMENT_ORIGIN (block))
27650 block = BLOCK_FRAGMENT_ORIGIN (block);
27651 /* Can the entry point ever not be at the beginning of an
27652 unfragmented lexical block? */
27653 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27654 || (cur_line_info_table
27655 && !ZERO_VIEW_P (cur_line_info_table->view))))
27656 return;
27657
27658 if (!inline_entry_data_table)
27659 inline_entry_data_table
27660 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27661
27662
27663 inline_entry_data **iedp
27664 = inline_entry_data_table->find_slot_with_hash (block,
27665 htab_hash_pointer (block),
27666 INSERT);
27667 if (*iedp)
27668 /* ??? Ideally, we'd record all entry points for the same inlined
27669 function (some may have been duplicated by e.g. unrolling), but
27670 we have no way to represent that ATM. */
27671 return;
27672
27673 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27674 ied->block = block;
27675 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27676 ied->label_num = BLOCK_NUMBER (block);
27677 if (cur_line_info_table)
27678 ied->view = cur_line_info_table->view;
27679
27680 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27681 BLOCK_NUMBER (block));
27682 }
27683
27684 /* Called from finalize_size_functions for size functions so that their body
27685 can be encoded in the debug info to describe the layout of variable-length
27686 structures. */
27687
27688 static void
27689 dwarf2out_size_function (tree decl)
27690 {
27691 function_to_dwarf_procedure (decl);
27692 }
27693
27694 /* Note in one location list that text section has changed. */
27695
27696 int
27697 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27698 {
27699 var_loc_list *list = *slot;
27700 if (list->first)
27701 list->last_before_switch
27702 = list->last->next ? list->last->next : list->last;
27703 return 1;
27704 }
27705
27706 /* Note in all location lists that text section has changed. */
27707
27708 static void
27709 var_location_switch_text_section (void)
27710 {
27711 if (decl_loc_table == NULL)
27712 return;
27713
27714 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27715 }
27716
27717 /* Create a new line number table. */
27718
27719 static dw_line_info_table *
27720 new_line_info_table (void)
27721 {
27722 dw_line_info_table *table;
27723
27724 table = ggc_cleared_alloc<dw_line_info_table> ();
27725 table->file_num = 1;
27726 table->line_num = 1;
27727 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27728 FORCE_RESET_NEXT_VIEW (table->view);
27729 table->symviews_since_reset = 0;
27730
27731 return table;
27732 }
27733
27734 /* Lookup the "current" table into which we emit line info, so
27735 that we don't have to do it for every source line. */
27736
27737 static void
27738 set_cur_line_info_table (section *sec)
27739 {
27740 dw_line_info_table *table;
27741
27742 if (sec == text_section)
27743 table = text_section_line_info;
27744 else if (sec == cold_text_section)
27745 {
27746 table = cold_text_section_line_info;
27747 if (!table)
27748 {
27749 cold_text_section_line_info = table = new_line_info_table ();
27750 table->end_label = cold_end_label;
27751 }
27752 }
27753 else
27754 {
27755 const char *end_label;
27756
27757 if (crtl->has_bb_partition)
27758 {
27759 if (in_cold_section_p)
27760 end_label = crtl->subsections.cold_section_end_label;
27761 else
27762 end_label = crtl->subsections.hot_section_end_label;
27763 }
27764 else
27765 {
27766 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27767 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27768 current_function_funcdef_no);
27769 end_label = ggc_strdup (label);
27770 }
27771
27772 table = new_line_info_table ();
27773 table->end_label = end_label;
27774
27775 vec_safe_push (separate_line_info, table);
27776 }
27777
27778 if (output_asm_line_debug_info ())
27779 table->is_stmt = (cur_line_info_table
27780 ? cur_line_info_table->is_stmt
27781 : DWARF_LINE_DEFAULT_IS_STMT_START);
27782 cur_line_info_table = table;
27783 }
27784
27785
27786 /* We need to reset the locations at the beginning of each
27787 function. We can't do this in the end_function hook, because the
27788 declarations that use the locations won't have been output when
27789 that hook is called. Also compute have_multiple_function_sections here. */
27790
27791 static void
27792 dwarf2out_begin_function (tree fun)
27793 {
27794 section *sec = function_section (fun);
27795
27796 if (sec != text_section)
27797 have_multiple_function_sections = true;
27798
27799 if (crtl->has_bb_partition && !cold_text_section)
27800 {
27801 gcc_assert (current_function_decl == fun);
27802 cold_text_section = unlikely_text_section ();
27803 switch_to_section (cold_text_section);
27804 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27805 switch_to_section (sec);
27806 }
27807
27808 dwarf2out_note_section_used ();
27809 call_site_count = 0;
27810 tail_call_site_count = 0;
27811
27812 set_cur_line_info_table (sec);
27813 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27814 }
27815
27816 /* Helper function of dwarf2out_end_function, called only after emitting
27817 the very first function into assembly. Check if some .debug_loc range
27818 might end with a .LVL* label that could be equal to .Ltext0.
27819 In that case we must force using absolute addresses in .debug_loc ranges,
27820 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27821 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27822 list terminator.
27823 Set have_multiple_function_sections to true in that case and
27824 terminate htab traversal. */
27825
27826 int
27827 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27828 {
27829 var_loc_list *entry = *slot;
27830 struct var_loc_node *node;
27831
27832 node = entry->first;
27833 if (node && node->next && node->next->label)
27834 {
27835 unsigned int i;
27836 const char *label = node->next->label;
27837 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27838
27839 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27840 {
27841 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27842 if (strcmp (label, loclabel) == 0)
27843 {
27844 have_multiple_function_sections = true;
27845 return 0;
27846 }
27847 }
27848 }
27849 return 1;
27850 }
27851
27852 /* Hook called after emitting a function into assembly.
27853 This does something only for the very first function emitted. */
27854
27855 static void
27856 dwarf2out_end_function (unsigned int)
27857 {
27858 if (in_first_function_p
27859 && !have_multiple_function_sections
27860 && first_loclabel_num_not_at_text_label
27861 && decl_loc_table)
27862 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27863 in_first_function_p = false;
27864 maybe_at_text_label_p = false;
27865 }
27866
27867 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27868 front-ends register a translation unit even before dwarf2out_init is
27869 called. */
27870 static tree main_translation_unit = NULL_TREE;
27871
27872 /* Hook called by front-ends after they built their main translation unit.
27873 Associate comp_unit_die to UNIT. */
27874
27875 static void
27876 dwarf2out_register_main_translation_unit (tree unit)
27877 {
27878 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27879 && main_translation_unit == NULL_TREE);
27880 main_translation_unit = unit;
27881 /* If dwarf2out_init has not been called yet, it will perform the association
27882 itself looking at main_translation_unit. */
27883 if (decl_die_table != NULL)
27884 equate_decl_number_to_die (unit, comp_unit_die ());
27885 }
27886
27887 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27888
27889 static void
27890 push_dw_line_info_entry (dw_line_info_table *table,
27891 enum dw_line_info_opcode opcode, unsigned int val)
27892 {
27893 dw_line_info_entry e;
27894 e.opcode = opcode;
27895 e.val = val;
27896 vec_safe_push (table->entries, e);
27897 }
27898
27899 /* Output a label to mark the beginning of a source code line entry
27900 and record information relating to this source line, in
27901 'line_info_table' for later output of the .debug_line section. */
27902 /* ??? The discriminator parameter ought to be unsigned. */
27903
27904 static void
27905 dwarf2out_source_line (unsigned int line, unsigned int column,
27906 const char *filename,
27907 int discriminator, bool is_stmt)
27908 {
27909 unsigned int file_num;
27910 dw_line_info_table *table;
27911 static var_loc_view lvugid;
27912
27913 if (debug_info_level < DINFO_LEVEL_TERSE)
27914 return;
27915
27916 table = cur_line_info_table;
27917
27918 if (line == 0)
27919 {
27920 if (debug_variable_location_views
27921 && output_asm_line_debug_info ()
27922 && table && !RESETTING_VIEW_P (table->view))
27923 {
27924 /* If we're using the assembler to compute view numbers, we
27925 can't issue a .loc directive for line zero, so we can't
27926 get a view number at this point. We might attempt to
27927 compute it from the previous view, or equate it to a
27928 subsequent view (though it might not be there!), but
27929 since we're omitting the line number entry, we might as
27930 well omit the view number as well. That means pretending
27931 it's a view number zero, which might very well turn out
27932 to be correct. ??? Extend the assembler so that the
27933 compiler could emit e.g. ".locview .LVU#", to output a
27934 view without changing line number information. We'd then
27935 have to count it in symviews_since_reset; when it's omitted,
27936 it doesn't count. */
27937 if (!zero_view_p)
27938 zero_view_p = BITMAP_GGC_ALLOC ();
27939 bitmap_set_bit (zero_view_p, table->view);
27940 if (flag_debug_asm)
27941 {
27942 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27943 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27944 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27945 ASM_COMMENT_START);
27946 assemble_name (asm_out_file, label);
27947 putc ('\n', asm_out_file);
27948 }
27949 table->view = ++lvugid;
27950 }
27951 return;
27952 }
27953
27954 /* The discriminator column was added in dwarf4. Simplify the below
27955 by simply removing it if we're not supposed to output it. */
27956 if (dwarf_version < 4 && dwarf_strict)
27957 discriminator = 0;
27958
27959 if (!debug_column_info)
27960 column = 0;
27961
27962 file_num = maybe_emit_file (lookup_filename (filename));
27963
27964 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27965 the debugger has used the second (possibly duplicate) line number
27966 at the beginning of the function to mark the end of the prologue.
27967 We could eliminate any other duplicates within the function. For
27968 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27969 that second line number entry. */
27970 /* Recall that this end-of-prologue indication is *not* the same thing
27971 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27972 to which the hook corresponds, follows the last insn that was
27973 emitted by gen_prologue. What we need is to precede the first insn
27974 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27975 insn that corresponds to something the user wrote. These may be
27976 very different locations once scheduling is enabled. */
27977
27978 if (0 && file_num == table->file_num
27979 && line == table->line_num
27980 && column == table->column_num
27981 && discriminator == table->discrim_num
27982 && is_stmt == table->is_stmt)
27983 return;
27984
27985 switch_to_section (current_function_section ());
27986
27987 /* If requested, emit something human-readable. */
27988 if (flag_debug_asm)
27989 {
27990 if (debug_column_info)
27991 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27992 filename, line, column);
27993 else
27994 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27995 filename, line);
27996 }
27997
27998 if (output_asm_line_debug_info ())
27999 {
28000 /* Emit the .loc directive understood by GNU as. */
28001 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
28002 file_num, line, is_stmt, discriminator */
28003 fputs ("\t.loc ", asm_out_file);
28004 fprint_ul (asm_out_file, file_num);
28005 putc (' ', asm_out_file);
28006 fprint_ul (asm_out_file, line);
28007 putc (' ', asm_out_file);
28008 fprint_ul (asm_out_file, column);
28009
28010 if (is_stmt != table->is_stmt)
28011 {
28012 #if HAVE_GAS_LOC_STMT
28013 fputs (" is_stmt ", asm_out_file);
28014 putc (is_stmt ? '1' : '0', asm_out_file);
28015 #endif
28016 }
28017 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28018 {
28019 gcc_assert (discriminator > 0);
28020 fputs (" discriminator ", asm_out_file);
28021 fprint_ul (asm_out_file, (unsigned long) discriminator);
28022 }
28023 if (debug_variable_location_views)
28024 {
28025 if (!RESETTING_VIEW_P (table->view))
28026 {
28027 table->symviews_since_reset++;
28028 if (table->symviews_since_reset > symview_upper_bound)
28029 symview_upper_bound = table->symviews_since_reset;
28030 /* When we're using the assembler to compute view
28031 numbers, we output symbolic labels after "view" in
28032 .loc directives, and the assembler will set them for
28033 us, so that we can refer to the view numbers in
28034 location lists. The only exceptions are when we know
28035 a view will be zero: "-0" is a forced reset, used
28036 e.g. in the beginning of functions, whereas "0" tells
28037 the assembler to check that there was a PC change
28038 since the previous view, in a way that implicitly
28039 resets the next view. */
28040 fputs (" view ", asm_out_file);
28041 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28042 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28043 assemble_name (asm_out_file, label);
28044 table->view = ++lvugid;
28045 }
28046 else
28047 {
28048 table->symviews_since_reset = 0;
28049 if (FORCE_RESETTING_VIEW_P (table->view))
28050 fputs (" view -0", asm_out_file);
28051 else
28052 fputs (" view 0", asm_out_file);
28053 /* Mark the present view as a zero view. Earlier debug
28054 binds may have already added its id to loclists to be
28055 emitted later, so we can't reuse the id for something
28056 else. However, it's good to know whether a view is
28057 known to be zero, because then we may be able to
28058 optimize out locviews that are all zeros, so take
28059 note of it in zero_view_p. */
28060 if (!zero_view_p)
28061 zero_view_p = BITMAP_GGC_ALLOC ();
28062 bitmap_set_bit (zero_view_p, lvugid);
28063 table->view = ++lvugid;
28064 }
28065 }
28066 putc ('\n', asm_out_file);
28067 }
28068 else
28069 {
28070 unsigned int label_num = ++line_info_label_num;
28071
28072 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28073
28074 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28075 push_dw_line_info_entry (table, LI_adv_address, label_num);
28076 else
28077 push_dw_line_info_entry (table, LI_set_address, label_num);
28078 if (debug_variable_location_views)
28079 {
28080 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28081 if (resetting)
28082 table->view = 0;
28083
28084 if (flag_debug_asm)
28085 fprintf (asm_out_file, "\t%s view %s%d\n",
28086 ASM_COMMENT_START,
28087 resetting ? "-" : "",
28088 table->view);
28089
28090 table->view++;
28091 }
28092 if (file_num != table->file_num)
28093 push_dw_line_info_entry (table, LI_set_file, file_num);
28094 if (discriminator != table->discrim_num)
28095 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28096 if (is_stmt != table->is_stmt)
28097 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28098 push_dw_line_info_entry (table, LI_set_line, line);
28099 if (debug_column_info)
28100 push_dw_line_info_entry (table, LI_set_column, column);
28101 }
28102
28103 table->file_num = file_num;
28104 table->line_num = line;
28105 table->column_num = column;
28106 table->discrim_num = discriminator;
28107 table->is_stmt = is_stmt;
28108 table->in_use = true;
28109 }
28110
28111 /* Record the beginning of a new source file. */
28112
28113 static void
28114 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28115 {
28116 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28117 {
28118 macinfo_entry e;
28119 e.code = DW_MACINFO_start_file;
28120 e.lineno = lineno;
28121 e.info = ggc_strdup (filename);
28122 vec_safe_push (macinfo_table, e);
28123 }
28124 }
28125
28126 /* Record the end of a source file. */
28127
28128 static void
28129 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28130 {
28131 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28132 {
28133 macinfo_entry e;
28134 e.code = DW_MACINFO_end_file;
28135 e.lineno = lineno;
28136 e.info = NULL;
28137 vec_safe_push (macinfo_table, e);
28138 }
28139 }
28140
28141 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28142 the tail part of the directive line, i.e. the part which is past the
28143 initial whitespace, #, whitespace, directive-name, whitespace part. */
28144
28145 static void
28146 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28147 const char *buffer ATTRIBUTE_UNUSED)
28148 {
28149 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28150 {
28151 macinfo_entry e;
28152 /* Insert a dummy first entry to be able to optimize the whole
28153 predefined macro block using DW_MACRO_import. */
28154 if (macinfo_table->is_empty () && lineno <= 1)
28155 {
28156 e.code = 0;
28157 e.lineno = 0;
28158 e.info = NULL;
28159 vec_safe_push (macinfo_table, e);
28160 }
28161 e.code = DW_MACINFO_define;
28162 e.lineno = lineno;
28163 e.info = ggc_strdup (buffer);
28164 vec_safe_push (macinfo_table, e);
28165 }
28166 }
28167
28168 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28169 the tail part of the directive line, i.e. the part which is past the
28170 initial whitespace, #, whitespace, directive-name, whitespace part. */
28171
28172 static void
28173 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28174 const char *buffer ATTRIBUTE_UNUSED)
28175 {
28176 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28177 {
28178 macinfo_entry e;
28179 /* Insert a dummy first entry to be able to optimize the whole
28180 predefined macro block using DW_MACRO_import. */
28181 if (macinfo_table->is_empty () && lineno <= 1)
28182 {
28183 e.code = 0;
28184 e.lineno = 0;
28185 e.info = NULL;
28186 vec_safe_push (macinfo_table, e);
28187 }
28188 e.code = DW_MACINFO_undef;
28189 e.lineno = lineno;
28190 e.info = ggc_strdup (buffer);
28191 vec_safe_push (macinfo_table, e);
28192 }
28193 }
28194
28195 /* Helpers to manipulate hash table of CUs. */
28196
28197 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28198 {
28199 static inline hashval_t hash (const macinfo_entry *);
28200 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28201 };
28202
28203 inline hashval_t
28204 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28205 {
28206 return htab_hash_string (entry->info);
28207 }
28208
28209 inline bool
28210 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28211 const macinfo_entry *entry2)
28212 {
28213 return !strcmp (entry1->info, entry2->info);
28214 }
28215
28216 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28217
28218 /* Output a single .debug_macinfo entry. */
28219
28220 static void
28221 output_macinfo_op (macinfo_entry *ref)
28222 {
28223 int file_num;
28224 size_t len;
28225 struct indirect_string_node *node;
28226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28227 struct dwarf_file_data *fd;
28228
28229 switch (ref->code)
28230 {
28231 case DW_MACINFO_start_file:
28232 fd = lookup_filename (ref->info);
28233 file_num = maybe_emit_file (fd);
28234 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28235 dw2_asm_output_data_uleb128 (ref->lineno,
28236 "Included from line number %lu",
28237 (unsigned long) ref->lineno);
28238 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28239 break;
28240 case DW_MACINFO_end_file:
28241 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28242 break;
28243 case DW_MACINFO_define:
28244 case DW_MACINFO_undef:
28245 len = strlen (ref->info) + 1;
28246 if (!dwarf_strict
28247 && len > DWARF_OFFSET_SIZE
28248 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28249 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28250 {
28251 ref->code = ref->code == DW_MACINFO_define
28252 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28253 output_macinfo_op (ref);
28254 return;
28255 }
28256 dw2_asm_output_data (1, ref->code,
28257 ref->code == DW_MACINFO_define
28258 ? "Define macro" : "Undefine macro");
28259 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28260 (unsigned long) ref->lineno);
28261 dw2_asm_output_nstring (ref->info, -1, "The macro");
28262 break;
28263 case DW_MACRO_define_strp:
28264 case DW_MACRO_undef_strp:
28265 /* NB: dwarf2out_finish performs:
28266 1. save_macinfo_strings
28267 2. hash table traverse of index_string
28268 3. output_macinfo -> output_macinfo_op
28269 4. output_indirect_strings
28270 -> hash table traverse of output_index_string
28271
28272 When output_macinfo_op is called, all index strings have been
28273 added to hash table by save_macinfo_strings and we can't pass
28274 INSERT to find_slot_with_hash which may expand hash table, even
28275 if no insertion is needed, and change hash table traverse order
28276 between index_string and output_index_string. */
28277 node = find_AT_string (ref->info, NO_INSERT);
28278 gcc_assert (node
28279 && (node->form == DW_FORM_strp
28280 || node->form == dwarf_FORM (DW_FORM_strx)));
28281 dw2_asm_output_data (1, ref->code,
28282 ref->code == DW_MACRO_define_strp
28283 ? "Define macro strp"
28284 : "Undefine macro strp");
28285 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28286 (unsigned long) ref->lineno);
28287 if (node->form == DW_FORM_strp)
28288 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28289 debug_str_section, "The macro: \"%s\"",
28290 ref->info);
28291 else
28292 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28293 ref->info);
28294 break;
28295 case DW_MACRO_import:
28296 dw2_asm_output_data (1, ref->code, "Import");
28297 ASM_GENERATE_INTERNAL_LABEL (label,
28298 DEBUG_MACRO_SECTION_LABEL,
28299 ref->lineno + macinfo_label_base);
28300 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28301 break;
28302 default:
28303 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28304 ASM_COMMENT_START, (unsigned long) ref->code);
28305 break;
28306 }
28307 }
28308
28309 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28310 other compilation unit .debug_macinfo sections. IDX is the first
28311 index of a define/undef, return the number of ops that should be
28312 emitted in a comdat .debug_macinfo section and emit
28313 a DW_MACRO_import entry referencing it.
28314 If the define/undef entry should be emitted normally, return 0. */
28315
28316 static unsigned
28317 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28318 macinfo_hash_type **macinfo_htab)
28319 {
28320 macinfo_entry *first, *second, *cur, *inc;
28321 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28322 unsigned char checksum[16];
28323 struct md5_ctx ctx;
28324 char *grp_name, *tail;
28325 const char *base;
28326 unsigned int i, count, encoded_filename_len, linebuf_len;
28327 macinfo_entry **slot;
28328
28329 first = &(*macinfo_table)[idx];
28330 second = &(*macinfo_table)[idx + 1];
28331
28332 /* Optimize only if there are at least two consecutive define/undef ops,
28333 and either all of them are before first DW_MACINFO_start_file
28334 with lineno {0,1} (i.e. predefined macro block), or all of them are
28335 in some included header file. */
28336 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28337 return 0;
28338 if (vec_safe_is_empty (files))
28339 {
28340 if (first->lineno > 1 || second->lineno > 1)
28341 return 0;
28342 }
28343 else if (first->lineno == 0)
28344 return 0;
28345
28346 /* Find the last define/undef entry that can be grouped together
28347 with first and at the same time compute md5 checksum of their
28348 codes, linenumbers and strings. */
28349 md5_init_ctx (&ctx);
28350 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28351 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28352 break;
28353 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28354 break;
28355 else
28356 {
28357 unsigned char code = cur->code;
28358 md5_process_bytes (&code, 1, &ctx);
28359 checksum_uleb128 (cur->lineno, &ctx);
28360 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28361 }
28362 md5_finish_ctx (&ctx, checksum);
28363 count = i - idx;
28364
28365 /* From the containing include filename (if any) pick up just
28366 usable characters from its basename. */
28367 if (vec_safe_is_empty (files))
28368 base = "";
28369 else
28370 base = lbasename (files->last ().info);
28371 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28372 if (ISIDNUM (base[i]) || base[i] == '.')
28373 encoded_filename_len++;
28374 /* Count . at the end. */
28375 if (encoded_filename_len)
28376 encoded_filename_len++;
28377
28378 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28379 linebuf_len = strlen (linebuf);
28380
28381 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28382 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28383 + 16 * 2 + 1);
28384 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28385 tail = grp_name + 4;
28386 if (encoded_filename_len)
28387 {
28388 for (i = 0; base[i]; i++)
28389 if (ISIDNUM (base[i]) || base[i] == '.')
28390 *tail++ = base[i];
28391 *tail++ = '.';
28392 }
28393 memcpy (tail, linebuf, linebuf_len);
28394 tail += linebuf_len;
28395 *tail++ = '.';
28396 for (i = 0; i < 16; i++)
28397 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28398
28399 /* Construct a macinfo_entry for DW_MACRO_import
28400 in the empty vector entry before the first define/undef. */
28401 inc = &(*macinfo_table)[idx - 1];
28402 inc->code = DW_MACRO_import;
28403 inc->lineno = 0;
28404 inc->info = ggc_strdup (grp_name);
28405 if (!*macinfo_htab)
28406 *macinfo_htab = new macinfo_hash_type (10);
28407 /* Avoid emitting duplicates. */
28408 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28409 if (*slot != NULL)
28410 {
28411 inc->code = 0;
28412 inc->info = NULL;
28413 /* If such an entry has been used before, just emit
28414 a DW_MACRO_import op. */
28415 inc = *slot;
28416 output_macinfo_op (inc);
28417 /* And clear all macinfo_entry in the range to avoid emitting them
28418 in the second pass. */
28419 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28420 {
28421 cur->code = 0;
28422 cur->info = NULL;
28423 }
28424 }
28425 else
28426 {
28427 *slot = inc;
28428 inc->lineno = (*macinfo_htab)->elements ();
28429 output_macinfo_op (inc);
28430 }
28431 return count;
28432 }
28433
28434 /* Save any strings needed by the macinfo table in the debug str
28435 table. All strings must be collected into the table by the time
28436 index_string is called. */
28437
28438 static void
28439 save_macinfo_strings (void)
28440 {
28441 unsigned len;
28442 unsigned i;
28443 macinfo_entry *ref;
28444
28445 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28446 {
28447 switch (ref->code)
28448 {
28449 /* Match the logic in output_macinfo_op to decide on
28450 indirect strings. */
28451 case DW_MACINFO_define:
28452 case DW_MACINFO_undef:
28453 len = strlen (ref->info) + 1;
28454 if (!dwarf_strict
28455 && len > DWARF_OFFSET_SIZE
28456 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28457 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28458 set_indirect_string (find_AT_string (ref->info));
28459 break;
28460 case DW_MACINFO_start_file:
28461 /* -gsplit-dwarf -g3 will also output filename as indirect
28462 string. */
28463 if (!dwarf_split_debug_info)
28464 break;
28465 /* Fall through. */
28466 case DW_MACRO_define_strp:
28467 case DW_MACRO_undef_strp:
28468 set_indirect_string (find_AT_string (ref->info));
28469 break;
28470 default:
28471 break;
28472 }
28473 }
28474 }
28475
28476 /* Output macinfo section(s). */
28477
28478 static void
28479 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28480 {
28481 unsigned i;
28482 unsigned long length = vec_safe_length (macinfo_table);
28483 macinfo_entry *ref;
28484 vec<macinfo_entry, va_gc> *files = NULL;
28485 macinfo_hash_type *macinfo_htab = NULL;
28486 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28487
28488 if (! length)
28489 return;
28490
28491 /* output_macinfo* uses these interchangeably. */
28492 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28493 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28494 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28495 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28496
28497 /* AIX Assembler inserts the length, so adjust the reference to match the
28498 offset expected by debuggers. */
28499 strcpy (dl_section_ref, debug_line_label);
28500 if (XCOFF_DEBUGGING_INFO)
28501 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28502
28503 /* For .debug_macro emit the section header. */
28504 if (!dwarf_strict || dwarf_version >= 5)
28505 {
28506 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28507 "DWARF macro version number");
28508 if (DWARF_OFFSET_SIZE == 8)
28509 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28510 else
28511 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28512 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28513 debug_line_section, NULL);
28514 }
28515
28516 /* In the first loop, it emits the primary .debug_macinfo section
28517 and after each emitted op the macinfo_entry is cleared.
28518 If a longer range of define/undef ops can be optimized using
28519 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28520 the vector before the first define/undef in the range and the
28521 whole range of define/undef ops is not emitted and kept. */
28522 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28523 {
28524 switch (ref->code)
28525 {
28526 case DW_MACINFO_start_file:
28527 vec_safe_push (files, *ref);
28528 break;
28529 case DW_MACINFO_end_file:
28530 if (!vec_safe_is_empty (files))
28531 files->pop ();
28532 break;
28533 case DW_MACINFO_define:
28534 case DW_MACINFO_undef:
28535 if ((!dwarf_strict || dwarf_version >= 5)
28536 && HAVE_COMDAT_GROUP
28537 && vec_safe_length (files) != 1
28538 && i > 0
28539 && i + 1 < length
28540 && (*macinfo_table)[i - 1].code == 0)
28541 {
28542 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28543 if (count)
28544 {
28545 i += count - 1;
28546 continue;
28547 }
28548 }
28549 break;
28550 case 0:
28551 /* A dummy entry may be inserted at the beginning to be able
28552 to optimize the whole block of predefined macros. */
28553 if (i == 0)
28554 continue;
28555 default:
28556 break;
28557 }
28558 output_macinfo_op (ref);
28559 ref->info = NULL;
28560 ref->code = 0;
28561 }
28562
28563 if (!macinfo_htab)
28564 return;
28565
28566 /* Save the number of transparent includes so we can adjust the
28567 label number for the fat LTO object DWARF. */
28568 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28569
28570 delete macinfo_htab;
28571 macinfo_htab = NULL;
28572
28573 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28574 terminate the current chain and switch to a new comdat .debug_macinfo
28575 section and emit the define/undef entries within it. */
28576 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28577 switch (ref->code)
28578 {
28579 case 0:
28580 continue;
28581 case DW_MACRO_import:
28582 {
28583 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28584 tree comdat_key = get_identifier (ref->info);
28585 /* Terminate the previous .debug_macinfo section. */
28586 dw2_asm_output_data (1, 0, "End compilation unit");
28587 targetm.asm_out.named_section (debug_macinfo_section_name,
28588 SECTION_DEBUG
28589 | SECTION_LINKONCE
28590 | (early_lto_debug
28591 ? SECTION_EXCLUDE : 0),
28592 comdat_key);
28593 ASM_GENERATE_INTERNAL_LABEL (label,
28594 DEBUG_MACRO_SECTION_LABEL,
28595 ref->lineno + macinfo_label_base);
28596 ASM_OUTPUT_LABEL (asm_out_file, label);
28597 ref->code = 0;
28598 ref->info = NULL;
28599 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28600 "DWARF macro version number");
28601 if (DWARF_OFFSET_SIZE == 8)
28602 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28603 else
28604 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28605 }
28606 break;
28607 case DW_MACINFO_define:
28608 case DW_MACINFO_undef:
28609 output_macinfo_op (ref);
28610 ref->code = 0;
28611 ref->info = NULL;
28612 break;
28613 default:
28614 gcc_unreachable ();
28615 }
28616
28617 macinfo_label_base += macinfo_label_base_adj;
28618 }
28619
28620 /* Initialize the various sections and labels for dwarf output and prefix
28621 them with PREFIX if non-NULL. Returns the generation (zero based
28622 number of times function was called). */
28623
28624 static unsigned
28625 init_sections_and_labels (bool early_lto_debug)
28626 {
28627 /* As we may get called multiple times have a generation count for
28628 labels. */
28629 static unsigned generation = 0;
28630
28631 if (early_lto_debug)
28632 {
28633 if (!dwarf_split_debug_info)
28634 {
28635 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28636 SECTION_DEBUG | SECTION_EXCLUDE,
28637 NULL);
28638 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28639 SECTION_DEBUG | SECTION_EXCLUDE,
28640 NULL);
28641 debug_macinfo_section_name
28642 = ((dwarf_strict && dwarf_version < 5)
28643 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28644 debug_macinfo_section = get_section (debug_macinfo_section_name,
28645 SECTION_DEBUG
28646 | SECTION_EXCLUDE, NULL);
28647 }
28648 else
28649 {
28650 /* ??? Which of the following do we need early? */
28651 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28652 SECTION_DEBUG | SECTION_EXCLUDE,
28653 NULL);
28654 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28655 SECTION_DEBUG | SECTION_EXCLUDE,
28656 NULL);
28657 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28658 SECTION_DEBUG
28659 | SECTION_EXCLUDE, NULL);
28660 debug_skeleton_abbrev_section
28661 = get_section (DEBUG_LTO_ABBREV_SECTION,
28662 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28663 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28664 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28665 generation);
28666
28667 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28668 stay in the main .o, but the skeleton_line goes into the split
28669 off dwo. */
28670 debug_skeleton_line_section
28671 = get_section (DEBUG_LTO_LINE_SECTION,
28672 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28673 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28674 DEBUG_SKELETON_LINE_SECTION_LABEL,
28675 generation);
28676 debug_str_offsets_section
28677 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28678 SECTION_DEBUG | SECTION_EXCLUDE,
28679 NULL);
28680 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28681 DEBUG_SKELETON_INFO_SECTION_LABEL,
28682 generation);
28683 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28684 DEBUG_STR_DWO_SECTION_FLAGS,
28685 NULL);
28686 debug_macinfo_section_name
28687 = ((dwarf_strict && dwarf_version < 5)
28688 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28689 debug_macinfo_section = get_section (debug_macinfo_section_name,
28690 SECTION_DEBUG | SECTION_EXCLUDE,
28691 NULL);
28692 }
28693 /* For macro info and the file table we have to refer to a
28694 debug_line section. */
28695 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28696 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28697 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28698 DEBUG_LINE_SECTION_LABEL, generation);
28699
28700 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28701 DEBUG_STR_SECTION_FLAGS
28702 | SECTION_EXCLUDE, NULL);
28703 if (!dwarf_split_debug_info)
28704 debug_line_str_section
28705 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28706 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28707 }
28708 else
28709 {
28710 if (!dwarf_split_debug_info)
28711 {
28712 debug_info_section = get_section (DEBUG_INFO_SECTION,
28713 SECTION_DEBUG, NULL);
28714 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28715 SECTION_DEBUG, NULL);
28716 debug_loc_section = get_section (dwarf_version >= 5
28717 ? DEBUG_LOCLISTS_SECTION
28718 : DEBUG_LOC_SECTION,
28719 SECTION_DEBUG, NULL);
28720 debug_macinfo_section_name
28721 = ((dwarf_strict && dwarf_version < 5)
28722 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28723 debug_macinfo_section = get_section (debug_macinfo_section_name,
28724 SECTION_DEBUG, NULL);
28725 }
28726 else
28727 {
28728 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28729 SECTION_DEBUG | SECTION_EXCLUDE,
28730 NULL);
28731 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28732 SECTION_DEBUG | SECTION_EXCLUDE,
28733 NULL);
28734 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28735 SECTION_DEBUG, NULL);
28736 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28737 SECTION_DEBUG, NULL);
28738 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28739 SECTION_DEBUG, NULL);
28740 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28741 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28742 generation);
28743
28744 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28745 stay in the main .o, but the skeleton_line goes into the
28746 split off dwo. */
28747 debug_skeleton_line_section
28748 = get_section (DEBUG_DWO_LINE_SECTION,
28749 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28750 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28751 DEBUG_SKELETON_LINE_SECTION_LABEL,
28752 generation);
28753 debug_str_offsets_section
28754 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28755 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28756 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28757 DEBUG_SKELETON_INFO_SECTION_LABEL,
28758 generation);
28759 debug_loc_section = get_section (dwarf_version >= 5
28760 ? DEBUG_DWO_LOCLISTS_SECTION
28761 : DEBUG_DWO_LOC_SECTION,
28762 SECTION_DEBUG | SECTION_EXCLUDE,
28763 NULL);
28764 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28765 DEBUG_STR_DWO_SECTION_FLAGS,
28766 NULL);
28767 debug_macinfo_section_name
28768 = ((dwarf_strict && dwarf_version < 5)
28769 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28770 debug_macinfo_section = get_section (debug_macinfo_section_name,
28771 SECTION_DEBUG | SECTION_EXCLUDE,
28772 NULL);
28773 }
28774 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28775 SECTION_DEBUG, NULL);
28776 debug_line_section = get_section (DEBUG_LINE_SECTION,
28777 SECTION_DEBUG, NULL);
28778 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28779 SECTION_DEBUG, NULL);
28780 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28781 SECTION_DEBUG, NULL);
28782 debug_str_section = get_section (DEBUG_STR_SECTION,
28783 DEBUG_STR_SECTION_FLAGS, NULL);
28784 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28785 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28786 DEBUG_STR_SECTION_FLAGS, NULL);
28787
28788 debug_ranges_section = get_section (dwarf_version >= 5
28789 ? DEBUG_RNGLISTS_SECTION
28790 : DEBUG_RANGES_SECTION,
28791 SECTION_DEBUG, NULL);
28792 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28793 SECTION_DEBUG, NULL);
28794 }
28795
28796 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28797 DEBUG_ABBREV_SECTION_LABEL, generation);
28798 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28799 DEBUG_INFO_SECTION_LABEL, generation);
28800 info_section_emitted = false;
28801 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28802 DEBUG_LINE_SECTION_LABEL, generation);
28803 /* There are up to 4 unique ranges labels per generation.
28804 See also output_rnglists. */
28805 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28806 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28807 if (dwarf_version >= 5 && dwarf_split_debug_info)
28808 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28809 DEBUG_RANGES_SECTION_LABEL,
28810 1 + generation * 4);
28811 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28812 DEBUG_ADDR_SECTION_LABEL, generation);
28813 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28814 (dwarf_strict && dwarf_version < 5)
28815 ? DEBUG_MACINFO_SECTION_LABEL
28816 : DEBUG_MACRO_SECTION_LABEL, generation);
28817 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28818 generation);
28819
28820 ++generation;
28821 return generation - 1;
28822 }
28823
28824 /* Set up for Dwarf output at the start of compilation. */
28825
28826 static void
28827 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28828 {
28829 /* Allocate the file_table. */
28830 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28831
28832 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28833 /* Allocate the decl_die_table. */
28834 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28835
28836 /* Allocate the decl_loc_table. */
28837 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28838
28839 /* Allocate the cached_dw_loc_list_table. */
28840 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28841
28842 /* Allocate the initial hunk of the abbrev_die_table. */
28843 vec_alloc (abbrev_die_table, 256);
28844 /* Zero-th entry is allocated, but unused. */
28845 abbrev_die_table->quick_push (NULL);
28846
28847 /* Allocate the dwarf_proc_stack_usage_map. */
28848 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28849
28850 /* Allocate the pubtypes and pubnames vectors. */
28851 vec_alloc (pubname_table, 32);
28852 vec_alloc (pubtype_table, 32);
28853
28854 vec_alloc (incomplete_types, 64);
28855
28856 vec_alloc (used_rtx_array, 32);
28857
28858 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28859 vec_alloc (macinfo_table, 64);
28860 #endif
28861
28862 /* If front-ends already registered a main translation unit but we were not
28863 ready to perform the association, do this now. */
28864 if (main_translation_unit != NULL_TREE)
28865 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28866 }
28867
28868 /* Called before compile () starts outputtting functions, variables
28869 and toplevel asms into assembly. */
28870
28871 static void
28872 dwarf2out_assembly_start (void)
28873 {
28874 if (text_section_line_info)
28875 return;
28876
28877 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28878 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28879 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28880 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28881 COLD_TEXT_SECTION_LABEL, 0);
28882 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28883
28884 switch_to_section (text_section);
28885 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28886 #endif
28887
28888 /* Make sure the line number table for .text always exists. */
28889 text_section_line_info = new_line_info_table ();
28890 text_section_line_info->end_label = text_end_label;
28891
28892 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28893 cur_line_info_table = text_section_line_info;
28894 #endif
28895
28896 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28897 && dwarf2out_do_cfi_asm ()
28898 && !dwarf2out_do_eh_frame ())
28899 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28900 }
28901
28902 /* A helper function for dwarf2out_finish called through
28903 htab_traverse. Assign a string its index. All strings must be
28904 collected into the table by the time index_string is called,
28905 because the indexing code relies on htab_traverse to traverse nodes
28906 in the same order for each run. */
28907
28908 int
28909 index_string (indirect_string_node **h, unsigned int *index)
28910 {
28911 indirect_string_node *node = *h;
28912
28913 find_string_form (node);
28914 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28915 {
28916 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28917 node->index = *index;
28918 *index += 1;
28919 }
28920 return 1;
28921 }
28922
28923 /* A helper function for output_indirect_strings called through
28924 htab_traverse. Output the offset to a string and update the
28925 current offset. */
28926
28927 int
28928 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28929 {
28930 indirect_string_node *node = *h;
28931
28932 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28933 {
28934 /* Assert that this node has been assigned an index. */
28935 gcc_assert (node->index != NO_INDEX_ASSIGNED
28936 && node->index != NOT_INDEXED);
28937 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28938 "indexed string 0x%x: %s", node->index, node->str);
28939 *offset += strlen (node->str) + 1;
28940 }
28941 return 1;
28942 }
28943
28944 /* A helper function for dwarf2out_finish called through
28945 htab_traverse. Output the indexed string. */
28946
28947 int
28948 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28949 {
28950 struct indirect_string_node *node = *h;
28951
28952 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28953 {
28954 /* Assert that the strings are output in the same order as their
28955 indexes were assigned. */
28956 gcc_assert (*cur_idx == node->index);
28957 assemble_string (node->str, strlen (node->str) + 1);
28958 *cur_idx += 1;
28959 }
28960 return 1;
28961 }
28962
28963 /* A helper function for output_indirect_strings. Counts the number
28964 of index strings offsets. Must match the logic of the functions
28965 output_index_string[_offsets] above. */
28966 int
28967 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28968 {
28969 struct indirect_string_node *node = *h;
28970
28971 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28972 *last_idx += 1;
28973 return 1;
28974 }
28975
28976 /* A helper function for dwarf2out_finish called through
28977 htab_traverse. Emit one queued .debug_str string. */
28978
28979 int
28980 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28981 {
28982 struct indirect_string_node *node = *h;
28983
28984 node->form = find_string_form (node);
28985 if (node->form == form && node->refcount > 0)
28986 {
28987 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28988 assemble_string (node->str, strlen (node->str) + 1);
28989 }
28990
28991 return 1;
28992 }
28993
28994 /* Output the indexed string table. */
28995
28996 static void
28997 output_indirect_strings (void)
28998 {
28999 switch_to_section (debug_str_section);
29000 if (!dwarf_split_debug_info)
29001 debug_str_hash->traverse<enum dwarf_form,
29002 output_indirect_string> (DW_FORM_strp);
29003 else
29004 {
29005 unsigned int offset = 0;
29006 unsigned int cur_idx = 0;
29007
29008 if (skeleton_debug_str_hash)
29009 skeleton_debug_str_hash->traverse<enum dwarf_form,
29010 output_indirect_string> (DW_FORM_strp);
29011
29012 switch_to_section (debug_str_offsets_section);
29013 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29014 header. Note that we don't need to generate a label to the
29015 actual index table following the header here, because this is
29016 for the split dwarf case only. In an .dwo file there is only
29017 one string offsets table (and one debug info section). But
29018 if we would start using string offset tables for the main (or
29019 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29020 pointing to the actual index after the header. Split dwarf
29021 units will never have a string offsets base attribute. When
29022 a split unit is moved into a .dwp file the string offsets can
29023 be found through the .debug_cu_index section table. */
29024 if (dwarf_version >= 5)
29025 {
29026 unsigned int last_idx = 0;
29027 unsigned long str_offsets_length;
29028
29029 debug_str_hash->traverse_noresize
29030 <unsigned int *, count_index_strings> (&last_idx);
29031 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
29032 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29033 dw2_asm_output_data (4, 0xffffffff,
29034 "Escape value for 64-bit DWARF extension");
29035 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
29036 "Length of string offsets unit");
29037 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29038 dw2_asm_output_data (2, 0, "Header zero padding");
29039 }
29040 debug_str_hash->traverse_noresize
29041 <unsigned int *, output_index_string_offset> (&offset);
29042 switch_to_section (debug_str_dwo_section);
29043 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29044 (&cur_idx);
29045 }
29046 }
29047
29048 /* Callback for htab_traverse to assign an index to an entry in the
29049 table, and to write that entry to the .debug_addr section. */
29050
29051 int
29052 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29053 {
29054 addr_table_entry *entry = *slot;
29055
29056 if (entry->refcount == 0)
29057 {
29058 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29059 || entry->index == NOT_INDEXED);
29060 return 1;
29061 }
29062
29063 gcc_assert (entry->index == *cur_index);
29064 (*cur_index)++;
29065
29066 switch (entry->kind)
29067 {
29068 case ate_kind_rtx:
29069 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29070 "0x%x", entry->index);
29071 break;
29072 case ate_kind_rtx_dtprel:
29073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29074 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29075 DWARF2_ADDR_SIZE,
29076 entry->addr.rtl);
29077 fputc ('\n', asm_out_file);
29078 break;
29079 case ate_kind_label:
29080 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29081 "0x%x", entry->index);
29082 break;
29083 default:
29084 gcc_unreachable ();
29085 }
29086 return 1;
29087 }
29088
29089 /* A helper function for dwarf2out_finish. Counts the number
29090 of indexed addresses. Must match the logic of the functions
29091 output_addr_table_entry above. */
29092 int
29093 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29094 {
29095 addr_table_entry *entry = *slot;
29096
29097 if (entry->refcount > 0)
29098 *last_idx += 1;
29099 return 1;
29100 }
29101
29102 /* Produce the .debug_addr section. */
29103
29104 static void
29105 output_addr_table (void)
29106 {
29107 unsigned int index = 0;
29108 if (addr_index_table == NULL || addr_index_table->size () == 0)
29109 return;
29110
29111 switch_to_section (debug_addr_section);
29112 addr_index_table
29113 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29114 }
29115
29116 #if ENABLE_ASSERT_CHECKING
29117 /* Verify that all marks are clear. */
29118
29119 static void
29120 verify_marks_clear (dw_die_ref die)
29121 {
29122 dw_die_ref c;
29123
29124 gcc_assert (! die->die_mark);
29125 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29126 }
29127 #endif /* ENABLE_ASSERT_CHECKING */
29128
29129 /* Clear the marks for a die and its children.
29130 Be cool if the mark isn't set. */
29131
29132 static void
29133 prune_unmark_dies (dw_die_ref die)
29134 {
29135 dw_die_ref c;
29136
29137 if (die->die_mark)
29138 die->die_mark = 0;
29139 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29140 }
29141
29142 /* Given LOC that is referenced by a DIE we're marking as used, find all
29143 referenced DWARF procedures it references and mark them as used. */
29144
29145 static void
29146 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29147 {
29148 for (; loc != NULL; loc = loc->dw_loc_next)
29149 switch (loc->dw_loc_opc)
29150 {
29151 case DW_OP_implicit_pointer:
29152 case DW_OP_convert:
29153 case DW_OP_reinterpret:
29154 case DW_OP_GNU_implicit_pointer:
29155 case DW_OP_GNU_convert:
29156 case DW_OP_GNU_reinterpret:
29157 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29158 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29159 break;
29160 case DW_OP_GNU_variable_value:
29161 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29162 {
29163 dw_die_ref ref
29164 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29165 if (ref == NULL)
29166 break;
29167 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29168 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29169 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29170 }
29171 /* FALLTHRU */
29172 case DW_OP_call2:
29173 case DW_OP_call4:
29174 case DW_OP_call_ref:
29175 case DW_OP_const_type:
29176 case DW_OP_GNU_const_type:
29177 case DW_OP_GNU_parameter_ref:
29178 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29179 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29180 break;
29181 case DW_OP_regval_type:
29182 case DW_OP_deref_type:
29183 case DW_OP_GNU_regval_type:
29184 case DW_OP_GNU_deref_type:
29185 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29186 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29187 break;
29188 case DW_OP_entry_value:
29189 case DW_OP_GNU_entry_value:
29190 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29191 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29192 break;
29193 default:
29194 break;
29195 }
29196 }
29197
29198 /* Given DIE that we're marking as used, find any other dies
29199 it references as attributes and mark them as used. */
29200
29201 static void
29202 prune_unused_types_walk_attribs (dw_die_ref die)
29203 {
29204 dw_attr_node *a;
29205 unsigned ix;
29206
29207 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29208 {
29209 switch (AT_class (a))
29210 {
29211 /* Make sure DWARF procedures referenced by location descriptions will
29212 get emitted. */
29213 case dw_val_class_loc:
29214 prune_unused_types_walk_loc_descr (AT_loc (a));
29215 break;
29216 case dw_val_class_loc_list:
29217 for (dw_loc_list_ref list = AT_loc_list (a);
29218 list != NULL;
29219 list = list->dw_loc_next)
29220 prune_unused_types_walk_loc_descr (list->expr);
29221 break;
29222
29223 case dw_val_class_view_list:
29224 /* This points to a loc_list in another attribute, so it's
29225 already covered. */
29226 break;
29227
29228 case dw_val_class_die_ref:
29229 /* A reference to another DIE.
29230 Make sure that it will get emitted.
29231 If it was broken out into a comdat group, don't follow it. */
29232 if (! AT_ref (a)->comdat_type_p
29233 || a->dw_attr == DW_AT_specification)
29234 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29235 break;
29236
29237 case dw_val_class_str:
29238 /* Set the string's refcount to 0 so that prune_unused_types_mark
29239 accounts properly for it. */
29240 a->dw_attr_val.v.val_str->refcount = 0;
29241 break;
29242
29243 default:
29244 break;
29245 }
29246 }
29247 }
29248
29249 /* Mark the generic parameters and arguments children DIEs of DIE. */
29250
29251 static void
29252 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29253 {
29254 dw_die_ref c;
29255
29256 if (die == NULL || die->die_child == NULL)
29257 return;
29258 c = die->die_child;
29259 do
29260 {
29261 if (is_template_parameter (c))
29262 prune_unused_types_mark (c, 1);
29263 c = c->die_sib;
29264 } while (c && c != die->die_child);
29265 }
29266
29267 /* Mark DIE as being used. If DOKIDS is true, then walk down
29268 to DIE's children. */
29269
29270 static void
29271 prune_unused_types_mark (dw_die_ref die, int dokids)
29272 {
29273 dw_die_ref c;
29274
29275 if (die->die_mark == 0)
29276 {
29277 /* We haven't done this node yet. Mark it as used. */
29278 die->die_mark = 1;
29279 /* If this is the DIE of a generic type instantiation,
29280 mark the children DIEs that describe its generic parms and
29281 args. */
29282 prune_unused_types_mark_generic_parms_dies (die);
29283
29284 /* We also have to mark its parents as used.
29285 (But we don't want to mark our parent's kids due to this,
29286 unless it is a class.) */
29287 if (die->die_parent)
29288 prune_unused_types_mark (die->die_parent,
29289 class_scope_p (die->die_parent));
29290
29291 /* Mark any referenced nodes. */
29292 prune_unused_types_walk_attribs (die);
29293
29294 /* If this node is a specification,
29295 also mark the definition, if it exists. */
29296 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29297 prune_unused_types_mark (die->die_definition, 1);
29298 }
29299
29300 if (dokids && die->die_mark != 2)
29301 {
29302 /* We need to walk the children, but haven't done so yet.
29303 Remember that we've walked the kids. */
29304 die->die_mark = 2;
29305
29306 /* If this is an array type, we need to make sure our
29307 kids get marked, even if they're types. If we're
29308 breaking out types into comdat sections, do this
29309 for all type definitions. */
29310 if (die->die_tag == DW_TAG_array_type
29311 || (use_debug_types
29312 && is_type_die (die) && ! is_declaration_die (die)))
29313 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29314 else
29315 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29316 }
29317 }
29318
29319 /* For local classes, look if any static member functions were emitted
29320 and if so, mark them. */
29321
29322 static void
29323 prune_unused_types_walk_local_classes (dw_die_ref die)
29324 {
29325 dw_die_ref c;
29326
29327 if (die->die_mark == 2)
29328 return;
29329
29330 switch (die->die_tag)
29331 {
29332 case DW_TAG_structure_type:
29333 case DW_TAG_union_type:
29334 case DW_TAG_class_type:
29335 case DW_TAG_interface_type:
29336 break;
29337
29338 case DW_TAG_subprogram:
29339 if (!get_AT_flag (die, DW_AT_declaration)
29340 || die->die_definition != NULL)
29341 prune_unused_types_mark (die, 1);
29342 return;
29343
29344 default:
29345 return;
29346 }
29347
29348 /* Mark children. */
29349 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29350 }
29351
29352 /* Walk the tree DIE and mark types that we actually use. */
29353
29354 static void
29355 prune_unused_types_walk (dw_die_ref die)
29356 {
29357 dw_die_ref c;
29358
29359 /* Don't do anything if this node is already marked and
29360 children have been marked as well. */
29361 if (die->die_mark == 2)
29362 return;
29363
29364 switch (die->die_tag)
29365 {
29366 case DW_TAG_structure_type:
29367 case DW_TAG_union_type:
29368 case DW_TAG_class_type:
29369 case DW_TAG_interface_type:
29370 if (die->die_perennial_p)
29371 break;
29372
29373 for (c = die->die_parent; c; c = c->die_parent)
29374 if (c->die_tag == DW_TAG_subprogram)
29375 break;
29376
29377 /* Finding used static member functions inside of classes
29378 is needed just for local classes, because for other classes
29379 static member function DIEs with DW_AT_specification
29380 are emitted outside of the DW_TAG_*_type. If we ever change
29381 it, we'd need to call this even for non-local classes. */
29382 if (c)
29383 prune_unused_types_walk_local_classes (die);
29384
29385 /* It's a type node --- don't mark it. */
29386 return;
29387
29388 case DW_TAG_const_type:
29389 case DW_TAG_packed_type:
29390 case DW_TAG_pointer_type:
29391 case DW_TAG_reference_type:
29392 case DW_TAG_rvalue_reference_type:
29393 case DW_TAG_volatile_type:
29394 case DW_TAG_typedef:
29395 case DW_TAG_array_type:
29396 case DW_TAG_friend:
29397 case DW_TAG_enumeration_type:
29398 case DW_TAG_subroutine_type:
29399 case DW_TAG_string_type:
29400 case DW_TAG_set_type:
29401 case DW_TAG_subrange_type:
29402 case DW_TAG_ptr_to_member_type:
29403 case DW_TAG_file_type:
29404 /* Type nodes are useful only when other DIEs reference them --- don't
29405 mark them. */
29406 /* FALLTHROUGH */
29407
29408 case DW_TAG_dwarf_procedure:
29409 /* Likewise for DWARF procedures. */
29410
29411 if (die->die_perennial_p)
29412 break;
29413
29414 return;
29415
29416 case DW_TAG_variable:
29417 if (flag_debug_only_used_symbols)
29418 {
29419 if (die->die_perennial_p)
29420 break;
29421
29422 /* premark_used_variables marks external variables --- don't mark
29423 them here. But function-local externals are always considered
29424 used. */
29425 if (get_AT (die, DW_AT_external))
29426 {
29427 for (c = die->die_parent; c; c = c->die_parent)
29428 if (c->die_tag == DW_TAG_subprogram)
29429 break;
29430 if (!c)
29431 return;
29432 }
29433 }
29434 /* FALLTHROUGH */
29435
29436 default:
29437 /* Mark everything else. */
29438 break;
29439 }
29440
29441 if (die->die_mark == 0)
29442 {
29443 die->die_mark = 1;
29444
29445 /* Now, mark any dies referenced from here. */
29446 prune_unused_types_walk_attribs (die);
29447 }
29448
29449 die->die_mark = 2;
29450
29451 /* Mark children. */
29452 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29453 }
29454
29455 /* Increment the string counts on strings referred to from DIE's
29456 attributes. */
29457
29458 static void
29459 prune_unused_types_update_strings (dw_die_ref die)
29460 {
29461 dw_attr_node *a;
29462 unsigned ix;
29463
29464 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29465 if (AT_class (a) == dw_val_class_str)
29466 {
29467 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29468 s->refcount++;
29469 /* Avoid unnecessarily putting strings that are used less than
29470 twice in the hash table. */
29471 if (s->refcount
29472 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29473 {
29474 indirect_string_node **slot
29475 = debug_str_hash->find_slot_with_hash (s->str,
29476 htab_hash_string (s->str),
29477 INSERT);
29478 gcc_assert (*slot == NULL);
29479 *slot = s;
29480 }
29481 }
29482 }
29483
29484 /* Mark DIE and its children as removed. */
29485
29486 static void
29487 mark_removed (dw_die_ref die)
29488 {
29489 dw_die_ref c;
29490 die->removed = true;
29491 FOR_EACH_CHILD (die, c, mark_removed (c));
29492 }
29493
29494 /* Remove from the tree DIE any dies that aren't marked. */
29495
29496 static void
29497 prune_unused_types_prune (dw_die_ref die)
29498 {
29499 dw_die_ref c;
29500
29501 gcc_assert (die->die_mark);
29502 prune_unused_types_update_strings (die);
29503
29504 if (! die->die_child)
29505 return;
29506
29507 c = die->die_child;
29508 do {
29509 dw_die_ref prev = c, next;
29510 for (c = c->die_sib; ! c->die_mark; c = next)
29511 if (c == die->die_child)
29512 {
29513 /* No marked children between 'prev' and the end of the list. */
29514 if (prev == c)
29515 /* No marked children at all. */
29516 die->die_child = NULL;
29517 else
29518 {
29519 prev->die_sib = c->die_sib;
29520 die->die_child = prev;
29521 }
29522 c->die_sib = NULL;
29523 mark_removed (c);
29524 return;
29525 }
29526 else
29527 {
29528 next = c->die_sib;
29529 c->die_sib = NULL;
29530 mark_removed (c);
29531 }
29532
29533 if (c != prev->die_sib)
29534 prev->die_sib = c;
29535 prune_unused_types_prune (c);
29536 } while (c != die->die_child);
29537 }
29538
29539 /* Remove dies representing declarations that we never use. */
29540
29541 static void
29542 prune_unused_types (void)
29543 {
29544 unsigned int i;
29545 limbo_die_node *node;
29546 comdat_type_node *ctnode;
29547 pubname_entry *pub;
29548 dw_die_ref base_type;
29549
29550 #if ENABLE_ASSERT_CHECKING
29551 /* All the marks should already be clear. */
29552 verify_marks_clear (comp_unit_die ());
29553 for (node = limbo_die_list; node; node = node->next)
29554 verify_marks_clear (node->die);
29555 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29556 verify_marks_clear (ctnode->root_die);
29557 #endif /* ENABLE_ASSERT_CHECKING */
29558
29559 /* Mark types that are used in global variables. */
29560 premark_types_used_by_global_vars ();
29561
29562 /* Mark variables used in the symtab. */
29563 if (flag_debug_only_used_symbols)
29564 premark_used_variables ();
29565
29566 /* Set the mark on nodes that are actually used. */
29567 prune_unused_types_walk (comp_unit_die ());
29568 for (node = limbo_die_list; node; node = node->next)
29569 prune_unused_types_walk (node->die);
29570 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29571 {
29572 prune_unused_types_walk (ctnode->root_die);
29573 prune_unused_types_mark (ctnode->type_die, 1);
29574 }
29575
29576 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29577 are unusual in that they are pubnames that are the children of pubtypes.
29578 They should only be marked via their parent DW_TAG_enumeration_type die,
29579 not as roots in themselves. */
29580 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29581 if (pub->die->die_tag != DW_TAG_enumerator)
29582 prune_unused_types_mark (pub->die, 1);
29583 for (i = 0; base_types.iterate (i, &base_type); i++)
29584 prune_unused_types_mark (base_type, 1);
29585
29586 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29587 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29588 callees). */
29589 cgraph_node *cnode;
29590 FOR_EACH_FUNCTION (cnode)
29591 if (cnode->referred_to_p (false))
29592 {
29593 dw_die_ref die = lookup_decl_die (cnode->decl);
29594 if (die == NULL || die->die_mark)
29595 continue;
29596 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29597 if (e->caller != cnode
29598 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29599 {
29600 prune_unused_types_mark (die, 1);
29601 break;
29602 }
29603 }
29604
29605 if (debug_str_hash)
29606 debug_str_hash->empty ();
29607 if (skeleton_debug_str_hash)
29608 skeleton_debug_str_hash->empty ();
29609 prune_unused_types_prune (comp_unit_die ());
29610 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29611 {
29612 node = *pnode;
29613 if (!node->die->die_mark)
29614 *pnode = node->next;
29615 else
29616 {
29617 prune_unused_types_prune (node->die);
29618 pnode = &node->next;
29619 }
29620 }
29621 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29622 prune_unused_types_prune (ctnode->root_die);
29623
29624 /* Leave the marks clear. */
29625 prune_unmark_dies (comp_unit_die ());
29626 for (node = limbo_die_list; node; node = node->next)
29627 prune_unmark_dies (node->die);
29628 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29629 prune_unmark_dies (ctnode->root_die);
29630 }
29631
29632 /* Helpers to manipulate hash table of comdat type units. */
29633
29634 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29635 {
29636 static inline hashval_t hash (const comdat_type_node *);
29637 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29638 };
29639
29640 inline hashval_t
29641 comdat_type_hasher::hash (const comdat_type_node *type_node)
29642 {
29643 hashval_t h;
29644 memcpy (&h, type_node->signature, sizeof (h));
29645 return h;
29646 }
29647
29648 inline bool
29649 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29650 const comdat_type_node *type_node_2)
29651 {
29652 return (! memcmp (type_node_1->signature, type_node_2->signature,
29653 DWARF_TYPE_SIGNATURE_SIZE));
29654 }
29655
29656 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29657 to the location it would have been added, should we know its
29658 DECL_ASSEMBLER_NAME when we added other attributes. This will
29659 probably improve compactness of debug info, removing equivalent
29660 abbrevs, and hide any differences caused by deferring the
29661 computation of the assembler name, triggered by e.g. PCH. */
29662
29663 static inline void
29664 move_linkage_attr (dw_die_ref die)
29665 {
29666 unsigned ix = vec_safe_length (die->die_attr);
29667 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29668
29669 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29670 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29671
29672 while (--ix > 0)
29673 {
29674 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29675
29676 if (prev->dw_attr == DW_AT_decl_line
29677 || prev->dw_attr == DW_AT_decl_column
29678 || prev->dw_attr == DW_AT_name)
29679 break;
29680 }
29681
29682 if (ix != vec_safe_length (die->die_attr) - 1)
29683 {
29684 die->die_attr->pop ();
29685 die->die_attr->quick_insert (ix, linkage);
29686 }
29687 }
29688
29689 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29690 referenced from typed stack ops and count how often they are used. */
29691
29692 static void
29693 mark_base_types (dw_loc_descr_ref loc)
29694 {
29695 dw_die_ref base_type = NULL;
29696
29697 for (; loc; loc = loc->dw_loc_next)
29698 {
29699 switch (loc->dw_loc_opc)
29700 {
29701 case DW_OP_regval_type:
29702 case DW_OP_deref_type:
29703 case DW_OP_GNU_regval_type:
29704 case DW_OP_GNU_deref_type:
29705 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29706 break;
29707 case DW_OP_convert:
29708 case DW_OP_reinterpret:
29709 case DW_OP_GNU_convert:
29710 case DW_OP_GNU_reinterpret:
29711 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29712 continue;
29713 /* FALLTHRU */
29714 case DW_OP_const_type:
29715 case DW_OP_GNU_const_type:
29716 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29717 break;
29718 case DW_OP_entry_value:
29719 case DW_OP_GNU_entry_value:
29720 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29721 continue;
29722 default:
29723 continue;
29724 }
29725 gcc_assert (base_type->die_parent == comp_unit_die ());
29726 if (base_type->die_mark)
29727 base_type->die_mark++;
29728 else
29729 {
29730 base_types.safe_push (base_type);
29731 base_type->die_mark = 1;
29732 }
29733 }
29734 }
29735
29736 /* Comparison function for sorting marked base types. */
29737
29738 static int
29739 base_type_cmp (const void *x, const void *y)
29740 {
29741 dw_die_ref dx = *(const dw_die_ref *) x;
29742 dw_die_ref dy = *(const dw_die_ref *) y;
29743 unsigned int byte_size1, byte_size2;
29744 unsigned int encoding1, encoding2;
29745 unsigned int align1, align2;
29746 if (dx->die_mark > dy->die_mark)
29747 return -1;
29748 if (dx->die_mark < dy->die_mark)
29749 return 1;
29750 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29751 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29752 if (byte_size1 < byte_size2)
29753 return 1;
29754 if (byte_size1 > byte_size2)
29755 return -1;
29756 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29757 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29758 if (encoding1 < encoding2)
29759 return 1;
29760 if (encoding1 > encoding2)
29761 return -1;
29762 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29763 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29764 if (align1 < align2)
29765 return 1;
29766 if (align1 > align2)
29767 return -1;
29768 return 0;
29769 }
29770
29771 /* Move base types marked by mark_base_types as early as possible
29772 in the CU, sorted by decreasing usage count both to make the
29773 uleb128 references as small as possible and to make sure they
29774 will have die_offset already computed by calc_die_sizes when
29775 sizes of typed stack loc ops is computed. */
29776
29777 static void
29778 move_marked_base_types (void)
29779 {
29780 unsigned int i;
29781 dw_die_ref base_type, die, c;
29782
29783 if (base_types.is_empty ())
29784 return;
29785
29786 /* Sort by decreasing usage count, they will be added again in that
29787 order later on. */
29788 base_types.qsort (base_type_cmp);
29789 die = comp_unit_die ();
29790 c = die->die_child;
29791 do
29792 {
29793 dw_die_ref prev = c;
29794 c = c->die_sib;
29795 while (c->die_mark)
29796 {
29797 remove_child_with_prev (c, prev);
29798 /* As base types got marked, there must be at least
29799 one node other than DW_TAG_base_type. */
29800 gcc_assert (die->die_child != NULL);
29801 c = prev->die_sib;
29802 }
29803 }
29804 while (c != die->die_child);
29805 gcc_assert (die->die_child);
29806 c = die->die_child;
29807 for (i = 0; base_types.iterate (i, &base_type); i++)
29808 {
29809 base_type->die_mark = 0;
29810 base_type->die_sib = c->die_sib;
29811 c->die_sib = base_type;
29812 c = base_type;
29813 }
29814 }
29815
29816 /* Helper function for resolve_addr, attempt to resolve
29817 one CONST_STRING, return true if successful. Similarly verify that
29818 SYMBOL_REFs refer to variables emitted in the current CU. */
29819
29820 static bool
29821 resolve_one_addr (rtx *addr)
29822 {
29823 rtx rtl = *addr;
29824
29825 if (GET_CODE (rtl) == CONST_STRING)
29826 {
29827 size_t len = strlen (XSTR (rtl, 0)) + 1;
29828 tree t = build_string (len, XSTR (rtl, 0));
29829 tree tlen = size_int (len - 1);
29830 TREE_TYPE (t)
29831 = build_array_type (char_type_node, build_index_type (tlen));
29832 rtl = lookup_constant_def (t);
29833 if (!rtl || !MEM_P (rtl))
29834 return false;
29835 rtl = XEXP (rtl, 0);
29836 if (GET_CODE (rtl) == SYMBOL_REF
29837 && SYMBOL_REF_DECL (rtl)
29838 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29839 return false;
29840 vec_safe_push (used_rtx_array, rtl);
29841 *addr = rtl;
29842 return true;
29843 }
29844
29845 if (GET_CODE (rtl) == SYMBOL_REF
29846 && SYMBOL_REF_DECL (rtl))
29847 {
29848 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29849 {
29850 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29851 return false;
29852 }
29853 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29854 return false;
29855 }
29856
29857 if (GET_CODE (rtl) == CONST)
29858 {
29859 subrtx_ptr_iterator::array_type array;
29860 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29861 if (!resolve_one_addr (*iter))
29862 return false;
29863 }
29864
29865 return true;
29866 }
29867
29868 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29869 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29870 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29871
29872 static rtx
29873 string_cst_pool_decl (tree t)
29874 {
29875 rtx rtl = output_constant_def (t, 1);
29876 unsigned char *array;
29877 dw_loc_descr_ref l;
29878 tree decl;
29879 size_t len;
29880 dw_die_ref ref;
29881
29882 if (!rtl || !MEM_P (rtl))
29883 return NULL_RTX;
29884 rtl = XEXP (rtl, 0);
29885 if (GET_CODE (rtl) != SYMBOL_REF
29886 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29887 return NULL_RTX;
29888
29889 decl = SYMBOL_REF_DECL (rtl);
29890 if (!lookup_decl_die (decl))
29891 {
29892 len = TREE_STRING_LENGTH (t);
29893 vec_safe_push (used_rtx_array, rtl);
29894 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29895 array = ggc_vec_alloc<unsigned char> (len);
29896 memcpy (array, TREE_STRING_POINTER (t), len);
29897 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29898 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29899 l->dw_loc_oprnd2.v.val_vec.length = len;
29900 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29901 l->dw_loc_oprnd2.v.val_vec.array = array;
29902 add_AT_loc (ref, DW_AT_location, l);
29903 equate_decl_number_to_die (decl, ref);
29904 }
29905 return rtl;
29906 }
29907
29908 /* Helper function of resolve_addr_in_expr. LOC is
29909 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29910 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29911 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29912 with DW_OP_implicit_pointer if possible
29913 and return true, if unsuccessful, return false. */
29914
29915 static bool
29916 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29917 {
29918 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29919 HOST_WIDE_INT offset = 0;
29920 dw_die_ref ref = NULL;
29921 tree decl;
29922
29923 if (GET_CODE (rtl) == CONST
29924 && GET_CODE (XEXP (rtl, 0)) == PLUS
29925 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29926 {
29927 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29928 rtl = XEXP (XEXP (rtl, 0), 0);
29929 }
29930 if (GET_CODE (rtl) == CONST_STRING)
29931 {
29932 size_t len = strlen (XSTR (rtl, 0)) + 1;
29933 tree t = build_string (len, XSTR (rtl, 0));
29934 tree tlen = size_int (len - 1);
29935
29936 TREE_TYPE (t)
29937 = build_array_type (char_type_node, build_index_type (tlen));
29938 rtl = string_cst_pool_decl (t);
29939 if (!rtl)
29940 return false;
29941 }
29942 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29943 {
29944 decl = SYMBOL_REF_DECL (rtl);
29945 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29946 {
29947 ref = lookup_decl_die (decl);
29948 if (ref && (get_AT (ref, DW_AT_location)
29949 || get_AT (ref, DW_AT_const_value)))
29950 {
29951 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29952 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29953 loc->dw_loc_oprnd1.val_entry = NULL;
29954 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29955 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29956 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29957 loc->dw_loc_oprnd2.v.val_int = offset;
29958 return true;
29959 }
29960 }
29961 }
29962 return false;
29963 }
29964
29965 /* Helper function for resolve_addr, handle one location
29966 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29967 the location list couldn't be resolved. */
29968
29969 static bool
29970 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29971 {
29972 dw_loc_descr_ref keep = NULL;
29973 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29974 switch (loc->dw_loc_opc)
29975 {
29976 case DW_OP_addr:
29977 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29978 {
29979 if ((prev == NULL
29980 || prev->dw_loc_opc == DW_OP_piece
29981 || prev->dw_loc_opc == DW_OP_bit_piece)
29982 && loc->dw_loc_next
29983 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29984 && (!dwarf_strict || dwarf_version >= 5)
29985 && optimize_one_addr_into_implicit_ptr (loc))
29986 break;
29987 return false;
29988 }
29989 break;
29990 case DW_OP_GNU_addr_index:
29991 case DW_OP_addrx:
29992 case DW_OP_GNU_const_index:
29993 case DW_OP_constx:
29994 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29995 || loc->dw_loc_opc == DW_OP_addrx)
29996 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29997 || loc->dw_loc_opc == DW_OP_constx)
29998 && loc->dtprel))
29999 {
30000 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
30001 if (!resolve_one_addr (&rtl))
30002 return false;
30003 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
30004 loc->dw_loc_oprnd1.val_entry
30005 = add_addr_table_entry (rtl, ate_kind_rtx);
30006 }
30007 break;
30008 case DW_OP_const4u:
30009 case DW_OP_const8u:
30010 if (loc->dtprel
30011 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30012 return false;
30013 break;
30014 case DW_OP_plus_uconst:
30015 if (size_of_loc_descr (loc)
30016 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30017 + 1
30018 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30019 {
30020 dw_loc_descr_ref repl
30021 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30022 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30023 add_loc_descr (&repl, loc->dw_loc_next);
30024 *loc = *repl;
30025 }
30026 break;
30027 case DW_OP_implicit_value:
30028 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30029 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30030 return false;
30031 break;
30032 case DW_OP_implicit_pointer:
30033 case DW_OP_GNU_implicit_pointer:
30034 case DW_OP_GNU_parameter_ref:
30035 case DW_OP_GNU_variable_value:
30036 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30037 {
30038 dw_die_ref ref
30039 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30040 if (ref == NULL)
30041 return false;
30042 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30043 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30044 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30045 }
30046 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30047 {
30048 if (prev == NULL
30049 && loc->dw_loc_next == NULL
30050 && AT_class (a) == dw_val_class_loc)
30051 switch (a->dw_attr)
30052 {
30053 /* Following attributes allow both exprloc and reference,
30054 so if the whole expression is DW_OP_GNU_variable_value
30055 alone we could transform it into reference. */
30056 case DW_AT_byte_size:
30057 case DW_AT_bit_size:
30058 case DW_AT_lower_bound:
30059 case DW_AT_upper_bound:
30060 case DW_AT_bit_stride:
30061 case DW_AT_count:
30062 case DW_AT_allocated:
30063 case DW_AT_associated:
30064 case DW_AT_byte_stride:
30065 a->dw_attr_val.val_class = dw_val_class_die_ref;
30066 a->dw_attr_val.val_entry = NULL;
30067 a->dw_attr_val.v.val_die_ref.die
30068 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30069 a->dw_attr_val.v.val_die_ref.external = 0;
30070 return true;
30071 default:
30072 break;
30073 }
30074 if (dwarf_strict)
30075 return false;
30076 }
30077 break;
30078 case DW_OP_const_type:
30079 case DW_OP_regval_type:
30080 case DW_OP_deref_type:
30081 case DW_OP_convert:
30082 case DW_OP_reinterpret:
30083 case DW_OP_GNU_const_type:
30084 case DW_OP_GNU_regval_type:
30085 case DW_OP_GNU_deref_type:
30086 case DW_OP_GNU_convert:
30087 case DW_OP_GNU_reinterpret:
30088 while (loc->dw_loc_next
30089 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30090 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30091 {
30092 dw_die_ref base1, base2;
30093 unsigned enc1, enc2, size1, size2;
30094 if (loc->dw_loc_opc == DW_OP_regval_type
30095 || loc->dw_loc_opc == DW_OP_deref_type
30096 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30097 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30098 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30099 else if (loc->dw_loc_oprnd1.val_class
30100 == dw_val_class_unsigned_const)
30101 break;
30102 else
30103 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30104 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30105 == dw_val_class_unsigned_const)
30106 break;
30107 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30108 gcc_assert (base1->die_tag == DW_TAG_base_type
30109 && base2->die_tag == DW_TAG_base_type);
30110 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30111 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30112 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30113 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30114 if (size1 == size2
30115 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30116 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30117 && loc != keep)
30118 || enc1 == enc2))
30119 {
30120 /* Optimize away next DW_OP_convert after
30121 adjusting LOC's base type die reference. */
30122 if (loc->dw_loc_opc == DW_OP_regval_type
30123 || loc->dw_loc_opc == DW_OP_deref_type
30124 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30125 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30126 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30127 else
30128 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30129 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30130 continue;
30131 }
30132 /* Don't change integer DW_OP_convert after e.g. floating
30133 point typed stack entry. */
30134 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30135 keep = loc->dw_loc_next;
30136 break;
30137 }
30138 break;
30139 default:
30140 break;
30141 }
30142 return true;
30143 }
30144
30145 /* Helper function of resolve_addr. DIE had DW_AT_location of
30146 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30147 and DW_OP_addr couldn't be resolved. resolve_addr has already
30148 removed the DW_AT_location attribute. This function attempts to
30149 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30150 to it or DW_AT_const_value attribute, if possible. */
30151
30152 static void
30153 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30154 {
30155 if (!VAR_P (decl)
30156 || lookup_decl_die (decl) != die
30157 || DECL_EXTERNAL (decl)
30158 || !TREE_STATIC (decl)
30159 || DECL_INITIAL (decl) == NULL_TREE
30160 || DECL_P (DECL_INITIAL (decl))
30161 || get_AT (die, DW_AT_const_value))
30162 return;
30163
30164 tree init = DECL_INITIAL (decl);
30165 HOST_WIDE_INT offset = 0;
30166 /* For variables that have been optimized away and thus
30167 don't have a memory location, see if we can emit
30168 DW_AT_const_value instead. */
30169 if (tree_add_const_value_attribute (die, init))
30170 return;
30171 if (dwarf_strict && dwarf_version < 5)
30172 return;
30173 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30174 and ADDR_EXPR refers to a decl that has DW_AT_location or
30175 DW_AT_const_value (but isn't addressable, otherwise
30176 resolving the original DW_OP_addr wouldn't fail), see if
30177 we can add DW_OP_implicit_pointer. */
30178 STRIP_NOPS (init);
30179 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30180 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30181 {
30182 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30183 init = TREE_OPERAND (init, 0);
30184 STRIP_NOPS (init);
30185 }
30186 if (TREE_CODE (init) != ADDR_EXPR)
30187 return;
30188 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30189 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30190 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30191 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30192 && TREE_OPERAND (init, 0) != decl))
30193 {
30194 dw_die_ref ref;
30195 dw_loc_descr_ref l;
30196
30197 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30198 {
30199 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30200 if (!rtl)
30201 return;
30202 decl = SYMBOL_REF_DECL (rtl);
30203 }
30204 else
30205 decl = TREE_OPERAND (init, 0);
30206 ref = lookup_decl_die (decl);
30207 if (ref == NULL
30208 || (!get_AT (ref, DW_AT_location)
30209 && !get_AT (ref, DW_AT_const_value)))
30210 return;
30211 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30212 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30213 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30214 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30215 add_AT_loc (die, DW_AT_location, l);
30216 }
30217 }
30218
30219 /* Return NULL if l is a DWARF expression, or first op that is not
30220 valid DWARF expression. */
30221
30222 static dw_loc_descr_ref
30223 non_dwarf_expression (dw_loc_descr_ref l)
30224 {
30225 while (l)
30226 {
30227 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30228 return l;
30229 switch (l->dw_loc_opc)
30230 {
30231 case DW_OP_regx:
30232 case DW_OP_implicit_value:
30233 case DW_OP_stack_value:
30234 case DW_OP_implicit_pointer:
30235 case DW_OP_GNU_implicit_pointer:
30236 case DW_OP_GNU_parameter_ref:
30237 case DW_OP_piece:
30238 case DW_OP_bit_piece:
30239 return l;
30240 default:
30241 break;
30242 }
30243 l = l->dw_loc_next;
30244 }
30245 return NULL;
30246 }
30247
30248 /* Return adjusted copy of EXPR:
30249 If it is empty DWARF expression, return it.
30250 If it is valid non-empty DWARF expression,
30251 return copy of EXPR with DW_OP_deref appended to it.
30252 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30253 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30254 If it is DWARF expression followed by DW_OP_stack_value, return
30255 copy of the DWARF expression without anything appended.
30256 Otherwise, return NULL. */
30257
30258 static dw_loc_descr_ref
30259 copy_deref_exprloc (dw_loc_descr_ref expr)
30260 {
30261 dw_loc_descr_ref tail = NULL;
30262
30263 if (expr == NULL)
30264 return NULL;
30265
30266 dw_loc_descr_ref l = non_dwarf_expression (expr);
30267 if (l && l->dw_loc_next)
30268 return NULL;
30269
30270 if (l)
30271 {
30272 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30273 tail = new_loc_descr ((enum dwarf_location_atom)
30274 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30275 0, 0);
30276 else
30277 switch (l->dw_loc_opc)
30278 {
30279 case DW_OP_regx:
30280 tail = new_loc_descr (DW_OP_bregx,
30281 l->dw_loc_oprnd1.v.val_unsigned, 0);
30282 break;
30283 case DW_OP_stack_value:
30284 break;
30285 default:
30286 return NULL;
30287 }
30288 }
30289 else
30290 tail = new_loc_descr (DW_OP_deref, 0, 0);
30291
30292 dw_loc_descr_ref ret = NULL, *p = &ret;
30293 while (expr != l)
30294 {
30295 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30296 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30297 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30298 p = &(*p)->dw_loc_next;
30299 expr = expr->dw_loc_next;
30300 }
30301 *p = tail;
30302 return ret;
30303 }
30304
30305 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30306 reference to a variable or argument, adjust it if needed and return:
30307 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30308 attribute if present should be removed
30309 0 keep the attribute perhaps with minor modifications, no need to rescan
30310 1 if the attribute has been successfully adjusted. */
30311
30312 static int
30313 optimize_string_length (dw_attr_node *a)
30314 {
30315 dw_loc_descr_ref l = AT_loc (a), lv;
30316 dw_die_ref die;
30317 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30318 {
30319 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30320 die = lookup_decl_die (decl);
30321 if (die)
30322 {
30323 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30324 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30325 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30326 }
30327 else
30328 return -1;
30329 }
30330 else
30331 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30332
30333 /* DWARF5 allows reference class, so we can then reference the DIE.
30334 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30335 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30336 {
30337 a->dw_attr_val.val_class = dw_val_class_die_ref;
30338 a->dw_attr_val.val_entry = NULL;
30339 a->dw_attr_val.v.val_die_ref.die = die;
30340 a->dw_attr_val.v.val_die_ref.external = 0;
30341 return 0;
30342 }
30343
30344 dw_attr_node *av = get_AT (die, DW_AT_location);
30345 dw_loc_list_ref d;
30346 bool non_dwarf_expr = false;
30347
30348 if (av == NULL)
30349 return dwarf_strict ? -1 : 0;
30350 switch (AT_class (av))
30351 {
30352 case dw_val_class_loc_list:
30353 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30354 if (d->expr && non_dwarf_expression (d->expr))
30355 non_dwarf_expr = true;
30356 break;
30357 case dw_val_class_view_list:
30358 gcc_unreachable ();
30359 case dw_val_class_loc:
30360 lv = AT_loc (av);
30361 if (lv == NULL)
30362 return dwarf_strict ? -1 : 0;
30363 if (non_dwarf_expression (lv))
30364 non_dwarf_expr = true;
30365 break;
30366 default:
30367 return dwarf_strict ? -1 : 0;
30368 }
30369
30370 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30371 into DW_OP_call4 or DW_OP_GNU_variable_value into
30372 DW_OP_call4 DW_OP_deref, do so. */
30373 if (!non_dwarf_expr
30374 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30375 {
30376 l->dw_loc_opc = DW_OP_call4;
30377 if (l->dw_loc_next)
30378 l->dw_loc_next = NULL;
30379 else
30380 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30381 return 0;
30382 }
30383
30384 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30385 copy over the DW_AT_location attribute from die to a. */
30386 if (l->dw_loc_next != NULL)
30387 {
30388 a->dw_attr_val = av->dw_attr_val;
30389 return 1;
30390 }
30391
30392 dw_loc_list_ref list, *p;
30393 switch (AT_class (av))
30394 {
30395 case dw_val_class_loc_list:
30396 p = &list;
30397 list = NULL;
30398 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30399 {
30400 lv = copy_deref_exprloc (d->expr);
30401 if (lv)
30402 {
30403 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30404 p = &(*p)->dw_loc_next;
30405 }
30406 else if (!dwarf_strict && d->expr)
30407 return 0;
30408 }
30409 if (list == NULL)
30410 return dwarf_strict ? -1 : 0;
30411 a->dw_attr_val.val_class = dw_val_class_loc_list;
30412 gen_llsym (list);
30413 *AT_loc_list_ptr (a) = list;
30414 return 1;
30415 case dw_val_class_loc:
30416 lv = copy_deref_exprloc (AT_loc (av));
30417 if (lv == NULL)
30418 return dwarf_strict ? -1 : 0;
30419 a->dw_attr_val.v.val_loc = lv;
30420 return 1;
30421 default:
30422 gcc_unreachable ();
30423 }
30424 }
30425
30426 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30427 an address in .rodata section if the string literal is emitted there,
30428 or remove the containing location list or replace DW_AT_const_value
30429 with DW_AT_location and empty location expression, if it isn't found
30430 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30431 to something that has been emitted in the current CU. */
30432
30433 static void
30434 resolve_addr (dw_die_ref die)
30435 {
30436 dw_die_ref c;
30437 dw_attr_node *a;
30438 dw_loc_list_ref *curr, *start, loc;
30439 unsigned ix;
30440 bool remove_AT_byte_size = false;
30441
30442 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30443 switch (AT_class (a))
30444 {
30445 case dw_val_class_loc_list:
30446 start = curr = AT_loc_list_ptr (a);
30447 loc = *curr;
30448 gcc_assert (loc);
30449 /* The same list can be referenced more than once. See if we have
30450 already recorded the result from a previous pass. */
30451 if (loc->replaced)
30452 *curr = loc->dw_loc_next;
30453 else if (!loc->resolved_addr)
30454 {
30455 /* As things stand, we do not expect or allow one die to
30456 reference a suffix of another die's location list chain.
30457 References must be identical or completely separate.
30458 There is therefore no need to cache the result of this
30459 pass on any list other than the first; doing so
30460 would lead to unnecessary writes. */
30461 while (*curr)
30462 {
30463 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30464 if (!resolve_addr_in_expr (a, (*curr)->expr))
30465 {
30466 dw_loc_list_ref next = (*curr)->dw_loc_next;
30467 dw_loc_descr_ref l = (*curr)->expr;
30468
30469 if (next && (*curr)->ll_symbol)
30470 {
30471 gcc_assert (!next->ll_symbol);
30472 next->ll_symbol = (*curr)->ll_symbol;
30473 next->vl_symbol = (*curr)->vl_symbol;
30474 }
30475 if (dwarf_split_debug_info)
30476 remove_loc_list_addr_table_entries (l);
30477 *curr = next;
30478 }
30479 else
30480 {
30481 mark_base_types ((*curr)->expr);
30482 curr = &(*curr)->dw_loc_next;
30483 }
30484 }
30485 if (loc == *start)
30486 loc->resolved_addr = 1;
30487 else
30488 {
30489 loc->replaced = 1;
30490 loc->dw_loc_next = *start;
30491 }
30492 }
30493 if (!*start)
30494 {
30495 remove_AT (die, a->dw_attr);
30496 ix--;
30497 }
30498 break;
30499 case dw_val_class_view_list:
30500 {
30501 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30502 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30503 dw_val_node *llnode
30504 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30505 /* If we no longer have a loclist, or it no longer needs
30506 views, drop this attribute. */
30507 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30508 {
30509 remove_AT (die, a->dw_attr);
30510 ix--;
30511 }
30512 break;
30513 }
30514 case dw_val_class_loc:
30515 {
30516 dw_loc_descr_ref l = AT_loc (a);
30517 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30518 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30519 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30520 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30521 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30522 with DW_FORM_ref referencing the same DIE as
30523 DW_OP_GNU_variable_value used to reference. */
30524 if (a->dw_attr == DW_AT_string_length
30525 && l
30526 && l->dw_loc_opc == DW_OP_GNU_variable_value
30527 && (l->dw_loc_next == NULL
30528 || (l->dw_loc_next->dw_loc_next == NULL
30529 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30530 {
30531 switch (optimize_string_length (a))
30532 {
30533 case -1:
30534 remove_AT (die, a->dw_attr);
30535 ix--;
30536 /* If we drop DW_AT_string_length, we need to drop also
30537 DW_AT_{string_length_,}byte_size. */
30538 remove_AT_byte_size = true;
30539 continue;
30540 default:
30541 break;
30542 case 1:
30543 /* Even if we keep the optimized DW_AT_string_length,
30544 it might have changed AT_class, so process it again. */
30545 ix--;
30546 continue;
30547 }
30548 }
30549 /* For -gdwarf-2 don't attempt to optimize
30550 DW_AT_data_member_location containing
30551 DW_OP_plus_uconst - older consumers might
30552 rely on it being that op instead of a more complex,
30553 but shorter, location description. */
30554 if ((dwarf_version > 2
30555 || a->dw_attr != DW_AT_data_member_location
30556 || l == NULL
30557 || l->dw_loc_opc != DW_OP_plus_uconst
30558 || l->dw_loc_next != NULL)
30559 && !resolve_addr_in_expr (a, l))
30560 {
30561 if (dwarf_split_debug_info)
30562 remove_loc_list_addr_table_entries (l);
30563 if (l != NULL
30564 && l->dw_loc_next == NULL
30565 && l->dw_loc_opc == DW_OP_addr
30566 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30567 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30568 && a->dw_attr == DW_AT_location)
30569 {
30570 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30571 remove_AT (die, a->dw_attr);
30572 ix--;
30573 optimize_location_into_implicit_ptr (die, decl);
30574 break;
30575 }
30576 if (a->dw_attr == DW_AT_string_length)
30577 /* If we drop DW_AT_string_length, we need to drop also
30578 DW_AT_{string_length_,}byte_size. */
30579 remove_AT_byte_size = true;
30580 remove_AT (die, a->dw_attr);
30581 ix--;
30582 }
30583 else
30584 mark_base_types (l);
30585 }
30586 break;
30587 case dw_val_class_addr:
30588 if (a->dw_attr == DW_AT_const_value
30589 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30590 {
30591 if (AT_index (a) != NOT_INDEXED)
30592 remove_addr_table_entry (a->dw_attr_val.val_entry);
30593 remove_AT (die, a->dw_attr);
30594 ix--;
30595 }
30596 if ((die->die_tag == DW_TAG_call_site
30597 && a->dw_attr == DW_AT_call_origin)
30598 || (die->die_tag == DW_TAG_GNU_call_site
30599 && a->dw_attr == DW_AT_abstract_origin))
30600 {
30601 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30602 dw_die_ref tdie = lookup_decl_die (tdecl);
30603 dw_die_ref cdie;
30604 if (tdie == NULL
30605 && DECL_EXTERNAL (tdecl)
30606 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30607 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30608 {
30609 dw_die_ref pdie = cdie;
30610 /* Make sure we don't add these DIEs into type units.
30611 We could emit skeleton DIEs for context (namespaces,
30612 outer structs/classes) and a skeleton DIE for the
30613 innermost context with DW_AT_signature pointing to the
30614 type unit. See PR78835. */
30615 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30616 pdie = pdie->die_parent;
30617 if (pdie == NULL)
30618 {
30619 /* Creating a full DIE for tdecl is overly expensive and
30620 at this point even wrong when in the LTO phase
30621 as it can end up generating new type DIEs we didn't
30622 output and thus optimize_external_refs will crash. */
30623 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30624 add_AT_flag (tdie, DW_AT_external, 1);
30625 add_AT_flag (tdie, DW_AT_declaration, 1);
30626 add_linkage_attr (tdie, tdecl);
30627 add_name_and_src_coords_attributes (tdie, tdecl, true);
30628 equate_decl_number_to_die (tdecl, tdie);
30629 }
30630 }
30631 if (tdie)
30632 {
30633 a->dw_attr_val.val_class = dw_val_class_die_ref;
30634 a->dw_attr_val.v.val_die_ref.die = tdie;
30635 a->dw_attr_val.v.val_die_ref.external = 0;
30636 }
30637 else
30638 {
30639 if (AT_index (a) != NOT_INDEXED)
30640 remove_addr_table_entry (a->dw_attr_val.val_entry);
30641 remove_AT (die, a->dw_attr);
30642 ix--;
30643 }
30644 }
30645 break;
30646 default:
30647 break;
30648 }
30649
30650 if (remove_AT_byte_size)
30651 remove_AT (die, dwarf_version >= 5
30652 ? DW_AT_string_length_byte_size
30653 : DW_AT_byte_size);
30654
30655 FOR_EACH_CHILD (die, c, resolve_addr (c));
30656 }
30657 \f
30658 /* Helper routines for optimize_location_lists.
30659 This pass tries to share identical local lists in .debug_loc
30660 section. */
30661
30662 /* Iteratively hash operands of LOC opcode into HSTATE. */
30663
30664 static void
30665 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30666 {
30667 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30668 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30669
30670 switch (loc->dw_loc_opc)
30671 {
30672 case DW_OP_const4u:
30673 case DW_OP_const8u:
30674 if (loc->dtprel)
30675 goto hash_addr;
30676 /* FALLTHRU */
30677 case DW_OP_const1u:
30678 case DW_OP_const1s:
30679 case DW_OP_const2u:
30680 case DW_OP_const2s:
30681 case DW_OP_const4s:
30682 case DW_OP_const8s:
30683 case DW_OP_constu:
30684 case DW_OP_consts:
30685 case DW_OP_pick:
30686 case DW_OP_plus_uconst:
30687 case DW_OP_breg0:
30688 case DW_OP_breg1:
30689 case DW_OP_breg2:
30690 case DW_OP_breg3:
30691 case DW_OP_breg4:
30692 case DW_OP_breg5:
30693 case DW_OP_breg6:
30694 case DW_OP_breg7:
30695 case DW_OP_breg8:
30696 case DW_OP_breg9:
30697 case DW_OP_breg10:
30698 case DW_OP_breg11:
30699 case DW_OP_breg12:
30700 case DW_OP_breg13:
30701 case DW_OP_breg14:
30702 case DW_OP_breg15:
30703 case DW_OP_breg16:
30704 case DW_OP_breg17:
30705 case DW_OP_breg18:
30706 case DW_OP_breg19:
30707 case DW_OP_breg20:
30708 case DW_OP_breg21:
30709 case DW_OP_breg22:
30710 case DW_OP_breg23:
30711 case DW_OP_breg24:
30712 case DW_OP_breg25:
30713 case DW_OP_breg26:
30714 case DW_OP_breg27:
30715 case DW_OP_breg28:
30716 case DW_OP_breg29:
30717 case DW_OP_breg30:
30718 case DW_OP_breg31:
30719 case DW_OP_regx:
30720 case DW_OP_fbreg:
30721 case DW_OP_piece:
30722 case DW_OP_deref_size:
30723 case DW_OP_xderef_size:
30724 hstate.add_object (val1->v.val_int);
30725 break;
30726 case DW_OP_skip:
30727 case DW_OP_bra:
30728 {
30729 int offset;
30730
30731 gcc_assert (val1->val_class == dw_val_class_loc);
30732 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30733 hstate.add_object (offset);
30734 }
30735 break;
30736 case DW_OP_implicit_value:
30737 hstate.add_object (val1->v.val_unsigned);
30738 switch (val2->val_class)
30739 {
30740 case dw_val_class_const:
30741 hstate.add_object (val2->v.val_int);
30742 break;
30743 case dw_val_class_vec:
30744 {
30745 unsigned int elt_size = val2->v.val_vec.elt_size;
30746 unsigned int len = val2->v.val_vec.length;
30747
30748 hstate.add_int (elt_size);
30749 hstate.add_int (len);
30750 hstate.add (val2->v.val_vec.array, len * elt_size);
30751 }
30752 break;
30753 case dw_val_class_const_double:
30754 hstate.add_object (val2->v.val_double.low);
30755 hstate.add_object (val2->v.val_double.high);
30756 break;
30757 case dw_val_class_wide_int:
30758 hstate.add (val2->v.val_wide->get_val (),
30759 get_full_len (*val2->v.val_wide)
30760 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30761 break;
30762 case dw_val_class_addr:
30763 inchash::add_rtx (val2->v.val_addr, hstate);
30764 break;
30765 default:
30766 gcc_unreachable ();
30767 }
30768 break;
30769 case DW_OP_bregx:
30770 case DW_OP_bit_piece:
30771 hstate.add_object (val1->v.val_int);
30772 hstate.add_object (val2->v.val_int);
30773 break;
30774 case DW_OP_addr:
30775 hash_addr:
30776 if (loc->dtprel)
30777 {
30778 unsigned char dtprel = 0xd1;
30779 hstate.add_object (dtprel);
30780 }
30781 inchash::add_rtx (val1->v.val_addr, hstate);
30782 break;
30783 case DW_OP_GNU_addr_index:
30784 case DW_OP_addrx:
30785 case DW_OP_GNU_const_index:
30786 case DW_OP_constx:
30787 {
30788 if (loc->dtprel)
30789 {
30790 unsigned char dtprel = 0xd1;
30791 hstate.add_object (dtprel);
30792 }
30793 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30794 }
30795 break;
30796 case DW_OP_implicit_pointer:
30797 case DW_OP_GNU_implicit_pointer:
30798 hstate.add_int (val2->v.val_int);
30799 break;
30800 case DW_OP_entry_value:
30801 case DW_OP_GNU_entry_value:
30802 hstate.add_object (val1->v.val_loc);
30803 break;
30804 case DW_OP_regval_type:
30805 case DW_OP_deref_type:
30806 case DW_OP_GNU_regval_type:
30807 case DW_OP_GNU_deref_type:
30808 {
30809 unsigned int byte_size
30810 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30811 unsigned int encoding
30812 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30813 hstate.add_object (val1->v.val_int);
30814 hstate.add_object (byte_size);
30815 hstate.add_object (encoding);
30816 }
30817 break;
30818 case DW_OP_convert:
30819 case DW_OP_reinterpret:
30820 case DW_OP_GNU_convert:
30821 case DW_OP_GNU_reinterpret:
30822 if (val1->val_class == dw_val_class_unsigned_const)
30823 {
30824 hstate.add_object (val1->v.val_unsigned);
30825 break;
30826 }
30827 /* FALLTHRU */
30828 case DW_OP_const_type:
30829 case DW_OP_GNU_const_type:
30830 {
30831 unsigned int byte_size
30832 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30833 unsigned int encoding
30834 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30835 hstate.add_object (byte_size);
30836 hstate.add_object (encoding);
30837 if (loc->dw_loc_opc != DW_OP_const_type
30838 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30839 break;
30840 hstate.add_object (val2->val_class);
30841 switch (val2->val_class)
30842 {
30843 case dw_val_class_const:
30844 hstate.add_object (val2->v.val_int);
30845 break;
30846 case dw_val_class_vec:
30847 {
30848 unsigned int elt_size = val2->v.val_vec.elt_size;
30849 unsigned int len = val2->v.val_vec.length;
30850
30851 hstate.add_object (elt_size);
30852 hstate.add_object (len);
30853 hstate.add (val2->v.val_vec.array, len * elt_size);
30854 }
30855 break;
30856 case dw_val_class_const_double:
30857 hstate.add_object (val2->v.val_double.low);
30858 hstate.add_object (val2->v.val_double.high);
30859 break;
30860 case dw_val_class_wide_int:
30861 hstate.add (val2->v.val_wide->get_val (),
30862 get_full_len (*val2->v.val_wide)
30863 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30864 break;
30865 default:
30866 gcc_unreachable ();
30867 }
30868 }
30869 break;
30870
30871 default:
30872 /* Other codes have no operands. */
30873 break;
30874 }
30875 }
30876
30877 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30878
30879 static inline void
30880 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30881 {
30882 dw_loc_descr_ref l;
30883 bool sizes_computed = false;
30884 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30885 size_of_locs (loc);
30886
30887 for (l = loc; l != NULL; l = l->dw_loc_next)
30888 {
30889 enum dwarf_location_atom opc = l->dw_loc_opc;
30890 hstate.add_object (opc);
30891 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30892 {
30893 size_of_locs (loc);
30894 sizes_computed = true;
30895 }
30896 hash_loc_operands (l, hstate);
30897 }
30898 }
30899
30900 /* Compute hash of the whole location list LIST_HEAD. */
30901
30902 static inline void
30903 hash_loc_list (dw_loc_list_ref list_head)
30904 {
30905 dw_loc_list_ref curr = list_head;
30906 inchash::hash hstate;
30907
30908 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30909 {
30910 hstate.add (curr->begin, strlen (curr->begin) + 1);
30911 hstate.add (curr->end, strlen (curr->end) + 1);
30912 hstate.add_object (curr->vbegin);
30913 hstate.add_object (curr->vend);
30914 if (curr->section)
30915 hstate.add (curr->section, strlen (curr->section) + 1);
30916 hash_locs (curr->expr, hstate);
30917 }
30918 list_head->hash = hstate.end ();
30919 }
30920
30921 /* Return true if X and Y opcodes have the same operands. */
30922
30923 static inline bool
30924 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30925 {
30926 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30927 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30928 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30929 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30930
30931 switch (x->dw_loc_opc)
30932 {
30933 case DW_OP_const4u:
30934 case DW_OP_const8u:
30935 if (x->dtprel)
30936 goto hash_addr;
30937 /* FALLTHRU */
30938 case DW_OP_const1u:
30939 case DW_OP_const1s:
30940 case DW_OP_const2u:
30941 case DW_OP_const2s:
30942 case DW_OP_const4s:
30943 case DW_OP_const8s:
30944 case DW_OP_constu:
30945 case DW_OP_consts:
30946 case DW_OP_pick:
30947 case DW_OP_plus_uconst:
30948 case DW_OP_breg0:
30949 case DW_OP_breg1:
30950 case DW_OP_breg2:
30951 case DW_OP_breg3:
30952 case DW_OP_breg4:
30953 case DW_OP_breg5:
30954 case DW_OP_breg6:
30955 case DW_OP_breg7:
30956 case DW_OP_breg8:
30957 case DW_OP_breg9:
30958 case DW_OP_breg10:
30959 case DW_OP_breg11:
30960 case DW_OP_breg12:
30961 case DW_OP_breg13:
30962 case DW_OP_breg14:
30963 case DW_OP_breg15:
30964 case DW_OP_breg16:
30965 case DW_OP_breg17:
30966 case DW_OP_breg18:
30967 case DW_OP_breg19:
30968 case DW_OP_breg20:
30969 case DW_OP_breg21:
30970 case DW_OP_breg22:
30971 case DW_OP_breg23:
30972 case DW_OP_breg24:
30973 case DW_OP_breg25:
30974 case DW_OP_breg26:
30975 case DW_OP_breg27:
30976 case DW_OP_breg28:
30977 case DW_OP_breg29:
30978 case DW_OP_breg30:
30979 case DW_OP_breg31:
30980 case DW_OP_regx:
30981 case DW_OP_fbreg:
30982 case DW_OP_piece:
30983 case DW_OP_deref_size:
30984 case DW_OP_xderef_size:
30985 return valx1->v.val_int == valy1->v.val_int;
30986 case DW_OP_skip:
30987 case DW_OP_bra:
30988 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30989 can cause irrelevant differences in dw_loc_addr. */
30990 gcc_assert (valx1->val_class == dw_val_class_loc
30991 && valy1->val_class == dw_val_class_loc
30992 && (dwarf_split_debug_info
30993 || x->dw_loc_addr == y->dw_loc_addr));
30994 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30995 case DW_OP_implicit_value:
30996 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30997 || valx2->val_class != valy2->val_class)
30998 return false;
30999 switch (valx2->val_class)
31000 {
31001 case dw_val_class_const:
31002 return valx2->v.val_int == valy2->v.val_int;
31003 case dw_val_class_vec:
31004 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31005 && valx2->v.val_vec.length == valy2->v.val_vec.length
31006 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31007 valx2->v.val_vec.elt_size
31008 * valx2->v.val_vec.length) == 0;
31009 case dw_val_class_const_double:
31010 return valx2->v.val_double.low == valy2->v.val_double.low
31011 && valx2->v.val_double.high == valy2->v.val_double.high;
31012 case dw_val_class_wide_int:
31013 return *valx2->v.val_wide == *valy2->v.val_wide;
31014 case dw_val_class_addr:
31015 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31016 default:
31017 gcc_unreachable ();
31018 }
31019 case DW_OP_bregx:
31020 case DW_OP_bit_piece:
31021 return valx1->v.val_int == valy1->v.val_int
31022 && valx2->v.val_int == valy2->v.val_int;
31023 case DW_OP_addr:
31024 hash_addr:
31025 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31026 case DW_OP_GNU_addr_index:
31027 case DW_OP_addrx:
31028 case DW_OP_GNU_const_index:
31029 case DW_OP_constx:
31030 {
31031 rtx ax1 = valx1->val_entry->addr.rtl;
31032 rtx ay1 = valy1->val_entry->addr.rtl;
31033 return rtx_equal_p (ax1, ay1);
31034 }
31035 case DW_OP_implicit_pointer:
31036 case DW_OP_GNU_implicit_pointer:
31037 return valx1->val_class == dw_val_class_die_ref
31038 && valx1->val_class == valy1->val_class
31039 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31040 && valx2->v.val_int == valy2->v.val_int;
31041 case DW_OP_entry_value:
31042 case DW_OP_GNU_entry_value:
31043 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31044 case DW_OP_const_type:
31045 case DW_OP_GNU_const_type:
31046 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31047 || valx2->val_class != valy2->val_class)
31048 return false;
31049 switch (valx2->val_class)
31050 {
31051 case dw_val_class_const:
31052 return valx2->v.val_int == valy2->v.val_int;
31053 case dw_val_class_vec:
31054 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31055 && valx2->v.val_vec.length == valy2->v.val_vec.length
31056 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31057 valx2->v.val_vec.elt_size
31058 * valx2->v.val_vec.length) == 0;
31059 case dw_val_class_const_double:
31060 return valx2->v.val_double.low == valy2->v.val_double.low
31061 && valx2->v.val_double.high == valy2->v.val_double.high;
31062 case dw_val_class_wide_int:
31063 return *valx2->v.val_wide == *valy2->v.val_wide;
31064 default:
31065 gcc_unreachable ();
31066 }
31067 case DW_OP_regval_type:
31068 case DW_OP_deref_type:
31069 case DW_OP_GNU_regval_type:
31070 case DW_OP_GNU_deref_type:
31071 return valx1->v.val_int == valy1->v.val_int
31072 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31073 case DW_OP_convert:
31074 case DW_OP_reinterpret:
31075 case DW_OP_GNU_convert:
31076 case DW_OP_GNU_reinterpret:
31077 if (valx1->val_class != valy1->val_class)
31078 return false;
31079 if (valx1->val_class == dw_val_class_unsigned_const)
31080 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31081 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31082 case DW_OP_GNU_parameter_ref:
31083 return valx1->val_class == dw_val_class_die_ref
31084 && valx1->val_class == valy1->val_class
31085 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31086 default:
31087 /* Other codes have no operands. */
31088 return true;
31089 }
31090 }
31091
31092 /* Return true if DWARF location expressions X and Y are the same. */
31093
31094 static inline bool
31095 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31096 {
31097 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31098 if (x->dw_loc_opc != y->dw_loc_opc
31099 || x->dtprel != y->dtprel
31100 || !compare_loc_operands (x, y))
31101 break;
31102 return x == NULL && y == NULL;
31103 }
31104
31105 /* Hashtable helpers. */
31106
31107 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31108 {
31109 static inline hashval_t hash (const dw_loc_list_struct *);
31110 static inline bool equal (const dw_loc_list_struct *,
31111 const dw_loc_list_struct *);
31112 };
31113
31114 /* Return precomputed hash of location list X. */
31115
31116 inline hashval_t
31117 loc_list_hasher::hash (const dw_loc_list_struct *x)
31118 {
31119 return x->hash;
31120 }
31121
31122 /* Return true if location lists A and B are the same. */
31123
31124 inline bool
31125 loc_list_hasher::equal (const dw_loc_list_struct *a,
31126 const dw_loc_list_struct *b)
31127 {
31128 if (a == b)
31129 return 1;
31130 if (a->hash != b->hash)
31131 return 0;
31132 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31133 if (strcmp (a->begin, b->begin) != 0
31134 || strcmp (a->end, b->end) != 0
31135 || (a->section == NULL) != (b->section == NULL)
31136 || (a->section && strcmp (a->section, b->section) != 0)
31137 || a->vbegin != b->vbegin || a->vend != b->vend
31138 || !compare_locs (a->expr, b->expr))
31139 break;
31140 return a == NULL && b == NULL;
31141 }
31142
31143 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31144
31145
31146 /* Recursively optimize location lists referenced from DIE
31147 children and share them whenever possible. */
31148
31149 static void
31150 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31151 {
31152 dw_die_ref c;
31153 dw_attr_node *a;
31154 unsigned ix;
31155 dw_loc_list_struct **slot;
31156 bool drop_locviews = false;
31157 bool has_locviews = false;
31158
31159 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31160 if (AT_class (a) == dw_val_class_loc_list)
31161 {
31162 dw_loc_list_ref list = AT_loc_list (a);
31163 /* TODO: perform some optimizations here, before hashing
31164 it and storing into the hash table. */
31165 hash_loc_list (list);
31166 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31167 if (*slot == NULL)
31168 {
31169 *slot = list;
31170 if (loc_list_has_views (list))
31171 gcc_assert (list->vl_symbol);
31172 else if (list->vl_symbol)
31173 {
31174 drop_locviews = true;
31175 list->vl_symbol = NULL;
31176 }
31177 }
31178 else
31179 {
31180 if (list->vl_symbol && !(*slot)->vl_symbol)
31181 drop_locviews = true;
31182 a->dw_attr_val.v.val_loc_list = *slot;
31183 }
31184 }
31185 else if (AT_class (a) == dw_val_class_view_list)
31186 {
31187 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31188 has_locviews = true;
31189 }
31190
31191
31192 if (drop_locviews && has_locviews)
31193 remove_AT (die, DW_AT_GNU_locviews);
31194
31195 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31196 }
31197
31198
31199 /* Recursively assign each location list a unique index into the debug_addr
31200 section. */
31201
31202 static void
31203 index_location_lists (dw_die_ref die)
31204 {
31205 dw_die_ref c;
31206 dw_attr_node *a;
31207 unsigned ix;
31208
31209 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31210 if (AT_class (a) == dw_val_class_loc_list)
31211 {
31212 dw_loc_list_ref list = AT_loc_list (a);
31213 dw_loc_list_ref curr;
31214 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31215 {
31216 /* Don't index an entry that has already been indexed
31217 or won't be output. Make sure skip_loc_list_entry doesn't
31218 call size_of_locs, because that might cause circular dependency,
31219 index_location_lists requiring address table indexes to be
31220 computed, but adding new indexes through add_addr_table_entry
31221 and address table index computation requiring no new additions
31222 to the hash table. In the rare case of DWARF[234] >= 64KB
31223 location expression, we'll just waste unused address table entry
31224 for it. */
31225 if (curr->begin_entry != NULL
31226 || skip_loc_list_entry (curr))
31227 continue;
31228
31229 curr->begin_entry
31230 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31231 }
31232 }
31233
31234 FOR_EACH_CHILD (die, c, index_location_lists (c));
31235 }
31236
31237 /* Optimize location lists referenced from DIE
31238 children and share them whenever possible. */
31239
31240 static void
31241 optimize_location_lists (dw_die_ref die)
31242 {
31243 loc_list_hash_type htab (500);
31244 optimize_location_lists_1 (die, &htab);
31245 }
31246 \f
31247 /* Traverse the limbo die list, and add parent/child links. The only
31248 dies without parents that should be here are concrete instances of
31249 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31250 For concrete instances, we can get the parent die from the abstract
31251 instance. */
31252
31253 static void
31254 flush_limbo_die_list (void)
31255 {
31256 limbo_die_node *node;
31257
31258 /* get_context_die calls force_decl_die, which can put new DIEs on the
31259 limbo list in LTO mode when nested functions are put in a different
31260 partition than that of their parent function. */
31261 while ((node = limbo_die_list))
31262 {
31263 dw_die_ref die = node->die;
31264 limbo_die_list = node->next;
31265
31266 if (die->die_parent == NULL)
31267 {
31268 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31269
31270 if (origin && origin->die_parent)
31271 add_child_die (origin->die_parent, die);
31272 else if (is_cu_die (die))
31273 ;
31274 else if (seen_error ())
31275 /* It's OK to be confused by errors in the input. */
31276 add_child_die (comp_unit_die (), die);
31277 else
31278 {
31279 /* In certain situations, the lexical block containing a
31280 nested function can be optimized away, which results
31281 in the nested function die being orphaned. Likewise
31282 with the return type of that nested function. Force
31283 this to be a child of the containing function.
31284
31285 It may happen that even the containing function got fully
31286 inlined and optimized out. In that case we are lost and
31287 assign the empty child. This should not be big issue as
31288 the function is likely unreachable too. */
31289 gcc_assert (node->created_for);
31290
31291 if (DECL_P (node->created_for))
31292 origin = get_context_die (DECL_CONTEXT (node->created_for));
31293 else if (TYPE_P (node->created_for))
31294 origin = scope_die_for (node->created_for, comp_unit_die ());
31295 else
31296 origin = comp_unit_die ();
31297
31298 add_child_die (origin, die);
31299 }
31300 }
31301 }
31302 }
31303
31304 /* Reset DIEs so we can output them again. */
31305
31306 static void
31307 reset_dies (dw_die_ref die)
31308 {
31309 dw_die_ref c;
31310
31311 /* Remove stuff we re-generate. */
31312 die->die_mark = 0;
31313 die->die_offset = 0;
31314 die->die_abbrev = 0;
31315 remove_AT (die, DW_AT_sibling);
31316
31317 FOR_EACH_CHILD (die, c, reset_dies (c));
31318 }
31319
31320 /* Output stuff that dwarf requires at the end of every file,
31321 and generate the DWARF-2 debugging info. */
31322
31323 static void
31324 dwarf2out_finish (const char *filename)
31325 {
31326 comdat_type_node *ctnode;
31327 dw_die_ref main_comp_unit_die;
31328 unsigned char checksum[16];
31329 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31330
31331 /* Flush out any latecomers to the limbo party. */
31332 flush_limbo_die_list ();
31333
31334 if (inline_entry_data_table)
31335 gcc_assert (inline_entry_data_table->is_empty ());
31336
31337 if (flag_checking)
31338 {
31339 verify_die (comp_unit_die ());
31340 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31341 verify_die (node->die);
31342 }
31343
31344 /* We shouldn't have any symbols with delayed asm names for
31345 DIEs generated after early finish. */
31346 gcc_assert (deferred_asm_name == NULL);
31347
31348 gen_remaining_tmpl_value_param_die_attribute ();
31349
31350 if (flag_generate_lto || flag_generate_offload)
31351 {
31352 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31353
31354 /* Prune stuff so that dwarf2out_finish runs successfully
31355 for the fat part of the object. */
31356 reset_dies (comp_unit_die ());
31357 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31358 reset_dies (node->die);
31359
31360 hash_table<comdat_type_hasher> comdat_type_table (100);
31361 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31362 {
31363 comdat_type_node **slot
31364 = comdat_type_table.find_slot (ctnode, INSERT);
31365
31366 /* Don't reset types twice. */
31367 if (*slot != HTAB_EMPTY_ENTRY)
31368 continue;
31369
31370 /* Remove the pointer to the line table. */
31371 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31372
31373 if (debug_info_level >= DINFO_LEVEL_TERSE)
31374 reset_dies (ctnode->root_die);
31375
31376 *slot = ctnode;
31377 }
31378
31379 /* Reset die CU symbol so we don't output it twice. */
31380 comp_unit_die ()->die_id.die_symbol = NULL;
31381
31382 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31383 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31384 if (have_macinfo)
31385 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31386
31387 /* Remove indirect string decisions. */
31388 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31389 if (debug_line_str_hash)
31390 {
31391 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31392 debug_line_str_hash = NULL;
31393 }
31394 }
31395
31396 #if ENABLE_ASSERT_CHECKING
31397 {
31398 dw_die_ref die = comp_unit_die (), c;
31399 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31400 }
31401 #endif
31402 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31403 resolve_addr (ctnode->root_die);
31404 resolve_addr (comp_unit_die ());
31405 move_marked_base_types ();
31406
31407 if (dump_file)
31408 {
31409 fprintf (dump_file, "DWARF for %s\n", filename);
31410 print_die (comp_unit_die (), dump_file);
31411 }
31412
31413 /* Initialize sections and labels used for actual assembler output. */
31414 unsigned generation = init_sections_and_labels (false);
31415
31416 /* Traverse the DIE's and add sibling attributes to those DIE's that
31417 have children. */
31418 add_sibling_attributes (comp_unit_die ());
31419 limbo_die_node *node;
31420 for (node = cu_die_list; node; node = node->next)
31421 add_sibling_attributes (node->die);
31422 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31423 add_sibling_attributes (ctnode->root_die);
31424
31425 /* When splitting DWARF info, we put some attributes in the
31426 skeleton compile_unit DIE that remains in the .o, while
31427 most attributes go in the DWO compile_unit_die. */
31428 if (dwarf_split_debug_info)
31429 {
31430 limbo_die_node *cu;
31431 main_comp_unit_die = gen_compile_unit_die (NULL);
31432 if (dwarf_version >= 5)
31433 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31434 cu = limbo_die_list;
31435 gcc_assert (cu->die == main_comp_unit_die);
31436 limbo_die_list = limbo_die_list->next;
31437 cu->next = cu_die_list;
31438 cu_die_list = cu;
31439 }
31440 else
31441 main_comp_unit_die = comp_unit_die ();
31442
31443 /* Output a terminator label for the .text section. */
31444 switch_to_section (text_section);
31445 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31446 if (cold_text_section)
31447 {
31448 switch_to_section (cold_text_section);
31449 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31450 }
31451
31452 /* We can only use the low/high_pc attributes if all of the code was
31453 in .text. */
31454 if (!have_multiple_function_sections
31455 || (dwarf_version < 3 && dwarf_strict))
31456 {
31457 /* Don't add if the CU has no associated code. */
31458 if (text_section_used)
31459 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31460 text_end_label, true);
31461 }
31462 else
31463 {
31464 unsigned fde_idx;
31465 dw_fde_ref fde;
31466 bool range_list_added = false;
31467
31468 if (text_section_used)
31469 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31470 text_end_label, &range_list_added, true);
31471 if (cold_text_section_used)
31472 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31473 cold_end_label, &range_list_added, true);
31474
31475 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31476 {
31477 if (DECL_IGNORED_P (fde->decl))
31478 continue;
31479 if (!fde->in_std_section)
31480 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31481 fde->dw_fde_end, &range_list_added,
31482 true);
31483 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31484 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31485 fde->dw_fde_second_end, &range_list_added,
31486 true);
31487 }
31488
31489 if (range_list_added)
31490 {
31491 /* We need to give .debug_loc and .debug_ranges an appropriate
31492 "base address". Use zero so that these addresses become
31493 absolute. Historically, we've emitted the unexpected
31494 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31495 Emit both to give time for other tools to adapt. */
31496 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31497 if (! dwarf_strict && dwarf_version < 4)
31498 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31499
31500 add_ranges (NULL);
31501 }
31502 }
31503
31504 /* AIX Assembler inserts the length, so adjust the reference to match the
31505 offset expected by debuggers. */
31506 strcpy (dl_section_ref, debug_line_section_label);
31507 if (XCOFF_DEBUGGING_INFO)
31508 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31509
31510 if (debug_info_level >= DINFO_LEVEL_TERSE)
31511 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31512 dl_section_ref);
31513
31514 if (have_macinfo)
31515 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31516 macinfo_section_label);
31517
31518 if (dwarf_split_debug_info)
31519 {
31520 if (have_location_lists)
31521 {
31522 /* Since we generate the loclists in the split DWARF .dwo
31523 file itself, we don't need to generate a loclists_base
31524 attribute for the split compile unit DIE. That attribute
31525 (and using relocatable sec_offset FORMs) isn't allowed
31526 for a split compile unit. Only if the .debug_loclists
31527 section was in the main file, would we need to generate a
31528 loclists_base attribute here (for the full or skeleton
31529 unit DIE). */
31530
31531 /* optimize_location_lists calculates the size of the lists,
31532 so index them first, and assign indices to the entries.
31533 Although optimize_location_lists will remove entries from
31534 the table, it only does so for duplicates, and therefore
31535 only reduces ref_counts to 1. */
31536 index_location_lists (comp_unit_die ());
31537 }
31538
31539 if (addr_index_table != NULL)
31540 {
31541 unsigned int index = 0;
31542 addr_index_table
31543 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31544 (&index);
31545 }
31546 }
31547
31548 loc_list_idx = 0;
31549 if (have_location_lists)
31550 {
31551 optimize_location_lists (comp_unit_die ());
31552 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31553 if (dwarf_version >= 5 && dwarf_split_debug_info)
31554 assign_location_list_indexes (comp_unit_die ());
31555 }
31556
31557 save_macinfo_strings ();
31558
31559 if (dwarf_split_debug_info)
31560 {
31561 unsigned int index = 0;
31562
31563 /* Add attributes common to skeleton compile_units and
31564 type_units. Because these attributes include strings, it
31565 must be done before freezing the string table. Top-level
31566 skeleton die attrs are added when the skeleton type unit is
31567 created, so ensure it is created by this point. */
31568 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31569 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31570 }
31571
31572 /* Output all of the compilation units. We put the main one last so that
31573 the offsets are available to output_pubnames. */
31574 for (node = cu_die_list; node; node = node->next)
31575 output_comp_unit (node->die, 0, NULL);
31576
31577 hash_table<comdat_type_hasher> comdat_type_table (100);
31578 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31579 {
31580 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31581
31582 /* Don't output duplicate types. */
31583 if (*slot != HTAB_EMPTY_ENTRY)
31584 continue;
31585
31586 /* Add a pointer to the line table for the main compilation unit
31587 so that the debugger can make sense of DW_AT_decl_file
31588 attributes. */
31589 if (debug_info_level >= DINFO_LEVEL_TERSE)
31590 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31591 (!dwarf_split_debug_info
31592 ? dl_section_ref
31593 : debug_skeleton_line_section_label));
31594
31595 output_comdat_type_unit (ctnode, false);
31596 *slot = ctnode;
31597 }
31598
31599 if (dwarf_split_debug_info)
31600 {
31601 int mark;
31602 struct md5_ctx ctx;
31603
31604 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31605 index_rnglists ();
31606
31607 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31608 md5_init_ctx (&ctx);
31609 mark = 0;
31610 die_checksum (comp_unit_die (), &ctx, &mark);
31611 unmark_all_dies (comp_unit_die ());
31612 md5_finish_ctx (&ctx, checksum);
31613
31614 if (dwarf_version < 5)
31615 {
31616 /* Use the first 8 bytes of the checksum as the dwo_id,
31617 and add it to both comp-unit DIEs. */
31618 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31619 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31620 }
31621
31622 /* Add the base offset of the ranges table to the skeleton
31623 comp-unit DIE. */
31624 if (!vec_safe_is_empty (ranges_table))
31625 {
31626 if (dwarf_version >= 5)
31627 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31628 ranges_base_label);
31629 else
31630 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31631 ranges_section_label);
31632 }
31633
31634 switch_to_section (debug_addr_section);
31635 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31636 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31637 before DWARF5, didn't have a header for .debug_addr units.
31638 DWARF5 specifies a small header when address tables are used. */
31639 if (dwarf_version >= 5)
31640 {
31641 unsigned int last_idx = 0;
31642 unsigned long addrs_length;
31643
31644 addr_index_table->traverse_noresize
31645 <unsigned int *, count_index_addrs> (&last_idx);
31646 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31647
31648 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31649 dw2_asm_output_data (4, 0xffffffff,
31650 "Escape value for 64-bit DWARF extension");
31651 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31652 "Length of Address Unit");
31653 dw2_asm_output_data (2, 5, "DWARF addr version");
31654 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31655 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31656 }
31657 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31658 output_addr_table ();
31659 }
31660
31661 /* Output the main compilation unit if non-empty or if .debug_macinfo
31662 or .debug_macro will be emitted. */
31663 output_comp_unit (comp_unit_die (), have_macinfo,
31664 dwarf_split_debug_info ? checksum : NULL);
31665
31666 if (dwarf_split_debug_info && info_section_emitted)
31667 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31668
31669 /* Output the abbreviation table. */
31670 if (vec_safe_length (abbrev_die_table) != 1)
31671 {
31672 switch_to_section (debug_abbrev_section);
31673 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31674 output_abbrev_section ();
31675 }
31676
31677 /* Output location list section if necessary. */
31678 if (have_location_lists)
31679 {
31680 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31681 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31682 /* Output the location lists info. */
31683 switch_to_section (debug_loc_section);
31684 if (dwarf_version >= 5)
31685 {
31686 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31687 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31688 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31689 dw2_asm_output_data (4, 0xffffffff,
31690 "Initial length escape value indicating "
31691 "64-bit DWARF extension");
31692 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31693 "Length of Location Lists");
31694 ASM_OUTPUT_LABEL (asm_out_file, l1);
31695 output_dwarf_version ();
31696 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31697 dw2_asm_output_data (1, 0, "Segment Size");
31698 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31699 "Offset Entry Count");
31700 }
31701 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31702 if (dwarf_version >= 5 && dwarf_split_debug_info)
31703 {
31704 unsigned int save_loc_list_idx = loc_list_idx;
31705 loc_list_idx = 0;
31706 output_loclists_offsets (comp_unit_die ());
31707 gcc_assert (save_loc_list_idx == loc_list_idx);
31708 }
31709 output_location_lists (comp_unit_die ());
31710 if (dwarf_version >= 5)
31711 ASM_OUTPUT_LABEL (asm_out_file, l2);
31712 }
31713
31714 output_pubtables ();
31715
31716 /* Output the address range information if a CU (.debug_info section)
31717 was emitted. We output an empty table even if we had no functions
31718 to put in it. This because the consumer has no way to tell the
31719 difference between an empty table that we omitted and failure to
31720 generate a table that would have contained data. */
31721 if (info_section_emitted)
31722 {
31723 switch_to_section (debug_aranges_section);
31724 output_aranges ();
31725 }
31726
31727 /* Output ranges section if necessary. */
31728 if (!vec_safe_is_empty (ranges_table))
31729 {
31730 if (dwarf_version >= 5)
31731 output_rnglists (generation);
31732 else
31733 output_ranges ();
31734 }
31735
31736 /* Have to end the macro section. */
31737 if (have_macinfo)
31738 {
31739 switch_to_section (debug_macinfo_section);
31740 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31741 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31742 : debug_skeleton_line_section_label, false);
31743 dw2_asm_output_data (1, 0, "End compilation unit");
31744 }
31745
31746 /* Output the source line correspondence table. We must do this
31747 even if there is no line information. Otherwise, on an empty
31748 translation unit, we will generate a present, but empty,
31749 .debug_info section. IRIX 6.5 `nm' will then complain when
31750 examining the file. This is done late so that any filenames
31751 used by the debug_info section are marked as 'used'. */
31752 switch_to_section (debug_line_section);
31753 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31754 if (! output_asm_line_debug_info ())
31755 output_line_info (false);
31756
31757 if (dwarf_split_debug_info && info_section_emitted)
31758 {
31759 switch_to_section (debug_skeleton_line_section);
31760 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31761 output_line_info (true);
31762 }
31763
31764 /* If we emitted any indirect strings, output the string table too. */
31765 if (debug_str_hash || skeleton_debug_str_hash)
31766 output_indirect_strings ();
31767 if (debug_line_str_hash)
31768 {
31769 switch_to_section (debug_line_str_section);
31770 const enum dwarf_form form = DW_FORM_line_strp;
31771 debug_line_str_hash->traverse<enum dwarf_form,
31772 output_indirect_string> (form);
31773 }
31774
31775 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31776 symview_upper_bound = 0;
31777 if (zero_view_p)
31778 bitmap_clear (zero_view_p);
31779 }
31780
31781 /* Returns a hash value for X (which really is a variable_value_struct). */
31782
31783 inline hashval_t
31784 variable_value_hasher::hash (variable_value_struct *x)
31785 {
31786 return (hashval_t) x->decl_id;
31787 }
31788
31789 /* Return nonzero if decl_id of variable_value_struct X is the same as
31790 UID of decl Y. */
31791
31792 inline bool
31793 variable_value_hasher::equal (variable_value_struct *x, tree y)
31794 {
31795 return x->decl_id == DECL_UID (y);
31796 }
31797
31798 /* Helper function for resolve_variable_value, handle
31799 DW_OP_GNU_variable_value in one location expression.
31800 Return true if exprloc has been changed into loclist. */
31801
31802 static bool
31803 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31804 {
31805 dw_loc_descr_ref next;
31806 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31807 {
31808 next = loc->dw_loc_next;
31809 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31810 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31811 continue;
31812
31813 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31814 if (DECL_CONTEXT (decl) != current_function_decl)
31815 continue;
31816
31817 dw_die_ref ref = lookup_decl_die (decl);
31818 if (ref)
31819 {
31820 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31821 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31822 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31823 continue;
31824 }
31825 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31826 if (l == NULL)
31827 continue;
31828 if (l->dw_loc_next)
31829 {
31830 if (AT_class (a) != dw_val_class_loc)
31831 continue;
31832 switch (a->dw_attr)
31833 {
31834 /* Following attributes allow both exprloc and loclist
31835 classes, so we can change them into a loclist. */
31836 case DW_AT_location:
31837 case DW_AT_string_length:
31838 case DW_AT_return_addr:
31839 case DW_AT_data_member_location:
31840 case DW_AT_frame_base:
31841 case DW_AT_segment:
31842 case DW_AT_static_link:
31843 case DW_AT_use_location:
31844 case DW_AT_vtable_elem_location:
31845 if (prev)
31846 {
31847 prev->dw_loc_next = NULL;
31848 prepend_loc_descr_to_each (l, AT_loc (a));
31849 }
31850 if (next)
31851 add_loc_descr_to_each (l, next);
31852 a->dw_attr_val.val_class = dw_val_class_loc_list;
31853 a->dw_attr_val.val_entry = NULL;
31854 a->dw_attr_val.v.val_loc_list = l;
31855 have_location_lists = true;
31856 return true;
31857 /* Following attributes allow both exprloc and reference,
31858 so if the whole expression is DW_OP_GNU_variable_value alone
31859 we could transform it into reference. */
31860 case DW_AT_byte_size:
31861 case DW_AT_bit_size:
31862 case DW_AT_lower_bound:
31863 case DW_AT_upper_bound:
31864 case DW_AT_bit_stride:
31865 case DW_AT_count:
31866 case DW_AT_allocated:
31867 case DW_AT_associated:
31868 case DW_AT_byte_stride:
31869 if (prev == NULL && next == NULL)
31870 break;
31871 /* FALLTHRU */
31872 default:
31873 if (dwarf_strict)
31874 continue;
31875 break;
31876 }
31877 /* Create DW_TAG_variable that we can refer to. */
31878 gen_decl_die (decl, NULL_TREE, NULL,
31879 lookup_decl_die (current_function_decl));
31880 ref = lookup_decl_die (decl);
31881 if (ref)
31882 {
31883 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31884 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31885 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31886 }
31887 continue;
31888 }
31889 if (prev)
31890 {
31891 prev->dw_loc_next = l->expr;
31892 add_loc_descr (&prev->dw_loc_next, next);
31893 free_loc_descr (loc, NULL);
31894 next = prev->dw_loc_next;
31895 }
31896 else
31897 {
31898 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31899 add_loc_descr (&loc, next);
31900 next = loc;
31901 }
31902 loc = prev;
31903 }
31904 return false;
31905 }
31906
31907 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31908
31909 static void
31910 resolve_variable_value (dw_die_ref die)
31911 {
31912 dw_attr_node *a;
31913 dw_loc_list_ref loc;
31914 unsigned ix;
31915
31916 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31917 switch (AT_class (a))
31918 {
31919 case dw_val_class_loc:
31920 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31921 break;
31922 /* FALLTHRU */
31923 case dw_val_class_loc_list:
31924 loc = AT_loc_list (a);
31925 gcc_assert (loc);
31926 for (; loc; loc = loc->dw_loc_next)
31927 resolve_variable_value_in_expr (a, loc->expr);
31928 break;
31929 default:
31930 break;
31931 }
31932 }
31933
31934 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31935 temporaries in the current function. */
31936
31937 static void
31938 resolve_variable_values (void)
31939 {
31940 if (!variable_value_hash || !current_function_decl)
31941 return;
31942
31943 struct variable_value_struct *node
31944 = variable_value_hash->find_with_hash (current_function_decl,
31945 DECL_UID (current_function_decl));
31946
31947 if (node == NULL)
31948 return;
31949
31950 unsigned int i;
31951 dw_die_ref die;
31952 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31953 resolve_variable_value (die);
31954 }
31955
31956 /* Helper function for note_variable_value, handle one location
31957 expression. */
31958
31959 static void
31960 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31961 {
31962 for (; loc; loc = loc->dw_loc_next)
31963 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31964 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31965 {
31966 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31967 dw_die_ref ref = lookup_decl_die (decl);
31968 if (! ref && (flag_generate_lto || flag_generate_offload))
31969 {
31970 /* ??? This is somewhat a hack because we do not create DIEs
31971 for variables not in BLOCK trees early but when generating
31972 early LTO output we need the dw_val_class_decl_ref to be
31973 fully resolved. For fat LTO objects we'd also like to
31974 undo this after LTO dwarf output. */
31975 gcc_assert (DECL_CONTEXT (decl));
31976 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31977 gcc_assert (ctx != NULL);
31978 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31979 ref = lookup_decl_die (decl);
31980 gcc_assert (ref != NULL);
31981 }
31982 if (ref)
31983 {
31984 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31985 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31986 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31987 continue;
31988 }
31989 if (VAR_P (decl)
31990 && DECL_CONTEXT (decl)
31991 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31992 && lookup_decl_die (DECL_CONTEXT (decl)))
31993 {
31994 if (!variable_value_hash)
31995 variable_value_hash
31996 = hash_table<variable_value_hasher>::create_ggc (10);
31997
31998 tree fndecl = DECL_CONTEXT (decl);
31999 struct variable_value_struct *node;
32000 struct variable_value_struct **slot
32001 = variable_value_hash->find_slot_with_hash (fndecl,
32002 DECL_UID (fndecl),
32003 INSERT);
32004 if (*slot == NULL)
32005 {
32006 node = ggc_cleared_alloc<variable_value_struct> ();
32007 node->decl_id = DECL_UID (fndecl);
32008 *slot = node;
32009 }
32010 else
32011 node = *slot;
32012
32013 vec_safe_push (node->dies, die);
32014 }
32015 }
32016 }
32017
32018 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32019 with dw_val_class_decl_ref operand. */
32020
32021 static void
32022 note_variable_value (dw_die_ref die)
32023 {
32024 dw_die_ref c;
32025 dw_attr_node *a;
32026 dw_loc_list_ref loc;
32027 unsigned ix;
32028
32029 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32030 switch (AT_class (a))
32031 {
32032 case dw_val_class_loc_list:
32033 loc = AT_loc_list (a);
32034 gcc_assert (loc);
32035 if (!loc->noted_variable_value)
32036 {
32037 loc->noted_variable_value = 1;
32038 for (; loc; loc = loc->dw_loc_next)
32039 note_variable_value_in_expr (die, loc->expr);
32040 }
32041 break;
32042 case dw_val_class_loc:
32043 note_variable_value_in_expr (die, AT_loc (a));
32044 break;
32045 default:
32046 break;
32047 }
32048
32049 /* Mark children. */
32050 FOR_EACH_CHILD (die, c, note_variable_value (c));
32051 }
32052
32053 /* Perform any cleanups needed after the early debug generation pass
32054 has run. */
32055
32056 static void
32057 dwarf2out_early_finish (const char *filename)
32058 {
32059 set_early_dwarf s;
32060 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32061
32062 /* PCH might result in DW_AT_producer string being restored from the
32063 header compilation, so always fill it with empty string initially
32064 and overwrite only here. */
32065 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32066 producer_string = gen_producer_string ();
32067 producer->dw_attr_val.v.val_str->refcount--;
32068 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32069
32070 /* Add the name for the main input file now. We delayed this from
32071 dwarf2out_init to avoid complications with PCH. */
32072 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32073 add_comp_dir_attribute (comp_unit_die ());
32074
32075 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32076 DW_AT_comp_dir into .debug_line_str section. */
32077 if (!output_asm_line_debug_info ()
32078 && dwarf_version >= 5
32079 && DWARF5_USE_DEBUG_LINE_STR)
32080 {
32081 for (int i = 0; i < 2; i++)
32082 {
32083 dw_attr_node *a = get_AT (comp_unit_die (),
32084 i ? DW_AT_comp_dir : DW_AT_name);
32085 if (a == NULL
32086 || AT_class (a) != dw_val_class_str
32087 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32088 continue;
32089
32090 if (! debug_line_str_hash)
32091 debug_line_str_hash
32092 = hash_table<indirect_string_hasher>::create_ggc (10);
32093
32094 struct indirect_string_node *node
32095 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32096 set_indirect_string (node);
32097 node->form = DW_FORM_line_strp;
32098 a->dw_attr_val.v.val_str->refcount--;
32099 a->dw_attr_val.v.val_str = node;
32100 }
32101 }
32102
32103 /* With LTO early dwarf was really finished at compile-time, so make
32104 sure to adjust the phase after annotating the LTRANS CU DIE. */
32105 if (in_lto_p)
32106 {
32107 /* Force DW_TAG_imported_unit to be created now, otherwise
32108 we might end up without it or ordered after DW_TAG_inlined_subroutine
32109 referencing DIEs from it. */
32110 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32111 {
32112 unsigned i;
32113 tree tu;
32114 if (external_die_map)
32115 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32116 if (sym_off_pair *desc = external_die_map->get (tu))
32117 {
32118 dw_die_ref import = new_die (DW_TAG_imported_unit,
32119 comp_unit_die (), NULL_TREE);
32120 add_AT_external_die_ref (import, DW_AT_import,
32121 desc->sym, desc->off);
32122 }
32123 }
32124
32125 early_dwarf_finished = true;
32126 if (dump_file)
32127 {
32128 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32129 print_die (comp_unit_die (), dump_file);
32130 }
32131 return;
32132 }
32133
32134 /* Walk through the list of incomplete types again, trying once more to
32135 emit full debugging info for them. */
32136 retry_incomplete_types ();
32137
32138 /* The point here is to flush out the limbo list so that it is empty
32139 and we don't need to stream it for LTO. */
32140 flush_limbo_die_list ();
32141
32142 gen_scheduled_generic_parms_dies ();
32143 gen_remaining_tmpl_value_param_die_attribute ();
32144
32145 /* Add DW_AT_linkage_name for all deferred DIEs. */
32146 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32147 {
32148 tree decl = node->created_for;
32149 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32150 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32151 ended up in deferred_asm_name before we knew it was
32152 constant and never written to disk. */
32153 && DECL_ASSEMBLER_NAME (decl))
32154 {
32155 add_linkage_attr (node->die, decl);
32156 move_linkage_attr (node->die);
32157 }
32158 }
32159 deferred_asm_name = NULL;
32160
32161 if (flag_eliminate_unused_debug_types)
32162 prune_unused_types ();
32163
32164 /* Generate separate COMDAT sections for type DIEs. */
32165 if (use_debug_types)
32166 {
32167 break_out_comdat_types (comp_unit_die ());
32168
32169 /* Each new type_unit DIE was added to the limbo die list when created.
32170 Since these have all been added to comdat_type_list, clear the
32171 limbo die list. */
32172 limbo_die_list = NULL;
32173
32174 /* For each new comdat type unit, copy declarations for incomplete
32175 types to make the new unit self-contained (i.e., no direct
32176 references to the main compile unit). */
32177 for (comdat_type_node *ctnode = comdat_type_list;
32178 ctnode != NULL; ctnode = ctnode->next)
32179 copy_decls_for_unworthy_types (ctnode->root_die);
32180 copy_decls_for_unworthy_types (comp_unit_die ());
32181
32182 /* In the process of copying declarations from one unit to another,
32183 we may have left some declarations behind that are no longer
32184 referenced. Prune them. */
32185 prune_unused_types ();
32186 }
32187
32188 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32189 with dw_val_class_decl_ref operand. */
32190 note_variable_value (comp_unit_die ());
32191 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32192 note_variable_value (node->die);
32193 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32194 ctnode = ctnode->next)
32195 note_variable_value (ctnode->root_die);
32196 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32197 note_variable_value (node->die);
32198
32199 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32200 both the main_cu and all skeleton TUs. Making this call unconditional
32201 would end up either adding a second copy of the AT_pubnames attribute, or
32202 requiring a special case in add_top_level_skeleton_die_attrs. */
32203 if (!dwarf_split_debug_info)
32204 add_AT_pubnames (comp_unit_die ());
32205
32206 /* The early debug phase is now finished. */
32207 early_dwarf_finished = true;
32208 if (dump_file)
32209 {
32210 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32211 print_die (comp_unit_die (), dump_file);
32212 }
32213
32214 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32215 if ((!flag_generate_lto && !flag_generate_offload)
32216 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32217 copy_lto_debug_sections operation of the simple object support in
32218 libiberty is not implemented for them yet. */
32219 || TARGET_PECOFF || TARGET_COFF)
32220 return;
32221
32222 /* Now as we are going to output for LTO initialize sections and labels
32223 to the LTO variants. We don't need a random-seed postfix as other
32224 LTO sections as linking the LTO debug sections into one in a partial
32225 link is fine. */
32226 init_sections_and_labels (true);
32227
32228 /* The output below is modeled after dwarf2out_finish with all
32229 location related output removed and some LTO specific changes.
32230 Some refactoring might make both smaller and easier to match up. */
32231
32232 /* Traverse the DIE's and add add sibling attributes to those DIE's
32233 that have children. */
32234 add_sibling_attributes (comp_unit_die ());
32235 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32236 add_sibling_attributes (node->die);
32237 for (comdat_type_node *ctnode = comdat_type_list;
32238 ctnode != NULL; ctnode = ctnode->next)
32239 add_sibling_attributes (ctnode->root_die);
32240
32241 /* AIX Assembler inserts the length, so adjust the reference to match the
32242 offset expected by debuggers. */
32243 strcpy (dl_section_ref, debug_line_section_label);
32244 if (XCOFF_DEBUGGING_INFO)
32245 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32246
32247 if (debug_info_level >= DINFO_LEVEL_TERSE)
32248 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32249
32250 if (have_macinfo)
32251 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32252 macinfo_section_label);
32253
32254 save_macinfo_strings ();
32255
32256 if (dwarf_split_debug_info)
32257 {
32258 unsigned int index = 0;
32259 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32260 }
32261
32262 /* Output all of the compilation units. We put the main one last so that
32263 the offsets are available to output_pubnames. */
32264 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32265 output_comp_unit (node->die, 0, NULL);
32266
32267 hash_table<comdat_type_hasher> comdat_type_table (100);
32268 for (comdat_type_node *ctnode = comdat_type_list;
32269 ctnode != NULL; ctnode = ctnode->next)
32270 {
32271 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32272
32273 /* Don't output duplicate types. */
32274 if (*slot != HTAB_EMPTY_ENTRY)
32275 continue;
32276
32277 /* Add a pointer to the line table for the main compilation unit
32278 so that the debugger can make sense of DW_AT_decl_file
32279 attributes. */
32280 if (debug_info_level >= DINFO_LEVEL_TERSE)
32281 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32282 (!dwarf_split_debug_info
32283 ? debug_line_section_label
32284 : debug_skeleton_line_section_label));
32285
32286 output_comdat_type_unit (ctnode, true);
32287 *slot = ctnode;
32288 }
32289
32290 /* Stick a unique symbol to the main debuginfo section. */
32291 compute_comp_unit_symbol (comp_unit_die ());
32292
32293 /* Output the main compilation unit. We always need it if only for
32294 the CU symbol. */
32295 output_comp_unit (comp_unit_die (), true, NULL);
32296
32297 /* Output the abbreviation table. */
32298 if (vec_safe_length (abbrev_die_table) != 1)
32299 {
32300 switch_to_section (debug_abbrev_section);
32301 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32302 output_abbrev_section ();
32303 }
32304
32305 /* Have to end the macro section. */
32306 if (have_macinfo)
32307 {
32308 /* We have to save macinfo state if we need to output it again
32309 for the FAT part of the object. */
32310 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32311 if (flag_fat_lto_objects)
32312 macinfo_table = macinfo_table->copy ();
32313
32314 switch_to_section (debug_macinfo_section);
32315 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32316 output_macinfo (debug_line_section_label, true);
32317 dw2_asm_output_data (1, 0, "End compilation unit");
32318
32319 if (flag_fat_lto_objects)
32320 {
32321 vec_free (macinfo_table);
32322 macinfo_table = saved_macinfo_table;
32323 }
32324 }
32325
32326 /* Emit a skeleton debug_line section. */
32327 switch_to_section (debug_line_section);
32328 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32329 output_line_info (true);
32330
32331 /* If we emitted any indirect strings, output the string table too. */
32332 if (debug_str_hash || skeleton_debug_str_hash)
32333 output_indirect_strings ();
32334 if (debug_line_str_hash)
32335 {
32336 switch_to_section (debug_line_str_section);
32337 const enum dwarf_form form = DW_FORM_line_strp;
32338 debug_line_str_hash->traverse<enum dwarf_form,
32339 output_indirect_string> (form);
32340 }
32341
32342 /* Switch back to the text section. */
32343 switch_to_section (text_section);
32344 }
32345
32346 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32347 within the same process. For use by toplev::finalize. */
32348
32349 void
32350 dwarf2out_c_finalize (void)
32351 {
32352 last_var_location_insn = NULL;
32353 cached_next_real_insn = NULL;
32354 used_rtx_array = NULL;
32355 incomplete_types = NULL;
32356 debug_info_section = NULL;
32357 debug_skeleton_info_section = NULL;
32358 debug_abbrev_section = NULL;
32359 debug_skeleton_abbrev_section = NULL;
32360 debug_aranges_section = NULL;
32361 debug_addr_section = NULL;
32362 debug_macinfo_section = NULL;
32363 debug_line_section = NULL;
32364 debug_skeleton_line_section = NULL;
32365 debug_loc_section = NULL;
32366 debug_pubnames_section = NULL;
32367 debug_pubtypes_section = NULL;
32368 debug_str_section = NULL;
32369 debug_line_str_section = NULL;
32370 debug_str_dwo_section = NULL;
32371 debug_str_offsets_section = NULL;
32372 debug_ranges_section = NULL;
32373 debug_frame_section = NULL;
32374 fde_vec = NULL;
32375 debug_str_hash = NULL;
32376 debug_line_str_hash = NULL;
32377 skeleton_debug_str_hash = NULL;
32378 dw2_string_counter = 0;
32379 have_multiple_function_sections = false;
32380 text_section_used = false;
32381 cold_text_section_used = false;
32382 cold_text_section = NULL;
32383 current_unit_personality = NULL;
32384
32385 early_dwarf = false;
32386 early_dwarf_finished = false;
32387
32388 next_die_offset = 0;
32389 single_comp_unit_die = NULL;
32390 comdat_type_list = NULL;
32391 limbo_die_list = NULL;
32392 file_table = NULL;
32393 decl_die_table = NULL;
32394 common_block_die_table = NULL;
32395 decl_loc_table = NULL;
32396 call_arg_locations = NULL;
32397 call_arg_loc_last = NULL;
32398 call_site_count = -1;
32399 tail_call_site_count = -1;
32400 cached_dw_loc_list_table = NULL;
32401 abbrev_die_table = NULL;
32402 delete dwarf_proc_stack_usage_map;
32403 dwarf_proc_stack_usage_map = NULL;
32404 line_info_label_num = 0;
32405 cur_line_info_table = NULL;
32406 text_section_line_info = NULL;
32407 cold_text_section_line_info = NULL;
32408 separate_line_info = NULL;
32409 info_section_emitted = false;
32410 pubname_table = NULL;
32411 pubtype_table = NULL;
32412 macinfo_table = NULL;
32413 ranges_table = NULL;
32414 ranges_by_label = NULL;
32415 rnglist_idx = 0;
32416 have_location_lists = false;
32417 loclabel_num = 0;
32418 poc_label_num = 0;
32419 last_emitted_file = NULL;
32420 label_num = 0;
32421 tmpl_value_parm_die_table = NULL;
32422 generic_type_instances = NULL;
32423 frame_pointer_fb_offset = 0;
32424 frame_pointer_fb_offset_valid = false;
32425 base_types.release ();
32426 XDELETEVEC (producer_string);
32427 producer_string = NULL;
32428 }
32429
32430 #include "gt-dwarf2out.h"
This page took 1.365878 seconds and 6 git commands to generate.