]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
re PR debug/88644 (Unexpected pub type info eliminated after r246973 (causes pubtypes...
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. */
2918
2919 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2920
2921 /* Various DIE's use offsets relative to the beginning of the
2922 .debug_info section to refer to each other. */
2923
2924 typedef long int dw_offset;
2925
2926 struct comdat_type_node;
2927
2928 /* The entries in the line_info table more-or-less mirror the opcodes
2929 that are used in the real dwarf line table. Arrays of these entries
2930 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2931 supported. */
2932
2933 enum dw_line_info_opcode {
2934 /* Emit DW_LNE_set_address; the operand is the label index. */
2935 LI_set_address,
2936
2937 /* Emit a row to the matrix with the given line. This may be done
2938 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2939 special opcodes. */
2940 LI_set_line,
2941
2942 /* Emit a DW_LNS_set_file. */
2943 LI_set_file,
2944
2945 /* Emit a DW_LNS_set_column. */
2946 LI_set_column,
2947
2948 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2949 LI_negate_stmt,
2950
2951 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2952 LI_set_prologue_end,
2953 LI_set_epilogue_begin,
2954
2955 /* Emit a DW_LNE_set_discriminator. */
2956 LI_set_discriminator,
2957
2958 /* Output a Fixed Advance PC; the target PC is the label index; the
2959 base PC is the previous LI_adv_address or LI_set_address entry.
2960 We only use this when emitting debug views without assembler
2961 support, at explicit user request. Ideally, we should only use
2962 it when the offset might be zero but we can't tell: it's the only
2963 way to maybe change the PC without resetting the view number. */
2964 LI_adv_address
2965 };
2966
2967 typedef struct GTY(()) dw_line_info_struct {
2968 enum dw_line_info_opcode opcode;
2969 unsigned int val;
2970 } dw_line_info_entry;
2971
2972
2973 struct GTY(()) dw_line_info_table {
2974 /* The label that marks the end of this section. */
2975 const char *end_label;
2976
2977 /* The values for the last row of the matrix, as collected in the table.
2978 These are used to minimize the changes to the next row. */
2979 unsigned int file_num;
2980 unsigned int line_num;
2981 unsigned int column_num;
2982 int discrim_num;
2983 bool is_stmt;
2984 bool in_use;
2985
2986 /* This denotes the NEXT view number.
2987
2988 If it is 0, it is known that the NEXT view will be the first view
2989 at the given PC.
2990
2991 If it is -1, we're forcing the view number to be reset, e.g. at a
2992 function entry.
2993
2994 The meaning of other nonzero values depends on whether we're
2995 computing views internally or leaving it for the assembler to do
2996 so. If we're emitting them internally, view denotes the view
2997 number since the last known advance of PC. If we're leaving it
2998 for the assembler, it denotes the LVU label number that we're
2999 going to ask the assembler to assign. */
3000 var_loc_view view;
3001
3002 /* This counts the number of symbolic views emitted in this table
3003 since the latest view reset. Its max value, over all tables,
3004 sets symview_upper_bound. */
3005 var_loc_view symviews_since_reset;
3006
3007 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3008 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3009 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3010 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3011
3012 vec<dw_line_info_entry, va_gc> *entries;
3013 };
3014
3015 /* This is an upper bound for view numbers that the assembler may
3016 assign to symbolic views output in this translation. It is used to
3017 decide how big a field to use to represent view numbers in
3018 symview-classed attributes. */
3019
3020 static var_loc_view symview_upper_bound;
3021
3022 /* If we're keep track of location views and their reset points, and
3023 INSN is a reset point (i.e., it necessarily advances the PC), mark
3024 the next view in TABLE as reset. */
3025
3026 static void
3027 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3028 {
3029 if (!debug_internal_reset_location_views)
3030 return;
3031
3032 /* Maybe turn (part of?) this test into a default target hook. */
3033 int reset = 0;
3034
3035 if (targetm.reset_location_view)
3036 reset = targetm.reset_location_view (insn);
3037
3038 if (reset)
3039 ;
3040 else if (JUMP_TABLE_DATA_P (insn))
3041 reset = 1;
3042 else if (GET_CODE (insn) == USE
3043 || GET_CODE (insn) == CLOBBER
3044 || GET_CODE (insn) == ASM_INPUT
3045 || asm_noperands (insn) >= 0)
3046 ;
3047 else if (get_attr_min_length (insn) > 0)
3048 reset = 1;
3049
3050 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3051 RESET_NEXT_VIEW (table->view);
3052 }
3053
3054 /* Each DIE attribute has a field specifying the attribute kind,
3055 a link to the next attribute in the chain, and an attribute value.
3056 Attributes are typically linked below the DIE they modify. */
3057
3058 typedef struct GTY(()) dw_attr_struct {
3059 enum dwarf_attribute dw_attr;
3060 dw_val_node dw_attr_val;
3061 }
3062 dw_attr_node;
3063
3064
3065 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3066 The children of each node form a circular list linked by
3067 die_sib. die_child points to the node *before* the "first" child node. */
3068
3069 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3070 union die_symbol_or_type_node
3071 {
3072 const char * GTY ((tag ("0"))) die_symbol;
3073 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3074 }
3075 GTY ((desc ("%0.comdat_type_p"))) die_id;
3076 vec<dw_attr_node, va_gc> *die_attr;
3077 dw_die_ref die_parent;
3078 dw_die_ref die_child;
3079 dw_die_ref die_sib;
3080 dw_die_ref die_definition; /* ref from a specification to its definition */
3081 dw_offset die_offset;
3082 unsigned long die_abbrev;
3083 int die_mark;
3084 unsigned int decl_id;
3085 enum dwarf_tag die_tag;
3086 /* Die is used and must not be pruned as unused. */
3087 BOOL_BITFIELD die_perennial_p : 1;
3088 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3089 /* For an external ref to die_symbol if die_offset contains an extra
3090 offset to that symbol. */
3091 BOOL_BITFIELD with_offset : 1;
3092 /* Whether this DIE was removed from the DIE tree, for example via
3093 prune_unused_types. We don't consider those present from the
3094 DIE lookup routines. */
3095 BOOL_BITFIELD removed : 1;
3096 /* Lots of spare bits. */
3097 }
3098 die_node;
3099
3100 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3101 static bool early_dwarf;
3102 static bool early_dwarf_finished;
3103 struct set_early_dwarf {
3104 bool saved;
3105 set_early_dwarf () : saved(early_dwarf)
3106 {
3107 gcc_assert (! early_dwarf_finished);
3108 early_dwarf = true;
3109 }
3110 ~set_early_dwarf () { early_dwarf = saved; }
3111 };
3112
3113 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3114 #define FOR_EACH_CHILD(die, c, expr) do { \
3115 c = die->die_child; \
3116 if (c) do { \
3117 c = c->die_sib; \
3118 expr; \
3119 } while (c != die->die_child); \
3120 } while (0)
3121
3122 /* The pubname structure */
3123
3124 typedef struct GTY(()) pubname_struct {
3125 dw_die_ref die;
3126 const char *name;
3127 }
3128 pubname_entry;
3129
3130
3131 struct GTY(()) dw_ranges {
3132 const char *label;
3133 /* If this is positive, it's a block number, otherwise it's a
3134 bitwise-negated index into dw_ranges_by_label. */
3135 int num;
3136 /* Index for the range list for DW_FORM_rnglistx. */
3137 unsigned int idx : 31;
3138 /* True if this range might be possibly in a different section
3139 from previous entry. */
3140 unsigned int maybe_new_sec : 1;
3141 };
3142
3143 /* A structure to hold a macinfo entry. */
3144
3145 typedef struct GTY(()) macinfo_struct {
3146 unsigned char code;
3147 unsigned HOST_WIDE_INT lineno;
3148 const char *info;
3149 }
3150 macinfo_entry;
3151
3152
3153 struct GTY(()) dw_ranges_by_label {
3154 const char *begin;
3155 const char *end;
3156 };
3157
3158 /* The comdat type node structure. */
3159 struct GTY(()) comdat_type_node
3160 {
3161 dw_die_ref root_die;
3162 dw_die_ref type_die;
3163 dw_die_ref skeleton_die;
3164 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3165 comdat_type_node *next;
3166 };
3167
3168 /* A list of DIEs for which we can't determine ancestry (parent_die
3169 field) just yet. Later in dwarf2out_finish we will fill in the
3170 missing bits. */
3171 typedef struct GTY(()) limbo_die_struct {
3172 dw_die_ref die;
3173 /* The tree for which this DIE was created. We use this to
3174 determine ancestry later. */
3175 tree created_for;
3176 struct limbo_die_struct *next;
3177 }
3178 limbo_die_node;
3179
3180 typedef struct skeleton_chain_struct
3181 {
3182 dw_die_ref old_die;
3183 dw_die_ref new_die;
3184 struct skeleton_chain_struct *parent;
3185 }
3186 skeleton_chain_node;
3187
3188 /* Define a macro which returns nonzero for a TYPE_DECL which was
3189 implicitly generated for a type.
3190
3191 Note that, unlike the C front-end (which generates a NULL named
3192 TYPE_DECL node for each complete tagged type, each array type,
3193 and each function type node created) the C++ front-end generates
3194 a _named_ TYPE_DECL node for each tagged type node created.
3195 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3196 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3197 front-end, but for each type, tagged or not. */
3198
3199 #define TYPE_DECL_IS_STUB(decl) \
3200 (DECL_NAME (decl) == NULL_TREE \
3201 || (DECL_ARTIFICIAL (decl) \
3202 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3203 /* This is necessary for stub decls that \
3204 appear in nested inline functions. */ \
3205 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3206 && (decl_ultimate_origin (decl) \
3207 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3208
3209 /* Information concerning the compilation unit's programming
3210 language, and compiler version. */
3211
3212 /* Fixed size portion of the DWARF compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3214 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3215 + (dwarf_version >= 5 ? 4 : 3))
3216
3217 /* Fixed size portion of the DWARF comdat type unit header. */
3218 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3219 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3220 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3221
3222 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3223 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3225
3226 /* Fixed size portion of public names info. */
3227 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3228
3229 /* Fixed size portion of the address range info. */
3230 #define DWARF_ARANGES_HEADER_SIZE \
3231 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3232 DWARF2_ADDR_SIZE * 2) \
3233 - DWARF_INITIAL_LENGTH_SIZE)
3234
3235 /* Size of padding portion in the address range info. It must be
3236 aligned to twice the pointer size. */
3237 #define DWARF_ARANGES_PAD_SIZE \
3238 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3239 DWARF2_ADDR_SIZE * 2) \
3240 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3241
3242 /* Use assembler line directives if available. */
3243 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3244 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3246 #else
3247 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3248 #endif
3249 #endif
3250
3251 /* Use assembler views in line directives if available. */
3252 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3253 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3255 #else
3256 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3257 #endif
3258 #endif
3259
3260 /* Return true if GCC configure detected assembler support for .loc. */
3261
3262 bool
3263 dwarf2out_default_as_loc_support (void)
3264 {
3265 return DWARF2_ASM_LINE_DEBUG_INFO;
3266 #if (GCC_VERSION >= 3000)
3267 # undef DWARF2_ASM_LINE_DEBUG_INFO
3268 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3269 #endif
3270 }
3271
3272 /* Return true if GCC configure detected assembler support for views
3273 in .loc directives. */
3274
3275 bool
3276 dwarf2out_default_as_locview_support (void)
3277 {
3278 return DWARF2_ASM_VIEW_DEBUG_INFO;
3279 #if (GCC_VERSION >= 3000)
3280 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3281 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3282 #endif
3283 }
3284
3285 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3286 view computation, and it refers to a view identifier for which we
3287 will not emit a label because it is known to map to a view number
3288 zero. We won't allocate the bitmap if we're not using assembler
3289 support for location views, but we have to make the variable
3290 visible for GGC and for code that will be optimized out for lack of
3291 support but that's still parsed and compiled. We could abstract it
3292 out with macros, but it's not worth it. */
3293 static GTY(()) bitmap zero_view_p;
3294
3295 /* Evaluate to TRUE iff N is known to identify the first location view
3296 at its PC. When not using assembler location view computation,
3297 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3298 and views label numbers recorded in it are the ones known to be
3299 zero. */
3300 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3301 || (N) == (var_loc_view)-1 \
3302 || (zero_view_p \
3303 && bitmap_bit_p (zero_view_p, (N))))
3304
3305 /* Return true iff we're to emit .loc directives for the assembler to
3306 generate line number sections.
3307
3308 When we're not emitting views, all we need from the assembler is
3309 support for .loc directives.
3310
3311 If we are emitting views, we can only use the assembler's .loc
3312 support if it also supports views.
3313
3314 When the compiler is emitting the line number programs and
3315 computing view numbers itself, it resets view numbers at known PC
3316 changes and counts from that, and then it emits view numbers as
3317 literal constants in locviewlists. There are cases in which the
3318 compiler is not sure about PC changes, e.g. when extra alignment is
3319 requested for a label. In these cases, the compiler may not reset
3320 the view counter, and the potential PC advance in the line number
3321 program will use an opcode that does not reset the view counter
3322 even if the PC actually changes, so that compiler and debug info
3323 consumer can keep view numbers in sync.
3324
3325 When the compiler defers view computation to the assembler, it
3326 emits symbolic view numbers in locviewlists, with the exception of
3327 views known to be zero (forced resets, or reset after
3328 compiler-visible PC changes): instead of emitting symbols for
3329 these, we emit literal zero and assert the assembler agrees with
3330 the compiler's assessment. We could use symbolic views everywhere,
3331 instead of special-casing zero views, but then we'd be unable to
3332 optimize out locviewlists that contain only zeros. */
3333
3334 static bool
3335 output_asm_line_debug_info (void)
3336 {
3337 return (dwarf2out_as_loc_support
3338 && (dwarf2out_as_locview_support
3339 || !debug_variable_location_views));
3340 }
3341
3342 /* Minimum line offset in a special line info. opcode.
3343 This value was chosen to give a reasonable range of values. */
3344 #define DWARF_LINE_BASE -10
3345
3346 /* First special line opcode - leave room for the standard opcodes. */
3347 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3348
3349 /* Range of line offsets in a special line info. opcode. */
3350 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3351
3352 /* Flag that indicates the initial value of the is_stmt_start flag.
3353 In the present implementation, we do not mark any lines as
3354 the beginning of a source statement, because that information
3355 is not made available by the GCC front-end. */
3356 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3357
3358 /* Maximum number of operations per instruction bundle. */
3359 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3360 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3361 #endif
3362
3363 /* This location is used by calc_die_sizes() to keep track
3364 the offset of each DIE within the .debug_info section. */
3365 static unsigned long next_die_offset;
3366
3367 /* Record the root of the DIE's built for the current compilation unit. */
3368 static GTY(()) dw_die_ref single_comp_unit_die;
3369
3370 /* A list of type DIEs that have been separated into comdat sections. */
3371 static GTY(()) comdat_type_node *comdat_type_list;
3372
3373 /* A list of CU DIEs that have been separated. */
3374 static GTY(()) limbo_die_node *cu_die_list;
3375
3376 /* A list of DIEs with a NULL parent waiting to be relocated. */
3377 static GTY(()) limbo_die_node *limbo_die_list;
3378
3379 /* A list of DIEs for which we may have to generate
3380 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3381 static GTY(()) limbo_die_node *deferred_asm_name;
3382
3383 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3384 {
3385 typedef const char *compare_type;
3386
3387 static hashval_t hash (dwarf_file_data *);
3388 static bool equal (dwarf_file_data *, const char *);
3389 };
3390
3391 /* Filenames referenced by this compilation unit. */
3392 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3393
3394 struct decl_die_hasher : ggc_ptr_hash<die_node>
3395 {
3396 typedef tree compare_type;
3397
3398 static hashval_t hash (die_node *);
3399 static bool equal (die_node *, tree);
3400 };
3401 /* A hash table of references to DIE's that describe declarations.
3402 The key is a DECL_UID() which is a unique number identifying each decl. */
3403 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3404
3405 struct GTY ((for_user)) variable_value_struct {
3406 unsigned int decl_id;
3407 vec<dw_die_ref, va_gc> *dies;
3408 };
3409
3410 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3411 {
3412 typedef tree compare_type;
3413
3414 static hashval_t hash (variable_value_struct *);
3415 static bool equal (variable_value_struct *, tree);
3416 };
3417 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3418 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3419 DECL_CONTEXT of the referenced VAR_DECLs. */
3420 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3421
3422 struct block_die_hasher : ggc_ptr_hash<die_struct>
3423 {
3424 static hashval_t hash (die_struct *);
3425 static bool equal (die_struct *, die_struct *);
3426 };
3427
3428 /* A hash table of references to DIE's that describe COMMON blocks.
3429 The key is DECL_UID() ^ die_parent. */
3430 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3431
3432 typedef struct GTY(()) die_arg_entry_struct {
3433 dw_die_ref die;
3434 tree arg;
3435 } die_arg_entry;
3436
3437
3438 /* Node of the variable location list. */
3439 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3440 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3441 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3442 in mode of the EXPR_LIST node and first EXPR_LIST operand
3443 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3444 location or NULL for padding. For larger bitsizes,
3445 mode is 0 and first operand is a CONCAT with bitsize
3446 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3447 NULL as second operand. */
3448 rtx GTY (()) loc;
3449 const char * GTY (()) label;
3450 struct var_loc_node * GTY (()) next;
3451 var_loc_view view;
3452 };
3453
3454 /* Variable location list. */
3455 struct GTY ((for_user)) var_loc_list_def {
3456 struct var_loc_node * GTY (()) first;
3457
3458 /* Pointer to the last but one or last element of the
3459 chained list. If the list is empty, both first and
3460 last are NULL, if the list contains just one node
3461 or the last node certainly is not redundant, it points
3462 to the last node, otherwise points to the last but one.
3463 Do not mark it for GC because it is marked through the chain. */
3464 struct var_loc_node * GTY ((skip ("%h"))) last;
3465
3466 /* Pointer to the last element before section switch,
3467 if NULL, either sections weren't switched or first
3468 is after section switch. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3470
3471 /* DECL_UID of the variable decl. */
3472 unsigned int decl_id;
3473 };
3474 typedef struct var_loc_list_def var_loc_list;
3475
3476 /* Call argument location list. */
3477 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3478 rtx GTY (()) call_arg_loc_note;
3479 const char * GTY (()) label;
3480 tree GTY (()) block;
3481 bool tail_call_p;
3482 rtx GTY (()) symbol_ref;
3483 struct call_arg_loc_node * GTY (()) next;
3484 };
3485
3486
3487 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3488 {
3489 typedef const_tree compare_type;
3490
3491 static hashval_t hash (var_loc_list *);
3492 static bool equal (var_loc_list *, const_tree);
3493 };
3494
3495 /* Table of decl location linked lists. */
3496 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3497
3498 /* Head and tail of call_arg_loc chain. */
3499 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3500 static struct call_arg_loc_node *call_arg_loc_last;
3501
3502 /* Number of call sites in the current function. */
3503 static int call_site_count = -1;
3504 /* Number of tail call sites in the current function. */
3505 static int tail_call_site_count = -1;
3506
3507 /* A cached location list. */
3508 struct GTY ((for_user)) cached_dw_loc_list_def {
3509 /* The DECL_UID of the decl that this entry describes. */
3510 unsigned int decl_id;
3511
3512 /* The cached location list. */
3513 dw_loc_list_ref loc_list;
3514 };
3515 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3516
3517 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3518 {
3519
3520 typedef const_tree compare_type;
3521
3522 static hashval_t hash (cached_dw_loc_list *);
3523 static bool equal (cached_dw_loc_list *, const_tree);
3524 };
3525
3526 /* Table of cached location lists. */
3527 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3528
3529 /* A vector of references to DIE's that are uniquely identified by their tag,
3530 presence/absence of children DIE's, and list of attribute/value pairs. */
3531 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3532
3533 /* A hash map to remember the stack usage for DWARF procedures. The value
3534 stored is the stack size difference between before the DWARF procedure
3535 invokation and after it returned. In other words, for a DWARF procedure
3536 that consumes N stack slots and that pushes M ones, this stores M - N. */
3537 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3538
3539 /* A global counter for generating labels for line number data. */
3540 static unsigned int line_info_label_num;
3541
3542 /* The current table to which we should emit line number information
3543 for the current function. This will be set up at the beginning of
3544 assembly for the function. */
3545 static GTY(()) dw_line_info_table *cur_line_info_table;
3546
3547 /* The two default tables of line number info. */
3548 static GTY(()) dw_line_info_table *text_section_line_info;
3549 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3550
3551 /* The set of all non-default tables of line number info. */
3552 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3553
3554 /* A flag to tell pubnames/types export if there is an info section to
3555 refer to. */
3556 static bool info_section_emitted;
3557
3558 /* A pointer to the base of a table that contains a list of publicly
3559 accessible names. */
3560 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3561
3562 /* A pointer to the base of a table that contains a list of publicly
3563 accessible types. */
3564 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3565
3566 /* A pointer to the base of a table that contains a list of macro
3567 defines/undefines (and file start/end markers). */
3568 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3569
3570 /* True if .debug_macinfo or .debug_macros section is going to be
3571 emitted. */
3572 #define have_macinfo \
3573 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3574 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3575 && !macinfo_table->is_empty ())
3576
3577 /* Vector of dies for which we should generate .debug_ranges info. */
3578 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3579
3580 /* Vector of pairs of labels referenced in ranges_table. */
3581 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3582
3583 /* Whether we have location lists that need outputting */
3584 static GTY(()) bool have_location_lists;
3585
3586 /* Unique label counter. */
3587 static GTY(()) unsigned int loclabel_num;
3588
3589 /* Unique label counter for point-of-call tables. */
3590 static GTY(()) unsigned int poc_label_num;
3591
3592 /* The last file entry emitted by maybe_emit_file(). */
3593 static GTY(()) struct dwarf_file_data * last_emitted_file;
3594
3595 /* Number of internal labels generated by gen_internal_sym(). */
3596 static GTY(()) int label_num;
3597
3598 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3599
3600 /* Instances of generic types for which we need to generate debug
3601 info that describe their generic parameters and arguments. That
3602 generation needs to happen once all types are properly laid out so
3603 we do it at the end of compilation. */
3604 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3605
3606 /* Offset from the "steady-state frame pointer" to the frame base,
3607 within the current function. */
3608 static poly_int64 frame_pointer_fb_offset;
3609 static bool frame_pointer_fb_offset_valid;
3610
3611 static vec<dw_die_ref> base_types;
3612
3613 /* Flags to represent a set of attribute classes for attributes that represent
3614 a scalar value (bounds, pointers, ...). */
3615 enum dw_scalar_form
3616 {
3617 dw_scalar_form_constant = 0x01,
3618 dw_scalar_form_exprloc = 0x02,
3619 dw_scalar_form_reference = 0x04
3620 };
3621
3622 /* Forward declarations for functions defined in this file. */
3623
3624 static int is_pseudo_reg (const_rtx);
3625 static tree type_main_variant (tree);
3626 static int is_tagged_type (const_tree);
3627 static const char *dwarf_tag_name (unsigned);
3628 static const char *dwarf_attr_name (unsigned);
3629 static const char *dwarf_form_name (unsigned);
3630 static tree decl_ultimate_origin (const_tree);
3631 static tree decl_class_context (tree);
3632 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3633 static inline enum dw_val_class AT_class (dw_attr_node *);
3634 static inline unsigned int AT_index (dw_attr_node *);
3635 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3636 static inline unsigned AT_flag (dw_attr_node *);
3637 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3638 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3639 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3640 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3641 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3642 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3643 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3644 unsigned int, unsigned char *);
3645 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3646 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3647 static inline const char *AT_string (dw_attr_node *);
3648 static enum dwarf_form AT_string_form (dw_attr_node *);
3649 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3650 static void add_AT_specification (dw_die_ref, dw_die_ref);
3651 static inline dw_die_ref AT_ref (dw_attr_node *);
3652 static inline int AT_ref_external (dw_attr_node *);
3653 static inline void set_AT_ref_external (dw_attr_node *, int);
3654 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3655 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3656 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3657 dw_loc_list_ref);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3660 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3661 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3662 static void remove_addr_table_entry (addr_table_entry *);
3663 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3664 static inline rtx AT_addr (dw_attr_node *);
3665 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3668 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3669 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3670 unsigned long, bool);
3671 static inline const char *AT_lbl (dw_attr_node *);
3672 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3673 static const char *get_AT_low_pc (dw_die_ref);
3674 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3675 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3676 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3677 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3678 static bool is_c (void);
3679 static bool is_cxx (void);
3680 static bool is_cxx (const_tree);
3681 static bool is_fortran (void);
3682 static bool is_ada (void);
3683 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3684 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3685 static void add_child_die (dw_die_ref, dw_die_ref);
3686 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3687 static dw_die_ref lookup_type_die (tree);
3688 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3689 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3690 static void equate_type_number_to_die (tree, dw_die_ref);
3691 static dw_die_ref lookup_decl_die (tree);
3692 static var_loc_list *lookup_decl_loc (const_tree);
3693 static void equate_decl_number_to_die (tree, dw_die_ref);
3694 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3695 static void print_spaces (FILE *);
3696 static void print_die (dw_die_ref, FILE *);
3697 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3699 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3700 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3701 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3702 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3704 struct md5_ctx *, int *);
3705 struct checksum_attributes;
3706 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3707 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3708 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3709 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3710 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3711 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3712 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3713 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3714 static int is_type_die (dw_die_ref);
3715 static inline bool is_template_instantiation (dw_die_ref);
3716 static int is_declaration_die (dw_die_ref);
3717 static int should_move_die_to_comdat (dw_die_ref);
3718 static dw_die_ref clone_as_declaration (dw_die_ref);
3719 static dw_die_ref clone_die (dw_die_ref);
3720 static dw_die_ref clone_tree (dw_die_ref);
3721 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3722 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3723 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3724 static dw_die_ref generate_skeleton (dw_die_ref);
3725 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3726 dw_die_ref,
3727 dw_die_ref);
3728 static void break_out_comdat_types (dw_die_ref);
3729 static void copy_decls_for_unworthy_types (dw_die_ref);
3730
3731 static void add_sibling_attributes (dw_die_ref);
3732 static void output_location_lists (dw_die_ref);
3733 static int constant_size (unsigned HOST_WIDE_INT);
3734 static unsigned long size_of_die (dw_die_ref);
3735 static void calc_die_sizes (dw_die_ref);
3736 static void calc_base_type_die_sizes (void);
3737 static void mark_dies (dw_die_ref);
3738 static void unmark_dies (dw_die_ref);
3739 static void unmark_all_dies (dw_die_ref);
3740 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3741 static unsigned long size_of_aranges (void);
3742 static enum dwarf_form value_format (dw_attr_node *);
3743 static void output_value_format (dw_attr_node *);
3744 static void output_abbrev_section (void);
3745 static void output_die_abbrevs (unsigned long, dw_die_ref);
3746 static void output_die (dw_die_ref);
3747 static void output_compilation_unit_header (enum dwarf_unit_type);
3748 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3749 static void output_comdat_type_unit (comdat_type_node *);
3750 static const char *dwarf2_name (tree, int);
3751 static void add_pubname (tree, dw_die_ref);
3752 static void add_enumerator_pubname (const char *, dw_die_ref);
3753 static void add_pubname_string (const char *, dw_die_ref);
3754 static void add_pubtype (tree, dw_die_ref);
3755 static void output_pubnames (vec<pubname_entry, va_gc> *);
3756 static void output_aranges (void);
3757 static unsigned int add_ranges (const_tree, bool = false);
3758 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3759 bool *, bool);
3760 static void output_ranges (void);
3761 static dw_line_info_table *new_line_info_table (void);
3762 static void output_line_info (bool);
3763 static void output_file_names (void);
3764 static dw_die_ref base_type_die (tree, bool);
3765 static int is_base_type (tree);
3766 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3767 static int decl_quals (const_tree);
3768 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3769 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3770 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3771 static unsigned int dbx_reg_number (const_rtx);
3772 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3773 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3774 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3775 enum var_init_status);
3776 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3777 enum var_init_status);
3778 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3779 enum var_init_status);
3780 static int is_based_loc (const_rtx);
3781 static bool resolve_one_addr (rtx *);
3782 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3783 enum var_init_status);
3784 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3785 enum var_init_status);
3786 struct loc_descr_context;
3787 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3788 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3789 static dw_loc_list_ref loc_list_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3792 struct loc_descr_context *);
3793 static tree field_type (const_tree);
3794 static unsigned int simple_type_align_in_bits (const_tree);
3795 static unsigned int simple_decl_align_in_bits (const_tree);
3796 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3797 struct vlr_context;
3798 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3799 HOST_WIDE_INT *);
3800 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3801 dw_loc_list_ref);
3802 static void add_data_member_location_attribute (dw_die_ref, tree,
3803 struct vlr_context *);
3804 static bool add_const_value_attribute (dw_die_ref, rtx);
3805 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3806 static void insert_wide_int (const wide_int &, unsigned char *, int);
3807 static void insert_float (const_rtx, unsigned char *);
3808 static rtx rtl_for_decl_location (tree);
3809 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3810 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3811 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3812 static void add_name_attribute (dw_die_ref, const char *);
3813 static void add_desc_attribute (dw_die_ref, tree);
3814 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3815 static void add_comp_dir_attribute (dw_die_ref);
3816 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3817 struct loc_descr_context *);
3818 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3819 struct loc_descr_context *);
3820 static void add_subscript_info (dw_die_ref, tree, bool);
3821 static void add_byte_size_attribute (dw_die_ref, tree);
3822 static void add_alignment_attribute (dw_die_ref, tree);
3823 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3824 struct vlr_context *);
3825 static void add_bit_size_attribute (dw_die_ref, tree);
3826 static void add_prototyped_attribute (dw_die_ref, tree);
3827 static void add_abstract_origin_attribute (dw_die_ref, tree);
3828 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3829 static void add_src_coords_attributes (dw_die_ref, tree);
3830 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3831 static void add_discr_value (dw_die_ref, dw_discr_value *);
3832 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3833 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3834 static dw_die_ref scope_die_for (tree, dw_die_ref);
3835 static inline int local_scope_p (dw_die_ref);
3836 static inline int class_scope_p (dw_die_ref);
3837 static inline int class_or_namespace_scope_p (dw_die_ref);
3838 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3839 static void add_calling_convention_attribute (dw_die_ref, tree);
3840 static const char *type_tag (const_tree);
3841 static tree member_declared_type (const_tree);
3842 #if 0
3843 static const char *decl_start_label (tree);
3844 #endif
3845 static void gen_array_type_die (tree, dw_die_ref);
3846 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3847 #if 0
3848 static void gen_entry_point_die (tree, dw_die_ref);
3849 #endif
3850 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3851 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3852 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3853 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3854 static void gen_formal_types_die (tree, dw_die_ref);
3855 static void gen_subprogram_die (tree, dw_die_ref);
3856 static void gen_variable_die (tree, tree, dw_die_ref);
3857 static void gen_const_die (tree, dw_die_ref);
3858 static void gen_label_die (tree, dw_die_ref);
3859 static void gen_lexical_block_die (tree, dw_die_ref);
3860 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3861 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3862 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3863 static dw_die_ref gen_compile_unit_die (const char *);
3864 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3865 static void gen_member_die (tree, dw_die_ref);
3866 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3867 enum debug_info_usage);
3868 static void gen_subroutine_type_die (tree, dw_die_ref);
3869 static void gen_typedef_die (tree, dw_die_ref);
3870 static void gen_type_die (tree, dw_die_ref);
3871 static void gen_block_die (tree, dw_die_ref);
3872 static void decls_for_scope (tree, dw_die_ref, bool = true);
3873 static bool is_naming_typedef_decl (const_tree);
3874 static inline dw_die_ref get_context_die (tree);
3875 static void gen_namespace_die (tree, dw_die_ref);
3876 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3877 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3878 static dw_die_ref force_decl_die (tree);
3879 static dw_die_ref force_type_die (tree);
3880 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3881 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3882 static struct dwarf_file_data * lookup_filename (const char *);
3883 static void retry_incomplete_types (void);
3884 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3885 static void gen_generic_params_dies (tree);
3886 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3887 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3888 static void splice_child_die (dw_die_ref, dw_die_ref);
3889 static int file_info_cmp (const void *, const void *);
3890 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3891 const char *, var_loc_view, const char *);
3892 static void output_loc_list (dw_loc_list_ref);
3893 static char *gen_internal_sym (const char *);
3894 static bool want_pubnames (void);
3895
3896 static void prune_unmark_dies (dw_die_ref);
3897 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3898 static void prune_unused_types_mark (dw_die_ref, int);
3899 static void prune_unused_types_walk (dw_die_ref);
3900 static void prune_unused_types_walk_attribs (dw_die_ref);
3901 static void prune_unused_types_prune (dw_die_ref);
3902 static void prune_unused_types (void);
3903 static int maybe_emit_file (struct dwarf_file_data *fd);
3904 static inline const char *AT_vms_delta1 (dw_attr_node *);
3905 static inline const char *AT_vms_delta2 (dw_attr_node *);
3906 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3907 static void gen_remaining_tmpl_value_param_die_attribute (void);
3908 static bool generic_type_p (tree);
3909 static void schedule_generic_params_dies_gen (tree t);
3910 static void gen_scheduled_generic_parms_dies (void);
3911 static void resolve_variable_values (void);
3912
3913 static const char *comp_dir_string (void);
3914
3915 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3916
3917 /* enum for tracking thread-local variables whose address is really an offset
3918 relative to the TLS pointer, which will need link-time relocation, but will
3919 not need relocation by the DWARF consumer. */
3920
3921 enum dtprel_bool
3922 {
3923 dtprel_false = 0,
3924 dtprel_true = 1
3925 };
3926
3927 /* Return the operator to use for an address of a variable. For dtprel_true, we
3928 use DW_OP_const*. For regular variables, which need both link-time
3929 relocation and consumer-level relocation (e.g., to account for shared objects
3930 loaded at a random address), we use DW_OP_addr*. */
3931
3932 static inline enum dwarf_location_atom
3933 dw_addr_op (enum dtprel_bool dtprel)
3934 {
3935 if (dtprel == dtprel_true)
3936 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3937 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3938 else
3939 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3940 }
3941
3942 /* Return a pointer to a newly allocated address location description. If
3943 dwarf_split_debug_info is true, then record the address with the appropriate
3944 relocation. */
3945 static inline dw_loc_descr_ref
3946 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3947 {
3948 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3949
3950 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3951 ref->dw_loc_oprnd1.v.val_addr = addr;
3952 ref->dtprel = dtprel;
3953 if (dwarf_split_debug_info)
3954 ref->dw_loc_oprnd1.val_entry
3955 = add_addr_table_entry (addr,
3956 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3957 else
3958 ref->dw_loc_oprnd1.val_entry = NULL;
3959
3960 return ref;
3961 }
3962
3963 /* Section names used to hold DWARF debugging information. */
3964
3965 #ifndef DEBUG_INFO_SECTION
3966 #define DEBUG_INFO_SECTION ".debug_info"
3967 #endif
3968 #ifndef DEBUG_DWO_INFO_SECTION
3969 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3970 #endif
3971 #ifndef DEBUG_LTO_INFO_SECTION
3972 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3973 #endif
3974 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3975 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3976 #endif
3977 #ifndef DEBUG_ABBREV_SECTION
3978 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3979 #endif
3980 #ifndef DEBUG_LTO_ABBREV_SECTION
3981 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3982 #endif
3983 #ifndef DEBUG_DWO_ABBREV_SECTION
3984 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3985 #endif
3986 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3987 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3988 #endif
3989 #ifndef DEBUG_ARANGES_SECTION
3990 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3991 #endif
3992 #ifndef DEBUG_ADDR_SECTION
3993 #define DEBUG_ADDR_SECTION ".debug_addr"
3994 #endif
3995 #ifndef DEBUG_MACINFO_SECTION
3996 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3997 #endif
3998 #ifndef DEBUG_LTO_MACINFO_SECTION
3999 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4000 #endif
4001 #ifndef DEBUG_DWO_MACINFO_SECTION
4002 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4003 #endif
4004 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4005 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4006 #endif
4007 #ifndef DEBUG_MACRO_SECTION
4008 #define DEBUG_MACRO_SECTION ".debug_macro"
4009 #endif
4010 #ifndef DEBUG_LTO_MACRO_SECTION
4011 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4012 #endif
4013 #ifndef DEBUG_DWO_MACRO_SECTION
4014 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4015 #endif
4016 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4017 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4018 #endif
4019 #ifndef DEBUG_LINE_SECTION
4020 #define DEBUG_LINE_SECTION ".debug_line"
4021 #endif
4022 #ifndef DEBUG_LTO_LINE_SECTION
4023 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4024 #endif
4025 #ifndef DEBUG_DWO_LINE_SECTION
4026 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4027 #endif
4028 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4029 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4030 #endif
4031 #ifndef DEBUG_LOC_SECTION
4032 #define DEBUG_LOC_SECTION ".debug_loc"
4033 #endif
4034 #ifndef DEBUG_DWO_LOC_SECTION
4035 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4036 #endif
4037 #ifndef DEBUG_LOCLISTS_SECTION
4038 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4039 #endif
4040 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4041 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4042 #endif
4043 #ifndef DEBUG_PUBNAMES_SECTION
4044 #define DEBUG_PUBNAMES_SECTION \
4045 ((debug_generate_pub_sections == 2) \
4046 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4047 #endif
4048 #ifndef DEBUG_PUBTYPES_SECTION
4049 #define DEBUG_PUBTYPES_SECTION \
4050 ((debug_generate_pub_sections == 2) \
4051 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4052 #endif
4053 #ifndef DEBUG_STR_OFFSETS_SECTION
4054 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4055 #endif
4056 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4057 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4058 #endif
4059 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4060 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4061 #endif
4062 #ifndef DEBUG_STR_SECTION
4063 #define DEBUG_STR_SECTION ".debug_str"
4064 #endif
4065 #ifndef DEBUG_LTO_STR_SECTION
4066 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4067 #endif
4068 #ifndef DEBUG_STR_DWO_SECTION
4069 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4070 #endif
4071 #ifndef DEBUG_LTO_STR_DWO_SECTION
4072 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4073 #endif
4074 #ifndef DEBUG_RANGES_SECTION
4075 #define DEBUG_RANGES_SECTION ".debug_ranges"
4076 #endif
4077 #ifndef DEBUG_RNGLISTS_SECTION
4078 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4079 #endif
4080 #ifndef DEBUG_LINE_STR_SECTION
4081 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4082 #endif
4083 #ifndef DEBUG_LTO_LINE_STR_SECTION
4084 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4085 #endif
4086
4087 /* Standard ELF section names for compiled code and data. */
4088 #ifndef TEXT_SECTION_NAME
4089 #define TEXT_SECTION_NAME ".text"
4090 #endif
4091
4092 /* Section flags for .debug_str section. */
4093 #define DEBUG_STR_SECTION_FLAGS \
4094 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4095 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4096 : SECTION_DEBUG)
4097
4098 /* Section flags for .debug_str.dwo section. */
4099 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4100
4101 /* Attribute used to refer to the macro section. */
4102 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4103 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4104
4105 /* Labels we insert at beginning sections we can reference instead of
4106 the section names themselves. */
4107
4108 #ifndef TEXT_SECTION_LABEL
4109 #define TEXT_SECTION_LABEL "Ltext"
4110 #endif
4111 #ifndef COLD_TEXT_SECTION_LABEL
4112 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4113 #endif
4114 #ifndef DEBUG_LINE_SECTION_LABEL
4115 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4116 #endif
4117 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4118 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4119 #endif
4120 #ifndef DEBUG_INFO_SECTION_LABEL
4121 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4122 #endif
4123 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4124 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4125 #endif
4126 #ifndef DEBUG_ABBREV_SECTION_LABEL
4127 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4128 #endif
4129 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4130 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4131 #endif
4132 #ifndef DEBUG_ADDR_SECTION_LABEL
4133 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4134 #endif
4135 #ifndef DEBUG_LOC_SECTION_LABEL
4136 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4137 #endif
4138 #ifndef DEBUG_RANGES_SECTION_LABEL
4139 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4140 #endif
4141 #ifndef DEBUG_MACINFO_SECTION_LABEL
4142 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4143 #endif
4144 #ifndef DEBUG_MACRO_SECTION_LABEL
4145 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4146 #endif
4147 #define SKELETON_COMP_DIE_ABBREV 1
4148 #define SKELETON_TYPE_DIE_ABBREV 2
4149
4150 /* Definitions of defaults for formats and names of various special
4151 (artificial) labels which may be generated within this file (when the -g
4152 options is used and DWARF2_DEBUGGING_INFO is in effect.
4153 If necessary, these may be overridden from within the tm.h file, but
4154 typically, overriding these defaults is unnecessary. */
4155
4156 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4171
4172 #ifndef TEXT_END_LABEL
4173 #define TEXT_END_LABEL "Letext"
4174 #endif
4175 #ifndef COLD_END_LABEL
4176 #define COLD_END_LABEL "Letext_cold"
4177 #endif
4178 #ifndef BLOCK_BEGIN_LABEL
4179 #define BLOCK_BEGIN_LABEL "LBB"
4180 #endif
4181 #ifndef BLOCK_INLINE_ENTRY_LABEL
4182 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4183 #endif
4184 #ifndef BLOCK_END_LABEL
4185 #define BLOCK_END_LABEL "LBE"
4186 #endif
4187 #ifndef LINE_CODE_LABEL
4188 #define LINE_CODE_LABEL "LM"
4189 #endif
4190
4191 \f
4192 /* Return the root of the DIE's built for the current compilation unit. */
4193 static dw_die_ref
4194 comp_unit_die (void)
4195 {
4196 if (!single_comp_unit_die)
4197 single_comp_unit_die = gen_compile_unit_die (NULL);
4198 return single_comp_unit_die;
4199 }
4200
4201 /* We allow a language front-end to designate a function that is to be
4202 called to "demangle" any name before it is put into a DIE. */
4203
4204 static const char *(*demangle_name_func) (const char *);
4205
4206 void
4207 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4208 {
4209 demangle_name_func = func;
4210 }
4211
4212 /* Test if rtl node points to a pseudo register. */
4213
4214 static inline int
4215 is_pseudo_reg (const_rtx rtl)
4216 {
4217 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4218 || (GET_CODE (rtl) == SUBREG
4219 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4220 }
4221
4222 /* Return a reference to a type, with its const and volatile qualifiers
4223 removed. */
4224
4225 static inline tree
4226 type_main_variant (tree type)
4227 {
4228 type = TYPE_MAIN_VARIANT (type);
4229
4230 /* ??? There really should be only one main variant among any group of
4231 variants of a given type (and all of the MAIN_VARIANT values for all
4232 members of the group should point to that one type) but sometimes the C
4233 front-end messes this up for array types, so we work around that bug
4234 here. */
4235 if (TREE_CODE (type) == ARRAY_TYPE)
4236 while (type != TYPE_MAIN_VARIANT (type))
4237 type = TYPE_MAIN_VARIANT (type);
4238
4239 return type;
4240 }
4241
4242 /* Return nonzero if the given type node represents a tagged type. */
4243
4244 static inline int
4245 is_tagged_type (const_tree type)
4246 {
4247 enum tree_code code = TREE_CODE (type);
4248
4249 return (code == RECORD_TYPE || code == UNION_TYPE
4250 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4251 }
4252
4253 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4254
4255 static void
4256 get_ref_die_offset_label (char *label, dw_die_ref ref)
4257 {
4258 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4259 }
4260
4261 /* Return die_offset of a DIE reference to a base type. */
4262
4263 static unsigned long int
4264 get_base_type_offset (dw_die_ref ref)
4265 {
4266 if (ref->die_offset)
4267 return ref->die_offset;
4268 if (comp_unit_die ()->die_abbrev)
4269 {
4270 calc_base_type_die_sizes ();
4271 gcc_assert (ref->die_offset);
4272 }
4273 return ref->die_offset;
4274 }
4275
4276 /* Return die_offset of a DIE reference other than base type. */
4277
4278 static unsigned long int
4279 get_ref_die_offset (dw_die_ref ref)
4280 {
4281 gcc_assert (ref->die_offset);
4282 return ref->die_offset;
4283 }
4284
4285 /* Convert a DIE tag into its string name. */
4286
4287 static const char *
4288 dwarf_tag_name (unsigned int tag)
4289 {
4290 const char *name = get_DW_TAG_name (tag);
4291
4292 if (name != NULL)
4293 return name;
4294
4295 return "DW_TAG_<unknown>";
4296 }
4297
4298 /* Convert a DWARF attribute code into its string name. */
4299
4300 static const char *
4301 dwarf_attr_name (unsigned int attr)
4302 {
4303 const char *name;
4304
4305 switch (attr)
4306 {
4307 #if VMS_DEBUGGING_INFO
4308 case DW_AT_HP_prologue:
4309 return "DW_AT_HP_prologue";
4310 #else
4311 case DW_AT_MIPS_loop_unroll_factor:
4312 return "DW_AT_MIPS_loop_unroll_factor";
4313 #endif
4314
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_epilogue:
4317 return "DW_AT_HP_epilogue";
4318 #else
4319 case DW_AT_MIPS_stride:
4320 return "DW_AT_MIPS_stride";
4321 #endif
4322 }
4323
4324 name = get_DW_AT_name (attr);
4325
4326 if (name != NULL)
4327 return name;
4328
4329 return "DW_AT_<unknown>";
4330 }
4331
4332 /* Convert a DWARF value form code into its string name. */
4333
4334 static const char *
4335 dwarf_form_name (unsigned int form)
4336 {
4337 const char *name = get_DW_FORM_name (form);
4338
4339 if (name != NULL)
4340 return name;
4341
4342 return "DW_FORM_<unknown>";
4343 }
4344 \f
4345 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4346 instance of an inlined instance of a decl which is local to an inline
4347 function, so we have to trace all of the way back through the origin chain
4348 to find out what sort of node actually served as the original seed for the
4349 given block. */
4350
4351 static tree
4352 decl_ultimate_origin (const_tree decl)
4353 {
4354 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4355 return NULL_TREE;
4356
4357 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4358 we're trying to output the abstract instance of this function. */
4359 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4360 return NULL_TREE;
4361
4362 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4363 most distant ancestor, this should never happen. */
4364 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4365
4366 return DECL_ABSTRACT_ORIGIN (decl);
4367 }
4368
4369 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4370 of a virtual function may refer to a base class, so we check the 'this'
4371 parameter. */
4372
4373 static tree
4374 decl_class_context (tree decl)
4375 {
4376 tree context = NULL_TREE;
4377
4378 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4379 context = DECL_CONTEXT (decl);
4380 else
4381 context = TYPE_MAIN_VARIANT
4382 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4383
4384 if (context && !TYPE_P (context))
4385 context = NULL_TREE;
4386
4387 return context;
4388 }
4389 \f
4390 /* Add an attribute/value pair to a DIE. */
4391
4392 static inline void
4393 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4394 {
4395 /* Maybe this should be an assert? */
4396 if (die == NULL)
4397 return;
4398
4399 if (flag_checking)
4400 {
4401 /* Check we do not add duplicate attrs. Can't use get_AT here
4402 because that recurses to the specification/abstract origin DIE. */
4403 dw_attr_node *a;
4404 unsigned ix;
4405 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4406 gcc_assert (a->dw_attr != attr->dw_attr);
4407 }
4408
4409 vec_safe_reserve (die->die_attr, 1);
4410 vec_safe_push (die->die_attr, *attr);
4411 }
4412
4413 static inline enum dw_val_class
4414 AT_class (dw_attr_node *a)
4415 {
4416 return a->dw_attr_val.val_class;
4417 }
4418
4419 /* Return the index for any attribute that will be referenced with a
4420 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4421 indices are stored in dw_attr_val.v.val_str for reference counting
4422 pruning. */
4423
4424 static inline unsigned int
4425 AT_index (dw_attr_node *a)
4426 {
4427 if (AT_class (a) == dw_val_class_str)
4428 return a->dw_attr_val.v.val_str->index;
4429 else if (a->dw_attr_val.val_entry != NULL)
4430 return a->dw_attr_val.val_entry->index;
4431 return NOT_INDEXED;
4432 }
4433
4434 /* Add a flag value attribute to a DIE. */
4435
4436 static inline void
4437 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4438 {
4439 dw_attr_node attr;
4440
4441 attr.dw_attr = attr_kind;
4442 attr.dw_attr_val.val_class = dw_val_class_flag;
4443 attr.dw_attr_val.val_entry = NULL;
4444 attr.dw_attr_val.v.val_flag = flag;
4445 add_dwarf_attr (die, &attr);
4446 }
4447
4448 static inline unsigned
4449 AT_flag (dw_attr_node *a)
4450 {
4451 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4452 return a->dw_attr_val.v.val_flag;
4453 }
4454
4455 /* Add a signed integer attribute value to a DIE. */
4456
4457 static inline void
4458 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_const;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_int = int_val;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline HOST_WIDE_INT
4470 AT_int (dw_attr_node *a)
4471 {
4472 gcc_assert (a && (AT_class (a) == dw_val_class_const
4473 || AT_class (a) == dw_val_class_const_implicit));
4474 return a->dw_attr_val.v.val_int;
4475 }
4476
4477 /* Add an unsigned integer attribute value to a DIE. */
4478
4479 static inline void
4480 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4481 unsigned HOST_WIDE_INT unsigned_val)
4482 {
4483 dw_attr_node attr;
4484
4485 attr.dw_attr = attr_kind;
4486 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4487 attr.dw_attr_val.val_entry = NULL;
4488 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4489 add_dwarf_attr (die, &attr);
4490 }
4491
4492 static inline unsigned HOST_WIDE_INT
4493 AT_unsigned (dw_attr_node *a)
4494 {
4495 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4496 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4497 return a->dw_attr_val.v.val_unsigned;
4498 }
4499
4500 /* Add an unsigned wide integer attribute value to a DIE. */
4501
4502 static inline void
4503 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4504 const wide_int& w)
4505 {
4506 dw_attr_node attr;
4507
4508 attr.dw_attr = attr_kind;
4509 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4510 attr.dw_attr_val.val_entry = NULL;
4511 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4512 *attr.dw_attr_val.v.val_wide = w;
4513 add_dwarf_attr (die, &attr);
4514 }
4515
4516 /* Add an unsigned double integer attribute value to a DIE. */
4517
4518 static inline void
4519 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4520 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4521 {
4522 dw_attr_node attr;
4523
4524 attr.dw_attr = attr_kind;
4525 attr.dw_attr_val.val_class = dw_val_class_const_double;
4526 attr.dw_attr_val.val_entry = NULL;
4527 attr.dw_attr_val.v.val_double.high = high;
4528 attr.dw_attr_val.v.val_double.low = low;
4529 add_dwarf_attr (die, &attr);
4530 }
4531
4532 /* Add a floating point attribute value to a DIE and return it. */
4533
4534 static inline void
4535 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4536 unsigned int length, unsigned int elt_size, unsigned char *array)
4537 {
4538 dw_attr_node attr;
4539
4540 attr.dw_attr = attr_kind;
4541 attr.dw_attr_val.val_class = dw_val_class_vec;
4542 attr.dw_attr_val.val_entry = NULL;
4543 attr.dw_attr_val.v.val_vec.length = length;
4544 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4545 attr.dw_attr_val.v.val_vec.array = array;
4546 add_dwarf_attr (die, &attr);
4547 }
4548
4549 /* Add an 8-byte data attribute value to a DIE. */
4550
4551 static inline void
4552 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4553 unsigned char data8[8])
4554 {
4555 dw_attr_node attr;
4556
4557 attr.dw_attr = attr_kind;
4558 attr.dw_attr_val.val_class = dw_val_class_data8;
4559 attr.dw_attr_val.val_entry = NULL;
4560 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4561 add_dwarf_attr (die, &attr);
4562 }
4563
4564 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4565 dwarf_split_debug_info, address attributes in dies destined for the
4566 final executable have force_direct set to avoid using indexed
4567 references. */
4568
4569 static inline void
4570 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4571 bool force_direct)
4572 {
4573 dw_attr_node attr;
4574 char * lbl_id;
4575
4576 lbl_id = xstrdup (lbl_low);
4577 attr.dw_attr = DW_AT_low_pc;
4578 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4579 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4580 if (dwarf_split_debug_info && !force_direct)
4581 attr.dw_attr_val.val_entry
4582 = add_addr_table_entry (lbl_id, ate_kind_label);
4583 else
4584 attr.dw_attr_val.val_entry = NULL;
4585 add_dwarf_attr (die, &attr);
4586
4587 attr.dw_attr = DW_AT_high_pc;
4588 if (dwarf_version < 4)
4589 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4590 else
4591 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4592 lbl_id = xstrdup (lbl_high);
4593 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4594 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4595 && dwarf_split_debug_info && !force_direct)
4596 attr.dw_attr_val.val_entry
4597 = add_addr_table_entry (lbl_id, ate_kind_label);
4598 else
4599 attr.dw_attr_val.val_entry = NULL;
4600 add_dwarf_attr (die, &attr);
4601 }
4602
4603 /* Hash and equality functions for debug_str_hash. */
4604
4605 hashval_t
4606 indirect_string_hasher::hash (indirect_string_node *x)
4607 {
4608 return htab_hash_string (x->str);
4609 }
4610
4611 bool
4612 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4613 {
4614 return strcmp (x1->str, x2) == 0;
4615 }
4616
4617 /* Add STR to the given string hash table. */
4618
4619 static struct indirect_string_node *
4620 find_AT_string_in_table (const char *str,
4621 hash_table<indirect_string_hasher> *table,
4622 enum insert_option insert = INSERT)
4623 {
4624 struct indirect_string_node *node;
4625
4626 indirect_string_node **slot
4627 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4628 if (*slot == NULL)
4629 {
4630 node = ggc_cleared_alloc<indirect_string_node> ();
4631 node->str = ggc_strdup (str);
4632 *slot = node;
4633 }
4634 else
4635 node = *slot;
4636
4637 node->refcount++;
4638 return node;
4639 }
4640
4641 /* Add STR to the indirect string hash table. */
4642
4643 static struct indirect_string_node *
4644 find_AT_string (const char *str, enum insert_option insert = INSERT)
4645 {
4646 if (! debug_str_hash)
4647 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4648
4649 return find_AT_string_in_table (str, debug_str_hash, insert);
4650 }
4651
4652 /* Add a string attribute value to a DIE. */
4653
4654 static inline void
4655 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4656 {
4657 dw_attr_node attr;
4658 struct indirect_string_node *node;
4659
4660 node = find_AT_string (str);
4661
4662 attr.dw_attr = attr_kind;
4663 attr.dw_attr_val.val_class = dw_val_class_str;
4664 attr.dw_attr_val.val_entry = NULL;
4665 attr.dw_attr_val.v.val_str = node;
4666 add_dwarf_attr (die, &attr);
4667 }
4668
4669 static inline const char *
4670 AT_string (dw_attr_node *a)
4671 {
4672 gcc_assert (a && AT_class (a) == dw_val_class_str);
4673 return a->dw_attr_val.v.val_str->str;
4674 }
4675
4676 /* Call this function directly to bypass AT_string_form's logic to put
4677 the string inline in the die. */
4678
4679 static void
4680 set_indirect_string (struct indirect_string_node *node)
4681 {
4682 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4683 /* Already indirect is a no op. */
4684 if (node->form == DW_FORM_strp
4685 || node->form == DW_FORM_line_strp
4686 || node->form == dwarf_FORM (DW_FORM_strx))
4687 {
4688 gcc_assert (node->label);
4689 return;
4690 }
4691 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4692 ++dw2_string_counter;
4693 node->label = xstrdup (label);
4694
4695 if (!dwarf_split_debug_info)
4696 {
4697 node->form = DW_FORM_strp;
4698 node->index = NOT_INDEXED;
4699 }
4700 else
4701 {
4702 node->form = dwarf_FORM (DW_FORM_strx);
4703 node->index = NO_INDEX_ASSIGNED;
4704 }
4705 }
4706
4707 /* A helper function for dwarf2out_finish, called to reset indirect
4708 string decisions done for early LTO dwarf output before fat object
4709 dwarf output. */
4710
4711 int
4712 reset_indirect_string (indirect_string_node **h, void *)
4713 {
4714 struct indirect_string_node *node = *h;
4715 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4716 {
4717 free (node->label);
4718 node->label = NULL;
4719 node->form = (dwarf_form) 0;
4720 node->index = 0;
4721 }
4722 return 1;
4723 }
4724
4725 /* Find out whether a string should be output inline in DIE
4726 or out-of-line in .debug_str section. */
4727
4728 static enum dwarf_form
4729 find_string_form (struct indirect_string_node *node)
4730 {
4731 unsigned int len;
4732
4733 if (node->form)
4734 return node->form;
4735
4736 len = strlen (node->str) + 1;
4737
4738 /* If the string is shorter or equal to the size of the reference, it is
4739 always better to put it inline. */
4740 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4741 return node->form = DW_FORM_string;
4742
4743 /* If we cannot expect the linker to merge strings in .debug_str
4744 section, only put it into .debug_str if it is worth even in this
4745 single module. */
4746 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4747 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4748 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4749 return node->form = DW_FORM_string;
4750
4751 set_indirect_string (node);
4752
4753 return node->form;
4754 }
4755
4756 /* Find out whether the string referenced from the attribute should be
4757 output inline in DIE or out-of-line in .debug_str section. */
4758
4759 static enum dwarf_form
4760 AT_string_form (dw_attr_node *a)
4761 {
4762 gcc_assert (a && AT_class (a) == dw_val_class_str);
4763 return find_string_form (a->dw_attr_val.v.val_str);
4764 }
4765
4766 /* Add a DIE reference attribute value to a DIE. */
4767
4768 static inline void
4769 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4770 {
4771 dw_attr_node attr;
4772 gcc_checking_assert (targ_die != NULL);
4773
4774 /* With LTO we can end up trying to reference something we didn't create
4775 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4776 if (targ_die == NULL)
4777 return;
4778
4779 attr.dw_attr = attr_kind;
4780 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4781 attr.dw_attr_val.val_entry = NULL;
4782 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4783 attr.dw_attr_val.v.val_die_ref.external = 0;
4784 add_dwarf_attr (die, &attr);
4785 }
4786
4787 /* Change DIE reference REF to point to NEW_DIE instead. */
4788
4789 static inline void
4790 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4791 {
4792 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4793 ref->dw_attr_val.v.val_die_ref.die = new_die;
4794 ref->dw_attr_val.v.val_die_ref.external = 0;
4795 }
4796
4797 /* Add an AT_specification attribute to a DIE, and also make the back
4798 pointer from the specification to the definition. */
4799
4800 static inline void
4801 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4802 {
4803 add_AT_die_ref (die, DW_AT_specification, targ_die);
4804 gcc_assert (!targ_die->die_definition);
4805 targ_die->die_definition = die;
4806 }
4807
4808 static inline dw_die_ref
4809 AT_ref (dw_attr_node *a)
4810 {
4811 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4812 return a->dw_attr_val.v.val_die_ref.die;
4813 }
4814
4815 static inline int
4816 AT_ref_external (dw_attr_node *a)
4817 {
4818 if (a && AT_class (a) == dw_val_class_die_ref)
4819 return a->dw_attr_val.v.val_die_ref.external;
4820
4821 return 0;
4822 }
4823
4824 static inline void
4825 set_AT_ref_external (dw_attr_node *a, int i)
4826 {
4827 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4828 a->dw_attr_val.v.val_die_ref.external = i;
4829 }
4830
4831 /* Add a location description attribute value to a DIE. */
4832
4833 static inline void
4834 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4835 {
4836 dw_attr_node attr;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc = loc;
4842 add_dwarf_attr (die, &attr);
4843 }
4844
4845 static inline dw_loc_descr_ref
4846 AT_loc (dw_attr_node *a)
4847 {
4848 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4849 return a->dw_attr_val.v.val_loc;
4850 }
4851
4852 static inline void
4853 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4854 {
4855 dw_attr_node attr;
4856
4857 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4858 return;
4859
4860 attr.dw_attr = attr_kind;
4861 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4862 attr.dw_attr_val.val_entry = NULL;
4863 attr.dw_attr_val.v.val_loc_list = loc_list;
4864 add_dwarf_attr (die, &attr);
4865 have_location_lists = true;
4866 }
4867
4868 static inline dw_loc_list_ref
4869 AT_loc_list (dw_attr_node *a)
4870 {
4871 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4872 return a->dw_attr_val.v.val_loc_list;
4873 }
4874
4875 /* Add a view list attribute to DIE. It must have a DW_AT_location
4876 attribute, because the view list complements the location list. */
4877
4878 static inline void
4879 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4880 {
4881 dw_attr_node attr;
4882
4883 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4884 return;
4885
4886 attr.dw_attr = attr_kind;
4887 attr.dw_attr_val.val_class = dw_val_class_view_list;
4888 attr.dw_attr_val.val_entry = NULL;
4889 attr.dw_attr_val.v.val_view_list = die;
4890 add_dwarf_attr (die, &attr);
4891 gcc_checking_assert (get_AT (die, DW_AT_location));
4892 gcc_assert (have_location_lists);
4893 }
4894
4895 /* Return a pointer to the location list referenced by the attribute.
4896 If the named attribute is a view list, look up the corresponding
4897 DW_AT_location attribute and return its location list. */
4898
4899 static inline dw_loc_list_ref *
4900 AT_loc_list_ptr (dw_attr_node *a)
4901 {
4902 gcc_assert (a);
4903 switch (AT_class (a))
4904 {
4905 case dw_val_class_loc_list:
4906 return &a->dw_attr_val.v.val_loc_list;
4907 case dw_val_class_view_list:
4908 {
4909 dw_attr_node *l;
4910 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4911 if (!l)
4912 return NULL;
4913 gcc_checking_assert (l + 1 == a);
4914 return AT_loc_list_ptr (l);
4915 }
4916 default:
4917 gcc_unreachable ();
4918 }
4919 }
4920
4921 /* Return the location attribute value associated with a view list
4922 attribute value. */
4923
4924 static inline dw_val_node *
4925 view_list_to_loc_list_val_node (dw_val_node *val)
4926 {
4927 gcc_assert (val->val_class == dw_val_class_view_list);
4928 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4929 if (!loc)
4930 return NULL;
4931 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4932 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4933 return &loc->dw_attr_val;
4934 }
4935
4936 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4937 {
4938 static hashval_t hash (addr_table_entry *);
4939 static bool equal (addr_table_entry *, addr_table_entry *);
4940 };
4941
4942 /* Table of entries into the .debug_addr section. */
4943
4944 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4945
4946 /* Hash an address_table_entry. */
4947
4948 hashval_t
4949 addr_hasher::hash (addr_table_entry *a)
4950 {
4951 inchash::hash hstate;
4952 switch (a->kind)
4953 {
4954 case ate_kind_rtx:
4955 hstate.add_int (0);
4956 break;
4957 case ate_kind_rtx_dtprel:
4958 hstate.add_int (1);
4959 break;
4960 case ate_kind_label:
4961 return htab_hash_string (a->addr.label);
4962 default:
4963 gcc_unreachable ();
4964 }
4965 inchash::add_rtx (a->addr.rtl, hstate);
4966 return hstate.end ();
4967 }
4968
4969 /* Determine equality for two address_table_entries. */
4970
4971 bool
4972 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4973 {
4974 if (a1->kind != a2->kind)
4975 return 0;
4976 switch (a1->kind)
4977 {
4978 case ate_kind_rtx:
4979 case ate_kind_rtx_dtprel:
4980 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4981 case ate_kind_label:
4982 return strcmp (a1->addr.label, a2->addr.label) == 0;
4983 default:
4984 gcc_unreachable ();
4985 }
4986 }
4987
4988 /* Initialize an addr_table_entry. */
4989
4990 void
4991 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4992 {
4993 e->kind = kind;
4994 switch (kind)
4995 {
4996 case ate_kind_rtx:
4997 case ate_kind_rtx_dtprel:
4998 e->addr.rtl = (rtx) addr;
4999 break;
5000 case ate_kind_label:
5001 e->addr.label = (char *) addr;
5002 break;
5003 }
5004 e->refcount = 0;
5005 e->index = NO_INDEX_ASSIGNED;
5006 }
5007
5008 /* Add attr to the address table entry to the table. Defer setting an
5009 index until output time. */
5010
5011 static addr_table_entry *
5012 add_addr_table_entry (void *addr, enum ate_kind kind)
5013 {
5014 addr_table_entry *node;
5015 addr_table_entry finder;
5016
5017 gcc_assert (dwarf_split_debug_info);
5018 if (! addr_index_table)
5019 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5020 init_addr_table_entry (&finder, kind, addr);
5021 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5022
5023 if (*slot == HTAB_EMPTY_ENTRY)
5024 {
5025 node = ggc_cleared_alloc<addr_table_entry> ();
5026 init_addr_table_entry (node, kind, addr);
5027 *slot = node;
5028 }
5029 else
5030 node = *slot;
5031
5032 node->refcount++;
5033 return node;
5034 }
5035
5036 /* Remove an entry from the addr table by decrementing its refcount.
5037 Strictly, decrementing the refcount would be enough, but the
5038 assertion that the entry is actually in the table has found
5039 bugs. */
5040
5041 static void
5042 remove_addr_table_entry (addr_table_entry *entry)
5043 {
5044 gcc_assert (dwarf_split_debug_info && addr_index_table);
5045 /* After an index is assigned, the table is frozen. */
5046 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5047 entry->refcount--;
5048 }
5049
5050 /* Given a location list, remove all addresses it refers to from the
5051 address_table. */
5052
5053 static void
5054 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5055 {
5056 for (; descr; descr = descr->dw_loc_next)
5057 if (descr->dw_loc_oprnd1.val_entry != NULL)
5058 {
5059 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5060 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5061 }
5062 }
5063
5064 /* A helper function for dwarf2out_finish called through
5065 htab_traverse. Assign an addr_table_entry its index. All entries
5066 must be collected into the table when this function is called,
5067 because the indexing code relies on htab_traverse to traverse nodes
5068 in the same order for each run. */
5069
5070 int
5071 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5072 {
5073 addr_table_entry *node = *h;
5074
5075 /* Don't index unreferenced nodes. */
5076 if (node->refcount == 0)
5077 return 1;
5078
5079 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5080 node->index = *index;
5081 *index += 1;
5082
5083 return 1;
5084 }
5085
5086 /* Add an address constant attribute value to a DIE. When using
5087 dwarf_split_debug_info, address attributes in dies destined for the
5088 final executable should be direct references--setting the parameter
5089 force_direct ensures this behavior. */
5090
5091 static inline void
5092 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5093 bool force_direct)
5094 {
5095 dw_attr_node attr;
5096
5097 attr.dw_attr = attr_kind;
5098 attr.dw_attr_val.val_class = dw_val_class_addr;
5099 attr.dw_attr_val.v.val_addr = addr;
5100 if (dwarf_split_debug_info && !force_direct)
5101 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5102 else
5103 attr.dw_attr_val.val_entry = NULL;
5104 add_dwarf_attr (die, &attr);
5105 }
5106
5107 /* Get the RTX from to an address DIE attribute. */
5108
5109 static inline rtx
5110 AT_addr (dw_attr_node *a)
5111 {
5112 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5113 return a->dw_attr_val.v.val_addr;
5114 }
5115
5116 /* Add a file attribute value to a DIE. */
5117
5118 static inline void
5119 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5120 struct dwarf_file_data *fd)
5121 {
5122 dw_attr_node attr;
5123
5124 attr.dw_attr = attr_kind;
5125 attr.dw_attr_val.val_class = dw_val_class_file;
5126 attr.dw_attr_val.val_entry = NULL;
5127 attr.dw_attr_val.v.val_file = fd;
5128 add_dwarf_attr (die, &attr);
5129 }
5130
5131 /* Get the dwarf_file_data from a file DIE attribute. */
5132
5133 static inline struct dwarf_file_data *
5134 AT_file (dw_attr_node *a)
5135 {
5136 gcc_assert (a && (AT_class (a) == dw_val_class_file
5137 || AT_class (a) == dw_val_class_file_implicit));
5138 return a->dw_attr_val.v.val_file;
5139 }
5140
5141 /* Add a symbolic view identifier attribute value to a DIE. */
5142
5143 static inline void
5144 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5145 const char *view_label)
5146 {
5147 dw_attr_node attr;
5148
5149 attr.dw_attr = attr_kind;
5150 attr.dw_attr_val.val_class = dw_val_class_symview;
5151 attr.dw_attr_val.val_entry = NULL;
5152 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5153 add_dwarf_attr (die, &attr);
5154 }
5155
5156 /* Add a label identifier attribute value to a DIE. */
5157
5158 static inline void
5159 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5160 const char *lbl_id)
5161 {
5162 dw_attr_node attr;
5163
5164 attr.dw_attr = attr_kind;
5165 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5166 attr.dw_attr_val.val_entry = NULL;
5167 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5168 if (dwarf_split_debug_info)
5169 attr.dw_attr_val.val_entry
5170 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5171 ate_kind_label);
5172 add_dwarf_attr (die, &attr);
5173 }
5174
5175 /* Add a section offset attribute value to a DIE, an offset into the
5176 debug_line section. */
5177
5178 static inline void
5179 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5180 const char *label)
5181 {
5182 dw_attr_node attr;
5183
5184 attr.dw_attr = attr_kind;
5185 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5186 attr.dw_attr_val.val_entry = NULL;
5187 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5188 add_dwarf_attr (die, &attr);
5189 }
5190
5191 /* Add a section offset attribute value to a DIE, an offset into the
5192 debug_macinfo section. */
5193
5194 static inline void
5195 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5196 const char *label)
5197 {
5198 dw_attr_node attr;
5199
5200 attr.dw_attr = attr_kind;
5201 attr.dw_attr_val.val_class = dw_val_class_macptr;
5202 attr.dw_attr_val.val_entry = NULL;
5203 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5204 add_dwarf_attr (die, &attr);
5205 }
5206
5207 /* Add a range_list attribute value to a DIE. When using
5208 dwarf_split_debug_info, address attributes in dies destined for the
5209 final executable should be direct references--setting the parameter
5210 force_direct ensures this behavior. */
5211
5212 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5213 #define RELOCATED_OFFSET (NULL)
5214
5215 static void
5216 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 long unsigned int offset, bool force_direct)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_range_list;
5223 /* For the range_list attribute, use val_entry to store whether the
5224 offset should follow split-debug-info or normal semantics. This
5225 value is read in output_range_list_offset. */
5226 if (dwarf_split_debug_info && !force_direct)
5227 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5228 else
5229 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5230 attr.dw_attr_val.v.val_offset = offset;
5231 add_dwarf_attr (die, &attr);
5232 }
5233
5234 /* Return the start label of a delta attribute. */
5235
5236 static inline const char *
5237 AT_vms_delta1 (dw_attr_node *a)
5238 {
5239 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5240 return a->dw_attr_val.v.val_vms_delta.lbl1;
5241 }
5242
5243 /* Return the end label of a delta attribute. */
5244
5245 static inline const char *
5246 AT_vms_delta2 (dw_attr_node *a)
5247 {
5248 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5249 return a->dw_attr_val.v.val_vms_delta.lbl2;
5250 }
5251
5252 static inline const char *
5253 AT_lbl (dw_attr_node *a)
5254 {
5255 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5256 || AT_class (a) == dw_val_class_lineptr
5257 || AT_class (a) == dw_val_class_macptr
5258 || AT_class (a) == dw_val_class_loclistsptr
5259 || AT_class (a) == dw_val_class_high_pc));
5260 return a->dw_attr_val.v.val_lbl_id;
5261 }
5262
5263 /* Get the attribute of type attr_kind. */
5264
5265 static dw_attr_node *
5266 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5267 {
5268 dw_attr_node *a;
5269 unsigned ix;
5270 dw_die_ref spec = NULL;
5271
5272 if (! die)
5273 return NULL;
5274
5275 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5276 if (a->dw_attr == attr_kind)
5277 return a;
5278 else if (a->dw_attr == DW_AT_specification
5279 || a->dw_attr == DW_AT_abstract_origin)
5280 spec = AT_ref (a);
5281
5282 if (spec)
5283 return get_AT (spec, attr_kind);
5284
5285 return NULL;
5286 }
5287
5288 /* Returns the parent of the declaration of DIE. */
5289
5290 static dw_die_ref
5291 get_die_parent (dw_die_ref die)
5292 {
5293 dw_die_ref t;
5294
5295 if (!die)
5296 return NULL;
5297
5298 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5299 || (t = get_AT_ref (die, DW_AT_specification)))
5300 die = t;
5301
5302 return die->die_parent;
5303 }
5304
5305 /* Return the "low pc" attribute value, typically associated with a subprogram
5306 DIE. Return null if the "low pc" attribute is either not present, or if it
5307 cannot be represented as an assembler label identifier. */
5308
5309 static inline const char *
5310 get_AT_low_pc (dw_die_ref die)
5311 {
5312 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5313
5314 return a ? AT_lbl (a) : NULL;
5315 }
5316
5317 /* Return the value of the string attribute designated by ATTR_KIND, or
5318 NULL if it is not present. */
5319
5320 static inline const char *
5321 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5322 {
5323 dw_attr_node *a = get_AT (die, attr_kind);
5324
5325 return a ? AT_string (a) : NULL;
5326 }
5327
5328 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5329 if it is not present. */
5330
5331 static inline int
5332 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5333 {
5334 dw_attr_node *a = get_AT (die, attr_kind);
5335
5336 return a ? AT_flag (a) : 0;
5337 }
5338
5339 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5340 if it is not present. */
5341
5342 static inline unsigned
5343 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5344 {
5345 dw_attr_node *a = get_AT (die, attr_kind);
5346
5347 return a ? AT_unsigned (a) : 0;
5348 }
5349
5350 static inline dw_die_ref
5351 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5352 {
5353 dw_attr_node *a = get_AT (die, attr_kind);
5354
5355 return a ? AT_ref (a) : NULL;
5356 }
5357
5358 static inline struct dwarf_file_data *
5359 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5360 {
5361 dw_attr_node *a = get_AT (die, attr_kind);
5362
5363 return a ? AT_file (a) : NULL;
5364 }
5365
5366 /* Return TRUE if the language is C. */
5367
5368 static inline bool
5369 is_c (void)
5370 {
5371 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5372
5373 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5374 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5375
5376
5377 }
5378
5379 /* Return TRUE if the language is C++. */
5380
5381 static inline bool
5382 is_cxx (void)
5383 {
5384 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5385
5386 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5387 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5388 }
5389
5390 /* Return TRUE if DECL was created by the C++ frontend. */
5391
5392 static bool
5393 is_cxx (const_tree decl)
5394 {
5395 if (in_lto_p)
5396 {
5397 const_tree context = get_ultimate_context (decl);
5398 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5399 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5400 }
5401 return is_cxx ();
5402 }
5403
5404 /* Return TRUE if the language is Fortran. */
5405
5406 static inline bool
5407 is_fortran (void)
5408 {
5409 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5410
5411 return (lang == DW_LANG_Fortran77
5412 || lang == DW_LANG_Fortran90
5413 || lang == DW_LANG_Fortran95
5414 || lang == DW_LANG_Fortran03
5415 || lang == DW_LANG_Fortran08);
5416 }
5417
5418 static inline bool
5419 is_fortran (const_tree decl)
5420 {
5421 if (in_lto_p)
5422 {
5423 const_tree context = get_ultimate_context (decl);
5424 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5425 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5426 "GNU Fortran", 11) == 0
5427 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5428 "GNU F77") == 0);
5429 }
5430 return is_fortran ();
5431 }
5432
5433 /* Return TRUE if the language is Ada. */
5434
5435 static inline bool
5436 is_ada (void)
5437 {
5438 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5439
5440 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5441 }
5442
5443 /* Return TRUE if the language is D. */
5444
5445 static inline bool
5446 is_dlang (void)
5447 {
5448 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5449
5450 return lang == DW_LANG_D;
5451 }
5452
5453 /* Remove the specified attribute if present. Return TRUE if removal
5454 was successful. */
5455
5456 static bool
5457 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5458 {
5459 dw_attr_node *a;
5460 unsigned ix;
5461
5462 if (! die)
5463 return false;
5464
5465 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5466 if (a->dw_attr == attr_kind)
5467 {
5468 if (AT_class (a) == dw_val_class_str)
5469 if (a->dw_attr_val.v.val_str->refcount)
5470 a->dw_attr_val.v.val_str->refcount--;
5471
5472 /* vec::ordered_remove should help reduce the number of abbrevs
5473 that are needed. */
5474 die->die_attr->ordered_remove (ix);
5475 return true;
5476 }
5477 return false;
5478 }
5479
5480 /* Remove CHILD from its parent. PREV must have the property that
5481 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5482
5483 static void
5484 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5485 {
5486 gcc_assert (child->die_parent == prev->die_parent);
5487 gcc_assert (prev->die_sib == child);
5488 if (prev == child)
5489 {
5490 gcc_assert (child->die_parent->die_child == child);
5491 prev = NULL;
5492 }
5493 else
5494 prev->die_sib = child->die_sib;
5495 if (child->die_parent->die_child == child)
5496 child->die_parent->die_child = prev;
5497 child->die_sib = NULL;
5498 }
5499
5500 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5501 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5502
5503 static void
5504 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5505 {
5506 dw_die_ref parent = old_child->die_parent;
5507
5508 gcc_assert (parent == prev->die_parent);
5509 gcc_assert (prev->die_sib == old_child);
5510
5511 new_child->die_parent = parent;
5512 if (prev == old_child)
5513 {
5514 gcc_assert (parent->die_child == old_child);
5515 new_child->die_sib = new_child;
5516 }
5517 else
5518 {
5519 prev->die_sib = new_child;
5520 new_child->die_sib = old_child->die_sib;
5521 }
5522 if (old_child->die_parent->die_child == old_child)
5523 old_child->die_parent->die_child = new_child;
5524 old_child->die_sib = NULL;
5525 }
5526
5527 /* Move all children from OLD_PARENT to NEW_PARENT. */
5528
5529 static void
5530 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5531 {
5532 dw_die_ref c;
5533 new_parent->die_child = old_parent->die_child;
5534 old_parent->die_child = NULL;
5535 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5536 }
5537
5538 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5539 matches TAG. */
5540
5541 static void
5542 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5543 {
5544 dw_die_ref c;
5545
5546 c = die->die_child;
5547 if (c) do {
5548 dw_die_ref prev = c;
5549 c = c->die_sib;
5550 while (c->die_tag == tag)
5551 {
5552 remove_child_with_prev (c, prev);
5553 c->die_parent = NULL;
5554 /* Might have removed every child. */
5555 if (die->die_child == NULL)
5556 return;
5557 c = prev->die_sib;
5558 }
5559 } while (c != die->die_child);
5560 }
5561
5562 /* Add a CHILD_DIE as the last child of DIE. */
5563
5564 static void
5565 add_child_die (dw_die_ref die, dw_die_ref child_die)
5566 {
5567 /* FIXME this should probably be an assert. */
5568 if (! die || ! child_die)
5569 return;
5570 gcc_assert (die != child_die);
5571
5572 child_die->die_parent = die;
5573 if (die->die_child)
5574 {
5575 child_die->die_sib = die->die_child->die_sib;
5576 die->die_child->die_sib = child_die;
5577 }
5578 else
5579 child_die->die_sib = child_die;
5580 die->die_child = child_die;
5581 }
5582
5583 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5584
5585 static void
5586 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5587 dw_die_ref after_die)
5588 {
5589 gcc_assert (die
5590 && child_die
5591 && after_die
5592 && die->die_child
5593 && die != child_die);
5594
5595 child_die->die_parent = die;
5596 child_die->die_sib = after_die->die_sib;
5597 after_die->die_sib = child_die;
5598 if (die->die_child == after_die)
5599 die->die_child = child_die;
5600 }
5601
5602 /* Unassociate CHILD from its parent, and make its parent be
5603 NEW_PARENT. */
5604
5605 static void
5606 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5607 {
5608 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5609 if (p->die_sib == child)
5610 {
5611 remove_child_with_prev (child, p);
5612 break;
5613 }
5614 add_child_die (new_parent, child);
5615 }
5616
5617 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5618 is the specification, to the end of PARENT's list of children.
5619 This is done by removing and re-adding it. */
5620
5621 static void
5622 splice_child_die (dw_die_ref parent, dw_die_ref child)
5623 {
5624 /* We want the declaration DIE from inside the class, not the
5625 specification DIE at toplevel. */
5626 if (child->die_parent != parent)
5627 {
5628 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5629
5630 if (tmp)
5631 child = tmp;
5632 }
5633
5634 gcc_assert (child->die_parent == parent
5635 || (child->die_parent
5636 == get_AT_ref (parent, DW_AT_specification)));
5637
5638 reparent_child (child, parent);
5639 }
5640
5641 /* Create and return a new die with TAG_VALUE as tag. */
5642
5643 static inline dw_die_ref
5644 new_die_raw (enum dwarf_tag tag_value)
5645 {
5646 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5647 die->die_tag = tag_value;
5648 return die;
5649 }
5650
5651 /* Create and return a new die with a parent of PARENT_DIE. If
5652 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5653 associated tree T must be supplied to determine parenthood
5654 later. */
5655
5656 static inline dw_die_ref
5657 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5658 {
5659 dw_die_ref die = new_die_raw (tag_value);
5660
5661 if (parent_die != NULL)
5662 add_child_die (parent_die, die);
5663 else
5664 {
5665 limbo_die_node *limbo_node;
5666
5667 /* No DIEs created after early dwarf should end up in limbo,
5668 because the limbo list should not persist past LTO
5669 streaming. */
5670 if (tag_value != DW_TAG_compile_unit
5671 /* These are allowed because they're generated while
5672 breaking out COMDAT units late. */
5673 && tag_value != DW_TAG_type_unit
5674 && tag_value != DW_TAG_skeleton_unit
5675 && !early_dwarf
5676 /* Allow nested functions to live in limbo because they will
5677 only temporarily live there, as decls_for_scope will fix
5678 them up. */
5679 && (TREE_CODE (t) != FUNCTION_DECL
5680 || !decl_function_context (t))
5681 /* Same as nested functions above but for types. Types that
5682 are local to a function will be fixed in
5683 decls_for_scope. */
5684 && (!RECORD_OR_UNION_TYPE_P (t)
5685 || !TYPE_CONTEXT (t)
5686 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5687 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5688 especially in the ltrans stage, but once we implement LTO
5689 dwarf streaming, we should remove this exception. */
5690 && !in_lto_p)
5691 {
5692 fprintf (stderr, "symbol ended up in limbo too late:");
5693 debug_generic_stmt (t);
5694 gcc_unreachable ();
5695 }
5696
5697 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5698 limbo_node->die = die;
5699 limbo_node->created_for = t;
5700 limbo_node->next = limbo_die_list;
5701 limbo_die_list = limbo_node;
5702 }
5703
5704 return die;
5705 }
5706
5707 /* Return the DIE associated with the given type specifier. */
5708
5709 static inline dw_die_ref
5710 lookup_type_die (tree type)
5711 {
5712 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5713 if (die && die->removed)
5714 {
5715 TYPE_SYMTAB_DIE (type) = NULL;
5716 return NULL;
5717 }
5718 return die;
5719 }
5720
5721 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5722 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5723 anonymous type instead the one of the naming typedef. */
5724
5725 static inline dw_die_ref
5726 strip_naming_typedef (tree type, dw_die_ref type_die)
5727 {
5728 if (type
5729 && TREE_CODE (type) == RECORD_TYPE
5730 && type_die
5731 && type_die->die_tag == DW_TAG_typedef
5732 && is_naming_typedef_decl (TYPE_NAME (type)))
5733 type_die = get_AT_ref (type_die, DW_AT_type);
5734 return type_die;
5735 }
5736
5737 /* Like lookup_type_die, but if type is an anonymous type named by a
5738 typedef[1], return the DIE of the anonymous type instead the one of
5739 the naming typedef. This is because in gen_typedef_die, we did
5740 equate the anonymous struct named by the typedef with the DIE of
5741 the naming typedef. So by default, lookup_type_die on an anonymous
5742 struct yields the DIE of the naming typedef.
5743
5744 [1]: Read the comment of is_naming_typedef_decl to learn about what
5745 a naming typedef is. */
5746
5747 static inline dw_die_ref
5748 lookup_type_die_strip_naming_typedef (tree type)
5749 {
5750 dw_die_ref die = lookup_type_die (type);
5751 return strip_naming_typedef (type, die);
5752 }
5753
5754 /* Equate a DIE to a given type specifier. */
5755
5756 static inline void
5757 equate_type_number_to_die (tree type, dw_die_ref type_die)
5758 {
5759 TYPE_SYMTAB_DIE (type) = type_die;
5760 }
5761
5762 static dw_die_ref maybe_create_die_with_external_ref (tree);
5763 struct GTY(()) sym_off_pair
5764 {
5765 const char * GTY((skip)) sym;
5766 unsigned HOST_WIDE_INT off;
5767 };
5768 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5769
5770 /* Returns a hash value for X (which really is a die_struct). */
5771
5772 inline hashval_t
5773 decl_die_hasher::hash (die_node *x)
5774 {
5775 return (hashval_t) x->decl_id;
5776 }
5777
5778 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5779
5780 inline bool
5781 decl_die_hasher::equal (die_node *x, tree y)
5782 {
5783 return (x->decl_id == DECL_UID (y));
5784 }
5785
5786 /* Return the DIE associated with a given declaration. */
5787
5788 static inline dw_die_ref
5789 lookup_decl_die (tree decl)
5790 {
5791 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5792 NO_INSERT);
5793 if (!die)
5794 {
5795 if (in_lto_p)
5796 return maybe_create_die_with_external_ref (decl);
5797 return NULL;
5798 }
5799 if ((*die)->removed)
5800 {
5801 decl_die_table->clear_slot (die);
5802 return NULL;
5803 }
5804 return *die;
5805 }
5806
5807
5808 /* Return the DIE associated with BLOCK. */
5809
5810 static inline dw_die_ref
5811 lookup_block_die (tree block)
5812 {
5813 dw_die_ref die = BLOCK_DIE (block);
5814 if (!die && in_lto_p)
5815 return maybe_create_die_with_external_ref (block);
5816 return die;
5817 }
5818
5819 /* Associate DIE with BLOCK. */
5820
5821 static inline void
5822 equate_block_to_die (tree block, dw_die_ref die)
5823 {
5824 BLOCK_DIE (block) = die;
5825 }
5826 #undef BLOCK_DIE
5827
5828
5829 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5830 style reference. Return true if we found one refering to a DIE for
5831 DECL, otherwise return false. */
5832
5833 static bool
5834 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5835 unsigned HOST_WIDE_INT *off)
5836 {
5837 dw_die_ref die;
5838
5839 if (in_lto_p)
5840 {
5841 /* During WPA stage and incremental linking we use a hash-map
5842 to store the decl <-> label + offset map. */
5843 if (!external_die_map)
5844 return false;
5845 sym_off_pair *desc = external_die_map->get (decl);
5846 if (!desc)
5847 return false;
5848 *sym = desc->sym;
5849 *off = desc->off;
5850 return true;
5851 }
5852
5853 if (TREE_CODE (decl) == BLOCK)
5854 die = lookup_block_die (decl);
5855 else
5856 die = lookup_decl_die (decl);
5857 if (!die)
5858 return false;
5859
5860 /* Similar to get_ref_die_offset_label, but using the "correct"
5861 label. */
5862 *off = die->die_offset;
5863 while (die->die_parent)
5864 die = die->die_parent;
5865 /* For the containing CU DIE we compute a die_symbol in
5866 compute_comp_unit_symbol. */
5867 gcc_assert (die->die_tag == DW_TAG_compile_unit
5868 && die->die_id.die_symbol != NULL);
5869 *sym = die->die_id.die_symbol;
5870 return true;
5871 }
5872
5873 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5874
5875 static void
5876 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5877 const char *symbol, HOST_WIDE_INT offset)
5878 {
5879 /* Create a fake DIE that contains the reference. Don't use
5880 new_die because we don't want to end up in the limbo list. */
5881 /* ??? We probably want to share these, thus put a ref to the DIE
5882 we create here to the external_die_map entry. */
5883 dw_die_ref ref = new_die_raw (die->die_tag);
5884 ref->die_id.die_symbol = symbol;
5885 ref->die_offset = offset;
5886 ref->with_offset = 1;
5887 add_AT_die_ref (die, attr_kind, ref);
5888 }
5889
5890 /* Create a DIE for DECL if required and add a reference to a DIE
5891 at SYMBOL + OFFSET which contains attributes dumped early. */
5892
5893 static void
5894 dwarf2out_register_external_die (tree decl, const char *sym,
5895 unsigned HOST_WIDE_INT off)
5896 {
5897 if (debug_info_level == DINFO_LEVEL_NONE)
5898 return;
5899
5900 if (!external_die_map)
5901 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5902 gcc_checking_assert (!external_die_map->get (decl));
5903 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5904 external_die_map->put (decl, p);
5905 }
5906
5907 /* If we have a registered external DIE for DECL return a new DIE for
5908 the concrete instance with an appropriate abstract origin. */
5909
5910 static dw_die_ref
5911 maybe_create_die_with_external_ref (tree decl)
5912 {
5913 if (!external_die_map)
5914 return NULL;
5915 sym_off_pair *desc = external_die_map->get (decl);
5916 if (!desc)
5917 return NULL;
5918
5919 const char *sym = desc->sym;
5920 unsigned HOST_WIDE_INT off = desc->off;
5921
5922 in_lto_p = false;
5923 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5924 ? lookup_block_die (decl) : lookup_decl_die (decl));
5925 gcc_assert (!die);
5926 in_lto_p = true;
5927
5928 tree ctx;
5929 dw_die_ref parent = NULL;
5930 /* Need to lookup a DIE for the decls context - the containing
5931 function or translation unit. */
5932 if (TREE_CODE (decl) == BLOCK)
5933 {
5934 ctx = BLOCK_SUPERCONTEXT (decl);
5935 /* ??? We do not output DIEs for all scopes thus skip as
5936 many DIEs as needed. */
5937 while (TREE_CODE (ctx) == BLOCK
5938 && !lookup_block_die (ctx))
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 }
5941 else
5942 ctx = DECL_CONTEXT (decl);
5943 /* Peel types in the context stack. */
5944 while (ctx && TYPE_P (ctx))
5945 ctx = TYPE_CONTEXT (ctx);
5946 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5947 if (debug_info_level <= DINFO_LEVEL_TERSE)
5948 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5949 ctx = DECL_CONTEXT (ctx);
5950 if (ctx)
5951 {
5952 if (TREE_CODE (ctx) == BLOCK)
5953 parent = lookup_block_die (ctx);
5954 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5955 /* Keep the 1:1 association during WPA. */
5956 && !flag_wpa
5957 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5958 /* Otherwise all late annotations go to the main CU which
5959 imports the original CUs. */
5960 parent = comp_unit_die ();
5961 else if (TREE_CODE (ctx) == FUNCTION_DECL
5962 && TREE_CODE (decl) != FUNCTION_DECL
5963 && TREE_CODE (decl) != PARM_DECL
5964 && TREE_CODE (decl) != RESULT_DECL
5965 && TREE_CODE (decl) != BLOCK)
5966 /* Leave function local entities parent determination to when
5967 we process scope vars. */
5968 ;
5969 else
5970 parent = lookup_decl_die (ctx);
5971 }
5972 else
5973 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5974 Handle this case gracefully by globalizing stuff. */
5975 parent = comp_unit_die ();
5976 /* Create a DIE "stub". */
5977 switch (TREE_CODE (decl))
5978 {
5979 case TRANSLATION_UNIT_DECL:
5980 {
5981 die = comp_unit_die ();
5982 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5983 to create a DIE for the original CUs. */
5984 return die;
5985 }
5986 case NAMESPACE_DECL:
5987 if (is_fortran (decl))
5988 die = new_die (DW_TAG_module, parent, decl);
5989 else
5990 die = new_die (DW_TAG_namespace, parent, decl);
5991 break;
5992 case FUNCTION_DECL:
5993 die = new_die (DW_TAG_subprogram, parent, decl);
5994 break;
5995 case VAR_DECL:
5996 die = new_die (DW_TAG_variable, parent, decl);
5997 break;
5998 case RESULT_DECL:
5999 die = new_die (DW_TAG_variable, parent, decl);
6000 break;
6001 case PARM_DECL:
6002 die = new_die (DW_TAG_formal_parameter, parent, decl);
6003 break;
6004 case CONST_DECL:
6005 die = new_die (DW_TAG_constant, parent, decl);
6006 break;
6007 case LABEL_DECL:
6008 die = new_die (DW_TAG_label, parent, decl);
6009 break;
6010 case BLOCK:
6011 die = new_die (DW_TAG_lexical_block, parent, decl);
6012 break;
6013 default:
6014 gcc_unreachable ();
6015 }
6016 if (TREE_CODE (decl) == BLOCK)
6017 equate_block_to_die (decl, die);
6018 else
6019 equate_decl_number_to_die (decl, die);
6020
6021 add_desc_attribute (die, decl);
6022
6023 /* Add a reference to the DIE providing early debug at $sym + off. */
6024 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6025
6026 return die;
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 {
6459 if (flag_dump_noaddr || flag_dump_unnumbered)
6460 fprintf (outfile, " #\n");
6461 else
6462 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6463 }
6464 break;
6465 case dw_val_class_loc_list:
6466 fprintf (outfile, "location list -> label:%s",
6467 val->v.val_loc_list->ll_symbol);
6468 break;
6469 case dw_val_class_view_list:
6470 val = view_list_to_loc_list_val_node (val);
6471 fprintf (outfile, "location list with views -> labels:%s and %s",
6472 val->v.val_loc_list->ll_symbol,
6473 val->v.val_loc_list->vl_symbol);
6474 break;
6475 case dw_val_class_range_list:
6476 fprintf (outfile, "range list");
6477 break;
6478 case dw_val_class_const:
6479 case dw_val_class_const_implicit:
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6481 break;
6482 case dw_val_class_unsigned_const:
6483 case dw_val_class_unsigned_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6485 break;
6486 case dw_val_class_const_double:
6487 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6488 HOST_WIDE_INT_PRINT_UNSIGNED")",
6489 val->v.val_double.high,
6490 val->v.val_double.low);
6491 break;
6492 case dw_val_class_wide_int:
6493 {
6494 int i = val->v.val_wide->get_len ();
6495 fprintf (outfile, "constant (");
6496 gcc_assert (i > 0);
6497 if (val->v.val_wide->elt (i - 1) == 0)
6498 fprintf (outfile, "0x");
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6500 val->v.val_wide->elt (--i));
6501 while (--i >= 0)
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6503 val->v.val_wide->elt (i));
6504 fprintf (outfile, ")");
6505 break;
6506 }
6507 case dw_val_class_vec:
6508 fprintf (outfile, "floating-point or vector constant");
6509 break;
6510 case dw_val_class_flag:
6511 fprintf (outfile, "%u", val->v.val_flag);
6512 break;
6513 case dw_val_class_die_ref:
6514 if (val->v.val_die_ref.die != NULL)
6515 {
6516 dw_die_ref die = val->v.val_die_ref.die;
6517
6518 if (die->comdat_type_p)
6519 {
6520 fprintf (outfile, "die -> signature: ");
6521 print_signature (outfile,
6522 die->die_id.die_type_node->signature);
6523 }
6524 else if (die->die_id.die_symbol)
6525 {
6526 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6527 if (die->with_offset)
6528 fprintf (outfile, " + %ld", die->die_offset);
6529 }
6530 else
6531 fprintf (outfile, "die -> %ld", die->die_offset);
6532 if (flag_dump_noaddr || flag_dump_unnumbered)
6533 fprintf (outfile, " #");
6534 else
6535 fprintf (outfile, " (%p)", (void *) die);
6536 }
6537 else
6538 fprintf (outfile, "die -> <null>");
6539 break;
6540 case dw_val_class_vms_delta:
6541 fprintf (outfile, "delta: @slotcount(%s-%s)",
6542 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6543 break;
6544 case dw_val_class_symview:
6545 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6546 break;
6547 case dw_val_class_lbl_id:
6548 case dw_val_class_lineptr:
6549 case dw_val_class_macptr:
6550 case dw_val_class_loclistsptr:
6551 case dw_val_class_high_pc:
6552 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6553 break;
6554 case dw_val_class_str:
6555 if (val->v.val_str->str != NULL)
6556 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6557 else
6558 fprintf (outfile, "<null>");
6559 break;
6560 case dw_val_class_file:
6561 case dw_val_class_file_implicit:
6562 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6563 val->v.val_file->emitted_number);
6564 break;
6565 case dw_val_class_data8:
6566 {
6567 int i;
6568
6569 for (i = 0; i < 8; i++)
6570 fprintf (outfile, "%02x", val->v.val_data8[i]);
6571 break;
6572 }
6573 case dw_val_class_discr_value:
6574 print_discr_value (outfile, &val->v.val_discr_value);
6575 break;
6576 case dw_val_class_discr_list:
6577 for (dw_discr_list_ref node = val->v.val_discr_list;
6578 node != NULL;
6579 node = node->dw_discr_next)
6580 {
6581 if (node->dw_discr_range)
6582 {
6583 fprintf (outfile, " .. ");
6584 print_discr_value (outfile, &node->dw_discr_lower_bound);
6585 print_discr_value (outfile, &node->dw_discr_upper_bound);
6586 }
6587 else
6588 print_discr_value (outfile, &node->dw_discr_lower_bound);
6589
6590 if (node->dw_discr_next != NULL)
6591 fprintf (outfile, " | ");
6592 }
6593 default:
6594 break;
6595 }
6596 }
6597
6598 /* Likewise, for a DIE attribute. */
6599
6600 static void
6601 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6602 {
6603 print_dw_val (&a->dw_attr_val, recurse, outfile);
6604 }
6605
6606
6607 /* Print the list of operands in the LOC location description to OUTFILE. This
6608 routine is a debugging aid only. */
6609
6610 static void
6611 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6612 {
6613 dw_loc_descr_ref l = loc;
6614
6615 if (loc == NULL)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, "<null>\n");
6619 return;
6620 }
6621
6622 for (l = loc; l != NULL; l = l->dw_loc_next)
6623 {
6624 print_spaces (outfile);
6625 if (flag_dump_noaddr || flag_dump_unnumbered)
6626 fprintf (outfile, "#");
6627 else
6628 fprintf (outfile, "(%p)", (void *) l);
6629 fprintf (outfile, " %s",
6630 dwarf_stack_op_name (l->dw_loc_opc));
6631 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6632 {
6633 fprintf (outfile, " ");
6634 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6635 }
6636 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6637 {
6638 fprintf (outfile, ", ");
6639 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6640 }
6641 fprintf (outfile, "\n");
6642 }
6643 }
6644
6645 /* Print the information associated with a given DIE, and its children.
6646 This routine is a debugging aid only. */
6647
6648 static void
6649 print_die (dw_die_ref die, FILE *outfile)
6650 {
6651 dw_attr_node *a;
6652 dw_die_ref c;
6653 unsigned ix;
6654
6655 print_spaces (outfile);
6656 fprintf (outfile, "DIE %4ld: %s ",
6657 die->die_offset, dwarf_tag_name (die->die_tag));
6658 if (flag_dump_noaddr || flag_dump_unnumbered)
6659 fprintf (outfile, "#\n");
6660 else
6661 fprintf (outfile, "(%p)\n", (void*) die);
6662 print_spaces (outfile);
6663 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6664 fprintf (outfile, " offset: %ld", die->die_offset);
6665 fprintf (outfile, " mark: %d\n", die->die_mark);
6666
6667 if (die->comdat_type_p)
6668 {
6669 print_spaces (outfile);
6670 fprintf (outfile, " signature: ");
6671 print_signature (outfile, die->die_id.die_type_node->signature);
6672 fprintf (outfile, "\n");
6673 }
6674
6675 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6676 {
6677 print_spaces (outfile);
6678 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6679
6680 print_attribute (a, true, outfile);
6681 fprintf (outfile, "\n");
6682 }
6683
6684 if (die->die_child != NULL)
6685 {
6686 print_indent += 4;
6687 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6688 print_indent -= 4;
6689 }
6690 if (print_indent == 0)
6691 fprintf (outfile, "\n");
6692 }
6693
6694 /* Print the list of operations in the LOC location description. */
6695
6696 DEBUG_FUNCTION void
6697 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6698 {
6699 print_loc_descr (loc, stderr);
6700 }
6701
6702 /* Print the information collected for a given DIE. */
6703
6704 DEBUG_FUNCTION void
6705 debug_dwarf_die (dw_die_ref die)
6706 {
6707 print_die (die, stderr);
6708 }
6709
6710 DEBUG_FUNCTION void
6711 debug (die_struct &ref)
6712 {
6713 print_die (&ref, stderr);
6714 }
6715
6716 DEBUG_FUNCTION void
6717 debug (die_struct *ptr)
6718 {
6719 if (ptr)
6720 debug (*ptr);
6721 else
6722 fprintf (stderr, "<nil>\n");
6723 }
6724
6725
6726 /* Print all DWARF information collected for the compilation unit.
6727 This routine is a debugging aid only. */
6728
6729 DEBUG_FUNCTION void
6730 debug_dwarf (void)
6731 {
6732 print_indent = 0;
6733 print_die (comp_unit_die (), stderr);
6734 }
6735
6736 /* Verify the DIE tree structure. */
6737
6738 DEBUG_FUNCTION void
6739 verify_die (dw_die_ref die)
6740 {
6741 gcc_assert (!die->die_mark);
6742 if (die->die_parent == NULL
6743 && die->die_sib == NULL)
6744 return;
6745 /* Verify the die_sib list is cyclic. */
6746 dw_die_ref x = die;
6747 do
6748 {
6749 x->die_mark = 1;
6750 x = x->die_sib;
6751 }
6752 while (x && !x->die_mark);
6753 gcc_assert (x == die);
6754 x = die;
6755 do
6756 {
6757 /* Verify all dies have the same parent. */
6758 gcc_assert (x->die_parent == die->die_parent);
6759 if (x->die_child)
6760 {
6761 /* Verify the child has the proper parent and recurse. */
6762 gcc_assert (x->die_child->die_parent == x);
6763 verify_die (x->die_child);
6764 }
6765 x->die_mark = 0;
6766 x = x->die_sib;
6767 }
6768 while (x && x->die_mark);
6769 }
6770
6771 /* Sanity checks on DIEs. */
6772
6773 static void
6774 check_die (dw_die_ref die)
6775 {
6776 unsigned ix;
6777 dw_attr_node *a;
6778 bool inline_found = false;
6779 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6780 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6781 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6782 {
6783 switch (a->dw_attr)
6784 {
6785 case DW_AT_inline:
6786 if (a->dw_attr_val.v.val_unsigned)
6787 inline_found = true;
6788 break;
6789 case DW_AT_location:
6790 ++n_location;
6791 break;
6792 case DW_AT_low_pc:
6793 ++n_low_pc;
6794 break;
6795 case DW_AT_high_pc:
6796 ++n_high_pc;
6797 break;
6798 case DW_AT_artificial:
6799 ++n_artificial;
6800 break;
6801 case DW_AT_decl_column:
6802 ++n_decl_column;
6803 break;
6804 case DW_AT_decl_line:
6805 ++n_decl_line;
6806 break;
6807 case DW_AT_decl_file:
6808 ++n_decl_file;
6809 break;
6810 default:
6811 break;
6812 }
6813 }
6814 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6815 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6816 {
6817 fprintf (stderr, "Duplicate attributes in DIE:\n");
6818 debug_dwarf_die (die);
6819 gcc_unreachable ();
6820 }
6821 if (inline_found)
6822 {
6823 /* A debugging information entry that is a member of an abstract
6824 instance tree [that has DW_AT_inline] should not contain any
6825 attributes which describe aspects of the subroutine which vary
6826 between distinct inlined expansions or distinct out-of-line
6827 expansions. */
6828 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6829 gcc_assert (a->dw_attr != DW_AT_low_pc
6830 && a->dw_attr != DW_AT_high_pc
6831 && a->dw_attr != DW_AT_location
6832 && a->dw_attr != DW_AT_frame_base
6833 && a->dw_attr != DW_AT_call_all_calls
6834 && a->dw_attr != DW_AT_GNU_all_call_sites);
6835 }
6836 }
6837 \f
6838 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6839 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6840 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6841
6842 /* Calculate the checksum of a location expression. */
6843
6844 static inline void
6845 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6846 {
6847 int tem;
6848 inchash::hash hstate;
6849 hashval_t hash;
6850
6851 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6852 CHECKSUM (tem);
6853 hash_loc_operands (loc, hstate);
6854 hash = hstate.end();
6855 CHECKSUM (hash);
6856 }
6857
6858 /* Calculate the checksum of an attribute. */
6859
6860 static void
6861 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6862 {
6863 dw_loc_descr_ref loc;
6864 rtx r;
6865
6866 CHECKSUM (at->dw_attr);
6867
6868 /* We don't care that this was compiled with a different compiler
6869 snapshot; if the output is the same, that's what matters. */
6870 if (at->dw_attr == DW_AT_producer)
6871 return;
6872
6873 switch (AT_class (at))
6874 {
6875 case dw_val_class_const:
6876 case dw_val_class_const_implicit:
6877 CHECKSUM (at->dw_attr_val.v.val_int);
6878 break;
6879 case dw_val_class_unsigned_const:
6880 case dw_val_class_unsigned_const_implicit:
6881 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6882 break;
6883 case dw_val_class_const_double:
6884 CHECKSUM (at->dw_attr_val.v.val_double);
6885 break;
6886 case dw_val_class_wide_int:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6888 get_full_len (*at->dw_attr_val.v.val_wide)
6889 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6890 break;
6891 case dw_val_class_vec:
6892 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6893 (at->dw_attr_val.v.val_vec.length
6894 * at->dw_attr_val.v.val_vec.elt_size));
6895 break;
6896 case dw_val_class_flag:
6897 CHECKSUM (at->dw_attr_val.v.val_flag);
6898 break;
6899 case dw_val_class_str:
6900 CHECKSUM_STRING (AT_string (at));
6901 break;
6902
6903 case dw_val_class_addr:
6904 r = AT_addr (at);
6905 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6906 CHECKSUM_STRING (XSTR (r, 0));
6907 break;
6908
6909 case dw_val_class_offset:
6910 CHECKSUM (at->dw_attr_val.v.val_offset);
6911 break;
6912
6913 case dw_val_class_loc:
6914 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6915 loc_checksum (loc, ctx);
6916 break;
6917
6918 case dw_val_class_die_ref:
6919 die_checksum (AT_ref (at), ctx, mark);
6920 break;
6921
6922 case dw_val_class_fde_ref:
6923 case dw_val_class_vms_delta:
6924 case dw_val_class_symview:
6925 case dw_val_class_lbl_id:
6926 case dw_val_class_lineptr:
6927 case dw_val_class_macptr:
6928 case dw_val_class_loclistsptr:
6929 case dw_val_class_high_pc:
6930 break;
6931
6932 case dw_val_class_file:
6933 case dw_val_class_file_implicit:
6934 CHECKSUM_STRING (AT_file (at)->filename);
6935 break;
6936
6937 case dw_val_class_data8:
6938 CHECKSUM (at->dw_attr_val.v.val_data8);
6939 break;
6940
6941 default:
6942 break;
6943 }
6944 }
6945
6946 /* Calculate the checksum of a DIE. */
6947
6948 static void
6949 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6950 {
6951 dw_die_ref c;
6952 dw_attr_node *a;
6953 unsigned ix;
6954
6955 /* To avoid infinite recursion. */
6956 if (die->die_mark)
6957 {
6958 CHECKSUM (die->die_mark);
6959 return;
6960 }
6961 die->die_mark = ++(*mark);
6962
6963 CHECKSUM (die->die_tag);
6964
6965 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6966 attr_checksum (a, ctx, mark);
6967
6968 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6969 }
6970
6971 #undef CHECKSUM
6972 #undef CHECKSUM_BLOCK
6973 #undef CHECKSUM_STRING
6974
6975 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6976 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6977 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6978 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6979 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6980 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6981 #define CHECKSUM_ATTR(FOO) \
6982 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6983
6984 /* Calculate the checksum of a number in signed LEB128 format. */
6985
6986 static void
6987 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6988 {
6989 unsigned char byte;
6990 bool more;
6991
6992 while (1)
6993 {
6994 byte = (value & 0x7f);
6995 value >>= 7;
6996 more = !((value == 0 && (byte & 0x40) == 0)
6997 || (value == -1 && (byte & 0x40) != 0));
6998 if (more)
6999 byte |= 0x80;
7000 CHECKSUM (byte);
7001 if (!more)
7002 break;
7003 }
7004 }
7005
7006 /* Calculate the checksum of a number in unsigned LEB128 format. */
7007
7008 static void
7009 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7010 {
7011 while (1)
7012 {
7013 unsigned char byte = (value & 0x7f);
7014 value >>= 7;
7015 if (value != 0)
7016 /* More bytes to follow. */
7017 byte |= 0x80;
7018 CHECKSUM (byte);
7019 if (value == 0)
7020 break;
7021 }
7022 }
7023
7024 /* Checksum the context of the DIE. This adds the names of any
7025 surrounding namespaces or structures to the checksum. */
7026
7027 static void
7028 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7029 {
7030 const char *name;
7031 dw_die_ref spec;
7032 int tag = die->die_tag;
7033
7034 if (tag != DW_TAG_namespace
7035 && tag != DW_TAG_structure_type
7036 && tag != DW_TAG_class_type)
7037 return;
7038
7039 name = get_AT_string (die, DW_AT_name);
7040
7041 spec = get_AT_ref (die, DW_AT_specification);
7042 if (spec != NULL)
7043 die = spec;
7044
7045 if (die->die_parent != NULL)
7046 checksum_die_context (die->die_parent, ctx);
7047
7048 CHECKSUM_ULEB128 ('C');
7049 CHECKSUM_ULEB128 (tag);
7050 if (name != NULL)
7051 CHECKSUM_STRING (name);
7052 }
7053
7054 /* Calculate the checksum of a location expression. */
7055
7056 static inline void
7057 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7058 {
7059 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7060 were emitted as a DW_FORM_sdata instead of a location expression. */
7061 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7062 {
7063 CHECKSUM_ULEB128 (DW_FORM_sdata);
7064 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7065 return;
7066 }
7067
7068 /* Otherwise, just checksum the raw location expression. */
7069 while (loc != NULL)
7070 {
7071 inchash::hash hstate;
7072 hashval_t hash;
7073
7074 CHECKSUM_ULEB128 (loc->dtprel);
7075 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7076 hash_loc_operands (loc, hstate);
7077 hash = hstate.end ();
7078 CHECKSUM (hash);
7079 loc = loc->dw_loc_next;
7080 }
7081 }
7082
7083 /* Calculate the checksum of an attribute. */
7084
7085 static void
7086 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7087 struct md5_ctx *ctx, int *mark)
7088 {
7089 dw_loc_descr_ref loc;
7090 rtx r;
7091
7092 if (AT_class (at) == dw_val_class_die_ref)
7093 {
7094 dw_die_ref target_die = AT_ref (at);
7095
7096 /* For pointer and reference types, we checksum only the (qualified)
7097 name of the target type (if there is a name). For friend entries,
7098 we checksum only the (qualified) name of the target type or function.
7099 This allows the checksum to remain the same whether the target type
7100 is complete or not. */
7101 if ((at->dw_attr == DW_AT_type
7102 && (tag == DW_TAG_pointer_type
7103 || tag == DW_TAG_reference_type
7104 || tag == DW_TAG_rvalue_reference_type
7105 || tag == DW_TAG_ptr_to_member_type))
7106 || (at->dw_attr == DW_AT_friend
7107 && tag == DW_TAG_friend))
7108 {
7109 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7110
7111 if (name_attr != NULL)
7112 {
7113 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7114
7115 if (decl == NULL)
7116 decl = target_die;
7117 CHECKSUM_ULEB128 ('N');
7118 CHECKSUM_ULEB128 (at->dw_attr);
7119 if (decl->die_parent != NULL)
7120 checksum_die_context (decl->die_parent, ctx);
7121 CHECKSUM_ULEB128 ('E');
7122 CHECKSUM_STRING (AT_string (name_attr));
7123 return;
7124 }
7125 }
7126
7127 /* For all other references to another DIE, we check to see if the
7128 target DIE has already been visited. If it has, we emit a
7129 backward reference; if not, we descend recursively. */
7130 if (target_die->die_mark > 0)
7131 {
7132 CHECKSUM_ULEB128 ('R');
7133 CHECKSUM_ULEB128 (at->dw_attr);
7134 CHECKSUM_ULEB128 (target_die->die_mark);
7135 }
7136 else
7137 {
7138 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7139
7140 if (decl == NULL)
7141 decl = target_die;
7142 target_die->die_mark = ++(*mark);
7143 CHECKSUM_ULEB128 ('T');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145 if (decl->die_parent != NULL)
7146 checksum_die_context (decl->die_parent, ctx);
7147 die_checksum_ordered (target_die, ctx, mark);
7148 }
7149 return;
7150 }
7151
7152 CHECKSUM_ULEB128 ('A');
7153 CHECKSUM_ULEB128 (at->dw_attr);
7154
7155 switch (AT_class (at))
7156 {
7157 case dw_val_class_const:
7158 case dw_val_class_const_implicit:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7161 break;
7162
7163 case dw_val_class_unsigned_const:
7164 case dw_val_class_unsigned_const_implicit:
7165 CHECKSUM_ULEB128 (DW_FORM_sdata);
7166 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7167 break;
7168
7169 case dw_val_class_const_double:
7170 CHECKSUM_ULEB128 (DW_FORM_block);
7171 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7172 CHECKSUM (at->dw_attr_val.v.val_double);
7173 break;
7174
7175 case dw_val_class_wide_int:
7176 CHECKSUM_ULEB128 (DW_FORM_block);
7177 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7178 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7179 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7180 get_full_len (*at->dw_attr_val.v.val_wide)
7181 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7182 break;
7183
7184 case dw_val_class_vec:
7185 CHECKSUM_ULEB128 (DW_FORM_block);
7186 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7187 * at->dw_attr_val.v.val_vec.elt_size);
7188 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7189 (at->dw_attr_val.v.val_vec.length
7190 * at->dw_attr_val.v.val_vec.elt_size));
7191 break;
7192
7193 case dw_val_class_flag:
7194 CHECKSUM_ULEB128 (DW_FORM_flag);
7195 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7196 break;
7197
7198 case dw_val_class_str:
7199 CHECKSUM_ULEB128 (DW_FORM_string);
7200 CHECKSUM_STRING (AT_string (at));
7201 break;
7202
7203 case dw_val_class_addr:
7204 r = AT_addr (at);
7205 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7206 CHECKSUM_ULEB128 (DW_FORM_string);
7207 CHECKSUM_STRING (XSTR (r, 0));
7208 break;
7209
7210 case dw_val_class_offset:
7211 CHECKSUM_ULEB128 (DW_FORM_sdata);
7212 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7213 break;
7214
7215 case dw_val_class_loc:
7216 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7217 loc_checksum_ordered (loc, ctx);
7218 break;
7219
7220 case dw_val_class_fde_ref:
7221 case dw_val_class_symview:
7222 case dw_val_class_lbl_id:
7223 case dw_val_class_lineptr:
7224 case dw_val_class_macptr:
7225 case dw_val_class_loclistsptr:
7226 case dw_val_class_high_pc:
7227 break;
7228
7229 case dw_val_class_file:
7230 case dw_val_class_file_implicit:
7231 CHECKSUM_ULEB128 (DW_FORM_string);
7232 CHECKSUM_STRING (AT_file (at)->filename);
7233 break;
7234
7235 case dw_val_class_data8:
7236 CHECKSUM (at->dw_attr_val.v.val_data8);
7237 break;
7238
7239 default:
7240 break;
7241 }
7242 }
7243
7244 struct checksum_attributes
7245 {
7246 dw_attr_node *at_name;
7247 dw_attr_node *at_type;
7248 dw_attr_node *at_friend;
7249 dw_attr_node *at_accessibility;
7250 dw_attr_node *at_address_class;
7251 dw_attr_node *at_alignment;
7252 dw_attr_node *at_allocated;
7253 dw_attr_node *at_artificial;
7254 dw_attr_node *at_associated;
7255 dw_attr_node *at_binary_scale;
7256 dw_attr_node *at_bit_offset;
7257 dw_attr_node *at_bit_size;
7258 dw_attr_node *at_bit_stride;
7259 dw_attr_node *at_byte_size;
7260 dw_attr_node *at_byte_stride;
7261 dw_attr_node *at_const_value;
7262 dw_attr_node *at_containing_type;
7263 dw_attr_node *at_count;
7264 dw_attr_node *at_data_location;
7265 dw_attr_node *at_data_member_location;
7266 dw_attr_node *at_decimal_scale;
7267 dw_attr_node *at_decimal_sign;
7268 dw_attr_node *at_default_value;
7269 dw_attr_node *at_digit_count;
7270 dw_attr_node *at_discr;
7271 dw_attr_node *at_discr_list;
7272 dw_attr_node *at_discr_value;
7273 dw_attr_node *at_encoding;
7274 dw_attr_node *at_endianity;
7275 dw_attr_node *at_explicit;
7276 dw_attr_node *at_is_optional;
7277 dw_attr_node *at_location;
7278 dw_attr_node *at_lower_bound;
7279 dw_attr_node *at_mutable;
7280 dw_attr_node *at_ordering;
7281 dw_attr_node *at_picture_string;
7282 dw_attr_node *at_prototyped;
7283 dw_attr_node *at_small;
7284 dw_attr_node *at_segment;
7285 dw_attr_node *at_string_length;
7286 dw_attr_node *at_string_length_bit_size;
7287 dw_attr_node *at_string_length_byte_size;
7288 dw_attr_node *at_threads_scaled;
7289 dw_attr_node *at_upper_bound;
7290 dw_attr_node *at_use_location;
7291 dw_attr_node *at_use_UTF8;
7292 dw_attr_node *at_variable_parameter;
7293 dw_attr_node *at_virtuality;
7294 dw_attr_node *at_visibility;
7295 dw_attr_node *at_vtable_elem_location;
7296 };
7297
7298 /* Collect the attributes that we will want to use for the checksum. */
7299
7300 static void
7301 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7302 {
7303 dw_attr_node *a;
7304 unsigned ix;
7305
7306 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7307 {
7308 switch (a->dw_attr)
7309 {
7310 case DW_AT_name:
7311 attrs->at_name = a;
7312 break;
7313 case DW_AT_type:
7314 attrs->at_type = a;
7315 break;
7316 case DW_AT_friend:
7317 attrs->at_friend = a;
7318 break;
7319 case DW_AT_accessibility:
7320 attrs->at_accessibility = a;
7321 break;
7322 case DW_AT_address_class:
7323 attrs->at_address_class = a;
7324 break;
7325 case DW_AT_alignment:
7326 attrs->at_alignment = a;
7327 break;
7328 case DW_AT_allocated:
7329 attrs->at_allocated = a;
7330 break;
7331 case DW_AT_artificial:
7332 attrs->at_artificial = a;
7333 break;
7334 case DW_AT_associated:
7335 attrs->at_associated = a;
7336 break;
7337 case DW_AT_binary_scale:
7338 attrs->at_binary_scale = a;
7339 break;
7340 case DW_AT_bit_offset:
7341 attrs->at_bit_offset = a;
7342 break;
7343 case DW_AT_bit_size:
7344 attrs->at_bit_size = a;
7345 break;
7346 case DW_AT_bit_stride:
7347 attrs->at_bit_stride = a;
7348 break;
7349 case DW_AT_byte_size:
7350 attrs->at_byte_size = a;
7351 break;
7352 case DW_AT_byte_stride:
7353 attrs->at_byte_stride = a;
7354 break;
7355 case DW_AT_const_value:
7356 attrs->at_const_value = a;
7357 break;
7358 case DW_AT_containing_type:
7359 attrs->at_containing_type = a;
7360 break;
7361 case DW_AT_count:
7362 attrs->at_count = a;
7363 break;
7364 case DW_AT_data_location:
7365 attrs->at_data_location = a;
7366 break;
7367 case DW_AT_data_member_location:
7368 attrs->at_data_member_location = a;
7369 break;
7370 case DW_AT_decimal_scale:
7371 attrs->at_decimal_scale = a;
7372 break;
7373 case DW_AT_decimal_sign:
7374 attrs->at_decimal_sign = a;
7375 break;
7376 case DW_AT_default_value:
7377 attrs->at_default_value = a;
7378 break;
7379 case DW_AT_digit_count:
7380 attrs->at_digit_count = a;
7381 break;
7382 case DW_AT_discr:
7383 attrs->at_discr = a;
7384 break;
7385 case DW_AT_discr_list:
7386 attrs->at_discr_list = a;
7387 break;
7388 case DW_AT_discr_value:
7389 attrs->at_discr_value = a;
7390 break;
7391 case DW_AT_encoding:
7392 attrs->at_encoding = a;
7393 break;
7394 case DW_AT_endianity:
7395 attrs->at_endianity = a;
7396 break;
7397 case DW_AT_explicit:
7398 attrs->at_explicit = a;
7399 break;
7400 case DW_AT_is_optional:
7401 attrs->at_is_optional = a;
7402 break;
7403 case DW_AT_location:
7404 attrs->at_location = a;
7405 break;
7406 case DW_AT_lower_bound:
7407 attrs->at_lower_bound = a;
7408 break;
7409 case DW_AT_mutable:
7410 attrs->at_mutable = a;
7411 break;
7412 case DW_AT_ordering:
7413 attrs->at_ordering = a;
7414 break;
7415 case DW_AT_picture_string:
7416 attrs->at_picture_string = a;
7417 break;
7418 case DW_AT_prototyped:
7419 attrs->at_prototyped = a;
7420 break;
7421 case DW_AT_small:
7422 attrs->at_small = a;
7423 break;
7424 case DW_AT_segment:
7425 attrs->at_segment = a;
7426 break;
7427 case DW_AT_string_length:
7428 attrs->at_string_length = a;
7429 break;
7430 case DW_AT_string_length_bit_size:
7431 attrs->at_string_length_bit_size = a;
7432 break;
7433 case DW_AT_string_length_byte_size:
7434 attrs->at_string_length_byte_size = a;
7435 break;
7436 case DW_AT_threads_scaled:
7437 attrs->at_threads_scaled = a;
7438 break;
7439 case DW_AT_upper_bound:
7440 attrs->at_upper_bound = a;
7441 break;
7442 case DW_AT_use_location:
7443 attrs->at_use_location = a;
7444 break;
7445 case DW_AT_use_UTF8:
7446 attrs->at_use_UTF8 = a;
7447 break;
7448 case DW_AT_variable_parameter:
7449 attrs->at_variable_parameter = a;
7450 break;
7451 case DW_AT_virtuality:
7452 attrs->at_virtuality = a;
7453 break;
7454 case DW_AT_visibility:
7455 attrs->at_visibility = a;
7456 break;
7457 case DW_AT_vtable_elem_location:
7458 attrs->at_vtable_elem_location = a;
7459 break;
7460 default:
7461 break;
7462 }
7463 }
7464 }
7465
7466 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7467
7468 static void
7469 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7470 {
7471 dw_die_ref c;
7472 dw_die_ref decl;
7473 struct checksum_attributes attrs;
7474
7475 CHECKSUM_ULEB128 ('D');
7476 CHECKSUM_ULEB128 (die->die_tag);
7477
7478 memset (&attrs, 0, sizeof (attrs));
7479
7480 decl = get_AT_ref (die, DW_AT_specification);
7481 if (decl != NULL)
7482 collect_checksum_attributes (&attrs, decl);
7483 collect_checksum_attributes (&attrs, die);
7484
7485 CHECKSUM_ATTR (attrs.at_name);
7486 CHECKSUM_ATTR (attrs.at_accessibility);
7487 CHECKSUM_ATTR (attrs.at_address_class);
7488 CHECKSUM_ATTR (attrs.at_allocated);
7489 CHECKSUM_ATTR (attrs.at_artificial);
7490 CHECKSUM_ATTR (attrs.at_associated);
7491 CHECKSUM_ATTR (attrs.at_binary_scale);
7492 CHECKSUM_ATTR (attrs.at_bit_offset);
7493 CHECKSUM_ATTR (attrs.at_bit_size);
7494 CHECKSUM_ATTR (attrs.at_bit_stride);
7495 CHECKSUM_ATTR (attrs.at_byte_size);
7496 CHECKSUM_ATTR (attrs.at_byte_stride);
7497 CHECKSUM_ATTR (attrs.at_const_value);
7498 CHECKSUM_ATTR (attrs.at_containing_type);
7499 CHECKSUM_ATTR (attrs.at_count);
7500 CHECKSUM_ATTR (attrs.at_data_location);
7501 CHECKSUM_ATTR (attrs.at_data_member_location);
7502 CHECKSUM_ATTR (attrs.at_decimal_scale);
7503 CHECKSUM_ATTR (attrs.at_decimal_sign);
7504 CHECKSUM_ATTR (attrs.at_default_value);
7505 CHECKSUM_ATTR (attrs.at_digit_count);
7506 CHECKSUM_ATTR (attrs.at_discr);
7507 CHECKSUM_ATTR (attrs.at_discr_list);
7508 CHECKSUM_ATTR (attrs.at_discr_value);
7509 CHECKSUM_ATTR (attrs.at_encoding);
7510 CHECKSUM_ATTR (attrs.at_endianity);
7511 CHECKSUM_ATTR (attrs.at_explicit);
7512 CHECKSUM_ATTR (attrs.at_is_optional);
7513 CHECKSUM_ATTR (attrs.at_location);
7514 CHECKSUM_ATTR (attrs.at_lower_bound);
7515 CHECKSUM_ATTR (attrs.at_mutable);
7516 CHECKSUM_ATTR (attrs.at_ordering);
7517 CHECKSUM_ATTR (attrs.at_picture_string);
7518 CHECKSUM_ATTR (attrs.at_prototyped);
7519 CHECKSUM_ATTR (attrs.at_small);
7520 CHECKSUM_ATTR (attrs.at_segment);
7521 CHECKSUM_ATTR (attrs.at_string_length);
7522 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7523 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7524 CHECKSUM_ATTR (attrs.at_threads_scaled);
7525 CHECKSUM_ATTR (attrs.at_upper_bound);
7526 CHECKSUM_ATTR (attrs.at_use_location);
7527 CHECKSUM_ATTR (attrs.at_use_UTF8);
7528 CHECKSUM_ATTR (attrs.at_variable_parameter);
7529 CHECKSUM_ATTR (attrs.at_virtuality);
7530 CHECKSUM_ATTR (attrs.at_visibility);
7531 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7532 CHECKSUM_ATTR (attrs.at_type);
7533 CHECKSUM_ATTR (attrs.at_friend);
7534 CHECKSUM_ATTR (attrs.at_alignment);
7535
7536 /* Checksum the child DIEs. */
7537 c = die->die_child;
7538 if (c) do {
7539 dw_attr_node *name_attr;
7540
7541 c = c->die_sib;
7542 name_attr = get_AT (c, DW_AT_name);
7543 if (is_template_instantiation (c))
7544 {
7545 /* Ignore instantiations of member type and function templates. */
7546 }
7547 else if (name_attr != NULL
7548 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7549 {
7550 /* Use a shallow checksum for named nested types and member
7551 functions. */
7552 CHECKSUM_ULEB128 ('S');
7553 CHECKSUM_ULEB128 (c->die_tag);
7554 CHECKSUM_STRING (AT_string (name_attr));
7555 }
7556 else
7557 {
7558 /* Use a deep checksum for other children. */
7559 /* Mark this DIE so it gets processed when unmarking. */
7560 if (c->die_mark == 0)
7561 c->die_mark = -1;
7562 die_checksum_ordered (c, ctx, mark);
7563 }
7564 } while (c != die->die_child);
7565
7566 CHECKSUM_ULEB128 (0);
7567 }
7568
7569 /* Add a type name and tag to a hash. */
7570 static void
7571 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7572 {
7573 CHECKSUM_ULEB128 (tag);
7574 CHECKSUM_STRING (name);
7575 }
7576
7577 #undef CHECKSUM
7578 #undef CHECKSUM_STRING
7579 #undef CHECKSUM_ATTR
7580 #undef CHECKSUM_LEB128
7581 #undef CHECKSUM_ULEB128
7582
7583 /* Generate the type signature for DIE. This is computed by generating an
7584 MD5 checksum over the DIE's tag, its relevant attributes, and its
7585 children. Attributes that are references to other DIEs are processed
7586 by recursion, using the MARK field to prevent infinite recursion.
7587 If the DIE is nested inside a namespace or another type, we also
7588 need to include that context in the signature. The lower 64 bits
7589 of the resulting MD5 checksum comprise the signature. */
7590
7591 static void
7592 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7593 {
7594 int mark;
7595 const char *name;
7596 unsigned char checksum[16];
7597 struct md5_ctx ctx;
7598 dw_die_ref decl;
7599 dw_die_ref parent;
7600
7601 name = get_AT_string (die, DW_AT_name);
7602 decl = get_AT_ref (die, DW_AT_specification);
7603 parent = get_die_parent (die);
7604
7605 /* First, compute a signature for just the type name (and its surrounding
7606 context, if any. This is stored in the type unit DIE for link-time
7607 ODR (one-definition rule) checking. */
7608
7609 if (is_cxx () && name != NULL)
7610 {
7611 md5_init_ctx (&ctx);
7612
7613 /* Checksum the names of surrounding namespaces and structures. */
7614 if (parent != NULL)
7615 checksum_die_context (parent, &ctx);
7616
7617 /* Checksum the current DIE. */
7618 die_odr_checksum (die->die_tag, name, &ctx);
7619 md5_finish_ctx (&ctx, checksum);
7620
7621 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7622 }
7623
7624 /* Next, compute the complete type signature. */
7625
7626 md5_init_ctx (&ctx);
7627 mark = 1;
7628 die->die_mark = mark;
7629
7630 /* Checksum the names of surrounding namespaces and structures. */
7631 if (parent != NULL)
7632 checksum_die_context (parent, &ctx);
7633
7634 /* Checksum the DIE and its children. */
7635 die_checksum_ordered (die, &ctx, &mark);
7636 unmark_all_dies (die);
7637 md5_finish_ctx (&ctx, checksum);
7638
7639 /* Store the signature in the type node and link the type DIE and the
7640 type node together. */
7641 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7642 DWARF_TYPE_SIGNATURE_SIZE);
7643 die->comdat_type_p = true;
7644 die->die_id.die_type_node = type_node;
7645 type_node->type_die = die;
7646
7647 /* If the DIE is a specification, link its declaration to the type node
7648 as well. */
7649 if (decl != NULL)
7650 {
7651 decl->comdat_type_p = true;
7652 decl->die_id.die_type_node = type_node;
7653 }
7654 }
7655
7656 /* Do the location expressions look same? */
7657 static inline int
7658 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7659 {
7660 return loc1->dw_loc_opc == loc2->dw_loc_opc
7661 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7662 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7663 }
7664
7665 /* Do the values look the same? */
7666 static int
7667 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7668 {
7669 dw_loc_descr_ref loc1, loc2;
7670 rtx r1, r2;
7671
7672 if (v1->val_class != v2->val_class)
7673 return 0;
7674
7675 switch (v1->val_class)
7676 {
7677 case dw_val_class_const:
7678 case dw_val_class_const_implicit:
7679 return v1->v.val_int == v2->v.val_int;
7680 case dw_val_class_unsigned_const:
7681 case dw_val_class_unsigned_const_implicit:
7682 return v1->v.val_unsigned == v2->v.val_unsigned;
7683 case dw_val_class_const_double:
7684 return v1->v.val_double.high == v2->v.val_double.high
7685 && v1->v.val_double.low == v2->v.val_double.low;
7686 case dw_val_class_wide_int:
7687 return *v1->v.val_wide == *v2->v.val_wide;
7688 case dw_val_class_vec:
7689 if (v1->v.val_vec.length != v2->v.val_vec.length
7690 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7691 return 0;
7692 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7693 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7694 return 0;
7695 return 1;
7696 case dw_val_class_flag:
7697 return v1->v.val_flag == v2->v.val_flag;
7698 case dw_val_class_str:
7699 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7700
7701 case dw_val_class_addr:
7702 r1 = v1->v.val_addr;
7703 r2 = v2->v.val_addr;
7704 if (GET_CODE (r1) != GET_CODE (r2))
7705 return 0;
7706 return !rtx_equal_p (r1, r2);
7707
7708 case dw_val_class_offset:
7709 return v1->v.val_offset == v2->v.val_offset;
7710
7711 case dw_val_class_loc:
7712 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7713 loc1 && loc2;
7714 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7715 if (!same_loc_p (loc1, loc2, mark))
7716 return 0;
7717 return !loc1 && !loc2;
7718
7719 case dw_val_class_die_ref:
7720 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7721
7722 case dw_val_class_symview:
7723 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7724
7725 case dw_val_class_fde_ref:
7726 case dw_val_class_vms_delta:
7727 case dw_val_class_lbl_id:
7728 case dw_val_class_lineptr:
7729 case dw_val_class_macptr:
7730 case dw_val_class_loclistsptr:
7731 case dw_val_class_high_pc:
7732 return 1;
7733
7734 case dw_val_class_file:
7735 case dw_val_class_file_implicit:
7736 return v1->v.val_file == v2->v.val_file;
7737
7738 case dw_val_class_data8:
7739 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7740
7741 default:
7742 return 1;
7743 }
7744 }
7745
7746 /* Do the attributes look the same? */
7747
7748 static int
7749 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7750 {
7751 if (at1->dw_attr != at2->dw_attr)
7752 return 0;
7753
7754 /* We don't care that this was compiled with a different compiler
7755 snapshot; if the output is the same, that's what matters. */
7756 if (at1->dw_attr == DW_AT_producer)
7757 return 1;
7758
7759 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7760 }
7761
7762 /* Do the dies look the same? */
7763
7764 static int
7765 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7766 {
7767 dw_die_ref c1, c2;
7768 dw_attr_node *a1;
7769 unsigned ix;
7770
7771 /* To avoid infinite recursion. */
7772 if (die1->die_mark)
7773 return die1->die_mark == die2->die_mark;
7774 die1->die_mark = die2->die_mark = ++(*mark);
7775
7776 if (die1->die_tag != die2->die_tag)
7777 return 0;
7778
7779 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7780 return 0;
7781
7782 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7783 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7784 return 0;
7785
7786 c1 = die1->die_child;
7787 c2 = die2->die_child;
7788 if (! c1)
7789 {
7790 if (c2)
7791 return 0;
7792 }
7793 else
7794 for (;;)
7795 {
7796 if (!same_die_p (c1, c2, mark))
7797 return 0;
7798 c1 = c1->die_sib;
7799 c2 = c2->die_sib;
7800 if (c1 == die1->die_child)
7801 {
7802 if (c2 == die2->die_child)
7803 break;
7804 else
7805 return 0;
7806 }
7807 }
7808
7809 return 1;
7810 }
7811
7812 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7813 children, and set die_symbol. */
7814
7815 static void
7816 compute_comp_unit_symbol (dw_die_ref unit_die)
7817 {
7818 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7819 const char *base = die_name ? lbasename (die_name) : "anonymous";
7820 char *name = XALLOCAVEC (char, strlen (base) + 64);
7821 char *p;
7822 int i, mark;
7823 unsigned char checksum[16];
7824 struct md5_ctx ctx;
7825
7826 /* Compute the checksum of the DIE, then append part of it as hex digits to
7827 the name filename of the unit. */
7828
7829 md5_init_ctx (&ctx);
7830 mark = 0;
7831 die_checksum (unit_die, &ctx, &mark);
7832 unmark_all_dies (unit_die);
7833 md5_finish_ctx (&ctx, checksum);
7834
7835 /* When we this for comp_unit_die () we have a DW_AT_name that might
7836 not start with a letter but with anything valid for filenames and
7837 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7838 character is not a letter. */
7839 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7840 clean_symbol_name (name);
7841
7842 p = name + strlen (name);
7843 for (i = 0; i < 4; i++)
7844 {
7845 sprintf (p, "%.2x", checksum[i]);
7846 p += 2;
7847 }
7848
7849 unit_die->die_id.die_symbol = xstrdup (name);
7850 }
7851
7852 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7853
7854 static int
7855 is_type_die (dw_die_ref die)
7856 {
7857 switch (die->die_tag)
7858 {
7859 case DW_TAG_array_type:
7860 case DW_TAG_class_type:
7861 case DW_TAG_interface_type:
7862 case DW_TAG_enumeration_type:
7863 case DW_TAG_pointer_type:
7864 case DW_TAG_reference_type:
7865 case DW_TAG_rvalue_reference_type:
7866 case DW_TAG_string_type:
7867 case DW_TAG_structure_type:
7868 case DW_TAG_subroutine_type:
7869 case DW_TAG_union_type:
7870 case DW_TAG_ptr_to_member_type:
7871 case DW_TAG_set_type:
7872 case DW_TAG_subrange_type:
7873 case DW_TAG_base_type:
7874 case DW_TAG_const_type:
7875 case DW_TAG_file_type:
7876 case DW_TAG_packed_type:
7877 case DW_TAG_volatile_type:
7878 case DW_TAG_typedef:
7879 return 1;
7880 default:
7881 return 0;
7882 }
7883 }
7884
7885 /* Returns true iff C is a compile-unit DIE. */
7886
7887 static inline bool
7888 is_cu_die (dw_die_ref c)
7889 {
7890 return c && (c->die_tag == DW_TAG_compile_unit
7891 || c->die_tag == DW_TAG_skeleton_unit);
7892 }
7893
7894 /* Returns true iff C is a unit DIE of some sort. */
7895
7896 static inline bool
7897 is_unit_die (dw_die_ref c)
7898 {
7899 return c && (c->die_tag == DW_TAG_compile_unit
7900 || c->die_tag == DW_TAG_partial_unit
7901 || c->die_tag == DW_TAG_type_unit
7902 || c->die_tag == DW_TAG_skeleton_unit);
7903 }
7904
7905 /* Returns true iff C is a namespace DIE. */
7906
7907 static inline bool
7908 is_namespace_die (dw_die_ref c)
7909 {
7910 return c && c->die_tag == DW_TAG_namespace;
7911 }
7912
7913 /* Return non-zero if this DIE is a template parameter. */
7914
7915 static inline bool
7916 is_template_parameter (dw_die_ref die)
7917 {
7918 switch (die->die_tag)
7919 {
7920 case DW_TAG_template_type_param:
7921 case DW_TAG_template_value_param:
7922 case DW_TAG_GNU_template_template_param:
7923 case DW_TAG_GNU_template_parameter_pack:
7924 return true;
7925 default:
7926 return false;
7927 }
7928 }
7929
7930 /* Return non-zero if this DIE represents a template instantiation. */
7931
7932 static inline bool
7933 is_template_instantiation (dw_die_ref die)
7934 {
7935 dw_die_ref c;
7936
7937 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7938 return false;
7939 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7940 return false;
7941 }
7942
7943 static char *
7944 gen_internal_sym (const char *prefix)
7945 {
7946 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7947
7948 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7949 return xstrdup (buf);
7950 }
7951
7952 /* Return non-zero if this DIE is a declaration. */
7953
7954 static int
7955 is_declaration_die (dw_die_ref die)
7956 {
7957 dw_attr_node *a;
7958 unsigned ix;
7959
7960 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7961 if (a->dw_attr == DW_AT_declaration)
7962 return 1;
7963
7964 return 0;
7965 }
7966
7967 /* Return non-zero if this DIE is nested inside a subprogram. */
7968
7969 static int
7970 is_nested_in_subprogram (dw_die_ref die)
7971 {
7972 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7973
7974 if (decl == NULL)
7975 decl = die;
7976 return local_scope_p (decl);
7977 }
7978
7979 /* Return non-zero if this DIE contains a defining declaration of a
7980 subprogram. */
7981
7982 static int
7983 contains_subprogram_definition (dw_die_ref die)
7984 {
7985 dw_die_ref c;
7986
7987 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7988 return 1;
7989 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7990 return 0;
7991 }
7992
7993 /* Return non-zero if this is a type DIE that should be moved to a
7994 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7995 unit type. */
7996
7997 static int
7998 should_move_die_to_comdat (dw_die_ref die)
7999 {
8000 switch (die->die_tag)
8001 {
8002 case DW_TAG_class_type:
8003 case DW_TAG_structure_type:
8004 case DW_TAG_enumeration_type:
8005 case DW_TAG_union_type:
8006 /* Don't move declarations, inlined instances, types nested in a
8007 subprogram, or types that contain subprogram definitions. */
8008 if (is_declaration_die (die)
8009 || get_AT (die, DW_AT_abstract_origin)
8010 || is_nested_in_subprogram (die)
8011 || contains_subprogram_definition (die))
8012 return 0;
8013 return 1;
8014 case DW_TAG_array_type:
8015 case DW_TAG_interface_type:
8016 case DW_TAG_pointer_type:
8017 case DW_TAG_reference_type:
8018 case DW_TAG_rvalue_reference_type:
8019 case DW_TAG_string_type:
8020 case DW_TAG_subroutine_type:
8021 case DW_TAG_ptr_to_member_type:
8022 case DW_TAG_set_type:
8023 case DW_TAG_subrange_type:
8024 case DW_TAG_base_type:
8025 case DW_TAG_const_type:
8026 case DW_TAG_file_type:
8027 case DW_TAG_packed_type:
8028 case DW_TAG_volatile_type:
8029 case DW_TAG_typedef:
8030 default:
8031 return 0;
8032 }
8033 }
8034
8035 /* Make a clone of DIE. */
8036
8037 static dw_die_ref
8038 clone_die (dw_die_ref die)
8039 {
8040 dw_die_ref clone = new_die_raw (die->die_tag);
8041 dw_attr_node *a;
8042 unsigned ix;
8043
8044 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8045 add_dwarf_attr (clone, a);
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of the tree rooted at DIE. */
8051
8052 static dw_die_ref
8053 clone_tree (dw_die_ref die)
8054 {
8055 dw_die_ref c;
8056 dw_die_ref clone = clone_die (die);
8057
8058 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8059
8060 return clone;
8061 }
8062
8063 /* Make a clone of DIE as a declaration. */
8064
8065 static dw_die_ref
8066 clone_as_declaration (dw_die_ref die)
8067 {
8068 dw_die_ref clone;
8069 dw_die_ref decl;
8070 dw_attr_node *a;
8071 unsigned ix;
8072
8073 /* If the DIE is already a declaration, just clone it. */
8074 if (is_declaration_die (die))
8075 return clone_die (die);
8076
8077 /* If the DIE is a specification, just clone its declaration DIE. */
8078 decl = get_AT_ref (die, DW_AT_specification);
8079 if (decl != NULL)
8080 {
8081 clone = clone_die (decl);
8082 if (die->comdat_type_p)
8083 add_AT_die_ref (clone, DW_AT_signature, die);
8084 return clone;
8085 }
8086
8087 clone = new_die_raw (die->die_tag);
8088
8089 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8090 {
8091 /* We don't want to copy over all attributes.
8092 For example we don't want DW_AT_byte_size because otherwise we will no
8093 longer have a declaration and GDB will treat it as a definition. */
8094
8095 switch (a->dw_attr)
8096 {
8097 case DW_AT_abstract_origin:
8098 case DW_AT_artificial:
8099 case DW_AT_containing_type:
8100 case DW_AT_external:
8101 case DW_AT_name:
8102 case DW_AT_type:
8103 case DW_AT_virtuality:
8104 case DW_AT_linkage_name:
8105 case DW_AT_MIPS_linkage_name:
8106 add_dwarf_attr (clone, a);
8107 break;
8108 case DW_AT_byte_size:
8109 case DW_AT_alignment:
8110 default:
8111 break;
8112 }
8113 }
8114
8115 if (die->comdat_type_p)
8116 add_AT_die_ref (clone, DW_AT_signature, die);
8117
8118 add_AT_flag (clone, DW_AT_declaration, 1);
8119 return clone;
8120 }
8121
8122
8123 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8124
8125 struct decl_table_entry
8126 {
8127 dw_die_ref orig;
8128 dw_die_ref copy;
8129 };
8130
8131 /* Helpers to manipulate hash table of copied declarations. */
8132
8133 /* Hashtable helpers. */
8134
8135 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8136 {
8137 typedef die_struct *compare_type;
8138 static inline hashval_t hash (const decl_table_entry *);
8139 static inline bool equal (const decl_table_entry *, const die_struct *);
8140 };
8141
8142 inline hashval_t
8143 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8144 {
8145 return htab_hash_pointer (entry->orig);
8146 }
8147
8148 inline bool
8149 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8150 const die_struct *entry2)
8151 {
8152 return entry1->orig == entry2;
8153 }
8154
8155 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8156
8157 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8158 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8159 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8160 to check if the ancestor has already been copied into UNIT. */
8161
8162 static dw_die_ref
8163 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8164 decl_hash_type *decl_table)
8165 {
8166 dw_die_ref parent = die->die_parent;
8167 dw_die_ref new_parent = unit;
8168 dw_die_ref copy;
8169 decl_table_entry **slot = NULL;
8170 struct decl_table_entry *entry = NULL;
8171
8172 if (decl_table)
8173 {
8174 /* Check if the entry has already been copied to UNIT. */
8175 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8176 INSERT);
8177 if (*slot != HTAB_EMPTY_ENTRY)
8178 {
8179 entry = *slot;
8180 return entry->copy;
8181 }
8182
8183 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8184 entry = XCNEW (struct decl_table_entry);
8185 entry->orig = die;
8186 entry->copy = NULL;
8187 *slot = entry;
8188 }
8189
8190 if (parent != NULL)
8191 {
8192 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8193 if (spec != NULL)
8194 parent = spec;
8195 if (!is_unit_die (parent))
8196 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8197 }
8198
8199 copy = clone_as_declaration (die);
8200 add_child_die (new_parent, copy);
8201
8202 if (decl_table)
8203 {
8204 /* Record the pointer to the copy. */
8205 entry->copy = copy;
8206 }
8207
8208 return copy;
8209 }
8210 /* Copy the declaration context to the new type unit DIE. This includes
8211 any surrounding namespace or type declarations. If the DIE has an
8212 AT_specification attribute, it also includes attributes and children
8213 attached to the specification, and returns a pointer to the original
8214 parent of the declaration DIE. Returns NULL otherwise. */
8215
8216 static dw_die_ref
8217 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8218 {
8219 dw_die_ref decl;
8220 dw_die_ref new_decl;
8221 dw_die_ref orig_parent = NULL;
8222
8223 decl = get_AT_ref (die, DW_AT_specification);
8224 if (decl == NULL)
8225 decl = die;
8226 else
8227 {
8228 unsigned ix;
8229 dw_die_ref c;
8230 dw_attr_node *a;
8231
8232 /* The original DIE will be changed to a declaration, and must
8233 be moved to be a child of the original declaration DIE. */
8234 orig_parent = decl->die_parent;
8235
8236 /* Copy the type node pointer from the new DIE to the original
8237 declaration DIE so we can forward references later. */
8238 decl->comdat_type_p = true;
8239 decl->die_id.die_type_node = die->die_id.die_type_node;
8240
8241 remove_AT (die, DW_AT_specification);
8242
8243 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8244 {
8245 if (a->dw_attr != DW_AT_name
8246 && a->dw_attr != DW_AT_declaration
8247 && a->dw_attr != DW_AT_external)
8248 add_dwarf_attr (die, a);
8249 }
8250
8251 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8252 }
8253
8254 if (decl->die_parent != NULL
8255 && !is_unit_die (decl->die_parent))
8256 {
8257 new_decl = copy_ancestor_tree (unit, decl, NULL);
8258 if (new_decl != NULL)
8259 {
8260 remove_AT (new_decl, DW_AT_signature);
8261 add_AT_specification (die, new_decl);
8262 }
8263 }
8264
8265 return orig_parent;
8266 }
8267
8268 /* Generate the skeleton ancestor tree for the given NODE, then clone
8269 the DIE and add the clone into the tree. */
8270
8271 static void
8272 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8273 {
8274 if (node->new_die != NULL)
8275 return;
8276
8277 node->new_die = clone_as_declaration (node->old_die);
8278
8279 if (node->parent != NULL)
8280 {
8281 generate_skeleton_ancestor_tree (node->parent);
8282 add_child_die (node->parent->new_die, node->new_die);
8283 }
8284 }
8285
8286 /* Generate a skeleton tree of DIEs containing any declarations that are
8287 found in the original tree. We traverse the tree looking for declaration
8288 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8289
8290 static void
8291 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8292 {
8293 skeleton_chain_node node;
8294 dw_die_ref c;
8295 dw_die_ref first;
8296 dw_die_ref prev = NULL;
8297 dw_die_ref next = NULL;
8298
8299 node.parent = parent;
8300
8301 first = c = parent->old_die->die_child;
8302 if (c)
8303 next = c->die_sib;
8304 if (c) do {
8305 if (prev == NULL || prev->die_sib == c)
8306 prev = c;
8307 c = next;
8308 next = (c == first ? NULL : c->die_sib);
8309 node.old_die = c;
8310 node.new_die = NULL;
8311 if (is_declaration_die (c))
8312 {
8313 if (is_template_instantiation (c))
8314 {
8315 /* Instantiated templates do not need to be cloned into the
8316 type unit. Just move the DIE and its children back to
8317 the skeleton tree (in the main CU). */
8318 remove_child_with_prev (c, prev);
8319 add_child_die (parent->new_die, c);
8320 c = prev;
8321 }
8322 else if (c->comdat_type_p)
8323 {
8324 /* This is the skeleton of earlier break_out_comdat_types
8325 type. Clone the existing DIE, but keep the children
8326 under the original (which is in the main CU). */
8327 dw_die_ref clone = clone_die (c);
8328
8329 replace_child (c, clone, prev);
8330 generate_skeleton_ancestor_tree (parent);
8331 add_child_die (parent->new_die, c);
8332 c = clone;
8333 continue;
8334 }
8335 else
8336 {
8337 /* Clone the existing DIE, move the original to the skeleton
8338 tree (which is in the main CU), and put the clone, with
8339 all the original's children, where the original came from
8340 (which is about to be moved to the type unit). */
8341 dw_die_ref clone = clone_die (c);
8342 move_all_children (c, clone);
8343
8344 /* If the original has a DW_AT_object_pointer attribute,
8345 it would now point to a child DIE just moved to the
8346 cloned tree, so we need to remove that attribute from
8347 the original. */
8348 remove_AT (c, DW_AT_object_pointer);
8349
8350 replace_child (c, clone, prev);
8351 generate_skeleton_ancestor_tree (parent);
8352 add_child_die (parent->new_die, c);
8353 node.old_die = clone;
8354 node.new_die = c;
8355 c = clone;
8356 }
8357 }
8358 generate_skeleton_bottom_up (&node);
8359 } while (next != NULL);
8360 }
8361
8362 /* Wrapper function for generate_skeleton_bottom_up. */
8363
8364 static dw_die_ref
8365 generate_skeleton (dw_die_ref die)
8366 {
8367 skeleton_chain_node node;
8368
8369 node.old_die = die;
8370 node.new_die = NULL;
8371 node.parent = NULL;
8372
8373 /* If this type definition is nested inside another type,
8374 and is not an instantiation of a template, always leave
8375 at least a declaration in its place. */
8376 if (die->die_parent != NULL
8377 && is_type_die (die->die_parent)
8378 && !is_template_instantiation (die))
8379 node.new_die = clone_as_declaration (die);
8380
8381 generate_skeleton_bottom_up (&node);
8382 return node.new_die;
8383 }
8384
8385 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8386 declaration. The original DIE is moved to a new compile unit so that
8387 existing references to it follow it to the new location. If any of the
8388 original DIE's descendants is a declaration, we need to replace the
8389 original DIE with a skeleton tree and move the declarations back into the
8390 skeleton tree. */
8391
8392 static dw_die_ref
8393 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8394 dw_die_ref prev)
8395 {
8396 dw_die_ref skeleton, orig_parent;
8397
8398 /* Copy the declaration context to the type unit DIE. If the returned
8399 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8400 that DIE. */
8401 orig_parent = copy_declaration_context (unit, child);
8402
8403 skeleton = generate_skeleton (child);
8404 if (skeleton == NULL)
8405 remove_child_with_prev (child, prev);
8406 else
8407 {
8408 skeleton->comdat_type_p = true;
8409 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8410
8411 /* If the original DIE was a specification, we need to put
8412 the skeleton under the parent DIE of the declaration.
8413 This leaves the original declaration in the tree, but
8414 it will be pruned later since there are no longer any
8415 references to it. */
8416 if (orig_parent != NULL)
8417 {
8418 remove_child_with_prev (child, prev);
8419 add_child_die (orig_parent, skeleton);
8420 }
8421 else
8422 replace_child (child, skeleton, prev);
8423 }
8424
8425 return skeleton;
8426 }
8427
8428 static void
8429 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8430 comdat_type_node *type_node,
8431 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8432
8433 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8434 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8435 DWARF procedure references in the DW_AT_location attribute. */
8436
8437 static dw_die_ref
8438 copy_dwarf_procedure (dw_die_ref die,
8439 comdat_type_node *type_node,
8440 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8441 {
8442 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8443
8444 /* DWARF procedures are not supposed to have children... */
8445 gcc_assert (die->die_child == NULL);
8446
8447 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8448 gcc_assert (vec_safe_length (die->die_attr) == 1
8449 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8450
8451 /* Do not copy more than once DWARF procedures. */
8452 bool existed;
8453 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8454 if (existed)
8455 return die_copy;
8456
8457 die_copy = clone_die (die);
8458 add_child_die (type_node->root_die, die_copy);
8459 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8460 return die_copy;
8461 }
8462
8463 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8464 procedures in DIE's attributes. */
8465
8466 static void
8467 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8468 comdat_type_node *type_node,
8469 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8470 {
8471 dw_attr_node *a;
8472 unsigned i;
8473
8474 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8475 {
8476 dw_loc_descr_ref loc;
8477
8478 if (a->dw_attr_val.val_class != dw_val_class_loc)
8479 continue;
8480
8481 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8482 {
8483 switch (loc->dw_loc_opc)
8484 {
8485 case DW_OP_call2:
8486 case DW_OP_call4:
8487 case DW_OP_call_ref:
8488 gcc_assert (loc->dw_loc_oprnd1.val_class
8489 == dw_val_class_die_ref);
8490 loc->dw_loc_oprnd1.v.val_die_ref.die
8491 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8492 type_node,
8493 copied_dwarf_procs);
8494
8495 default:
8496 break;
8497 }
8498 }
8499 }
8500 }
8501
8502 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8503 rewrite references to point to the copies.
8504
8505 References are looked for in DIE's attributes and recursively in all its
8506 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8507 mapping from old DWARF procedures to their copy. It is used not to copy
8508 twice the same DWARF procedure under TYPE_NODE. */
8509
8510 static void
8511 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8512 comdat_type_node *type_node,
8513 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8514 {
8515 dw_die_ref c;
8516
8517 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8518 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8519 type_node,
8520 copied_dwarf_procs));
8521 }
8522
8523 /* Traverse the DIE and set up additional .debug_types or .debug_info
8524 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8525 section. */
8526
8527 static void
8528 break_out_comdat_types (dw_die_ref die)
8529 {
8530 dw_die_ref c;
8531 dw_die_ref first;
8532 dw_die_ref prev = NULL;
8533 dw_die_ref next = NULL;
8534 dw_die_ref unit = NULL;
8535
8536 first = c = die->die_child;
8537 if (c)
8538 next = c->die_sib;
8539 if (c) do {
8540 if (prev == NULL || prev->die_sib == c)
8541 prev = c;
8542 c = next;
8543 next = (c == first ? NULL : c->die_sib);
8544 if (should_move_die_to_comdat (c))
8545 {
8546 dw_die_ref replacement;
8547 comdat_type_node *type_node;
8548
8549 /* Break out nested types into their own type units. */
8550 break_out_comdat_types (c);
8551
8552 /* Create a new type unit DIE as the root for the new tree, and
8553 add it to the list of comdat types. */
8554 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8555 add_AT_unsigned (unit, DW_AT_language,
8556 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8557 type_node = ggc_cleared_alloc<comdat_type_node> ();
8558 type_node->root_die = unit;
8559 type_node->next = comdat_type_list;
8560 comdat_type_list = type_node;
8561
8562 /* Generate the type signature. */
8563 generate_type_signature (c, type_node);
8564
8565 /* Copy the declaration context, attributes, and children of the
8566 declaration into the new type unit DIE, then remove this DIE
8567 from the main CU (or replace it with a skeleton if necessary). */
8568 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8569 type_node->skeleton_die = replacement;
8570
8571 /* Add the DIE to the new compunit. */
8572 add_child_die (unit, c);
8573
8574 /* Types can reference DWARF procedures for type size or data location
8575 expressions. Calls in DWARF expressions cannot target procedures
8576 that are not in the same section. So we must copy DWARF procedures
8577 along with this type and then rewrite references to them. */
8578 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8579 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8580
8581 if (replacement != NULL)
8582 c = replacement;
8583 }
8584 else if (c->die_tag == DW_TAG_namespace
8585 || c->die_tag == DW_TAG_class_type
8586 || c->die_tag == DW_TAG_structure_type
8587 || c->die_tag == DW_TAG_union_type)
8588 {
8589 /* Look for nested types that can be broken out. */
8590 break_out_comdat_types (c);
8591 }
8592 } while (next != NULL);
8593 }
8594
8595 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8596 Enter all the cloned children into the hash table decl_table. */
8597
8598 static dw_die_ref
8599 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8600 {
8601 dw_die_ref c;
8602 dw_die_ref clone;
8603 struct decl_table_entry *entry;
8604 decl_table_entry **slot;
8605
8606 if (die->die_tag == DW_TAG_subprogram)
8607 clone = clone_as_declaration (die);
8608 else
8609 clone = clone_die (die);
8610
8611 slot = decl_table->find_slot_with_hash (die,
8612 htab_hash_pointer (die), INSERT);
8613
8614 /* Assert that DIE isn't in the hash table yet. If it would be there
8615 before, the ancestors would be necessarily there as well, therefore
8616 clone_tree_partial wouldn't be called. */
8617 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8618
8619 entry = XCNEW (struct decl_table_entry);
8620 entry->orig = die;
8621 entry->copy = clone;
8622 *slot = entry;
8623
8624 if (die->die_tag != DW_TAG_subprogram)
8625 FOR_EACH_CHILD (die, c,
8626 add_child_die (clone, clone_tree_partial (c, decl_table)));
8627
8628 return clone;
8629 }
8630
8631 /* Walk the DIE and its children, looking for references to incomplete
8632 or trivial types that are unmarked (i.e., that are not in the current
8633 type_unit). */
8634
8635 static void
8636 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8637 {
8638 dw_die_ref c;
8639 dw_attr_node *a;
8640 unsigned ix;
8641
8642 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8643 {
8644 if (AT_class (a) == dw_val_class_die_ref)
8645 {
8646 dw_die_ref targ = AT_ref (a);
8647 decl_table_entry **slot;
8648 struct decl_table_entry *entry;
8649
8650 if (targ->die_mark != 0 || targ->comdat_type_p)
8651 continue;
8652
8653 slot = decl_table->find_slot_with_hash (targ,
8654 htab_hash_pointer (targ),
8655 INSERT);
8656
8657 if (*slot != HTAB_EMPTY_ENTRY)
8658 {
8659 /* TARG has already been copied, so we just need to
8660 modify the reference to point to the copy. */
8661 entry = *slot;
8662 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8663 }
8664 else
8665 {
8666 dw_die_ref parent = unit;
8667 dw_die_ref copy = clone_die (targ);
8668
8669 /* Record in DECL_TABLE that TARG has been copied.
8670 Need to do this now, before the recursive call,
8671 because DECL_TABLE may be expanded and SLOT
8672 would no longer be a valid pointer. */
8673 entry = XCNEW (struct decl_table_entry);
8674 entry->orig = targ;
8675 entry->copy = copy;
8676 *slot = entry;
8677
8678 /* If TARG is not a declaration DIE, we need to copy its
8679 children. */
8680 if (!is_declaration_die (targ))
8681 {
8682 FOR_EACH_CHILD (
8683 targ, c,
8684 add_child_die (copy,
8685 clone_tree_partial (c, decl_table)));
8686 }
8687
8688 /* Make sure the cloned tree is marked as part of the
8689 type unit. */
8690 mark_dies (copy);
8691
8692 /* If TARG has surrounding context, copy its ancestor tree
8693 into the new type unit. */
8694 if (targ->die_parent != NULL
8695 && !is_unit_die (targ->die_parent))
8696 parent = copy_ancestor_tree (unit, targ->die_parent,
8697 decl_table);
8698
8699 add_child_die (parent, copy);
8700 a->dw_attr_val.v.val_die_ref.die = copy;
8701
8702 /* Make sure the newly-copied DIE is walked. If it was
8703 installed in a previously-added context, it won't
8704 get visited otherwise. */
8705 if (parent != unit)
8706 {
8707 /* Find the highest point of the newly-added tree,
8708 mark each node along the way, and walk from there. */
8709 parent->die_mark = 1;
8710 while (parent->die_parent
8711 && parent->die_parent->die_mark == 0)
8712 {
8713 parent = parent->die_parent;
8714 parent->die_mark = 1;
8715 }
8716 copy_decls_walk (unit, parent, decl_table);
8717 }
8718 }
8719 }
8720 }
8721
8722 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8723 }
8724
8725 /* Copy declarations for "unworthy" types into the new comdat section.
8726 Incomplete types, modified types, and certain other types aren't broken
8727 out into comdat sections of their own, so they don't have a signature,
8728 and we need to copy the declaration into the same section so that we
8729 don't have an external reference. */
8730
8731 static void
8732 copy_decls_for_unworthy_types (dw_die_ref unit)
8733 {
8734 mark_dies (unit);
8735 decl_hash_type decl_table (10);
8736 copy_decls_walk (unit, unit, &decl_table);
8737 unmark_dies (unit);
8738 }
8739
8740 /* Traverse the DIE and add a sibling attribute if it may have the
8741 effect of speeding up access to siblings. To save some space,
8742 avoid generating sibling attributes for DIE's without children. */
8743
8744 static void
8745 add_sibling_attributes (dw_die_ref die)
8746 {
8747 dw_die_ref c;
8748
8749 if (! die->die_child)
8750 return;
8751
8752 if (die->die_parent && die != die->die_parent->die_child)
8753 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8754
8755 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8756 }
8757
8758 /* Output all location lists for the DIE and its children. */
8759
8760 static void
8761 output_location_lists (dw_die_ref die)
8762 {
8763 dw_die_ref c;
8764 dw_attr_node *a;
8765 unsigned ix;
8766
8767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8768 if (AT_class (a) == dw_val_class_loc_list)
8769 output_loc_list (AT_loc_list (a));
8770
8771 FOR_EACH_CHILD (die, c, output_location_lists (c));
8772 }
8773
8774 /* During assign_location_list_indexes and output_loclists_offset the
8775 current index, after it the number of assigned indexes (i.e. how
8776 large the .debug_loclists* offset table should be). */
8777 static unsigned int loc_list_idx;
8778
8779 /* Output all location list offsets for the DIE and its children. */
8780
8781 static void
8782 output_loclists_offsets (dw_die_ref die)
8783 {
8784 dw_die_ref c;
8785 dw_attr_node *a;
8786 unsigned ix;
8787
8788 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8789 if (AT_class (a) == dw_val_class_loc_list)
8790 {
8791 dw_loc_list_ref l = AT_loc_list (a);
8792 if (l->offset_emitted)
8793 continue;
8794 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8795 loc_section_label, NULL);
8796 gcc_assert (l->hash == loc_list_idx);
8797 loc_list_idx++;
8798 l->offset_emitted = true;
8799 }
8800
8801 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8802 }
8803
8804 /* Recursively set indexes of location lists. */
8805
8806 static void
8807 assign_location_list_indexes (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref list = AT_loc_list (a);
8817 if (!list->num_assigned)
8818 {
8819 list->num_assigned = true;
8820 list->hash = loc_list_idx++;
8821 }
8822 }
8823
8824 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8825 }
8826
8827 /* We want to limit the number of external references, because they are
8828 larger than local references: a relocation takes multiple words, and
8829 even a sig8 reference is always eight bytes, whereas a local reference
8830 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8831 So if we encounter multiple external references to the same type DIE, we
8832 make a local typedef stub for it and redirect all references there.
8833
8834 This is the element of the hash table for keeping track of these
8835 references. */
8836
8837 struct external_ref
8838 {
8839 dw_die_ref type;
8840 dw_die_ref stub;
8841 unsigned n_refs;
8842 };
8843
8844 /* Hashtable helpers. */
8845
8846 struct external_ref_hasher : free_ptr_hash <external_ref>
8847 {
8848 static inline hashval_t hash (const external_ref *);
8849 static inline bool equal (const external_ref *, const external_ref *);
8850 };
8851
8852 inline hashval_t
8853 external_ref_hasher::hash (const external_ref *r)
8854 {
8855 dw_die_ref die = r->type;
8856 hashval_t h = 0;
8857
8858 /* We can't use the address of the DIE for hashing, because
8859 that will make the order of the stub DIEs non-deterministic. */
8860 if (! die->comdat_type_p)
8861 /* We have a symbol; use it to compute a hash. */
8862 h = htab_hash_string (die->die_id.die_symbol);
8863 else
8864 {
8865 /* We have a type signature; use a subset of the bits as the hash.
8866 The 8-byte signature is at least as large as hashval_t. */
8867 comdat_type_node *type_node = die->die_id.die_type_node;
8868 memcpy (&h, type_node->signature, sizeof (h));
8869 }
8870 return h;
8871 }
8872
8873 inline bool
8874 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8875 {
8876 return r1->type == r2->type;
8877 }
8878
8879 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8880
8881 /* Return a pointer to the external_ref for references to DIE. */
8882
8883 static struct external_ref *
8884 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8885 {
8886 struct external_ref ref, *ref_p;
8887 external_ref **slot;
8888
8889 ref.type = die;
8890 slot = map->find_slot (&ref, INSERT);
8891 if (*slot != HTAB_EMPTY_ENTRY)
8892 return *slot;
8893
8894 ref_p = XCNEW (struct external_ref);
8895 ref_p->type = die;
8896 *slot = ref_p;
8897 return ref_p;
8898 }
8899
8900 /* Subroutine of optimize_external_refs, below.
8901
8902 If we see a type skeleton, record it as our stub. If we see external
8903 references, remember how many we've seen. */
8904
8905 static void
8906 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8907 {
8908 dw_die_ref c;
8909 dw_attr_node *a;
8910 unsigned ix;
8911 struct external_ref *ref_p;
8912
8913 if (is_type_die (die)
8914 && (c = get_AT_ref (die, DW_AT_signature)))
8915 {
8916 /* This is a local skeleton; use it for local references. */
8917 ref_p = lookup_external_ref (map, c);
8918 ref_p->stub = die;
8919 }
8920
8921 /* Scan the DIE references, and remember any that refer to DIEs from
8922 other CUs (i.e. those which are not marked). */
8923 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8924 if (AT_class (a) == dw_val_class_die_ref
8925 && (c = AT_ref (a))->die_mark == 0
8926 && is_type_die (c))
8927 {
8928 ref_p = lookup_external_ref (map, c);
8929 ref_p->n_refs++;
8930 }
8931
8932 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8933 }
8934
8935 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8936 points to an external_ref, DATA is the CU we're processing. If we don't
8937 already have a local stub, and we have multiple refs, build a stub. */
8938
8939 int
8940 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8941 {
8942 struct external_ref *ref_p = *slot;
8943
8944 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8945 {
8946 /* We have multiple references to this type, so build a small stub.
8947 Both of these forms are a bit dodgy from the perspective of the
8948 DWARF standard, since technically they should have names. */
8949 dw_die_ref cu = data;
8950 dw_die_ref type = ref_p->type;
8951 dw_die_ref stub = NULL;
8952
8953 if (type->comdat_type_p)
8954 {
8955 /* If we refer to this type via sig8, use AT_signature. */
8956 stub = new_die (type->die_tag, cu, NULL_TREE);
8957 add_AT_die_ref (stub, DW_AT_signature, type);
8958 }
8959 else
8960 {
8961 /* Otherwise, use a typedef with no name. */
8962 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8963 add_AT_die_ref (stub, DW_AT_type, type);
8964 }
8965
8966 stub->die_mark++;
8967 ref_p->stub = stub;
8968 }
8969 return 1;
8970 }
8971
8972 /* DIE is a unit; look through all the DIE references to see if there are
8973 any external references to types, and if so, create local stubs for
8974 them which will be applied in build_abbrev_table. This is useful because
8975 references to local DIEs are smaller. */
8976
8977 static external_ref_hash_type *
8978 optimize_external_refs (dw_die_ref die)
8979 {
8980 external_ref_hash_type *map = new external_ref_hash_type (10);
8981 optimize_external_refs_1 (die, map);
8982 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8983 return map;
8984 }
8985
8986 /* The following 3 variables are temporaries that are computed only during the
8987 build_abbrev_table call and used and released during the following
8988 optimize_abbrev_table call. */
8989
8990 /* First abbrev_id that can be optimized based on usage. */
8991 static unsigned int abbrev_opt_start;
8992
8993 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8994 abbrev_id smaller than this, because they must be already sized
8995 during build_abbrev_table). */
8996 static unsigned int abbrev_opt_base_type_end;
8997
8998 /* Vector of usage counts during build_abbrev_table. Indexed by
8999 abbrev_id - abbrev_opt_start. */
9000 static vec<unsigned int> abbrev_usage_count;
9001
9002 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9003 static vec<dw_die_ref> sorted_abbrev_dies;
9004
9005 /* The format of each DIE (and its attribute value pairs) is encoded in an
9006 abbreviation table. This routine builds the abbreviation table and assigns
9007 a unique abbreviation id for each abbreviation entry. The children of each
9008 die are visited recursively. */
9009
9010 static void
9011 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9012 {
9013 unsigned int abbrev_id = 0;
9014 dw_die_ref c;
9015 dw_attr_node *a;
9016 unsigned ix;
9017 dw_die_ref abbrev;
9018
9019 /* Scan the DIE references, and replace any that refer to
9020 DIEs from other CUs (i.e. those which are not marked) with
9021 the local stubs we built in optimize_external_refs. */
9022 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9023 if (AT_class (a) == dw_val_class_die_ref
9024 && (c = AT_ref (a))->die_mark == 0)
9025 {
9026 struct external_ref *ref_p;
9027 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9028
9029 if (is_type_die (c)
9030 && (ref_p = lookup_external_ref (extern_map, c))
9031 && ref_p->stub && ref_p->stub != die)
9032 change_AT_die_ref (a, ref_p->stub);
9033 else
9034 /* We aren't changing this reference, so mark it external. */
9035 set_AT_ref_external (a, 1);
9036 }
9037
9038 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9039 {
9040 dw_attr_node *die_a, *abbrev_a;
9041 unsigned ix;
9042 bool ok = true;
9043
9044 if (abbrev_id == 0)
9045 continue;
9046 if (abbrev->die_tag != die->die_tag)
9047 continue;
9048 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9049 continue;
9050
9051 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9052 continue;
9053
9054 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9055 {
9056 abbrev_a = &(*abbrev->die_attr)[ix];
9057 if ((abbrev_a->dw_attr != die_a->dw_attr)
9058 || (value_format (abbrev_a) != value_format (die_a)))
9059 {
9060 ok = false;
9061 break;
9062 }
9063 }
9064 if (ok)
9065 break;
9066 }
9067
9068 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9069 {
9070 vec_safe_push (abbrev_die_table, die);
9071 if (abbrev_opt_start)
9072 abbrev_usage_count.safe_push (0);
9073 }
9074 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9075 {
9076 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9077 sorted_abbrev_dies.safe_push (die);
9078 }
9079
9080 die->die_abbrev = abbrev_id;
9081 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9082 }
9083
9084 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9085 by die_abbrev's usage count, from the most commonly used
9086 abbreviation to the least. */
9087
9088 static int
9089 die_abbrev_cmp (const void *p1, const void *p2)
9090 {
9091 dw_die_ref die1 = *(const dw_die_ref *) p1;
9092 dw_die_ref die2 = *(const dw_die_ref *) p2;
9093
9094 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9095 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9096
9097 if (die1->die_abbrev >= abbrev_opt_base_type_end
9098 && die2->die_abbrev >= abbrev_opt_base_type_end)
9099 {
9100 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9101 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9102 return -1;
9103 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9104 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9105 return 1;
9106 }
9107
9108 /* Stabilize the sort. */
9109 if (die1->die_abbrev < die2->die_abbrev)
9110 return -1;
9111 if (die1->die_abbrev > die2->die_abbrev)
9112 return 1;
9113
9114 return 0;
9115 }
9116
9117 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9118 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9119 into dw_val_class_const_implicit or
9120 dw_val_class_unsigned_const_implicit. */
9121
9122 static void
9123 optimize_implicit_const (unsigned int first_id, unsigned int end,
9124 vec<bool> &implicit_consts)
9125 {
9126 /* It never makes sense if there is just one DIE using the abbreviation. */
9127 if (end < first_id + 2)
9128 return;
9129
9130 dw_attr_node *a;
9131 unsigned ix, i;
9132 dw_die_ref die = sorted_abbrev_dies[first_id];
9133 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9134 if (implicit_consts[ix])
9135 {
9136 enum dw_val_class new_class = dw_val_class_none;
9137 switch (AT_class (a))
9138 {
9139 case dw_val_class_unsigned_const:
9140 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9141 continue;
9142
9143 /* The .debug_abbrev section will grow by
9144 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9145 in all the DIEs using that abbreviation. */
9146 if (constant_size (AT_unsigned (a)) * (end - first_id)
9147 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9148 continue;
9149
9150 new_class = dw_val_class_unsigned_const_implicit;
9151 break;
9152
9153 case dw_val_class_const:
9154 new_class = dw_val_class_const_implicit;
9155 break;
9156
9157 case dw_val_class_file:
9158 new_class = dw_val_class_file_implicit;
9159 break;
9160
9161 default:
9162 continue;
9163 }
9164 for (i = first_id; i < end; i++)
9165 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9166 = new_class;
9167 }
9168 }
9169
9170 /* Attempt to optimize abbreviation table from abbrev_opt_start
9171 abbreviation above. */
9172
9173 static void
9174 optimize_abbrev_table (void)
9175 {
9176 if (abbrev_opt_start
9177 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9178 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9179 {
9180 auto_vec<bool, 32> implicit_consts;
9181 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9182
9183 unsigned int abbrev_id = abbrev_opt_start - 1;
9184 unsigned int first_id = ~0U;
9185 unsigned int last_abbrev_id = 0;
9186 unsigned int i;
9187 dw_die_ref die;
9188 if (abbrev_opt_base_type_end > abbrev_opt_start)
9189 abbrev_id = abbrev_opt_base_type_end - 1;
9190 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9191 most commonly used abbreviations come first. */
9192 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9193 {
9194 dw_attr_node *a;
9195 unsigned ix;
9196
9197 /* If calc_base_type_die_sizes has been called, the CU and
9198 base types after it can't be optimized, because we've already
9199 calculated their DIE offsets. We've sorted them first. */
9200 if (die->die_abbrev < abbrev_opt_base_type_end)
9201 continue;
9202 if (die->die_abbrev != last_abbrev_id)
9203 {
9204 last_abbrev_id = die->die_abbrev;
9205 if (dwarf_version >= 5 && first_id != ~0U)
9206 optimize_implicit_const (first_id, i, implicit_consts);
9207 abbrev_id++;
9208 (*abbrev_die_table)[abbrev_id] = die;
9209 if (dwarf_version >= 5)
9210 {
9211 first_id = i;
9212 implicit_consts.truncate (0);
9213
9214 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9215 switch (AT_class (a))
9216 {
9217 case dw_val_class_const:
9218 case dw_val_class_unsigned_const:
9219 case dw_val_class_file:
9220 implicit_consts.safe_push (true);
9221 break;
9222 default:
9223 implicit_consts.safe_push (false);
9224 break;
9225 }
9226 }
9227 }
9228 else if (dwarf_version >= 5)
9229 {
9230 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9231 if (!implicit_consts[ix])
9232 continue;
9233 else
9234 {
9235 dw_attr_node *other_a
9236 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9237 if (!dw_val_equal_p (&a->dw_attr_val,
9238 &other_a->dw_attr_val))
9239 implicit_consts[ix] = false;
9240 }
9241 }
9242 die->die_abbrev = abbrev_id;
9243 }
9244 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9245 if (dwarf_version >= 5 && first_id != ~0U)
9246 optimize_implicit_const (first_id, i, implicit_consts);
9247 }
9248
9249 abbrev_opt_start = 0;
9250 abbrev_opt_base_type_end = 0;
9251 abbrev_usage_count.release ();
9252 sorted_abbrev_dies.release ();
9253 }
9254 \f
9255 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9256
9257 static int
9258 constant_size (unsigned HOST_WIDE_INT value)
9259 {
9260 int log;
9261
9262 if (value == 0)
9263 log = 0;
9264 else
9265 log = floor_log2 (value);
9266
9267 log = log / 8;
9268 log = 1 << (floor_log2 (log) + 1);
9269
9270 return log;
9271 }
9272
9273 /* Return the size of a DIE as it is represented in the
9274 .debug_info section. */
9275
9276 static unsigned long
9277 size_of_die (dw_die_ref die)
9278 {
9279 unsigned long size = 0;
9280 dw_attr_node *a;
9281 unsigned ix;
9282 enum dwarf_form form;
9283
9284 size += size_of_uleb128 (die->die_abbrev);
9285 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9286 {
9287 switch (AT_class (a))
9288 {
9289 case dw_val_class_addr:
9290 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9291 {
9292 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9293 size += size_of_uleb128 (AT_index (a));
9294 }
9295 else
9296 size += DWARF2_ADDR_SIZE;
9297 break;
9298 case dw_val_class_offset:
9299 size += DWARF_OFFSET_SIZE;
9300 break;
9301 case dw_val_class_loc:
9302 {
9303 unsigned long lsize = size_of_locs (AT_loc (a));
9304
9305 /* Block length. */
9306 if (dwarf_version >= 4)
9307 size += size_of_uleb128 (lsize);
9308 else
9309 size += constant_size (lsize);
9310 size += lsize;
9311 }
9312 break;
9313 case dw_val_class_loc_list:
9314 case dw_val_class_view_list:
9315 if (dwarf_split_debug_info && dwarf_version >= 5)
9316 {
9317 gcc_assert (AT_loc_list (a)->num_assigned);
9318 size += size_of_uleb128 (AT_loc_list (a)->hash);
9319 }
9320 else
9321 size += DWARF_OFFSET_SIZE;
9322 break;
9323 case dw_val_class_range_list:
9324 if (value_format (a) == DW_FORM_rnglistx)
9325 {
9326 gcc_assert (rnglist_idx);
9327 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9328 size += size_of_uleb128 (r->idx);
9329 }
9330 else
9331 size += DWARF_OFFSET_SIZE;
9332 break;
9333 case dw_val_class_const:
9334 size += size_of_sleb128 (AT_int (a));
9335 break;
9336 case dw_val_class_unsigned_const:
9337 {
9338 int csize = constant_size (AT_unsigned (a));
9339 if (dwarf_version == 3
9340 && a->dw_attr == DW_AT_data_member_location
9341 && csize >= 4)
9342 size += size_of_uleb128 (AT_unsigned (a));
9343 else
9344 size += csize;
9345 }
9346 break;
9347 case dw_val_class_symview:
9348 if (symview_upper_bound <= 0xff)
9349 size += 1;
9350 else if (symview_upper_bound <= 0xffff)
9351 size += 2;
9352 else if (symview_upper_bound <= 0xffffffff)
9353 size += 4;
9354 else
9355 size += 8;
9356 break;
9357 case dw_val_class_const_implicit:
9358 case dw_val_class_unsigned_const_implicit:
9359 case dw_val_class_file_implicit:
9360 /* These occupy no size in the DIE, just an extra sleb128 in
9361 .debug_abbrev. */
9362 break;
9363 case dw_val_class_const_double:
9364 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9365 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9366 size++; /* block */
9367 break;
9368 case dw_val_class_wide_int:
9369 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9370 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9371 if (get_full_len (*a->dw_attr_val.v.val_wide)
9372 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9373 size++; /* block */
9374 break;
9375 case dw_val_class_vec:
9376 size += constant_size (a->dw_attr_val.v.val_vec.length
9377 * a->dw_attr_val.v.val_vec.elt_size)
9378 + a->dw_attr_val.v.val_vec.length
9379 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9380 break;
9381 case dw_val_class_flag:
9382 if (dwarf_version >= 4)
9383 /* Currently all add_AT_flag calls pass in 1 as last argument,
9384 so DW_FORM_flag_present can be used. If that ever changes,
9385 we'll need to use DW_FORM_flag and have some optimization
9386 in build_abbrev_table that will change those to
9387 DW_FORM_flag_present if it is set to 1 in all DIEs using
9388 the same abbrev entry. */
9389 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9390 else
9391 size += 1;
9392 break;
9393 case dw_val_class_die_ref:
9394 if (AT_ref_external (a))
9395 {
9396 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9397 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9398 is sized by target address length, whereas in DWARF3
9399 it's always sized as an offset. */
9400 if (use_debug_types)
9401 size += DWARF_TYPE_SIGNATURE_SIZE;
9402 else if (dwarf_version == 2)
9403 size += DWARF2_ADDR_SIZE;
9404 else
9405 size += DWARF_OFFSET_SIZE;
9406 }
9407 else
9408 size += DWARF_OFFSET_SIZE;
9409 break;
9410 case dw_val_class_fde_ref:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_lbl_id:
9414 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9415 {
9416 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9417 size += size_of_uleb128 (AT_index (a));
9418 }
9419 else
9420 size += DWARF2_ADDR_SIZE;
9421 break;
9422 case dw_val_class_lineptr:
9423 case dw_val_class_macptr:
9424 case dw_val_class_loclistsptr:
9425 size += DWARF_OFFSET_SIZE;
9426 break;
9427 case dw_val_class_str:
9428 form = AT_string_form (a);
9429 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9430 size += DWARF_OFFSET_SIZE;
9431 else if (form == dwarf_FORM (DW_FORM_strx))
9432 size += size_of_uleb128 (AT_index (a));
9433 else
9434 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9435 break;
9436 case dw_val_class_file:
9437 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9438 break;
9439 case dw_val_class_data8:
9440 size += 8;
9441 break;
9442 case dw_val_class_vms_delta:
9443 size += DWARF_OFFSET_SIZE;
9444 break;
9445 case dw_val_class_high_pc:
9446 size += DWARF2_ADDR_SIZE;
9447 break;
9448 case dw_val_class_discr_value:
9449 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9450 break;
9451 case dw_val_class_discr_list:
9452 {
9453 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9454
9455 /* This is a block, so we have the block length and then its
9456 data. */
9457 size += constant_size (block_size) + block_size;
9458 }
9459 break;
9460 default:
9461 gcc_unreachable ();
9462 }
9463 }
9464
9465 return size;
9466 }
9467
9468 /* Size the debugging information associated with a given DIE. Visits the
9469 DIE's children recursively. Updates the global variable next_die_offset, on
9470 each time through. Uses the current value of next_die_offset to update the
9471 die_offset field in each DIE. */
9472
9473 static void
9474 calc_die_sizes (dw_die_ref die)
9475 {
9476 dw_die_ref c;
9477
9478 gcc_assert (die->die_offset == 0
9479 || (unsigned long int) die->die_offset == next_die_offset);
9480 die->die_offset = next_die_offset;
9481 next_die_offset += size_of_die (die);
9482
9483 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9484
9485 if (die->die_child != NULL)
9486 /* Count the null byte used to terminate sibling lists. */
9487 next_die_offset += 1;
9488 }
9489
9490 /* Size just the base type children at the start of the CU.
9491 This is needed because build_abbrev needs to size locs
9492 and sizing of type based stack ops needs to know die_offset
9493 values for the base types. */
9494
9495 static void
9496 calc_base_type_die_sizes (void)
9497 {
9498 unsigned long die_offset = (dwarf_split_debug_info
9499 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9500 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9501 unsigned int i;
9502 dw_die_ref base_type;
9503 #if ENABLE_ASSERT_CHECKING
9504 dw_die_ref prev = comp_unit_die ()->die_child;
9505 #endif
9506
9507 die_offset += size_of_die (comp_unit_die ());
9508 for (i = 0; base_types.iterate (i, &base_type); i++)
9509 {
9510 #if ENABLE_ASSERT_CHECKING
9511 gcc_assert (base_type->die_offset == 0
9512 && prev->die_sib == base_type
9513 && base_type->die_child == NULL
9514 && base_type->die_abbrev);
9515 prev = base_type;
9516 #endif
9517 if (abbrev_opt_start
9518 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9519 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9520 base_type->die_offset = die_offset;
9521 die_offset += size_of_die (base_type);
9522 }
9523 }
9524
9525 /* Set the marks for a die and its children. We do this so
9526 that we know whether or not a reference needs to use FORM_ref_addr; only
9527 DIEs in the same CU will be marked. We used to clear out the offset
9528 and use that as the flag, but ran into ordering problems. */
9529
9530 static void
9531 mark_dies (dw_die_ref die)
9532 {
9533 dw_die_ref c;
9534
9535 gcc_assert (!die->die_mark);
9536
9537 die->die_mark = 1;
9538 FOR_EACH_CHILD (die, c, mark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die and its children. */
9542
9543 static void
9544 unmark_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547
9548 if (! use_debug_types)
9549 gcc_assert (die->die_mark);
9550
9551 die->die_mark = 0;
9552 FOR_EACH_CHILD (die, c, unmark_dies (c));
9553 }
9554
9555 /* Clear the marks for a die, its children and referred dies. */
9556
9557 static void
9558 unmark_all_dies (dw_die_ref die)
9559 {
9560 dw_die_ref c;
9561 dw_attr_node *a;
9562 unsigned ix;
9563
9564 if (!die->die_mark)
9565 return;
9566 die->die_mark = 0;
9567
9568 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9569
9570 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9571 if (AT_class (a) == dw_val_class_die_ref)
9572 unmark_all_dies (AT_ref (a));
9573 }
9574
9575 /* Calculate if the entry should appear in the final output file. It may be
9576 from a pruned a type. */
9577
9578 static bool
9579 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9580 {
9581 /* By limiting gnu pubnames to definitions only, gold can generate a
9582 gdb index without entries for declarations, which don't include
9583 enough information to be useful. */
9584 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9585 return false;
9586
9587 if (table == pubname_table)
9588 {
9589 /* Enumerator names are part of the pubname table, but the
9590 parent DW_TAG_enumeration_type die may have been pruned.
9591 Don't output them if that is the case. */
9592 if (p->die->die_tag == DW_TAG_enumerator &&
9593 (p->die->die_parent == NULL
9594 || !p->die->die_parent->die_perennial_p))
9595 return false;
9596
9597 /* Everything else in the pubname table is included. */
9598 return true;
9599 }
9600
9601 /* The pubtypes table shouldn't include types that have been
9602 pruned. */
9603 return (p->die->die_offset != 0
9604 || !flag_eliminate_unused_debug_types);
9605 }
9606
9607 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9608 generated for the compilation unit. */
9609
9610 static unsigned long
9611 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9612 {
9613 unsigned long size;
9614 unsigned i;
9615 pubname_entry *p;
9616 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9617
9618 size = DWARF_PUBNAMES_HEADER_SIZE;
9619 FOR_EACH_VEC_ELT (*names, i, p)
9620 if (include_pubname_in_output (names, p))
9621 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9622
9623 size += DWARF_OFFSET_SIZE;
9624 return size;
9625 }
9626
9627 /* Return the size of the information in the .debug_aranges section. */
9628
9629 static unsigned long
9630 size_of_aranges (void)
9631 {
9632 unsigned long size;
9633
9634 size = DWARF_ARANGES_HEADER_SIZE;
9635
9636 /* Count the address/length pair for this compilation unit. */
9637 if (text_section_used)
9638 size += 2 * DWARF2_ADDR_SIZE;
9639 if (cold_text_section_used)
9640 size += 2 * DWARF2_ADDR_SIZE;
9641 if (have_multiple_function_sections)
9642 {
9643 unsigned fde_idx;
9644 dw_fde_ref fde;
9645
9646 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9647 {
9648 if (DECL_IGNORED_P (fde->decl))
9649 continue;
9650 if (!fde->in_std_section)
9651 size += 2 * DWARF2_ADDR_SIZE;
9652 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9653 size += 2 * DWARF2_ADDR_SIZE;
9654 }
9655 }
9656
9657 /* Count the two zero words used to terminated the address range table. */
9658 size += 2 * DWARF2_ADDR_SIZE;
9659 return size;
9660 }
9661 \f
9662 /* Select the encoding of an attribute value. */
9663
9664 static enum dwarf_form
9665 value_format (dw_attr_node *a)
9666 {
9667 switch (AT_class (a))
9668 {
9669 case dw_val_class_addr:
9670 /* Only very few attributes allow DW_FORM_addr. */
9671 switch (a->dw_attr)
9672 {
9673 case DW_AT_low_pc:
9674 case DW_AT_high_pc:
9675 case DW_AT_entry_pc:
9676 case DW_AT_trampoline:
9677 return (AT_index (a) == NOT_INDEXED
9678 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9679 default:
9680 break;
9681 }
9682 switch (DWARF2_ADDR_SIZE)
9683 {
9684 case 1:
9685 return DW_FORM_data1;
9686 case 2:
9687 return DW_FORM_data2;
9688 case 4:
9689 return DW_FORM_data4;
9690 case 8:
9691 return DW_FORM_data8;
9692 default:
9693 gcc_unreachable ();
9694 }
9695 case dw_val_class_loc_list:
9696 case dw_val_class_view_list:
9697 if (dwarf_split_debug_info
9698 && dwarf_version >= 5
9699 && AT_loc_list (a)->num_assigned)
9700 return DW_FORM_loclistx;
9701 /* FALLTHRU */
9702 case dw_val_class_range_list:
9703 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9704 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9705 care about sizes of .debug* sections in shared libraries and
9706 executables and don't take into account relocations that affect just
9707 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9708 table in the .debug_rnglists section. */
9709 if (dwarf_split_debug_info
9710 && dwarf_version >= 5
9711 && AT_class (a) == dw_val_class_range_list
9712 && rnglist_idx
9713 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9714 return DW_FORM_rnglistx;
9715 if (dwarf_version >= 4)
9716 return DW_FORM_sec_offset;
9717 /* FALLTHRU */
9718 case dw_val_class_vms_delta:
9719 case dw_val_class_offset:
9720 switch (DWARF_OFFSET_SIZE)
9721 {
9722 case 4:
9723 return DW_FORM_data4;
9724 case 8:
9725 return DW_FORM_data8;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_loc:
9730 if (dwarf_version >= 4)
9731 return DW_FORM_exprloc;
9732 switch (constant_size (size_of_locs (AT_loc (a))))
9733 {
9734 case 1:
9735 return DW_FORM_block1;
9736 case 2:
9737 return DW_FORM_block2;
9738 case 4:
9739 return DW_FORM_block4;
9740 default:
9741 gcc_unreachable ();
9742 }
9743 case dw_val_class_const:
9744 return DW_FORM_sdata;
9745 case dw_val_class_unsigned_const:
9746 switch (constant_size (AT_unsigned (a)))
9747 {
9748 case 1:
9749 return DW_FORM_data1;
9750 case 2:
9751 return DW_FORM_data2;
9752 case 4:
9753 /* In DWARF3 DW_AT_data_member_location with
9754 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9755 constant, so we need to use DW_FORM_udata if we need
9756 a large constant. */
9757 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9758 return DW_FORM_udata;
9759 return DW_FORM_data4;
9760 case 8:
9761 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9762 return DW_FORM_udata;
9763 return DW_FORM_data8;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const_implicit:
9768 case dw_val_class_unsigned_const_implicit:
9769 case dw_val_class_file_implicit:
9770 return DW_FORM_implicit_const;
9771 case dw_val_class_const_double:
9772 switch (HOST_BITS_PER_WIDE_INT)
9773 {
9774 case 8:
9775 return DW_FORM_data2;
9776 case 16:
9777 return DW_FORM_data4;
9778 case 32:
9779 return DW_FORM_data8;
9780 case 64:
9781 if (dwarf_version >= 5)
9782 return DW_FORM_data16;
9783 /* FALLTHRU */
9784 default:
9785 return DW_FORM_block1;
9786 }
9787 case dw_val_class_wide_int:
9788 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9789 {
9790 case 8:
9791 return DW_FORM_data1;
9792 case 16:
9793 return DW_FORM_data2;
9794 case 32:
9795 return DW_FORM_data4;
9796 case 64:
9797 return DW_FORM_data8;
9798 case 128:
9799 if (dwarf_version >= 5)
9800 return DW_FORM_data16;
9801 /* FALLTHRU */
9802 default:
9803 return DW_FORM_block1;
9804 }
9805 case dw_val_class_symview:
9806 /* ??? We might use uleb128, but then we'd have to compute
9807 .debug_info offsets in the assembler. */
9808 if (symview_upper_bound <= 0xff)
9809 return DW_FORM_data1;
9810 else if (symview_upper_bound <= 0xffff)
9811 return DW_FORM_data2;
9812 else if (symview_upper_bound <= 0xffffffff)
9813 return DW_FORM_data4;
9814 else
9815 return DW_FORM_data8;
9816 case dw_val_class_vec:
9817 switch (constant_size (a->dw_attr_val.v.val_vec.length
9818 * a->dw_attr_val.v.val_vec.elt_size))
9819 {
9820 case 1:
9821 return DW_FORM_block1;
9822 case 2:
9823 return DW_FORM_block2;
9824 case 4:
9825 return DW_FORM_block4;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_flag:
9830 if (dwarf_version >= 4)
9831 {
9832 /* Currently all add_AT_flag calls pass in 1 as last argument,
9833 so DW_FORM_flag_present can be used. If that ever changes,
9834 we'll need to use DW_FORM_flag and have some optimization
9835 in build_abbrev_table that will change those to
9836 DW_FORM_flag_present if it is set to 1 in all DIEs using
9837 the same abbrev entry. */
9838 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9839 return DW_FORM_flag_present;
9840 }
9841 return DW_FORM_flag;
9842 case dw_val_class_die_ref:
9843 if (AT_ref_external (a))
9844 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9845 else
9846 return DW_FORM_ref;
9847 case dw_val_class_fde_ref:
9848 return DW_FORM_data;
9849 case dw_val_class_lbl_id:
9850 return (AT_index (a) == NOT_INDEXED
9851 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9852 case dw_val_class_lineptr:
9853 case dw_val_class_macptr:
9854 case dw_val_class_loclistsptr:
9855 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9856 case dw_val_class_str:
9857 return AT_string_form (a);
9858 case dw_val_class_file:
9859 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9860 {
9861 case 1:
9862 return DW_FORM_data1;
9863 case 2:
9864 return DW_FORM_data2;
9865 case 4:
9866 return DW_FORM_data4;
9867 default:
9868 gcc_unreachable ();
9869 }
9870
9871 case dw_val_class_data8:
9872 return DW_FORM_data8;
9873
9874 case dw_val_class_high_pc:
9875 switch (DWARF2_ADDR_SIZE)
9876 {
9877 case 1:
9878 return DW_FORM_data1;
9879 case 2:
9880 return DW_FORM_data2;
9881 case 4:
9882 return DW_FORM_data4;
9883 case 8:
9884 return DW_FORM_data8;
9885 default:
9886 gcc_unreachable ();
9887 }
9888
9889 case dw_val_class_discr_value:
9890 return (a->dw_attr_val.v.val_discr_value.pos
9891 ? DW_FORM_udata
9892 : DW_FORM_sdata);
9893 case dw_val_class_discr_list:
9894 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9895 {
9896 case 1:
9897 return DW_FORM_block1;
9898 case 2:
9899 return DW_FORM_block2;
9900 case 4:
9901 return DW_FORM_block4;
9902 default:
9903 gcc_unreachable ();
9904 }
9905
9906 default:
9907 gcc_unreachable ();
9908 }
9909 }
9910
9911 /* Output the encoding of an attribute value. */
9912
9913 static void
9914 output_value_format (dw_attr_node *a)
9915 {
9916 enum dwarf_form form = value_format (a);
9917
9918 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9919 }
9920
9921 /* Given a die and id, produce the appropriate abbreviations. */
9922
9923 static void
9924 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9925 {
9926 unsigned ix;
9927 dw_attr_node *a_attr;
9928
9929 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9930 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9931 dwarf_tag_name (abbrev->die_tag));
9932
9933 if (abbrev->die_child != NULL)
9934 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9935 else
9936 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9937
9938 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9939 {
9940 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9941 dwarf_attr_name (a_attr->dw_attr));
9942 output_value_format (a_attr);
9943 if (value_format (a_attr) == DW_FORM_implicit_const)
9944 {
9945 if (AT_class (a_attr) == dw_val_class_file_implicit)
9946 {
9947 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9948 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9949 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9950 }
9951 else
9952 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9953 }
9954 }
9955
9956 dw2_asm_output_data (1, 0, NULL);
9957 dw2_asm_output_data (1, 0, NULL);
9958 }
9959
9960
9961 /* Output the .debug_abbrev section which defines the DIE abbreviation
9962 table. */
9963
9964 static void
9965 output_abbrev_section (void)
9966 {
9967 unsigned int abbrev_id;
9968 dw_die_ref abbrev;
9969
9970 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9971 if (abbrev_id != 0)
9972 output_die_abbrevs (abbrev_id, abbrev);
9973
9974 /* Terminate the table. */
9975 dw2_asm_output_data (1, 0, NULL);
9976 }
9977
9978 /* Return a new location list, given the begin and end range, and the
9979 expression. */
9980
9981 static inline dw_loc_list_ref
9982 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9983 const char *end, var_loc_view vend,
9984 const char *section)
9985 {
9986 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9987
9988 retlist->begin = begin;
9989 retlist->begin_entry = NULL;
9990 retlist->end = end;
9991 retlist->expr = expr;
9992 retlist->section = section;
9993 retlist->vbegin = vbegin;
9994 retlist->vend = vend;
9995
9996 return retlist;
9997 }
9998
9999 /* Return true iff there's any nonzero view number in the loc list.
10000
10001 ??? When views are not enabled, we'll often extend a single range
10002 to the entire function, so that we emit a single location
10003 expression rather than a location list. With views, even with a
10004 single range, we'll output a list if start or end have a nonzero
10005 view. If we change this, we may want to stop splitting a single
10006 range in dw_loc_list just because of a nonzero view, even if it
10007 straddles across hot/cold partitions. */
10008
10009 static bool
10010 loc_list_has_views (dw_loc_list_ref list)
10011 {
10012 if (!debug_variable_location_views)
10013 return false;
10014
10015 for (dw_loc_list_ref loc = list;
10016 loc != NULL; loc = loc->dw_loc_next)
10017 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10018 return true;
10019
10020 return false;
10021 }
10022
10023 /* Generate a new internal symbol for this location list node, if it
10024 hasn't got one yet. */
10025
10026 static inline void
10027 gen_llsym (dw_loc_list_ref list)
10028 {
10029 gcc_assert (!list->ll_symbol);
10030 list->ll_symbol = gen_internal_sym ("LLST");
10031
10032 if (!loc_list_has_views (list))
10033 return;
10034
10035 if (dwarf2out_locviews_in_attribute ())
10036 {
10037 /* Use the same label_num for the view list. */
10038 label_num--;
10039 list->vl_symbol = gen_internal_sym ("LVUS");
10040 }
10041 else
10042 list->vl_symbol = list->ll_symbol;
10043 }
10044
10045 /* Generate a symbol for the list, but only if we really want to emit
10046 it as a list. */
10047
10048 static inline void
10049 maybe_gen_llsym (dw_loc_list_ref list)
10050 {
10051 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10052 return;
10053
10054 gen_llsym (list);
10055 }
10056
10057 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10058 NULL, don't consider size of the location expression. If we're not
10059 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10060 representation in *SIZEP. */
10061
10062 static bool
10063 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10064 {
10065 /* Don't output an entry that starts and ends at the same address. */
10066 if (strcmp (curr->begin, curr->end) == 0
10067 && curr->vbegin == curr->vend && !curr->force)
10068 return true;
10069
10070 if (!sizep)
10071 return false;
10072
10073 unsigned long size = size_of_locs (curr->expr);
10074
10075 /* If the expression is too large, drop it on the floor. We could
10076 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10077 in the expression, but >= 64KB expressions for a single value
10078 in a single range are unlikely very useful. */
10079 if (dwarf_version < 5 && size > 0xffff)
10080 return true;
10081
10082 *sizep = size;
10083
10084 return false;
10085 }
10086
10087 /* Output a view pair loclist entry for CURR, if it requires one. */
10088
10089 static void
10090 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10091 {
10092 if (!dwarf2out_locviews_in_loclist ())
10093 return;
10094
10095 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10096 return;
10097
10098 #ifdef DW_LLE_view_pair
10099 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10100
10101 if (dwarf2out_as_locview_support)
10102 {
10103 if (ZERO_VIEW_P (curr->vbegin))
10104 dw2_asm_output_data_uleb128 (0, "Location view begin");
10105 else
10106 {
10107 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10108 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10109 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10110 }
10111
10112 if (ZERO_VIEW_P (curr->vend))
10113 dw2_asm_output_data_uleb128 (0, "Location view end");
10114 else
10115 {
10116 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10117 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10118 dw2_asm_output_symname_uleb128 (label, "Location view end");
10119 }
10120 }
10121 else
10122 {
10123 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10124 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10125 }
10126 #endif /* DW_LLE_view_pair */
10127
10128 return;
10129 }
10130
10131 /* Output the location list given to us. */
10132
10133 static void
10134 output_loc_list (dw_loc_list_ref list_head)
10135 {
10136 int vcount = 0, lcount = 0;
10137
10138 if (list_head->emitted)
10139 return;
10140 list_head->emitted = true;
10141
10142 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10143 {
10144 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10145
10146 for (dw_loc_list_ref curr = list_head; curr != NULL;
10147 curr = curr->dw_loc_next)
10148 {
10149 unsigned long size;
10150
10151 if (skip_loc_list_entry (curr, &size))
10152 continue;
10153
10154 vcount++;
10155
10156 /* ?? dwarf_split_debug_info? */
10157 if (dwarf2out_as_locview_support)
10158 {
10159 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10160
10161 if (!ZERO_VIEW_P (curr->vbegin))
10162 {
10163 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10164 dw2_asm_output_symname_uleb128 (label,
10165 "View list begin (%s)",
10166 list_head->vl_symbol);
10167 }
10168 else
10169 dw2_asm_output_data_uleb128 (0,
10170 "View list begin (%s)",
10171 list_head->vl_symbol);
10172
10173 if (!ZERO_VIEW_P (curr->vend))
10174 {
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10176 dw2_asm_output_symname_uleb128 (label,
10177 "View list end (%s)",
10178 list_head->vl_symbol);
10179 }
10180 else
10181 dw2_asm_output_data_uleb128 (0,
10182 "View list end (%s)",
10183 list_head->vl_symbol);
10184 }
10185 else
10186 {
10187 dw2_asm_output_data_uleb128 (curr->vbegin,
10188 "View list begin (%s)",
10189 list_head->vl_symbol);
10190 dw2_asm_output_data_uleb128 (curr->vend,
10191 "View list end (%s)",
10192 list_head->vl_symbol);
10193 }
10194 }
10195 }
10196
10197 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10198
10199 const char *last_section = NULL;
10200 const char *base_label = NULL;
10201
10202 /* Walk the location list, and output each range + expression. */
10203 for (dw_loc_list_ref curr = list_head; curr != NULL;
10204 curr = curr->dw_loc_next)
10205 {
10206 unsigned long size;
10207
10208 /* Skip this entry? If we skip it here, we must skip it in the
10209 view list above as well. */
10210 if (skip_loc_list_entry (curr, &size))
10211 continue;
10212
10213 lcount++;
10214
10215 if (dwarf_version >= 5)
10216 {
10217 if (dwarf_split_debug_info)
10218 {
10219 dwarf2out_maybe_output_loclist_view_pair (curr);
10220 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10221 uleb128 index into .debug_addr and uleb128 length. */
10222 dw2_asm_output_data (1, DW_LLE_startx_length,
10223 "DW_LLE_startx_length (%s)",
10224 list_head->ll_symbol);
10225 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10226 "Location list range start index "
10227 "(%s)", curr->begin);
10228 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10229 For that case we probably need to emit DW_LLE_startx_endx,
10230 but we'd need 2 .debug_addr entries rather than just one. */
10231 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10232 "Location list length (%s)",
10233 list_head->ll_symbol);
10234 }
10235 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10236 {
10237 dwarf2out_maybe_output_loclist_view_pair (curr);
10238 /* If all code is in .text section, the base address is
10239 already provided by the CU attributes. Use
10240 DW_LLE_offset_pair where both addresses are uleb128 encoded
10241 offsets against that base. */
10242 dw2_asm_output_data (1, DW_LLE_offset_pair,
10243 "DW_LLE_offset_pair (%s)",
10244 list_head->ll_symbol);
10245 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10246 "Location list begin address (%s)",
10247 list_head->ll_symbol);
10248 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10249 "Location list end address (%s)",
10250 list_head->ll_symbol);
10251 }
10252 else if (HAVE_AS_LEB128)
10253 {
10254 /* Otherwise, find out how many consecutive entries could share
10255 the same base entry. If just one, emit DW_LLE_start_length,
10256 otherwise emit DW_LLE_base_address for the base address
10257 followed by a series of DW_LLE_offset_pair. */
10258 if (last_section == NULL || curr->section != last_section)
10259 {
10260 dw_loc_list_ref curr2;
10261 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10262 curr2 = curr2->dw_loc_next)
10263 {
10264 if (strcmp (curr2->begin, curr2->end) == 0
10265 && !curr2->force)
10266 continue;
10267 break;
10268 }
10269 if (curr2 == NULL || curr->section != curr2->section)
10270 last_section = NULL;
10271 else
10272 {
10273 last_section = curr->section;
10274 base_label = curr->begin;
10275 dw2_asm_output_data (1, DW_LLE_base_address,
10276 "DW_LLE_base_address (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10279 "Base address (%s)",
10280 list_head->ll_symbol);
10281 }
10282 }
10283 /* Only one entry with the same base address. Use
10284 DW_LLE_start_length with absolute address and uleb128
10285 length. */
10286 if (last_section == NULL)
10287 {
10288 dwarf2out_maybe_output_loclist_view_pair (curr);
10289 dw2_asm_output_data (1, DW_LLE_start_length,
10290 "DW_LLE_start_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10293 "Location list begin address (%s)",
10294 list_head->ll_symbol);
10295 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10296 "Location list length "
10297 "(%s)", list_head->ll_symbol);
10298 }
10299 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10300 DW_LLE_base_address. */
10301 else
10302 {
10303 dwarf2out_maybe_output_loclist_view_pair (curr);
10304 dw2_asm_output_data (1, DW_LLE_offset_pair,
10305 "DW_LLE_offset_pair (%s)",
10306 list_head->ll_symbol);
10307 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10308 "Location list begin address "
10309 "(%s)", list_head->ll_symbol);
10310 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10311 "Location list end address "
10312 "(%s)", list_head->ll_symbol);
10313 }
10314 }
10315 /* The assembler does not support .uleb128 directive. Emit
10316 DW_LLE_start_end with a pair of absolute addresses. */
10317 else
10318 {
10319 dwarf2out_maybe_output_loclist_view_pair (curr);
10320 dw2_asm_output_data (1, DW_LLE_start_end,
10321 "DW_LLE_start_end (%s)",
10322 list_head->ll_symbol);
10323 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10324 "Location list begin address (%s)",
10325 list_head->ll_symbol);
10326 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10327 "Location list end address (%s)",
10328 list_head->ll_symbol);
10329 }
10330 }
10331 else if (dwarf_split_debug_info)
10332 {
10333 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10334 and 4 byte length. */
10335 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10336 "Location list start/length entry (%s)",
10337 list_head->ll_symbol);
10338 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10339 "Location list range start index (%s)",
10340 curr->begin);
10341 /* The length field is 4 bytes. If we ever need to support
10342 an 8-byte length, we can add a new DW_LLE code or fall back
10343 to DW_LLE_GNU_start_end_entry. */
10344 dw2_asm_output_delta (4, curr->end, curr->begin,
10345 "Location list range length (%s)",
10346 list_head->ll_symbol);
10347 }
10348 else if (!have_multiple_function_sections)
10349 {
10350 /* Pair of relative addresses against start of text section. */
10351 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10352 "Location list begin address (%s)",
10353 list_head->ll_symbol);
10354 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10355 "Location list end address (%s)",
10356 list_head->ll_symbol);
10357 }
10358 else
10359 {
10360 /* Pair of absolute addresses. */
10361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10362 "Location list begin address (%s)",
10363 list_head->ll_symbol);
10364 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10365 "Location list end address (%s)",
10366 list_head->ll_symbol);
10367 }
10368
10369 /* Output the block length for this list of location operations. */
10370 if (dwarf_version >= 5)
10371 dw2_asm_output_data_uleb128 (size, "Location expression size");
10372 else
10373 {
10374 gcc_assert (size <= 0xffff);
10375 dw2_asm_output_data (2, size, "Location expression size");
10376 }
10377
10378 output_loc_sequence (curr->expr, -1);
10379 }
10380
10381 /* And finally list termination. */
10382 if (dwarf_version >= 5)
10383 dw2_asm_output_data (1, DW_LLE_end_of_list,
10384 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10385 else if (dwarf_split_debug_info)
10386 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10387 "Location list terminator (%s)",
10388 list_head->ll_symbol);
10389 else
10390 {
10391 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10392 "Location list terminator begin (%s)",
10393 list_head->ll_symbol);
10394 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10395 "Location list terminator end (%s)",
10396 list_head->ll_symbol);
10397 }
10398
10399 gcc_assert (!list_head->vl_symbol
10400 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10401 }
10402
10403 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10404 section. Emit a relocated reference if val_entry is NULL, otherwise,
10405 emit an indirect reference. */
10406
10407 static void
10408 output_range_list_offset (dw_attr_node *a)
10409 {
10410 const char *name = dwarf_attr_name (a->dw_attr);
10411
10412 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10413 {
10414 if (dwarf_version >= 5)
10415 {
10416 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10417 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10418 debug_ranges_section, "%s", name);
10419 }
10420 else
10421 {
10422 char *p = strchr (ranges_section_label, '\0');
10423 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10424 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10425 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10426 debug_ranges_section, "%s", name);
10427 *p = '\0';
10428 }
10429 }
10430 else if (dwarf_version >= 5)
10431 {
10432 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10433 gcc_assert (rnglist_idx);
10434 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10435 }
10436 else
10437 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10438 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10439 "%s (offset from %s)", name, ranges_section_label);
10440 }
10441
10442 /* Output the offset into the debug_loc section. */
10443
10444 static void
10445 output_loc_list_offset (dw_attr_node *a)
10446 {
10447 char *sym = AT_loc_list (a)->ll_symbol;
10448
10449 gcc_assert (sym);
10450 if (!dwarf_split_debug_info)
10451 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10452 "%s", dwarf_attr_name (a->dw_attr));
10453 else if (dwarf_version >= 5)
10454 {
10455 gcc_assert (AT_loc_list (a)->num_assigned);
10456 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10457 dwarf_attr_name (a->dw_attr),
10458 sym);
10459 }
10460 else
10461 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10462 "%s", dwarf_attr_name (a->dw_attr));
10463 }
10464
10465 /* Output the offset into the debug_loc section. */
10466
10467 static void
10468 output_view_list_offset (dw_attr_node *a)
10469 {
10470 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10471
10472 gcc_assert (sym);
10473 if (dwarf_split_debug_info)
10474 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10475 "%s", dwarf_attr_name (a->dw_attr));
10476 else
10477 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10478 "%s", dwarf_attr_name (a->dw_attr));
10479 }
10480
10481 /* Output an attribute's index or value appropriately. */
10482
10483 static void
10484 output_attr_index_or_value (dw_attr_node *a)
10485 {
10486 const char *name = dwarf_attr_name (a->dw_attr);
10487
10488 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10489 {
10490 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10491 return;
10492 }
10493 switch (AT_class (a))
10494 {
10495 case dw_val_class_addr:
10496 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10497 break;
10498 case dw_val_class_high_pc:
10499 case dw_val_class_lbl_id:
10500 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10501 break;
10502 default:
10503 gcc_unreachable ();
10504 }
10505 }
10506
10507 /* Output a type signature. */
10508
10509 static inline void
10510 output_signature (const char *sig, const char *name)
10511 {
10512 int i;
10513
10514 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10515 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10516 }
10517
10518 /* Output a discriminant value. */
10519
10520 static inline void
10521 output_discr_value (dw_discr_value *discr_value, const char *name)
10522 {
10523 if (discr_value->pos)
10524 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10525 else
10526 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10527 }
10528
10529 /* Output the DIE and its attributes. Called recursively to generate
10530 the definitions of each child DIE. */
10531
10532 static void
10533 output_die (dw_die_ref die)
10534 {
10535 dw_attr_node *a;
10536 dw_die_ref c;
10537 unsigned long size;
10538 unsigned ix;
10539
10540 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10541 (unsigned long)die->die_offset,
10542 dwarf_tag_name (die->die_tag));
10543
10544 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10545 {
10546 const char *name = dwarf_attr_name (a->dw_attr);
10547
10548 switch (AT_class (a))
10549 {
10550 case dw_val_class_addr:
10551 output_attr_index_or_value (a);
10552 break;
10553
10554 case dw_val_class_offset:
10555 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10556 "%s", name);
10557 break;
10558
10559 case dw_val_class_range_list:
10560 output_range_list_offset (a);
10561 break;
10562
10563 case dw_val_class_loc:
10564 size = size_of_locs (AT_loc (a));
10565
10566 /* Output the block length for this list of location operations. */
10567 if (dwarf_version >= 4)
10568 dw2_asm_output_data_uleb128 (size, "%s", name);
10569 else
10570 dw2_asm_output_data (constant_size (size), size, "%s", name);
10571
10572 output_loc_sequence (AT_loc (a), -1);
10573 break;
10574
10575 case dw_val_class_const:
10576 /* ??? It would be slightly more efficient to use a scheme like is
10577 used for unsigned constants below, but gdb 4.x does not sign
10578 extend. Gdb 5.x does sign extend. */
10579 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10580 break;
10581
10582 case dw_val_class_unsigned_const:
10583 {
10584 int csize = constant_size (AT_unsigned (a));
10585 if (dwarf_version == 3
10586 && a->dw_attr == DW_AT_data_member_location
10587 && csize >= 4)
10588 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10589 else
10590 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10591 }
10592 break;
10593
10594 case dw_val_class_symview:
10595 {
10596 int vsize;
10597 if (symview_upper_bound <= 0xff)
10598 vsize = 1;
10599 else if (symview_upper_bound <= 0xffff)
10600 vsize = 2;
10601 else if (symview_upper_bound <= 0xffffffff)
10602 vsize = 4;
10603 else
10604 vsize = 8;
10605 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10606 "%s", name);
10607 }
10608 break;
10609
10610 case dw_val_class_const_implicit:
10611 if (flag_debug_asm)
10612 fprintf (asm_out_file, "\t\t\t%s %s ("
10613 HOST_WIDE_INT_PRINT_DEC ")\n",
10614 ASM_COMMENT_START, name, AT_int (a));
10615 break;
10616
10617 case dw_val_class_unsigned_const_implicit:
10618 if (flag_debug_asm)
10619 fprintf (asm_out_file, "\t\t\t%s %s ("
10620 HOST_WIDE_INT_PRINT_HEX ")\n",
10621 ASM_COMMENT_START, name, AT_unsigned (a));
10622 break;
10623
10624 case dw_val_class_const_double:
10625 {
10626 unsigned HOST_WIDE_INT first, second;
10627
10628 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10629 dw2_asm_output_data (1,
10630 HOST_BITS_PER_DOUBLE_INT
10631 / HOST_BITS_PER_CHAR,
10632 NULL);
10633
10634 if (WORDS_BIG_ENDIAN)
10635 {
10636 first = a->dw_attr_val.v.val_double.high;
10637 second = a->dw_attr_val.v.val_double.low;
10638 }
10639 else
10640 {
10641 first = a->dw_attr_val.v.val_double.low;
10642 second = a->dw_attr_val.v.val_double.high;
10643 }
10644
10645 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10646 first, "%s", name);
10647 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10648 second, NULL);
10649 }
10650 break;
10651
10652 case dw_val_class_wide_int:
10653 {
10654 int i;
10655 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10656 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10657 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10658 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10659 * l, NULL);
10660
10661 if (WORDS_BIG_ENDIAN)
10662 for (i = len - 1; i >= 0; --i)
10663 {
10664 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10665 "%s", name);
10666 name = "";
10667 }
10668 else
10669 for (i = 0; i < len; ++i)
10670 {
10671 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10672 "%s", name);
10673 name = "";
10674 }
10675 }
10676 break;
10677
10678 case dw_val_class_vec:
10679 {
10680 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10681 unsigned int len = a->dw_attr_val.v.val_vec.length;
10682 unsigned int i;
10683 unsigned char *p;
10684
10685 dw2_asm_output_data (constant_size (len * elt_size),
10686 len * elt_size, "%s", name);
10687 if (elt_size > sizeof (HOST_WIDE_INT))
10688 {
10689 elt_size /= 2;
10690 len *= 2;
10691 }
10692 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10693 i < len;
10694 i++, p += elt_size)
10695 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10696 "fp or vector constant word %u", i);
10697 break;
10698 }
10699
10700 case dw_val_class_flag:
10701 if (dwarf_version >= 4)
10702 {
10703 /* Currently all add_AT_flag calls pass in 1 as last argument,
10704 so DW_FORM_flag_present can be used. If that ever changes,
10705 we'll need to use DW_FORM_flag and have some optimization
10706 in build_abbrev_table that will change those to
10707 DW_FORM_flag_present if it is set to 1 in all DIEs using
10708 the same abbrev entry. */
10709 gcc_assert (AT_flag (a) == 1);
10710 if (flag_debug_asm)
10711 fprintf (asm_out_file, "\t\t\t%s %s\n",
10712 ASM_COMMENT_START, name);
10713 break;
10714 }
10715 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10716 break;
10717
10718 case dw_val_class_loc_list:
10719 output_loc_list_offset (a);
10720 break;
10721
10722 case dw_val_class_view_list:
10723 output_view_list_offset (a);
10724 break;
10725
10726 case dw_val_class_die_ref:
10727 if (AT_ref_external (a))
10728 {
10729 if (AT_ref (a)->comdat_type_p)
10730 {
10731 comdat_type_node *type_node
10732 = AT_ref (a)->die_id.die_type_node;
10733
10734 gcc_assert (type_node);
10735 output_signature (type_node->signature, name);
10736 }
10737 else
10738 {
10739 const char *sym = AT_ref (a)->die_id.die_symbol;
10740 int size;
10741
10742 gcc_assert (sym);
10743 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10744 length, whereas in DWARF3 it's always sized as an
10745 offset. */
10746 if (dwarf_version == 2)
10747 size = DWARF2_ADDR_SIZE;
10748 else
10749 size = DWARF_OFFSET_SIZE;
10750 /* ??? We cannot unconditionally output die_offset if
10751 non-zero - others might create references to those
10752 DIEs via symbols.
10753 And we do not clear its DIE offset after outputting it
10754 (and the label refers to the actual DIEs, not the
10755 DWARF CU unit header which is when using label + offset
10756 would be the correct thing to do).
10757 ??? This is the reason for the with_offset flag. */
10758 if (AT_ref (a)->with_offset)
10759 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10760 debug_info_section, "%s", name);
10761 else
10762 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10763 name);
10764 }
10765 }
10766 else
10767 {
10768 gcc_assert (AT_ref (a)->die_offset);
10769 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10770 "%s", name);
10771 }
10772 break;
10773
10774 case dw_val_class_fde_ref:
10775 {
10776 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10777
10778 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10779 a->dw_attr_val.v.val_fde_index * 2);
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10781 "%s", name);
10782 }
10783 break;
10784
10785 case dw_val_class_vms_delta:
10786 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10787 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10788 AT_vms_delta2 (a), AT_vms_delta1 (a),
10789 "%s", name);
10790 #else
10791 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10792 AT_vms_delta2 (a), AT_vms_delta1 (a),
10793 "%s", name);
10794 #endif
10795 break;
10796
10797 case dw_val_class_lbl_id:
10798 output_attr_index_or_value (a);
10799 break;
10800
10801 case dw_val_class_lineptr:
10802 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10803 debug_line_section, "%s", name);
10804 break;
10805
10806 case dw_val_class_macptr:
10807 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10808 debug_macinfo_section, "%s", name);
10809 break;
10810
10811 case dw_val_class_loclistsptr:
10812 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10813 debug_loc_section, "%s", name);
10814 break;
10815
10816 case dw_val_class_str:
10817 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10818 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10819 a->dw_attr_val.v.val_str->label,
10820 debug_str_section,
10821 "%s: \"%s\"", name, AT_string (a));
10822 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10823 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10824 a->dw_attr_val.v.val_str->label,
10825 debug_line_str_section,
10826 "%s: \"%s\"", name, AT_string (a));
10827 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10828 dw2_asm_output_data_uleb128 (AT_index (a),
10829 "%s: \"%s\"", name, AT_string (a));
10830 else
10831 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10832 break;
10833
10834 case dw_val_class_file:
10835 {
10836 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10837
10838 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10839 a->dw_attr_val.v.val_file->filename);
10840 break;
10841 }
10842
10843 case dw_val_class_file_implicit:
10844 if (flag_debug_asm)
10845 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10846 ASM_COMMENT_START, name,
10847 maybe_emit_file (a->dw_attr_val.v.val_file),
10848 a->dw_attr_val.v.val_file->filename);
10849 break;
10850
10851 case dw_val_class_data8:
10852 {
10853 int i;
10854
10855 for (i = 0; i < 8; i++)
10856 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10857 i == 0 ? "%s" : NULL, name);
10858 break;
10859 }
10860
10861 case dw_val_class_high_pc:
10862 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10863 get_AT_low_pc (die), "DW_AT_high_pc");
10864 break;
10865
10866 case dw_val_class_discr_value:
10867 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10868 break;
10869
10870 case dw_val_class_discr_list:
10871 {
10872 dw_discr_list_ref list = AT_discr_list (a);
10873 const int size = size_of_discr_list (list);
10874
10875 /* This is a block, so output its length first. */
10876 dw2_asm_output_data (constant_size (size), size,
10877 "%s: block size", name);
10878
10879 for (; list != NULL; list = list->dw_discr_next)
10880 {
10881 /* One byte for the discriminant value descriptor, and then as
10882 many LEB128 numbers as required. */
10883 if (list->dw_discr_range)
10884 dw2_asm_output_data (1, DW_DSC_range,
10885 "%s: DW_DSC_range", name);
10886 else
10887 dw2_asm_output_data (1, DW_DSC_label,
10888 "%s: DW_DSC_label", name);
10889
10890 output_discr_value (&list->dw_discr_lower_bound, name);
10891 if (list->dw_discr_range)
10892 output_discr_value (&list->dw_discr_upper_bound, name);
10893 }
10894 break;
10895 }
10896
10897 default:
10898 gcc_unreachable ();
10899 }
10900 }
10901
10902 FOR_EACH_CHILD (die, c, output_die (c));
10903
10904 /* Add null byte to terminate sibling list. */
10905 if (die->die_child != NULL)
10906 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10907 (unsigned long) die->die_offset);
10908 }
10909
10910 /* Output the dwarf version number. */
10911
10912 static void
10913 output_dwarf_version ()
10914 {
10915 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10916 views in loclist. That will change eventually. */
10917 if (dwarf_version == 6)
10918 {
10919 static bool once;
10920 if (!once)
10921 {
10922 warning (0,
10923 "-gdwarf-6 is output as version 5 with incompatibilities");
10924 once = true;
10925 }
10926 dw2_asm_output_data (2, 5, "DWARF version number");
10927 }
10928 else
10929 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10930 }
10931
10932 /* Output the compilation unit that appears at the beginning of the
10933 .debug_info section, and precedes the DIE descriptions. */
10934
10935 static void
10936 output_compilation_unit_header (enum dwarf_unit_type ut)
10937 {
10938 if (!XCOFF_DEBUGGING_INFO)
10939 {
10940 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10941 dw2_asm_output_data (4, 0xffffffff,
10942 "Initial length escape value indicating 64-bit DWARF extension");
10943 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10944 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10945 "Length of Compilation Unit Info");
10946 }
10947
10948 output_dwarf_version ();
10949 if (dwarf_version >= 5)
10950 {
10951 const char *name;
10952 switch (ut)
10953 {
10954 case DW_UT_compile: name = "DW_UT_compile"; break;
10955 case DW_UT_type: name = "DW_UT_type"; break;
10956 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10957 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10958 default: gcc_unreachable ();
10959 }
10960 dw2_asm_output_data (1, ut, "%s", name);
10961 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10962 }
10963 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10964 debug_abbrev_section,
10965 "Offset Into Abbrev. Section");
10966 if (dwarf_version < 5)
10967 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10968 }
10969
10970 /* Output the compilation unit DIE and its children. */
10971
10972 static void
10973 output_comp_unit (dw_die_ref die, int output_if_empty,
10974 const unsigned char *dwo_id)
10975 {
10976 const char *secname, *oldsym;
10977 char *tmp;
10978
10979 /* Unless we are outputting main CU, we may throw away empty ones. */
10980 if (!output_if_empty && die->die_child == NULL)
10981 return;
10982
10983 /* Even if there are no children of this DIE, we must output the information
10984 about the compilation unit. Otherwise, on an empty translation unit, we
10985 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10986 will then complain when examining the file. First mark all the DIEs in
10987 this CU so we know which get local refs. */
10988 mark_dies (die);
10989
10990 external_ref_hash_type *extern_map = optimize_external_refs (die);
10991
10992 /* For now, optimize only the main CU, in order to optimize the rest
10993 we'd need to see all of them earlier. Leave the rest for post-linking
10994 tools like DWZ. */
10995 if (die == comp_unit_die ())
10996 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10997
10998 build_abbrev_table (die, extern_map);
10999
11000 optimize_abbrev_table ();
11001
11002 delete extern_map;
11003
11004 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11005 next_die_offset = (dwo_id
11006 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11007 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11008 calc_die_sizes (die);
11009
11010 oldsym = die->die_id.die_symbol;
11011 if (oldsym && die->comdat_type_p)
11012 {
11013 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11014
11015 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11016 secname = tmp;
11017 die->die_id.die_symbol = NULL;
11018 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11019 }
11020 else
11021 {
11022 switch_to_section (debug_info_section);
11023 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11024 info_section_emitted = true;
11025 }
11026
11027 /* For LTO cross unit DIE refs we want a symbol on the start of the
11028 debuginfo section, not on the CU DIE. */
11029 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11030 {
11031 /* ??? No way to get visibility assembled without a decl. */
11032 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11033 get_identifier (oldsym), char_type_node);
11034 TREE_PUBLIC (decl) = true;
11035 TREE_STATIC (decl) = true;
11036 DECL_ARTIFICIAL (decl) = true;
11037 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11038 DECL_VISIBILITY_SPECIFIED (decl) = true;
11039 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11040 #ifdef ASM_WEAKEN_LABEL
11041 /* We prefer a .weak because that handles duplicates from duplicate
11042 archive members in a graceful way. */
11043 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11044 #else
11045 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11046 #endif
11047 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11048 }
11049
11050 /* Output debugging information. */
11051 output_compilation_unit_header (dwo_id
11052 ? DW_UT_split_compile : DW_UT_compile);
11053 if (dwarf_version >= 5)
11054 {
11055 if (dwo_id != NULL)
11056 for (int i = 0; i < 8; i++)
11057 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11058 }
11059 output_die (die);
11060
11061 /* Leave the marks on the main CU, so we can check them in
11062 output_pubnames. */
11063 if (oldsym)
11064 {
11065 unmark_dies (die);
11066 die->die_id.die_symbol = oldsym;
11067 }
11068 }
11069
11070 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11071 and .debug_pubtypes. This is configured per-target, but can be
11072 overridden by the -gpubnames or -gno-pubnames options. */
11073
11074 static inline bool
11075 want_pubnames (void)
11076 {
11077 if (debug_info_level <= DINFO_LEVEL_TERSE)
11078 return false;
11079 if (debug_generate_pub_sections != -1)
11080 return debug_generate_pub_sections;
11081 return targetm.want_debug_pub_sections;
11082 }
11083
11084 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11085
11086 static void
11087 add_AT_pubnames (dw_die_ref die)
11088 {
11089 if (want_pubnames ())
11090 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11091 }
11092
11093 /* Add a string attribute value to a skeleton DIE. */
11094
11095 static inline void
11096 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11097 const char *str)
11098 {
11099 dw_attr_node attr;
11100 struct indirect_string_node *node;
11101
11102 if (! skeleton_debug_str_hash)
11103 skeleton_debug_str_hash
11104 = hash_table<indirect_string_hasher>::create_ggc (10);
11105
11106 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11107 find_string_form (node);
11108 if (node->form == dwarf_FORM (DW_FORM_strx))
11109 node->form = DW_FORM_strp;
11110
11111 attr.dw_attr = attr_kind;
11112 attr.dw_attr_val.val_class = dw_val_class_str;
11113 attr.dw_attr_val.val_entry = NULL;
11114 attr.dw_attr_val.v.val_str = node;
11115 add_dwarf_attr (die, &attr);
11116 }
11117
11118 /* Helper function to generate top-level dies for skeleton debug_info and
11119 debug_types. */
11120
11121 static void
11122 add_top_level_skeleton_die_attrs (dw_die_ref die)
11123 {
11124 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11125 const char *comp_dir = comp_dir_string ();
11126
11127 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11128 if (comp_dir != NULL)
11129 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11130 add_AT_pubnames (die);
11131 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11132 }
11133
11134 /* Output skeleton debug sections that point to the dwo file. */
11135
11136 static void
11137 output_skeleton_debug_sections (dw_die_ref comp_unit,
11138 const unsigned char *dwo_id)
11139 {
11140 /* These attributes will be found in the full debug_info section. */
11141 remove_AT (comp_unit, DW_AT_producer);
11142 remove_AT (comp_unit, DW_AT_language);
11143
11144 switch_to_section (debug_skeleton_info_section);
11145 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11146
11147 /* Produce the skeleton compilation-unit header. This one differs enough from
11148 a normal CU header that it's better not to call output_compilation_unit
11149 header. */
11150 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11151 dw2_asm_output_data (4, 0xffffffff,
11152 "Initial length escape value indicating 64-bit "
11153 "DWARF extension");
11154
11155 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11156 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11157 - DWARF_INITIAL_LENGTH_SIZE
11158 + size_of_die (comp_unit),
11159 "Length of Compilation Unit Info");
11160 output_dwarf_version ();
11161 if (dwarf_version >= 5)
11162 {
11163 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11164 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11165 }
11166 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11167 debug_skeleton_abbrev_section,
11168 "Offset Into Abbrev. Section");
11169 if (dwarf_version < 5)
11170 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11171 else
11172 for (int i = 0; i < 8; i++)
11173 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11174
11175 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11176 output_die (comp_unit);
11177
11178 /* Build the skeleton debug_abbrev section. */
11179 switch_to_section (debug_skeleton_abbrev_section);
11180 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11181
11182 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11183
11184 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11185 }
11186
11187 /* Output a comdat type unit DIE and its children. */
11188
11189 static void
11190 output_comdat_type_unit (comdat_type_node *node)
11191 {
11192 const char *secname;
11193 char *tmp;
11194 int i;
11195 #if defined (OBJECT_FORMAT_ELF)
11196 tree comdat_key;
11197 #endif
11198
11199 /* First mark all the DIEs in this CU so we know which get local refs. */
11200 mark_dies (node->root_die);
11201
11202 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11203
11204 build_abbrev_table (node->root_die, extern_map);
11205
11206 delete extern_map;
11207 extern_map = NULL;
11208
11209 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11210 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11211 calc_die_sizes (node->root_die);
11212
11213 #if defined (OBJECT_FORMAT_ELF)
11214 if (dwarf_version >= 5)
11215 {
11216 if (!dwarf_split_debug_info)
11217 secname = ".debug_info";
11218 else
11219 secname = ".debug_info.dwo";
11220 }
11221 else if (!dwarf_split_debug_info)
11222 secname = ".debug_types";
11223 else
11224 secname = ".debug_types.dwo";
11225
11226 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11227 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11228 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11229 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11230 comdat_key = get_identifier (tmp);
11231 targetm.asm_out.named_section (secname,
11232 SECTION_DEBUG | SECTION_LINKONCE,
11233 comdat_key);
11234 #else
11235 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11236 sprintf (tmp, (dwarf_version >= 5
11237 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11238 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11239 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11240 secname = tmp;
11241 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11242 #endif
11243
11244 /* Output debugging information. */
11245 output_compilation_unit_header (dwarf_split_debug_info
11246 ? DW_UT_split_type : DW_UT_type);
11247 output_signature (node->signature, "Type Signature");
11248 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11249 "Offset to Type DIE");
11250 output_die (node->root_die);
11251
11252 unmark_dies (node->root_die);
11253 }
11254
11255 /* Return the DWARF2/3 pubname associated with a decl. */
11256
11257 static const char *
11258 dwarf2_name (tree decl, int scope)
11259 {
11260 if (DECL_NAMELESS (decl))
11261 return NULL;
11262 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11263 }
11264
11265 /* Add a new entry to .debug_pubnames if appropriate. */
11266
11267 static void
11268 add_pubname_string (const char *str, dw_die_ref die)
11269 {
11270 pubname_entry e;
11271
11272 e.die = die;
11273 e.name = xstrdup (str);
11274 vec_safe_push (pubname_table, e);
11275 }
11276
11277 static void
11278 add_pubname (tree decl, dw_die_ref die)
11279 {
11280 if (!want_pubnames ())
11281 return;
11282
11283 /* Don't add items to the table when we expect that the consumer will have
11284 just read the enclosing die. For example, if the consumer is looking at a
11285 class_member, it will either be inside the class already, or will have just
11286 looked up the class to find the member. Either way, searching the class is
11287 faster than searching the index. */
11288 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11289 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11290 {
11291 const char *name = dwarf2_name (decl, 1);
11292
11293 if (name)
11294 add_pubname_string (name, die);
11295 }
11296 }
11297
11298 /* Add an enumerator to the pubnames section. */
11299
11300 static void
11301 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11302 {
11303 pubname_entry e;
11304
11305 gcc_assert (scope_name);
11306 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11307 e.die = die;
11308 vec_safe_push (pubname_table, e);
11309 }
11310
11311 /* Add a new entry to .debug_pubtypes if appropriate. */
11312
11313 static void
11314 add_pubtype (tree decl, dw_die_ref die)
11315 {
11316 pubname_entry e;
11317
11318 if (!want_pubnames ())
11319 return;
11320
11321 if ((TREE_PUBLIC (decl)
11322 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11323 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11324 {
11325 tree scope = NULL;
11326 const char *scope_name = "";
11327 const char *sep = is_cxx () ? "::" : ".";
11328 const char *name;
11329
11330 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11331 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11332 {
11333 scope_name = lang_hooks.dwarf_name (scope, 1);
11334 if (scope_name != NULL && scope_name[0] != '\0')
11335 scope_name = concat (scope_name, sep, NULL);
11336 else
11337 scope_name = "";
11338 }
11339
11340 if (TYPE_P (decl))
11341 name = type_tag (decl);
11342 else
11343 name = lang_hooks.dwarf_name (decl, 1);
11344
11345 /* If we don't have a name for the type, there's no point in adding
11346 it to the table. */
11347 if (name != NULL && name[0] != '\0')
11348 {
11349 e.die = die;
11350 e.name = concat (scope_name, name, NULL);
11351 vec_safe_push (pubtype_table, e);
11352 }
11353
11354 /* Although it might be more consistent to add the pubinfo for the
11355 enumerators as their dies are created, they should only be added if the
11356 enum type meets the criteria above. So rather than re-check the parent
11357 enum type whenever an enumerator die is created, just output them all
11358 here. This isn't protected by the name conditional because anonymous
11359 enums don't have names. */
11360 if (die->die_tag == DW_TAG_enumeration_type)
11361 {
11362 dw_die_ref c;
11363
11364 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11365 }
11366 }
11367 }
11368
11369 /* Output a single entry in the pubnames table. */
11370
11371 static void
11372 output_pubname (dw_offset die_offset, pubname_entry *entry)
11373 {
11374 dw_die_ref die = entry->die;
11375 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11376
11377 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11378
11379 if (debug_generate_pub_sections == 2)
11380 {
11381 /* This logic follows gdb's method for determining the value of the flag
11382 byte. */
11383 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11384 switch (die->die_tag)
11385 {
11386 case DW_TAG_typedef:
11387 case DW_TAG_base_type:
11388 case DW_TAG_subrange_type:
11389 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11391 break;
11392 case DW_TAG_enumerator:
11393 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11394 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11395 if (!is_cxx ())
11396 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11397 break;
11398 case DW_TAG_subprogram:
11399 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11400 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11401 if (!is_ada ())
11402 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11403 break;
11404 case DW_TAG_constant:
11405 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11406 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11407 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11408 break;
11409 case DW_TAG_variable:
11410 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11411 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11412 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11413 break;
11414 case DW_TAG_namespace:
11415 case DW_TAG_imported_declaration:
11416 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11417 break;
11418 case DW_TAG_class_type:
11419 case DW_TAG_interface_type:
11420 case DW_TAG_structure_type:
11421 case DW_TAG_union_type:
11422 case DW_TAG_enumeration_type:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11424 if (!is_cxx ())
11425 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11426 break;
11427 default:
11428 /* An unusual tag. Leave the flag-byte empty. */
11429 break;
11430 }
11431 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11432 "GDB-index flags");
11433 }
11434
11435 dw2_asm_output_nstring (entry->name, -1, "external name");
11436 }
11437
11438
11439 /* Output the public names table used to speed up access to externally
11440 visible names; or the public types table used to find type definitions. */
11441
11442 static void
11443 output_pubnames (vec<pubname_entry, va_gc> *names)
11444 {
11445 unsigned i;
11446 unsigned long pubnames_length = size_of_pubnames (names);
11447 pubname_entry *pub;
11448
11449 if (!XCOFF_DEBUGGING_INFO)
11450 {
11451 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11452 dw2_asm_output_data (4, 0xffffffff,
11453 "Initial length escape value indicating 64-bit DWARF extension");
11454 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11455 "Pub Info Length");
11456 }
11457
11458 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11459 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11460
11461 if (dwarf_split_debug_info)
11462 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11463 debug_skeleton_info_section,
11464 "Offset of Compilation Unit Info");
11465 else
11466 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11467 debug_info_section,
11468 "Offset of Compilation Unit Info");
11469 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11470 "Compilation Unit Length");
11471
11472 FOR_EACH_VEC_ELT (*names, i, pub)
11473 {
11474 if (include_pubname_in_output (names, pub))
11475 {
11476 dw_offset die_offset = pub->die->die_offset;
11477
11478 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11479 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11480 gcc_assert (pub->die->die_mark);
11481
11482 /* If we're putting types in their own .debug_types sections,
11483 the .debug_pubtypes table will still point to the compile
11484 unit (not the type unit), so we want to use the offset of
11485 the skeleton DIE (if there is one). */
11486 if (pub->die->comdat_type_p && names == pubtype_table)
11487 {
11488 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11489
11490 if (type_node != NULL)
11491 die_offset = (type_node->skeleton_die != NULL
11492 ? type_node->skeleton_die->die_offset
11493 : comp_unit_die ()->die_offset);
11494 }
11495
11496 output_pubname (die_offset, pub);
11497 }
11498 }
11499
11500 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11501 }
11502
11503 /* Output public names and types tables if necessary. */
11504
11505 static void
11506 output_pubtables (void)
11507 {
11508 if (!want_pubnames () || !info_section_emitted)
11509 return;
11510
11511 switch_to_section (debug_pubnames_section);
11512 output_pubnames (pubname_table);
11513 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11514 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11515 simply won't look for the section. */
11516 switch_to_section (debug_pubtypes_section);
11517 output_pubnames (pubtype_table);
11518 }
11519
11520
11521 /* Output the information that goes into the .debug_aranges table.
11522 Namely, define the beginning and ending address range of the
11523 text section generated for this compilation unit. */
11524
11525 static void
11526 output_aranges (void)
11527 {
11528 unsigned i;
11529 unsigned long aranges_length = size_of_aranges ();
11530
11531 if (!XCOFF_DEBUGGING_INFO)
11532 {
11533 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11534 dw2_asm_output_data (4, 0xffffffff,
11535 "Initial length escape value indicating 64-bit DWARF extension");
11536 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11537 "Length of Address Ranges Info");
11538 }
11539
11540 /* Version number for aranges is still 2, even up to DWARF5. */
11541 dw2_asm_output_data (2, 2, "DWARF aranges version");
11542 if (dwarf_split_debug_info)
11543 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11544 debug_skeleton_info_section,
11545 "Offset of Compilation Unit Info");
11546 else
11547 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11548 debug_info_section,
11549 "Offset of Compilation Unit Info");
11550 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11551 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11552
11553 /* We need to align to twice the pointer size here. */
11554 if (DWARF_ARANGES_PAD_SIZE)
11555 {
11556 /* Pad using a 2 byte words so that padding is correct for any
11557 pointer size. */
11558 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11559 2 * DWARF2_ADDR_SIZE);
11560 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11561 dw2_asm_output_data (2, 0, NULL);
11562 }
11563
11564 /* It is necessary not to output these entries if the sections were
11565 not used; if the sections were not used, the length will be 0 and
11566 the address may end up as 0 if the section is discarded by ld
11567 --gc-sections, leaving an invalid (0, 0) entry that can be
11568 confused with the terminator. */
11569 if (text_section_used)
11570 {
11571 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11572 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11573 text_section_label, "Length");
11574 }
11575 if (cold_text_section_used)
11576 {
11577 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11578 "Address");
11579 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11580 cold_text_section_label, "Length");
11581 }
11582
11583 if (have_multiple_function_sections)
11584 {
11585 unsigned fde_idx;
11586 dw_fde_ref fde;
11587
11588 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11589 {
11590 if (DECL_IGNORED_P (fde->decl))
11591 continue;
11592 if (!fde->in_std_section)
11593 {
11594 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11595 "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11597 fde->dw_fde_begin, "Length");
11598 }
11599 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11604 fde->dw_fde_second_begin, "Length");
11605 }
11606 }
11607 }
11608
11609 /* Output the terminator words. */
11610 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11611 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11612 }
11613
11614 /* Add a new entry to .debug_ranges. Return its index into
11615 ranges_table vector. */
11616
11617 static unsigned int
11618 add_ranges_num (int num, bool maybe_new_sec)
11619 {
11620 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11621 vec_safe_push (ranges_table, r);
11622 return vec_safe_length (ranges_table) - 1;
11623 }
11624
11625 /* Add a new entry to .debug_ranges corresponding to a block, or a
11626 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11627 this entry might be in a different section from previous range. */
11628
11629 static unsigned int
11630 add_ranges (const_tree block, bool maybe_new_sec)
11631 {
11632 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11633 }
11634
11635 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11636 chain, or middle entry of a chain that will be directly referred to. */
11637
11638 static void
11639 note_rnglist_head (unsigned int offset)
11640 {
11641 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11642 return;
11643 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11644 }
11645
11646 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11647 When using dwarf_split_debug_info, address attributes in dies destined
11648 for the final executable should be direct references--setting the
11649 parameter force_direct ensures this behavior. */
11650
11651 static void
11652 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11653 bool *added, bool force_direct)
11654 {
11655 unsigned int in_use = vec_safe_length (ranges_by_label);
11656 unsigned int offset;
11657 dw_ranges_by_label rbl = { begin, end };
11658 vec_safe_push (ranges_by_label, rbl);
11659 offset = add_ranges_num (-(int)in_use - 1, true);
11660 if (!*added)
11661 {
11662 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11663 *added = true;
11664 note_rnglist_head (offset);
11665 }
11666 }
11667
11668 /* Emit .debug_ranges section. */
11669
11670 static void
11671 output_ranges (void)
11672 {
11673 unsigned i;
11674 static const char *const start_fmt = "Offset %#x";
11675 const char *fmt = start_fmt;
11676 dw_ranges *r;
11677
11678 switch_to_section (debug_ranges_section);
11679 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11680 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11681 {
11682 int block_num = r->num;
11683
11684 if (block_num > 0)
11685 {
11686 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11687 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11688
11689 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11690 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11691
11692 /* If all code is in the text section, then the compilation
11693 unit base address defaults to DW_AT_low_pc, which is the
11694 base of the text section. */
11695 if (!have_multiple_function_sections)
11696 {
11697 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11698 text_section_label,
11699 fmt, i * 2 * DWARF2_ADDR_SIZE);
11700 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11701 text_section_label, NULL);
11702 }
11703
11704 /* Otherwise, the compilation unit base address is zero,
11705 which allows us to use absolute addresses, and not worry
11706 about whether the target supports cross-section
11707 arithmetic. */
11708 else
11709 {
11710 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11713 }
11714
11715 fmt = NULL;
11716 }
11717
11718 /* Negative block_num stands for an index into ranges_by_label. */
11719 else if (block_num < 0)
11720 {
11721 int lab_idx = - block_num - 1;
11722
11723 if (!have_multiple_function_sections)
11724 {
11725 gcc_unreachable ();
11726 #if 0
11727 /* If we ever use add_ranges_by_labels () for a single
11728 function section, all we have to do is to take out
11729 the #if 0 above. */
11730 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11731 (*ranges_by_label)[lab_idx].begin,
11732 text_section_label,
11733 fmt, i * 2 * DWARF2_ADDR_SIZE);
11734 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11735 (*ranges_by_label)[lab_idx].end,
11736 text_section_label, NULL);
11737 #endif
11738 }
11739 else
11740 {
11741 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11742 (*ranges_by_label)[lab_idx].begin,
11743 fmt, i * 2 * DWARF2_ADDR_SIZE);
11744 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11745 (*ranges_by_label)[lab_idx].end,
11746 NULL);
11747 }
11748 }
11749 else
11750 {
11751 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11752 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11753 fmt = start_fmt;
11754 }
11755 }
11756 }
11757
11758 /* Non-zero if .debug_line_str should be used for .debug_line section
11759 strings or strings that are likely shareable with those. */
11760 #define DWARF5_USE_DEBUG_LINE_STR \
11761 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11762 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11763 /* FIXME: there is no .debug_line_str.dwo section, \
11764 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11765 && !dwarf_split_debug_info)
11766
11767 /* Assign .debug_rnglists indexes. */
11768
11769 static void
11770 index_rnglists (void)
11771 {
11772 unsigned i;
11773 dw_ranges *r;
11774
11775 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11776 if (r->label)
11777 r->idx = rnglist_idx++;
11778 }
11779
11780 /* Emit .debug_rnglists section. */
11781
11782 static void
11783 output_rnglists (unsigned generation)
11784 {
11785 unsigned i;
11786 dw_ranges *r;
11787 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11788 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11789 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11790
11791 switch_to_section (debug_ranges_section);
11792 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11793 /* There are up to 4 unique ranges labels per generation.
11794 See also init_sections_and_labels. */
11795 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11796 2 + generation * 4);
11797 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11798 3 + generation * 4);
11799 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11800 dw2_asm_output_data (4, 0xffffffff,
11801 "Initial length escape value indicating "
11802 "64-bit DWARF extension");
11803 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11804 "Length of Range Lists");
11805 ASM_OUTPUT_LABEL (asm_out_file, l1);
11806 output_dwarf_version ();
11807 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11808 dw2_asm_output_data (1, 0, "Segment Size");
11809 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11810 about relocation sizes and primarily care about the size of .debug*
11811 sections in linked shared libraries and executables, then
11812 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11813 into it are usually larger than just DW_FORM_sec_offset offsets
11814 into the .debug_rnglists section. */
11815 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11816 "Offset Entry Count");
11817 if (dwarf_split_debug_info)
11818 {
11819 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11820 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11821 if (r->label)
11822 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11823 ranges_base_label, NULL);
11824 }
11825
11826 const char *lab = "";
11827 unsigned int len = vec_safe_length (ranges_table);
11828 const char *base = NULL;
11829 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11830 {
11831 int block_num = r->num;
11832
11833 if (r->label)
11834 {
11835 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11836 lab = r->label;
11837 }
11838 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11839 base = NULL;
11840 if (block_num > 0)
11841 {
11842 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11843 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11844
11845 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11846 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11847
11848 if (HAVE_AS_LEB128)
11849 {
11850 /* If all code is in the text section, then the compilation
11851 unit base address defaults to DW_AT_low_pc, which is the
11852 base of the text section. */
11853 if (!have_multiple_function_sections)
11854 {
11855 dw2_asm_output_data (1, DW_RLE_offset_pair,
11856 "DW_RLE_offset_pair (%s)", lab);
11857 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11858 "Range begin address (%s)", lab);
11859 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11860 "Range end address (%s)", lab);
11861 continue;
11862 }
11863 if (base == NULL)
11864 {
11865 dw_ranges *r2 = NULL;
11866 if (i < len - 1)
11867 r2 = &(*ranges_table)[i + 1];
11868 if (r2
11869 && r2->num != 0
11870 && r2->label == NULL
11871 && !r2->maybe_new_sec)
11872 {
11873 dw2_asm_output_data (1, DW_RLE_base_address,
11874 "DW_RLE_base_address (%s)", lab);
11875 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11876 "Base address (%s)", lab);
11877 strcpy (basebuf, blabel);
11878 base = basebuf;
11879 }
11880 }
11881 if (base)
11882 {
11883 dw2_asm_output_data (1, DW_RLE_offset_pair,
11884 "DW_RLE_offset_pair (%s)", lab);
11885 dw2_asm_output_delta_uleb128 (blabel, base,
11886 "Range begin address (%s)", lab);
11887 dw2_asm_output_delta_uleb128 (elabel, base,
11888 "Range end address (%s)", lab);
11889 continue;
11890 }
11891 dw2_asm_output_data (1, DW_RLE_start_length,
11892 "DW_RLE_start_length (%s)", lab);
11893 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11894 "Range begin address (%s)", lab);
11895 dw2_asm_output_delta_uleb128 (elabel, blabel,
11896 "Range length (%s)", lab);
11897 }
11898 else
11899 {
11900 dw2_asm_output_data (1, DW_RLE_start_end,
11901 "DW_RLE_start_end (%s)", lab);
11902 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11903 "Range begin address (%s)", lab);
11904 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11905 "Range end address (%s)", lab);
11906 }
11907 }
11908
11909 /* Negative block_num stands for an index into ranges_by_label. */
11910 else if (block_num < 0)
11911 {
11912 int lab_idx = - block_num - 1;
11913 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11914 const char *elabel = (*ranges_by_label)[lab_idx].end;
11915
11916 if (!have_multiple_function_sections)
11917 gcc_unreachable ();
11918 if (HAVE_AS_LEB128)
11919 {
11920 dw2_asm_output_data (1, DW_RLE_start_length,
11921 "DW_RLE_start_length (%s)", lab);
11922 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11923 "Range begin address (%s)", lab);
11924 dw2_asm_output_delta_uleb128 (elabel, blabel,
11925 "Range length (%s)", lab);
11926 }
11927 else
11928 {
11929 dw2_asm_output_data (1, DW_RLE_start_end,
11930 "DW_RLE_start_end (%s)", lab);
11931 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11932 "Range begin address (%s)", lab);
11933 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11934 "Range end address (%s)", lab);
11935 }
11936 }
11937 else
11938 dw2_asm_output_data (1, DW_RLE_end_of_list,
11939 "DW_RLE_end_of_list (%s)", lab);
11940 }
11941 ASM_OUTPUT_LABEL (asm_out_file, l2);
11942 }
11943
11944 /* Data structure containing information about input files. */
11945 struct file_info
11946 {
11947 const char *path; /* Complete file name. */
11948 const char *fname; /* File name part. */
11949 int length; /* Length of entire string. */
11950 struct dwarf_file_data * file_idx; /* Index in input file table. */
11951 int dir_idx; /* Index in directory table. */
11952 };
11953
11954 /* Data structure containing information about directories with source
11955 files. */
11956 struct dir_info
11957 {
11958 const char *path; /* Path including directory name. */
11959 int length; /* Path length. */
11960 int prefix; /* Index of directory entry which is a prefix. */
11961 int count; /* Number of files in this directory. */
11962 int dir_idx; /* Index of directory used as base. */
11963 };
11964
11965 /* Callback function for file_info comparison. We sort by looking at
11966 the directories in the path. */
11967
11968 static int
11969 file_info_cmp (const void *p1, const void *p2)
11970 {
11971 const struct file_info *const s1 = (const struct file_info *) p1;
11972 const struct file_info *const s2 = (const struct file_info *) p2;
11973 const unsigned char *cp1;
11974 const unsigned char *cp2;
11975
11976 /* Take care of file names without directories. We need to make sure that
11977 we return consistent values to qsort since some will get confused if
11978 we return the same value when identical operands are passed in opposite
11979 orders. So if neither has a directory, return 0 and otherwise return
11980 1 or -1 depending on which one has the directory. We want the one with
11981 the directory to sort after the one without, so all no directory files
11982 are at the start (normally only the compilation unit file). */
11983 if ((s1->path == s1->fname || s2->path == s2->fname))
11984 return (s2->path == s2->fname) - (s1->path == s1->fname);
11985
11986 cp1 = (const unsigned char *) s1->path;
11987 cp2 = (const unsigned char *) s2->path;
11988
11989 while (1)
11990 {
11991 ++cp1;
11992 ++cp2;
11993 /* Reached the end of the first path? If so, handle like above,
11994 but now we want longer directory prefixes before shorter ones. */
11995 if ((cp1 == (const unsigned char *) s1->fname)
11996 || (cp2 == (const unsigned char *) s2->fname))
11997 return ((cp1 == (const unsigned char *) s1->fname)
11998 - (cp2 == (const unsigned char *) s2->fname));
11999
12000 /* Character of current path component the same? */
12001 else if (*cp1 != *cp2)
12002 return *cp1 - *cp2;
12003 }
12004 }
12005
12006 struct file_name_acquire_data
12007 {
12008 struct file_info *files;
12009 int used_files;
12010 int max_files;
12011 };
12012
12013 /* Traversal function for the hash table. */
12014
12015 int
12016 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12017 {
12018 struct dwarf_file_data *d = *slot;
12019 struct file_info *fi;
12020 const char *f;
12021
12022 gcc_assert (fnad->max_files >= d->emitted_number);
12023
12024 if (! d->emitted_number)
12025 return 1;
12026
12027 gcc_assert (fnad->max_files != fnad->used_files);
12028
12029 fi = fnad->files + fnad->used_files++;
12030
12031 /* Skip all leading "./". */
12032 f = d->filename;
12033 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12034 f += 2;
12035
12036 /* Create a new array entry. */
12037 fi->path = f;
12038 fi->length = strlen (f);
12039 fi->file_idx = d;
12040
12041 /* Search for the file name part. */
12042 f = strrchr (f, DIR_SEPARATOR);
12043 #if defined (DIR_SEPARATOR_2)
12044 {
12045 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12046
12047 if (g != NULL)
12048 {
12049 if (f == NULL || f < g)
12050 f = g;
12051 }
12052 }
12053 #endif
12054
12055 fi->fname = f == NULL ? fi->path : f + 1;
12056 return 1;
12057 }
12058
12059 /* Helper function for output_file_names. Emit a FORM encoded
12060 string STR, with assembly comment start ENTRY_KIND and
12061 index IDX */
12062
12063 static void
12064 output_line_string (enum dwarf_form form, const char *str,
12065 const char *entry_kind, unsigned int idx)
12066 {
12067 switch (form)
12068 {
12069 case DW_FORM_string:
12070 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12071 break;
12072 case DW_FORM_line_strp:
12073 if (!debug_line_str_hash)
12074 debug_line_str_hash
12075 = hash_table<indirect_string_hasher>::create_ggc (10);
12076
12077 struct indirect_string_node *node;
12078 node = find_AT_string_in_table (str, debug_line_str_hash);
12079 set_indirect_string (node);
12080 node->form = form;
12081 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12082 debug_line_str_section, "%s: %#x: \"%s\"",
12083 entry_kind, 0, node->str);
12084 break;
12085 default:
12086 gcc_unreachable ();
12087 }
12088 }
12089
12090 /* Output the directory table and the file name table. We try to minimize
12091 the total amount of memory needed. A heuristic is used to avoid large
12092 slowdowns with many input files. */
12093
12094 static void
12095 output_file_names (void)
12096 {
12097 struct file_name_acquire_data fnad;
12098 int numfiles;
12099 struct file_info *files;
12100 struct dir_info *dirs;
12101 int *saved;
12102 int *savehere;
12103 int *backmap;
12104 int ndirs;
12105 int idx_offset;
12106 int i;
12107
12108 if (!last_emitted_file)
12109 {
12110 if (dwarf_version >= 5)
12111 {
12112 dw2_asm_output_data (1, 0, "Directory entry format count");
12113 dw2_asm_output_data_uleb128 (0, "Directories count");
12114 dw2_asm_output_data (1, 0, "File name entry format count");
12115 dw2_asm_output_data_uleb128 (0, "File names count");
12116 }
12117 else
12118 {
12119 dw2_asm_output_data (1, 0, "End directory table");
12120 dw2_asm_output_data (1, 0, "End file name table");
12121 }
12122 return;
12123 }
12124
12125 numfiles = last_emitted_file->emitted_number;
12126
12127 /* Allocate the various arrays we need. */
12128 files = XALLOCAVEC (struct file_info, numfiles);
12129 dirs = XALLOCAVEC (struct dir_info, numfiles);
12130
12131 fnad.files = files;
12132 fnad.used_files = 0;
12133 fnad.max_files = numfiles;
12134 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12135 gcc_assert (fnad.used_files == fnad.max_files);
12136
12137 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12138
12139 /* Find all the different directories used. */
12140 dirs[0].path = files[0].path;
12141 dirs[0].length = files[0].fname - files[0].path;
12142 dirs[0].prefix = -1;
12143 dirs[0].count = 1;
12144 dirs[0].dir_idx = 0;
12145 files[0].dir_idx = 0;
12146 ndirs = 1;
12147
12148 for (i = 1; i < numfiles; i++)
12149 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12150 && memcmp (dirs[ndirs - 1].path, files[i].path,
12151 dirs[ndirs - 1].length) == 0)
12152 {
12153 /* Same directory as last entry. */
12154 files[i].dir_idx = ndirs - 1;
12155 ++dirs[ndirs - 1].count;
12156 }
12157 else
12158 {
12159 int j;
12160
12161 /* This is a new directory. */
12162 dirs[ndirs].path = files[i].path;
12163 dirs[ndirs].length = files[i].fname - files[i].path;
12164 dirs[ndirs].count = 1;
12165 dirs[ndirs].dir_idx = ndirs;
12166 files[i].dir_idx = ndirs;
12167
12168 /* Search for a prefix. */
12169 dirs[ndirs].prefix = -1;
12170 for (j = 0; j < ndirs; j++)
12171 if (dirs[j].length < dirs[ndirs].length
12172 && dirs[j].length > 1
12173 && (dirs[ndirs].prefix == -1
12174 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12175 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12176 dirs[ndirs].prefix = j;
12177
12178 ++ndirs;
12179 }
12180
12181 /* Now to the actual work. We have to find a subset of the directories which
12182 allow expressing the file name using references to the directory table
12183 with the least amount of characters. We do not do an exhaustive search
12184 where we would have to check out every combination of every single
12185 possible prefix. Instead we use a heuristic which provides nearly optimal
12186 results in most cases and never is much off. */
12187 saved = XALLOCAVEC (int, ndirs);
12188 savehere = XALLOCAVEC (int, ndirs);
12189
12190 memset (saved, '\0', ndirs * sizeof (saved[0]));
12191 for (i = 0; i < ndirs; i++)
12192 {
12193 int j;
12194 int total;
12195
12196 /* We can always save some space for the current directory. But this
12197 does not mean it will be enough to justify adding the directory. */
12198 savehere[i] = dirs[i].length;
12199 total = (savehere[i] - saved[i]) * dirs[i].count;
12200
12201 for (j = i + 1; j < ndirs; j++)
12202 {
12203 savehere[j] = 0;
12204 if (saved[j] < dirs[i].length)
12205 {
12206 /* Determine whether the dirs[i] path is a prefix of the
12207 dirs[j] path. */
12208 int k;
12209
12210 k = dirs[j].prefix;
12211 while (k != -1 && k != (int) i)
12212 k = dirs[k].prefix;
12213
12214 if (k == (int) i)
12215 {
12216 /* Yes it is. We can possibly save some memory by
12217 writing the filenames in dirs[j] relative to
12218 dirs[i]. */
12219 savehere[j] = dirs[i].length;
12220 total += (savehere[j] - saved[j]) * dirs[j].count;
12221 }
12222 }
12223 }
12224
12225 /* Check whether we can save enough to justify adding the dirs[i]
12226 directory. */
12227 if (total > dirs[i].length + 1)
12228 {
12229 /* It's worthwhile adding. */
12230 for (j = i; j < ndirs; j++)
12231 if (savehere[j] > 0)
12232 {
12233 /* Remember how much we saved for this directory so far. */
12234 saved[j] = savehere[j];
12235
12236 /* Remember the prefix directory. */
12237 dirs[j].dir_idx = i;
12238 }
12239 }
12240 }
12241
12242 /* Emit the directory name table. */
12243 idx_offset = dirs[0].length > 0 ? 1 : 0;
12244 enum dwarf_form str_form = DW_FORM_string;
12245 enum dwarf_form idx_form = DW_FORM_udata;
12246 if (dwarf_version >= 5)
12247 {
12248 const char *comp_dir = comp_dir_string ();
12249 if (comp_dir == NULL)
12250 comp_dir = "";
12251 dw2_asm_output_data (1, 1, "Directory entry format count");
12252 if (DWARF5_USE_DEBUG_LINE_STR)
12253 str_form = DW_FORM_line_strp;
12254 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12255 dw2_asm_output_data_uleb128 (str_form, "%s",
12256 get_DW_FORM_name (str_form));
12257 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12258 if (str_form == DW_FORM_string)
12259 {
12260 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12261 for (i = 1 - idx_offset; i < ndirs; i++)
12262 dw2_asm_output_nstring (dirs[i].path,
12263 dirs[i].length
12264 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12265 "Directory Entry: %#x", i + idx_offset);
12266 }
12267 else
12268 {
12269 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12270 for (i = 1 - idx_offset; i < ndirs; i++)
12271 {
12272 const char *str
12273 = ggc_alloc_string (dirs[i].path,
12274 dirs[i].length
12275 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12276 output_line_string (str_form, str, "Directory Entry",
12277 (unsigned) i + idx_offset);
12278 }
12279 }
12280 }
12281 else
12282 {
12283 for (i = 1 - idx_offset; i < ndirs; i++)
12284 dw2_asm_output_nstring (dirs[i].path,
12285 dirs[i].length
12286 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12287 "Directory Entry: %#x", i + idx_offset);
12288
12289 dw2_asm_output_data (1, 0, "End directory table");
12290 }
12291
12292 /* We have to emit them in the order of emitted_number since that's
12293 used in the debug info generation. To do this efficiently we
12294 generate a back-mapping of the indices first. */
12295 backmap = XALLOCAVEC (int, numfiles);
12296 for (i = 0; i < numfiles; i++)
12297 backmap[files[i].file_idx->emitted_number - 1] = i;
12298
12299 if (dwarf_version >= 5)
12300 {
12301 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12302 if (filename0 == NULL)
12303 filename0 = "";
12304 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12305 DW_FORM_data2. Choose one based on the number of directories
12306 and how much space would they occupy in each encoding.
12307 If we have at most 256 directories, all indexes fit into
12308 a single byte, so DW_FORM_data1 is most compact (if there
12309 are at most 128 directories, DW_FORM_udata would be as
12310 compact as that, but not shorter and slower to decode). */
12311 if (ndirs + idx_offset <= 256)
12312 idx_form = DW_FORM_data1;
12313 /* If there are more than 65536 directories, we have to use
12314 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12315 Otherwise, compute what space would occupy if all the indexes
12316 used DW_FORM_udata - sum - and compare that to how large would
12317 be DW_FORM_data2 encoding, and pick the more efficient one. */
12318 else if (ndirs + idx_offset <= 65536)
12319 {
12320 unsigned HOST_WIDE_INT sum = 1;
12321 for (i = 0; i < numfiles; i++)
12322 {
12323 int file_idx = backmap[i];
12324 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12325 sum += size_of_uleb128 (dir_idx);
12326 }
12327 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12328 idx_form = DW_FORM_data2;
12329 }
12330 #ifdef VMS_DEBUGGING_INFO
12331 dw2_asm_output_data (1, 4, "File name entry format count");
12332 #else
12333 dw2_asm_output_data (1, 2, "File name entry format count");
12334 #endif
12335 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12336 dw2_asm_output_data_uleb128 (str_form, "%s",
12337 get_DW_FORM_name (str_form));
12338 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12339 "DW_LNCT_directory_index");
12340 dw2_asm_output_data_uleb128 (idx_form, "%s",
12341 get_DW_FORM_name (idx_form));
12342 #ifdef VMS_DEBUGGING_INFO
12343 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12344 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12345 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12346 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12347 #endif
12348 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12349
12350 output_line_string (str_form, filename0, "File Entry", 0);
12351
12352 /* Include directory index. */
12353 if (idx_form != DW_FORM_udata)
12354 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12355 0, NULL);
12356 else
12357 dw2_asm_output_data_uleb128 (0, NULL);
12358
12359 #ifdef VMS_DEBUGGING_INFO
12360 dw2_asm_output_data_uleb128 (0, NULL);
12361 dw2_asm_output_data_uleb128 (0, NULL);
12362 #endif
12363 }
12364
12365 /* Now write all the file names. */
12366 for (i = 0; i < numfiles; i++)
12367 {
12368 int file_idx = backmap[i];
12369 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12370
12371 #ifdef VMS_DEBUGGING_INFO
12372 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12373
12374 /* Setting these fields can lead to debugger miscomparisons,
12375 but VMS Debug requires them to be set correctly. */
12376
12377 int ver;
12378 long long cdt;
12379 long siz;
12380 int maxfilelen = (strlen (files[file_idx].path)
12381 + dirs[dir_idx].length
12382 + MAX_VMS_VERSION_LEN + 1);
12383 char *filebuf = XALLOCAVEC (char, maxfilelen);
12384
12385 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12386 snprintf (filebuf, maxfilelen, "%s;%d",
12387 files[file_idx].path + dirs[dir_idx].length, ver);
12388
12389 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12390
12391 /* Include directory index. */
12392 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12393 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12394 dir_idx + idx_offset, NULL);
12395 else
12396 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12397
12398 /* Modification time. */
12399 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12400 &cdt, 0, 0, 0) == 0)
12401 ? cdt : 0, NULL);
12402
12403 /* File length in bytes. */
12404 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12405 0, &siz, 0, 0) == 0)
12406 ? siz : 0, NULL);
12407 #else
12408 output_line_string (str_form,
12409 files[file_idx].path + dirs[dir_idx].length,
12410 "File Entry", (unsigned) i + 1);
12411
12412 /* Include directory index. */
12413 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12414 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12415 dir_idx + idx_offset, NULL);
12416 else
12417 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12418
12419 if (dwarf_version >= 5)
12420 continue;
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 (0, NULL);
12424
12425 /* File length in bytes. */
12426 dw2_asm_output_data_uleb128 (0, NULL);
12427 #endif /* VMS_DEBUGGING_INFO */
12428 }
12429
12430 if (dwarf_version < 5)
12431 dw2_asm_output_data (1, 0, "End file name table");
12432 }
12433
12434
12435 /* Output one line number table into the .debug_line section. */
12436
12437 static void
12438 output_one_line_info_table (dw_line_info_table *table)
12439 {
12440 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12441 unsigned int current_line = 1;
12442 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12443 dw_line_info_entry *ent, *prev_addr;
12444 size_t i;
12445 unsigned int view;
12446
12447 view = 0;
12448
12449 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12450 {
12451 switch (ent->opcode)
12452 {
12453 case LI_set_address:
12454 /* ??? Unfortunately, we have little choice here currently, and
12455 must always use the most general form. GCC does not know the
12456 address delta itself, so we can't use DW_LNS_advance_pc. Many
12457 ports do have length attributes which will give an upper bound
12458 on the address range. We could perhaps use length attributes
12459 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12460 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12461
12462 view = 0;
12463
12464 /* This can handle any delta. This takes
12465 4+DWARF2_ADDR_SIZE bytes. */
12466 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12467 debug_variable_location_views
12468 ? ", reset view to 0" : "");
12469 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12470 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12471 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12472
12473 prev_addr = ent;
12474 break;
12475
12476 case LI_adv_address:
12477 {
12478 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12479 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12480 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12481
12482 view++;
12483
12484 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12485 dw2_asm_output_delta (2, line_label, prev_label,
12486 "from %s to %s", prev_label, line_label);
12487
12488 prev_addr = ent;
12489 break;
12490 }
12491
12492 case LI_set_line:
12493 if (ent->val == current_line)
12494 {
12495 /* We still need to start a new row, so output a copy insn. */
12496 dw2_asm_output_data (1, DW_LNS_copy,
12497 "copy line %u", current_line);
12498 }
12499 else
12500 {
12501 int line_offset = ent->val - current_line;
12502 int line_delta = line_offset - DWARF_LINE_BASE;
12503
12504 current_line = ent->val;
12505 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12506 {
12507 /* This can handle deltas from -10 to 234, using the current
12508 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12509 This takes 1 byte. */
12510 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12511 "line %u", current_line);
12512 }
12513 else
12514 {
12515 /* This can handle any delta. This takes at least 4 bytes,
12516 depending on the value being encoded. */
12517 dw2_asm_output_data (1, DW_LNS_advance_line,
12518 "advance to line %u", current_line);
12519 dw2_asm_output_data_sleb128 (line_offset, NULL);
12520 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12521 }
12522 }
12523 break;
12524
12525 case LI_set_file:
12526 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12527 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12528 break;
12529
12530 case LI_set_column:
12531 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12532 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12533 break;
12534
12535 case LI_negate_stmt:
12536 current_is_stmt = !current_is_stmt;
12537 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12538 "is_stmt %d", current_is_stmt);
12539 break;
12540
12541 case LI_set_prologue_end:
12542 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12543 "set prologue end");
12544 break;
12545
12546 case LI_set_epilogue_begin:
12547 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12548 "set epilogue begin");
12549 break;
12550
12551 case LI_set_discriminator:
12552 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12553 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12554 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12555 dw2_asm_output_data_uleb128 (ent->val, NULL);
12556 break;
12557 }
12558 }
12559
12560 /* Emit debug info for the address of the end of the table. */
12561 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12562 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12563 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12564 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12565
12566 dw2_asm_output_data (1, 0, "end sequence");
12567 dw2_asm_output_data_uleb128 (1, NULL);
12568 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12569 }
12570
12571 /* Output the source line number correspondence information. This
12572 information goes into the .debug_line section. */
12573
12574 static void
12575 output_line_info (bool prologue_only)
12576 {
12577 static unsigned int generation;
12578 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12579 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12580 bool saw_one = false;
12581 int opc;
12582
12583 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12584 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12585 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12586 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12587
12588 if (!XCOFF_DEBUGGING_INFO)
12589 {
12590 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12591 dw2_asm_output_data (4, 0xffffffff,
12592 "Initial length escape value indicating 64-bit DWARF extension");
12593 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12594 "Length of Source Line Info");
12595 }
12596
12597 ASM_OUTPUT_LABEL (asm_out_file, l1);
12598
12599 output_dwarf_version ();
12600 if (dwarf_version >= 5)
12601 {
12602 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12603 dw2_asm_output_data (1, 0, "Segment Size");
12604 }
12605 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12606 ASM_OUTPUT_LABEL (asm_out_file, p1);
12607
12608 /* Define the architecture-dependent minimum instruction length (in bytes).
12609 In this implementation of DWARF, this field is used for information
12610 purposes only. Since GCC generates assembly language, we have no
12611 a priori knowledge of how many instruction bytes are generated for each
12612 source line, and therefore can use only the DW_LNE_set_address and
12613 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12614 this as '1', which is "correct enough" for all architectures,
12615 and don't let the target override. */
12616 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12617
12618 if (dwarf_version >= 4)
12619 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12620 "Maximum Operations Per Instruction");
12621 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12622 "Default is_stmt_start flag");
12623 dw2_asm_output_data (1, DWARF_LINE_BASE,
12624 "Line Base Value (Special Opcodes)");
12625 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12626 "Line Range Value (Special Opcodes)");
12627 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12628 "Special Opcode Base");
12629
12630 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12631 {
12632 int n_op_args;
12633 switch (opc)
12634 {
12635 case DW_LNS_advance_pc:
12636 case DW_LNS_advance_line:
12637 case DW_LNS_set_file:
12638 case DW_LNS_set_column:
12639 case DW_LNS_fixed_advance_pc:
12640 case DW_LNS_set_isa:
12641 n_op_args = 1;
12642 break;
12643 default:
12644 n_op_args = 0;
12645 break;
12646 }
12647
12648 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12649 opc, n_op_args);
12650 }
12651
12652 /* Write out the information about the files we use. */
12653 output_file_names ();
12654 ASM_OUTPUT_LABEL (asm_out_file, p2);
12655 if (prologue_only)
12656 {
12657 /* Output the marker for the end of the line number info. */
12658 ASM_OUTPUT_LABEL (asm_out_file, l2);
12659 return;
12660 }
12661
12662 if (separate_line_info)
12663 {
12664 dw_line_info_table *table;
12665 size_t i;
12666
12667 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12668 if (table->in_use)
12669 {
12670 output_one_line_info_table (table);
12671 saw_one = true;
12672 }
12673 }
12674 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12675 {
12676 output_one_line_info_table (cold_text_section_line_info);
12677 saw_one = true;
12678 }
12679
12680 /* ??? Some Darwin linkers crash on a .debug_line section with no
12681 sequences. Further, merely a DW_LNE_end_sequence entry is not
12682 sufficient -- the address column must also be initialized.
12683 Make sure to output at least one set_address/end_sequence pair,
12684 choosing .text since that section is always present. */
12685 if (text_section_line_info->in_use || !saw_one)
12686 output_one_line_info_table (text_section_line_info);
12687
12688 /* Output the marker for the end of the line number info. */
12689 ASM_OUTPUT_LABEL (asm_out_file, l2);
12690 }
12691 \f
12692 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12693
12694 static inline bool
12695 need_endianity_attribute_p (bool reverse)
12696 {
12697 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12698 }
12699
12700 /* Given a pointer to a tree node for some base type, return a pointer to
12701 a DIE that describes the given type. REVERSE is true if the type is
12702 to be interpreted in the reverse storage order wrt the target order.
12703
12704 This routine must only be called for GCC type nodes that correspond to
12705 Dwarf base (fundamental) types. */
12706
12707 static dw_die_ref
12708 base_type_die (tree type, bool reverse)
12709 {
12710 dw_die_ref base_type_result;
12711 enum dwarf_type encoding;
12712 bool fpt_used = false;
12713 struct fixed_point_type_info fpt_info;
12714 tree type_bias = NULL_TREE;
12715
12716 /* If this is a subtype that should not be emitted as a subrange type,
12717 use the base type. See subrange_type_for_debug_p. */
12718 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12719 type = TREE_TYPE (type);
12720
12721 switch (TREE_CODE (type))
12722 {
12723 case INTEGER_TYPE:
12724 if ((dwarf_version >= 4 || !dwarf_strict)
12725 && TYPE_NAME (type)
12726 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12727 && DECL_IS_BUILTIN (TYPE_NAME (type))
12728 && DECL_NAME (TYPE_NAME (type)))
12729 {
12730 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12731 if (strcmp (name, "char16_t") == 0
12732 || strcmp (name, "char32_t") == 0)
12733 {
12734 encoding = DW_ATE_UTF;
12735 break;
12736 }
12737 }
12738 if ((dwarf_version >= 3 || !dwarf_strict)
12739 && lang_hooks.types.get_fixed_point_type_info)
12740 {
12741 memset (&fpt_info, 0, sizeof (fpt_info));
12742 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12743 {
12744 fpt_used = true;
12745 encoding = ((TYPE_UNSIGNED (type))
12746 ? DW_ATE_unsigned_fixed
12747 : DW_ATE_signed_fixed);
12748 break;
12749 }
12750 }
12751 if (TYPE_STRING_FLAG (type))
12752 {
12753 if (TYPE_UNSIGNED (type))
12754 encoding = DW_ATE_unsigned_char;
12755 else
12756 encoding = DW_ATE_signed_char;
12757 }
12758 else if (TYPE_UNSIGNED (type))
12759 encoding = DW_ATE_unsigned;
12760 else
12761 encoding = DW_ATE_signed;
12762
12763 if (!dwarf_strict
12764 && lang_hooks.types.get_type_bias)
12765 type_bias = lang_hooks.types.get_type_bias (type);
12766 break;
12767
12768 case REAL_TYPE:
12769 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12770 {
12771 if (dwarf_version >= 3 || !dwarf_strict)
12772 encoding = DW_ATE_decimal_float;
12773 else
12774 encoding = DW_ATE_lo_user;
12775 }
12776 else
12777 encoding = DW_ATE_float;
12778 break;
12779
12780 case FIXED_POINT_TYPE:
12781 if (!(dwarf_version >= 3 || !dwarf_strict))
12782 encoding = DW_ATE_lo_user;
12783 else if (TYPE_UNSIGNED (type))
12784 encoding = DW_ATE_unsigned_fixed;
12785 else
12786 encoding = DW_ATE_signed_fixed;
12787 break;
12788
12789 /* Dwarf2 doesn't know anything about complex ints, so use
12790 a user defined type for it. */
12791 case COMPLEX_TYPE:
12792 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12793 encoding = DW_ATE_complex_float;
12794 else
12795 encoding = DW_ATE_lo_user;
12796 break;
12797
12798 case BOOLEAN_TYPE:
12799 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12800 encoding = DW_ATE_boolean;
12801 break;
12802
12803 default:
12804 /* No other TREE_CODEs are Dwarf fundamental types. */
12805 gcc_unreachable ();
12806 }
12807
12808 base_type_result = new_die_raw (DW_TAG_base_type);
12809
12810 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12811 int_size_in_bytes (type));
12812 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12813
12814 if (need_endianity_attribute_p (reverse))
12815 add_AT_unsigned (base_type_result, DW_AT_endianity,
12816 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12817
12818 add_alignment_attribute (base_type_result, type);
12819
12820 if (fpt_used)
12821 {
12822 switch (fpt_info.scale_factor_kind)
12823 {
12824 case fixed_point_scale_factor_binary:
12825 add_AT_int (base_type_result, DW_AT_binary_scale,
12826 fpt_info.scale_factor.binary);
12827 break;
12828
12829 case fixed_point_scale_factor_decimal:
12830 add_AT_int (base_type_result, DW_AT_decimal_scale,
12831 fpt_info.scale_factor.decimal);
12832 break;
12833
12834 case fixed_point_scale_factor_arbitrary:
12835 /* Arbitrary scale factors cannot be described in standard DWARF,
12836 yet. */
12837 if (!dwarf_strict)
12838 {
12839 /* Describe the scale factor as a rational constant. */
12840 const dw_die_ref scale_factor
12841 = new_die (DW_TAG_constant, comp_unit_die (), type);
12842
12843 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12844 fpt_info.scale_factor.arbitrary.numerator);
12845 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12846 fpt_info.scale_factor.arbitrary.denominator);
12847
12848 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12849 }
12850 break;
12851
12852 default:
12853 gcc_unreachable ();
12854 }
12855 }
12856
12857 if (type_bias)
12858 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12859 dw_scalar_form_constant
12860 | dw_scalar_form_exprloc
12861 | dw_scalar_form_reference,
12862 NULL);
12863
12864 return base_type_result;
12865 }
12866
12867 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12868 named 'auto' in its type: return true for it, false otherwise. */
12869
12870 static inline bool
12871 is_cxx_auto (tree type)
12872 {
12873 if (is_cxx ())
12874 {
12875 tree name = TYPE_IDENTIFIER (type);
12876 if (name == get_identifier ("auto")
12877 || name == get_identifier ("decltype(auto)"))
12878 return true;
12879 }
12880 return false;
12881 }
12882
12883 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12884 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12885
12886 static inline int
12887 is_base_type (tree type)
12888 {
12889 switch (TREE_CODE (type))
12890 {
12891 case INTEGER_TYPE:
12892 case REAL_TYPE:
12893 case FIXED_POINT_TYPE:
12894 case COMPLEX_TYPE:
12895 case BOOLEAN_TYPE:
12896 return 1;
12897
12898 case VOID_TYPE:
12899 case ARRAY_TYPE:
12900 case RECORD_TYPE:
12901 case UNION_TYPE:
12902 case QUAL_UNION_TYPE:
12903 case ENUMERAL_TYPE:
12904 case FUNCTION_TYPE:
12905 case METHOD_TYPE:
12906 case POINTER_TYPE:
12907 case REFERENCE_TYPE:
12908 case NULLPTR_TYPE:
12909 case OFFSET_TYPE:
12910 case LANG_TYPE:
12911 case VECTOR_TYPE:
12912 return 0;
12913
12914 default:
12915 if (is_cxx_auto (type))
12916 return 0;
12917 gcc_unreachable ();
12918 }
12919
12920 return 0;
12921 }
12922
12923 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12924 node, return the size in bits for the type if it is a constant, or else
12925 return the alignment for the type if the type's size is not constant, or
12926 else return BITS_PER_WORD if the type actually turns out to be an
12927 ERROR_MARK node. */
12928
12929 static inline unsigned HOST_WIDE_INT
12930 simple_type_size_in_bits (const_tree type)
12931 {
12932 if (TREE_CODE (type) == ERROR_MARK)
12933 return BITS_PER_WORD;
12934 else if (TYPE_SIZE (type) == NULL_TREE)
12935 return 0;
12936 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12937 return tree_to_uhwi (TYPE_SIZE (type));
12938 else
12939 return TYPE_ALIGN (type);
12940 }
12941
12942 /* Similarly, but return an offset_int instead of UHWI. */
12943
12944 static inline offset_int
12945 offset_int_type_size_in_bits (const_tree type)
12946 {
12947 if (TREE_CODE (type) == ERROR_MARK)
12948 return BITS_PER_WORD;
12949 else if (TYPE_SIZE (type) == NULL_TREE)
12950 return 0;
12951 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12952 return wi::to_offset (TYPE_SIZE (type));
12953 else
12954 return TYPE_ALIGN (type);
12955 }
12956
12957 /* Given a pointer to a tree node for a subrange type, return a pointer
12958 to a DIE that describes the given type. */
12959
12960 static dw_die_ref
12961 subrange_type_die (tree type, tree low, tree high, tree bias,
12962 dw_die_ref context_die)
12963 {
12964 dw_die_ref subrange_die;
12965 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12966
12967 if (context_die == NULL)
12968 context_die = comp_unit_die ();
12969
12970 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12971
12972 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12973 {
12974 /* The size of the subrange type and its base type do not match,
12975 so we need to generate a size attribute for the subrange type. */
12976 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12977 }
12978
12979 add_alignment_attribute (subrange_die, type);
12980
12981 if (low)
12982 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12983 if (high)
12984 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12985 if (bias && !dwarf_strict)
12986 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12987 dw_scalar_form_constant
12988 | dw_scalar_form_exprloc
12989 | dw_scalar_form_reference,
12990 NULL);
12991
12992 return subrange_die;
12993 }
12994
12995 /* Returns the (const and/or volatile) cv_qualifiers associated with
12996 the decl node. This will normally be augmented with the
12997 cv_qualifiers of the underlying type in add_type_attribute. */
12998
12999 static int
13000 decl_quals (const_tree decl)
13001 {
13002 return ((TREE_READONLY (decl)
13003 /* The C++ front-end correctly marks reference-typed
13004 variables as readonly, but from a language (and debug
13005 info) standpoint they are not const-qualified. */
13006 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13007 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13008 | (TREE_THIS_VOLATILE (decl)
13009 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13010 }
13011
13012 /* Determine the TYPE whose qualifiers match the largest strict subset
13013 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13014 qualifiers outside QUAL_MASK. */
13015
13016 static int
13017 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13018 {
13019 tree t;
13020 int best_rank = 0, best_qual = 0, max_rank;
13021
13022 type_quals &= qual_mask;
13023 max_rank = popcount_hwi (type_quals) - 1;
13024
13025 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13026 t = TYPE_NEXT_VARIANT (t))
13027 {
13028 int q = TYPE_QUALS (t) & qual_mask;
13029
13030 if ((q & type_quals) == q && q != type_quals
13031 && check_base_type (t, type))
13032 {
13033 int rank = popcount_hwi (q);
13034
13035 if (rank > best_rank)
13036 {
13037 best_rank = rank;
13038 best_qual = q;
13039 }
13040 }
13041 }
13042
13043 return best_qual;
13044 }
13045
13046 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13047 static const dwarf_qual_info_t dwarf_qual_info[] =
13048 {
13049 { TYPE_QUAL_CONST, DW_TAG_const_type },
13050 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13051 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13052 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13053 };
13054 static const unsigned int dwarf_qual_info_size
13055 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13056
13057 /* If DIE is a qualified DIE of some base DIE with the same parent,
13058 return the base DIE, otherwise return NULL. Set MASK to the
13059 qualifiers added compared to the returned DIE. */
13060
13061 static dw_die_ref
13062 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13063 {
13064 unsigned int i;
13065 for (i = 0; i < dwarf_qual_info_size; i++)
13066 if (die->die_tag == dwarf_qual_info[i].t)
13067 break;
13068 if (i == dwarf_qual_info_size)
13069 return NULL;
13070 if (vec_safe_length (die->die_attr) != 1)
13071 return NULL;
13072 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13073 if (type == NULL || type->die_parent != die->die_parent)
13074 return NULL;
13075 *mask |= dwarf_qual_info[i].q;
13076 if (depth)
13077 {
13078 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13079 if (ret)
13080 return ret;
13081 }
13082 return type;
13083 }
13084
13085 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13086 entry that chains the modifiers specified by CV_QUALS in front of the
13087 given type. REVERSE is true if the type is to be interpreted in the
13088 reverse storage order wrt the target order. */
13089
13090 static dw_die_ref
13091 modified_type_die (tree type, int cv_quals, bool reverse,
13092 dw_die_ref context_die)
13093 {
13094 enum tree_code code = TREE_CODE (type);
13095 dw_die_ref mod_type_die;
13096 dw_die_ref sub_die = NULL;
13097 tree item_type = NULL;
13098 tree qualified_type;
13099 tree name, low, high;
13100 dw_die_ref mod_scope;
13101 /* Only these cv-qualifiers are currently handled. */
13102 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13103 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13104 ENCODE_QUAL_ADDR_SPACE(~0U));
13105 const bool reverse_base_type
13106 = need_endianity_attribute_p (reverse) && is_base_type (type);
13107
13108 if (code == ERROR_MARK)
13109 return NULL;
13110
13111 if (lang_hooks.types.get_debug_type)
13112 {
13113 tree debug_type = lang_hooks.types.get_debug_type (type);
13114
13115 if (debug_type != NULL_TREE && debug_type != type)
13116 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13117 }
13118
13119 cv_quals &= cv_qual_mask;
13120
13121 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13122 tag modifier (and not an attribute) old consumers won't be able
13123 to handle it. */
13124 if (dwarf_version < 3)
13125 cv_quals &= ~TYPE_QUAL_RESTRICT;
13126
13127 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13128 if (dwarf_version < 5)
13129 cv_quals &= ~TYPE_QUAL_ATOMIC;
13130
13131 /* See if we already have the appropriately qualified variant of
13132 this type. */
13133 qualified_type = get_qualified_type (type, cv_quals);
13134
13135 if (qualified_type == sizetype)
13136 {
13137 /* Try not to expose the internal sizetype type's name. */
13138 if (TYPE_NAME (qualified_type)
13139 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13140 {
13141 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13142
13143 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13144 && (TYPE_PRECISION (t)
13145 == TYPE_PRECISION (qualified_type))
13146 && (TYPE_UNSIGNED (t)
13147 == TYPE_UNSIGNED (qualified_type)));
13148 qualified_type = t;
13149 }
13150 else if (qualified_type == sizetype
13151 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13152 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13153 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13154 qualified_type = size_type_node;
13155 if (type == sizetype)
13156 type = qualified_type;
13157 }
13158
13159 /* If we do, then we can just use its DIE, if it exists. */
13160 if (qualified_type)
13161 {
13162 mod_type_die = lookup_type_die (qualified_type);
13163
13164 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13165 dealt with specially: the DIE with the attribute, if it exists, is
13166 placed immediately after the regular DIE for the same base type. */
13167 if (mod_type_die
13168 && (!reverse_base_type
13169 || ((mod_type_die = mod_type_die->die_sib) != NULL
13170 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13171 return mod_type_die;
13172 }
13173
13174 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13175
13176 /* Handle C typedef types. */
13177 if (name
13178 && TREE_CODE (name) == TYPE_DECL
13179 && DECL_ORIGINAL_TYPE (name)
13180 && !DECL_ARTIFICIAL (name))
13181 {
13182 tree dtype = TREE_TYPE (name);
13183
13184 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13185 if (qualified_type == dtype && !reverse_base_type)
13186 {
13187 tree origin = decl_ultimate_origin (name);
13188
13189 /* Typedef variants that have an abstract origin don't get their own
13190 type DIE (see gen_typedef_die), so fall back on the ultimate
13191 abstract origin instead. */
13192 if (origin != NULL && origin != name)
13193 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13194 context_die);
13195
13196 /* For a named type, use the typedef. */
13197 gen_type_die (qualified_type, context_die);
13198 return lookup_type_die (qualified_type);
13199 }
13200 else
13201 {
13202 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13203 dquals &= cv_qual_mask;
13204 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13205 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13206 /* cv-unqualified version of named type. Just use
13207 the unnamed type to which it refers. */
13208 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13209 reverse, context_die);
13210 /* Else cv-qualified version of named type; fall through. */
13211 }
13212 }
13213
13214 mod_scope = scope_die_for (type, context_die);
13215
13216 if (cv_quals)
13217 {
13218 int sub_quals = 0, first_quals = 0;
13219 unsigned i;
13220 dw_die_ref first = NULL, last = NULL;
13221
13222 /* Determine a lesser qualified type that most closely matches
13223 this one. Then generate DW_TAG_* entries for the remaining
13224 qualifiers. */
13225 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13226 cv_qual_mask);
13227 if (sub_quals && use_debug_types)
13228 {
13229 bool needed = false;
13230 /* If emitting type units, make sure the order of qualifiers
13231 is canonical. Thus, start from unqualified type if
13232 an earlier qualifier is missing in sub_quals, but some later
13233 one is present there. */
13234 for (i = 0; i < dwarf_qual_info_size; i++)
13235 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13236 needed = true;
13237 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13238 {
13239 sub_quals = 0;
13240 break;
13241 }
13242 }
13243 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13244 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13245 {
13246 /* As not all intermediate qualified DIEs have corresponding
13247 tree types, ensure that qualified DIEs in the same scope
13248 as their DW_AT_type are emitted after their DW_AT_type,
13249 only with other qualified DIEs for the same type possibly
13250 in between them. Determine the range of such qualified
13251 DIEs now (first being the base type, last being corresponding
13252 last qualified DIE for it). */
13253 unsigned int count = 0;
13254 first = qualified_die_p (mod_type_die, &first_quals,
13255 dwarf_qual_info_size);
13256 if (first == NULL)
13257 first = mod_type_die;
13258 gcc_assert ((first_quals & ~sub_quals) == 0);
13259 for (count = 0, last = first;
13260 count < (1U << dwarf_qual_info_size);
13261 count++, last = last->die_sib)
13262 {
13263 int quals = 0;
13264 if (last == mod_scope->die_child)
13265 break;
13266 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13267 != first)
13268 break;
13269 }
13270 }
13271
13272 for (i = 0; i < dwarf_qual_info_size; i++)
13273 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13274 {
13275 dw_die_ref d;
13276 if (first && first != last)
13277 {
13278 for (d = first->die_sib; ; d = d->die_sib)
13279 {
13280 int quals = 0;
13281 qualified_die_p (d, &quals, dwarf_qual_info_size);
13282 if (quals == (first_quals | dwarf_qual_info[i].q))
13283 break;
13284 if (d == last)
13285 {
13286 d = NULL;
13287 break;
13288 }
13289 }
13290 if (d)
13291 {
13292 mod_type_die = d;
13293 continue;
13294 }
13295 }
13296 if (first)
13297 {
13298 d = new_die_raw (dwarf_qual_info[i].t);
13299 add_child_die_after (mod_scope, d, last);
13300 last = d;
13301 }
13302 else
13303 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13304 if (mod_type_die)
13305 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13306 mod_type_die = d;
13307 first_quals |= dwarf_qual_info[i].q;
13308 }
13309 }
13310 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13311 {
13312 dwarf_tag tag = DW_TAG_pointer_type;
13313 if (code == REFERENCE_TYPE)
13314 {
13315 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13316 tag = DW_TAG_rvalue_reference_type;
13317 else
13318 tag = DW_TAG_reference_type;
13319 }
13320 mod_type_die = new_die (tag, mod_scope, type);
13321
13322 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13323 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13324 add_alignment_attribute (mod_type_die, type);
13325 item_type = TREE_TYPE (type);
13326
13327 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13328 if (!ADDR_SPACE_GENERIC_P (as))
13329 {
13330 int action = targetm.addr_space.debug (as);
13331 if (action >= 0)
13332 {
13333 /* Positive values indicate an address_class. */
13334 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13335 }
13336 else
13337 {
13338 /* Negative values indicate an (inverted) segment base reg. */
13339 dw_loc_descr_ref d
13340 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13341 add_AT_loc (mod_type_die, DW_AT_segment, d);
13342 }
13343 }
13344 }
13345 else if (code == INTEGER_TYPE
13346 && TREE_TYPE (type) != NULL_TREE
13347 && subrange_type_for_debug_p (type, &low, &high))
13348 {
13349 tree bias = NULL_TREE;
13350 if (lang_hooks.types.get_type_bias)
13351 bias = lang_hooks.types.get_type_bias (type);
13352 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13353 item_type = TREE_TYPE (type);
13354 }
13355 else if (is_base_type (type))
13356 {
13357 mod_type_die = base_type_die (type, reverse);
13358
13359 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13360 if (reverse_base_type)
13361 {
13362 dw_die_ref after_die
13363 = modified_type_die (type, cv_quals, false, context_die);
13364 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13365 }
13366 else
13367 add_child_die (comp_unit_die (), mod_type_die);
13368
13369 add_pubtype (type, mod_type_die);
13370 }
13371 else
13372 {
13373 gen_type_die (type, context_die);
13374
13375 /* We have to get the type_main_variant here (and pass that to the
13376 `lookup_type_die' routine) because the ..._TYPE node we have
13377 might simply be a *copy* of some original type node (where the
13378 copy was created to help us keep track of typedef names) and
13379 that copy might have a different TYPE_UID from the original
13380 ..._TYPE node. */
13381 if (TREE_CODE (type) == FUNCTION_TYPE
13382 || TREE_CODE (type) == METHOD_TYPE)
13383 {
13384 /* For function/method types, can't just use type_main_variant here,
13385 because that can have different ref-qualifiers for C++,
13386 but try to canonicalize. */
13387 tree main = TYPE_MAIN_VARIANT (type);
13388 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13389 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13390 && check_base_type (t, main)
13391 && check_lang_type (t, type))
13392 return lookup_type_die (t);
13393 return lookup_type_die (type);
13394 }
13395 else if (TREE_CODE (type) != VECTOR_TYPE
13396 && TREE_CODE (type) != ARRAY_TYPE)
13397 return lookup_type_die (type_main_variant (type));
13398 else
13399 /* Vectors have the debugging information in the type,
13400 not the main variant. */
13401 return lookup_type_die (type);
13402 }
13403
13404 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13405 don't output a DW_TAG_typedef, since there isn't one in the
13406 user's program; just attach a DW_AT_name to the type.
13407 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13408 if the base type already has the same name. */
13409 if (name
13410 && ((TREE_CODE (name) != TYPE_DECL
13411 && (qualified_type == TYPE_MAIN_VARIANT (type)
13412 || (cv_quals == TYPE_UNQUALIFIED)))
13413 || (TREE_CODE (name) == TYPE_DECL
13414 && TREE_TYPE (name) == qualified_type
13415 && DECL_NAME (name))))
13416 {
13417 if (TREE_CODE (name) == TYPE_DECL)
13418 /* Could just call add_name_and_src_coords_attributes here,
13419 but since this is a builtin type it doesn't have any
13420 useful source coordinates anyway. */
13421 name = DECL_NAME (name);
13422 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13423 }
13424 /* This probably indicates a bug. */
13425 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13426 {
13427 name = TYPE_IDENTIFIER (type);
13428 add_name_attribute (mod_type_die,
13429 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13430 }
13431
13432 if (qualified_type && !reverse_base_type)
13433 equate_type_number_to_die (qualified_type, mod_type_die);
13434
13435 if (item_type)
13436 /* We must do this after the equate_type_number_to_die call, in case
13437 this is a recursive type. This ensures that the modified_type_die
13438 recursion will terminate even if the type is recursive. Recursive
13439 types are possible in Ada. */
13440 sub_die = modified_type_die (item_type,
13441 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13442 reverse,
13443 context_die);
13444
13445 if (sub_die != NULL)
13446 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13447
13448 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13449 if (TYPE_ARTIFICIAL (type))
13450 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13451
13452 return mod_type_die;
13453 }
13454
13455 /* Generate DIEs for the generic parameters of T.
13456 T must be either a generic type or a generic function.
13457 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13458
13459 static void
13460 gen_generic_params_dies (tree t)
13461 {
13462 tree parms, args;
13463 int parms_num, i;
13464 dw_die_ref die = NULL;
13465 int non_default;
13466
13467 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13468 return;
13469
13470 if (TYPE_P (t))
13471 die = lookup_type_die (t);
13472 else if (DECL_P (t))
13473 die = lookup_decl_die (t);
13474
13475 gcc_assert (die);
13476
13477 parms = lang_hooks.get_innermost_generic_parms (t);
13478 if (!parms)
13479 /* T has no generic parameter. It means T is neither a generic type
13480 or function. End of story. */
13481 return;
13482
13483 parms_num = TREE_VEC_LENGTH (parms);
13484 args = lang_hooks.get_innermost_generic_args (t);
13485 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13486 non_default = int_cst_value (TREE_CHAIN (args));
13487 else
13488 non_default = TREE_VEC_LENGTH (args);
13489 for (i = 0; i < parms_num; i++)
13490 {
13491 tree parm, arg, arg_pack_elems;
13492 dw_die_ref parm_die;
13493
13494 parm = TREE_VEC_ELT (parms, i);
13495 arg = TREE_VEC_ELT (args, i);
13496 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13497 gcc_assert (parm && TREE_VALUE (parm) && arg);
13498
13499 if (parm && TREE_VALUE (parm) && arg)
13500 {
13501 /* If PARM represents a template parameter pack,
13502 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13503 by DW_TAG_template_*_parameter DIEs for the argument
13504 pack elements of ARG. Note that ARG would then be
13505 an argument pack. */
13506 if (arg_pack_elems)
13507 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13508 arg_pack_elems,
13509 die);
13510 else
13511 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13512 true /* emit name */, die);
13513 if (i >= non_default)
13514 add_AT_flag (parm_die, DW_AT_default_value, 1);
13515 }
13516 }
13517 }
13518
13519 /* Create and return a DIE for PARM which should be
13520 the representation of a generic type parameter.
13521 For instance, in the C++ front end, PARM would be a template parameter.
13522 ARG is the argument to PARM.
13523 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13524 name of the PARM.
13525 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13526 as a child node. */
13527
13528 static dw_die_ref
13529 generic_parameter_die (tree parm, tree arg,
13530 bool emit_name_p,
13531 dw_die_ref parent_die)
13532 {
13533 dw_die_ref tmpl_die = NULL;
13534 const char *name = NULL;
13535
13536 if (!parm || !DECL_NAME (parm) || !arg)
13537 return NULL;
13538
13539 /* We support non-type generic parameters and arguments,
13540 type generic parameters and arguments, as well as
13541 generic generic parameters (a.k.a. template template parameters in C++)
13542 and arguments. */
13543 if (TREE_CODE (parm) == PARM_DECL)
13544 /* PARM is a nontype generic parameter */
13545 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13546 else if (TREE_CODE (parm) == TYPE_DECL)
13547 /* PARM is a type generic parameter. */
13548 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13549 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13550 /* PARM is a generic generic parameter.
13551 Its DIE is a GNU extension. It shall have a
13552 DW_AT_name attribute to represent the name of the template template
13553 parameter, and a DW_AT_GNU_template_name attribute to represent the
13554 name of the template template argument. */
13555 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13556 parent_die, parm);
13557 else
13558 gcc_unreachable ();
13559
13560 if (tmpl_die)
13561 {
13562 tree tmpl_type;
13563
13564 /* If PARM is a generic parameter pack, it means we are
13565 emitting debug info for a template argument pack element.
13566 In other terms, ARG is a template argument pack element.
13567 In that case, we don't emit any DW_AT_name attribute for
13568 the die. */
13569 if (emit_name_p)
13570 {
13571 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13572 gcc_assert (name);
13573 add_AT_string (tmpl_die, DW_AT_name, name);
13574 }
13575
13576 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13577 {
13578 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13579 TMPL_DIE should have a child DW_AT_type attribute that is set
13580 to the type of the argument to PARM, which is ARG.
13581 If PARM is a type generic parameter, TMPL_DIE should have a
13582 child DW_AT_type that is set to ARG. */
13583 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13584 add_type_attribute (tmpl_die, tmpl_type,
13585 (TREE_THIS_VOLATILE (tmpl_type)
13586 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13587 false, parent_die);
13588 }
13589 else
13590 {
13591 /* So TMPL_DIE is a DIE representing a
13592 a generic generic template parameter, a.k.a template template
13593 parameter in C++ and arg is a template. */
13594
13595 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13596 to the name of the argument. */
13597 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13598 if (name)
13599 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13600 }
13601
13602 if (TREE_CODE (parm) == PARM_DECL)
13603 /* So PARM is a non-type generic parameter.
13604 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13605 attribute of TMPL_DIE which value represents the value
13606 of ARG.
13607 We must be careful here:
13608 The value of ARG might reference some function decls.
13609 We might currently be emitting debug info for a generic
13610 type and types are emitted before function decls, we don't
13611 know if the function decls referenced by ARG will actually be
13612 emitted after cgraph computations.
13613 So must defer the generation of the DW_AT_const_value to
13614 after cgraph is ready. */
13615 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13616 }
13617
13618 return tmpl_die;
13619 }
13620
13621 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13622 PARM_PACK must be a template parameter pack. The returned DIE
13623 will be child DIE of PARENT_DIE. */
13624
13625 static dw_die_ref
13626 template_parameter_pack_die (tree parm_pack,
13627 tree parm_pack_args,
13628 dw_die_ref parent_die)
13629 {
13630 dw_die_ref die;
13631 int j;
13632
13633 gcc_assert (parent_die && parm_pack);
13634
13635 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13636 add_name_and_src_coords_attributes (die, parm_pack);
13637 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13638 generic_parameter_die (parm_pack,
13639 TREE_VEC_ELT (parm_pack_args, j),
13640 false /* Don't emit DW_AT_name */,
13641 die);
13642 return die;
13643 }
13644
13645 /* Return the DBX register number described by a given RTL node. */
13646
13647 static unsigned int
13648 dbx_reg_number (const_rtx rtl)
13649 {
13650 unsigned regno = REGNO (rtl);
13651
13652 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13653
13654 #ifdef LEAF_REG_REMAP
13655 if (crtl->uses_only_leaf_regs)
13656 {
13657 int leaf_reg = LEAF_REG_REMAP (regno);
13658 if (leaf_reg != -1)
13659 regno = (unsigned) leaf_reg;
13660 }
13661 #endif
13662
13663 regno = DBX_REGISTER_NUMBER (regno);
13664 gcc_assert (regno != INVALID_REGNUM);
13665 return regno;
13666 }
13667
13668 /* Optionally add a DW_OP_piece term to a location description expression.
13669 DW_OP_piece is only added if the location description expression already
13670 doesn't end with DW_OP_piece. */
13671
13672 static void
13673 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13674 {
13675 dw_loc_descr_ref loc;
13676
13677 if (*list_head != NULL)
13678 {
13679 /* Find the end of the chain. */
13680 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13681 ;
13682
13683 if (loc->dw_loc_opc != DW_OP_piece)
13684 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13685 }
13686 }
13687
13688 /* Return a location descriptor that designates a machine register or
13689 zero if there is none. */
13690
13691 static dw_loc_descr_ref
13692 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13693 {
13694 rtx regs;
13695
13696 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13697 return 0;
13698
13699 /* We only use "frame base" when we're sure we're talking about the
13700 post-prologue local stack frame. We do this by *not* running
13701 register elimination until this point, and recognizing the special
13702 argument pointer and soft frame pointer rtx's.
13703 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13704 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13705 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13706 {
13707 dw_loc_descr_ref result = NULL;
13708
13709 if (dwarf_version >= 4 || !dwarf_strict)
13710 {
13711 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13712 initialized);
13713 if (result)
13714 add_loc_descr (&result,
13715 new_loc_descr (DW_OP_stack_value, 0, 0));
13716 }
13717 return result;
13718 }
13719
13720 regs = targetm.dwarf_register_span (rtl);
13721
13722 if (REG_NREGS (rtl) > 1 || regs)
13723 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13724 else
13725 {
13726 unsigned int dbx_regnum = dbx_reg_number (rtl);
13727 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13728 return 0;
13729 return one_reg_loc_descriptor (dbx_regnum, initialized);
13730 }
13731 }
13732
13733 /* Return a location descriptor that designates a machine register for
13734 a given hard register number. */
13735
13736 static dw_loc_descr_ref
13737 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13738 {
13739 dw_loc_descr_ref reg_loc_descr;
13740
13741 if (regno <= 31)
13742 reg_loc_descr
13743 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13744 else
13745 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13746
13747 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13748 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13749
13750 return reg_loc_descr;
13751 }
13752
13753 /* Given an RTL of a register, return a location descriptor that
13754 designates a value that spans more than one register. */
13755
13756 static dw_loc_descr_ref
13757 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13758 enum var_init_status initialized)
13759 {
13760 int size, i;
13761 dw_loc_descr_ref loc_result = NULL;
13762
13763 /* Simple, contiguous registers. */
13764 if (regs == NULL_RTX)
13765 {
13766 unsigned reg = REGNO (rtl);
13767 int nregs;
13768
13769 #ifdef LEAF_REG_REMAP
13770 if (crtl->uses_only_leaf_regs)
13771 {
13772 int leaf_reg = LEAF_REG_REMAP (reg);
13773 if (leaf_reg != -1)
13774 reg = (unsigned) leaf_reg;
13775 }
13776 #endif
13777
13778 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13779 nregs = REG_NREGS (rtl);
13780
13781 /* At present we only track constant-sized pieces. */
13782 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13783 return NULL;
13784 size /= nregs;
13785
13786 loc_result = NULL;
13787 while (nregs--)
13788 {
13789 dw_loc_descr_ref t;
13790
13791 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13792 VAR_INIT_STATUS_INITIALIZED);
13793 add_loc_descr (&loc_result, t);
13794 add_loc_descr_op_piece (&loc_result, size);
13795 ++reg;
13796 }
13797 return loc_result;
13798 }
13799
13800 /* Now onto stupid register sets in non contiguous locations. */
13801
13802 gcc_assert (GET_CODE (regs) == PARALLEL);
13803
13804 /* At present we only track constant-sized pieces. */
13805 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13806 return NULL;
13807 loc_result = NULL;
13808
13809 for (i = 0; i < XVECLEN (regs, 0); ++i)
13810 {
13811 dw_loc_descr_ref t;
13812
13813 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13814 VAR_INIT_STATUS_INITIALIZED);
13815 add_loc_descr (&loc_result, t);
13816 add_loc_descr_op_piece (&loc_result, size);
13817 }
13818
13819 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13820 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13821 return loc_result;
13822 }
13823
13824 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13825
13826 /* Return a location descriptor that designates a constant i,
13827 as a compound operation from constant (i >> shift), constant shift
13828 and DW_OP_shl. */
13829
13830 static dw_loc_descr_ref
13831 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13832 {
13833 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13834 add_loc_descr (&ret, int_loc_descriptor (shift));
13835 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13836 return ret;
13837 }
13838
13839 /* Return a location descriptor that designates constant POLY_I. */
13840
13841 static dw_loc_descr_ref
13842 int_loc_descriptor (poly_int64 poly_i)
13843 {
13844 enum dwarf_location_atom op;
13845
13846 HOST_WIDE_INT i;
13847 if (!poly_i.is_constant (&i))
13848 {
13849 /* Create location descriptions for the non-constant part and
13850 add any constant offset at the end. */
13851 dw_loc_descr_ref ret = NULL;
13852 HOST_WIDE_INT constant = poly_i.coeffs[0];
13853 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13854 {
13855 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13856 if (coeff != 0)
13857 {
13858 dw_loc_descr_ref start = ret;
13859 unsigned int factor;
13860 int bias;
13861 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13862 (j, &factor, &bias);
13863
13864 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13865 add COEFF * (REGNO / FACTOR) now and subtract
13866 COEFF * BIAS from the final constant part. */
13867 constant -= coeff * bias;
13868 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13869 if (coeff % factor == 0)
13870 coeff /= factor;
13871 else
13872 {
13873 int amount = exact_log2 (factor);
13874 gcc_assert (amount >= 0);
13875 add_loc_descr (&ret, int_loc_descriptor (amount));
13876 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13877 }
13878 if (coeff != 1)
13879 {
13880 add_loc_descr (&ret, int_loc_descriptor (coeff));
13881 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13882 }
13883 if (start)
13884 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13885 }
13886 }
13887 loc_descr_plus_const (&ret, constant);
13888 return ret;
13889 }
13890
13891 /* Pick the smallest representation of a constant, rather than just
13892 defaulting to the LEB encoding. */
13893 if (i >= 0)
13894 {
13895 int clz = clz_hwi (i);
13896 int ctz = ctz_hwi (i);
13897 if (i <= 31)
13898 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13899 else if (i <= 0xff)
13900 op = DW_OP_const1u;
13901 else if (i <= 0xffff)
13902 op = DW_OP_const2u;
13903 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13904 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13905 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13906 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13907 while DW_OP_const4u is 5 bytes. */
13908 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13909 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13910 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13911 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13912 while DW_OP_const4u is 5 bytes. */
13913 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13914
13915 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13916 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13917 <= 4)
13918 {
13919 /* As i >= 2**31, the double cast above will yield a negative number.
13920 Since wrapping is defined in DWARF expressions we can output big
13921 positive integers as small negative ones, regardless of the size
13922 of host wide ints.
13923
13924 Here, since the evaluator will handle 32-bit values and since i >=
13925 2**31, we know it's going to be interpreted as a negative literal:
13926 store it this way if we can do better than 5 bytes this way. */
13927 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13928 }
13929 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13930 op = DW_OP_const4u;
13931
13932 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13933 least 6 bytes: see if we can do better before falling back to it. */
13934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13935 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13936 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13937 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13938 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13939 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13940 >= HOST_BITS_PER_WIDE_INT)
13941 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13942 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13943 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13944 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13945 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13946 && size_of_uleb128 (i) > 6)
13947 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13948 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13949 else
13950 op = DW_OP_constu;
13951 }
13952 else
13953 {
13954 if (i >= -0x80)
13955 op = DW_OP_const1s;
13956 else if (i >= -0x8000)
13957 op = DW_OP_const2s;
13958 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13959 {
13960 if (size_of_int_loc_descriptor (i) < 5)
13961 {
13962 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13963 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13964 return ret;
13965 }
13966 op = DW_OP_const4s;
13967 }
13968 else
13969 {
13970 if (size_of_int_loc_descriptor (i)
13971 < (unsigned long) 1 + size_of_sleb128 (i))
13972 {
13973 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13974 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13975 return ret;
13976 }
13977 op = DW_OP_consts;
13978 }
13979 }
13980
13981 return new_loc_descr (op, i, 0);
13982 }
13983
13984 /* Likewise, for unsigned constants. */
13985
13986 static dw_loc_descr_ref
13987 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13988 {
13989 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13990 const unsigned HOST_WIDE_INT max_uint
13991 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13992
13993 /* If possible, use the clever signed constants handling. */
13994 if (i <= max_int)
13995 return int_loc_descriptor ((HOST_WIDE_INT) i);
13996
13997 /* Here, we are left with positive numbers that cannot be represented as
13998 HOST_WIDE_INT, i.e.:
13999 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14000
14001 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14002 whereas may be better to output a negative integer: thanks to integer
14003 wrapping, we know that:
14004 x = x - 2 ** DWARF2_ADDR_SIZE
14005 = x - 2 * (max (HOST_WIDE_INT) + 1)
14006 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14007 small negative integers. Let's try that in cases it will clearly improve
14008 the encoding: there is no gain turning DW_OP_const4u into
14009 DW_OP_const4s. */
14010 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14011 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14012 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14013 {
14014 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14015
14016 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14017 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14018 const HOST_WIDE_INT second_shift
14019 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14020
14021 /* So we finally have:
14022 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14023 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14024 return int_loc_descriptor (second_shift);
14025 }
14026
14027 /* Last chance: fallback to a simple constant operation. */
14028 return new_loc_descr
14029 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14030 ? DW_OP_const4u
14031 : DW_OP_const8u,
14032 i, 0);
14033 }
14034
14035 /* Generate and return a location description that computes the unsigned
14036 comparison of the two stack top entries (a OP b where b is the top-most
14037 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14038 LE_EXPR, GT_EXPR or GE_EXPR. */
14039
14040 static dw_loc_descr_ref
14041 uint_comparison_loc_list (enum tree_code kind)
14042 {
14043 enum dwarf_location_atom op, flip_op;
14044 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14045
14046 switch (kind)
14047 {
14048 case LT_EXPR:
14049 op = DW_OP_lt;
14050 break;
14051 case LE_EXPR:
14052 op = DW_OP_le;
14053 break;
14054 case GT_EXPR:
14055 op = DW_OP_gt;
14056 break;
14057 case GE_EXPR:
14058 op = DW_OP_ge;
14059 break;
14060 default:
14061 gcc_unreachable ();
14062 }
14063
14064 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14065 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14066
14067 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14068 possible to perform unsigned comparisons: we just have to distinguish
14069 three cases:
14070
14071 1. when a and b have the same sign (as signed integers); then we should
14072 return: a OP(signed) b;
14073
14074 2. when a is a negative signed integer while b is a positive one, then a
14075 is a greater unsigned integer than b; likewise when a and b's roles
14076 are flipped.
14077
14078 So first, compare the sign of the two operands. */
14079 ret = new_loc_descr (DW_OP_over, 0, 0);
14080 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14081 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14082 /* If they have different signs (i.e. they have different sign bits), then
14083 the stack top value has now the sign bit set and thus it's smaller than
14084 zero. */
14085 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14086 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14087 add_loc_descr (&ret, bra_node);
14088
14089 /* We are in case 1. At this point, we know both operands have the same
14090 sign, to it's safe to use the built-in signed comparison. */
14091 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14092 add_loc_descr (&ret, jmp_node);
14093
14094 /* We are in case 2. Here, we know both operands do not have the same sign,
14095 so we have to flip the signed comparison. */
14096 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14097 tmp = new_loc_descr (flip_op, 0, 0);
14098 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14099 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14100 add_loc_descr (&ret, tmp);
14101
14102 /* This dummy operation is necessary to make the two branches join. */
14103 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14104 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14105 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14106 add_loc_descr (&ret, tmp);
14107
14108 return ret;
14109 }
14110
14111 /* Likewise, but takes the location description lists (might be destructive on
14112 them). Return NULL if either is NULL or if concatenation fails. */
14113
14114 static dw_loc_list_ref
14115 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14116 enum tree_code kind)
14117 {
14118 if (left == NULL || right == NULL)
14119 return NULL;
14120
14121 add_loc_list (&left, right);
14122 if (left == NULL)
14123 return NULL;
14124
14125 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14126 return left;
14127 }
14128
14129 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14130 without actually allocating it. */
14131
14132 static unsigned long
14133 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14134 {
14135 return size_of_int_loc_descriptor (i >> shift)
14136 + size_of_int_loc_descriptor (shift)
14137 + 1;
14138 }
14139
14140 /* Return size_of_locs (int_loc_descriptor (i)) without
14141 actually allocating it. */
14142
14143 static unsigned long
14144 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14145 {
14146 unsigned long s;
14147
14148 if (i >= 0)
14149 {
14150 int clz, ctz;
14151 if (i <= 31)
14152 return 1;
14153 else if (i <= 0xff)
14154 return 2;
14155 else if (i <= 0xffff)
14156 return 3;
14157 clz = clz_hwi (i);
14158 ctz = ctz_hwi (i);
14159 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14160 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14161 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14162 - clz - 5);
14163 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14164 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14165 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14166 - clz - 8);
14167 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14168 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14169 <= 4)
14170 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14171 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14172 return 5;
14173 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14174 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14175 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14176 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14177 - clz - 8);
14178 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14179 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14180 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14181 - clz - 16);
14182 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14183 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14184 && s > 6)
14185 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14186 - clz - 32);
14187 else
14188 return 1 + s;
14189 }
14190 else
14191 {
14192 if (i >= -0x80)
14193 return 2;
14194 else if (i >= -0x8000)
14195 return 3;
14196 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14197 {
14198 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14199 {
14200 s = size_of_int_loc_descriptor (-i) + 1;
14201 if (s < 5)
14202 return s;
14203 }
14204 return 5;
14205 }
14206 else
14207 {
14208 unsigned long r = 1 + size_of_sleb128 (i);
14209 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14210 {
14211 s = size_of_int_loc_descriptor (-i) + 1;
14212 if (s < r)
14213 return s;
14214 }
14215 return r;
14216 }
14217 }
14218 }
14219
14220 /* Return loc description representing "address" of integer value.
14221 This can appear only as toplevel expression. */
14222
14223 static dw_loc_descr_ref
14224 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14225 {
14226 int litsize;
14227 dw_loc_descr_ref loc_result = NULL;
14228
14229 if (!(dwarf_version >= 4 || !dwarf_strict))
14230 return NULL;
14231
14232 litsize = size_of_int_loc_descriptor (i);
14233 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14234 is more compact. For DW_OP_stack_value we need:
14235 litsize + 1 (DW_OP_stack_value)
14236 and for DW_OP_implicit_value:
14237 1 (DW_OP_implicit_value) + 1 (length) + size. */
14238 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14239 {
14240 loc_result = int_loc_descriptor (i);
14241 add_loc_descr (&loc_result,
14242 new_loc_descr (DW_OP_stack_value, 0, 0));
14243 return loc_result;
14244 }
14245
14246 loc_result = new_loc_descr (DW_OP_implicit_value,
14247 size, 0);
14248 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14249 loc_result->dw_loc_oprnd2.v.val_int = i;
14250 return loc_result;
14251 }
14252
14253 /* Return a location descriptor that designates a base+offset location. */
14254
14255 static dw_loc_descr_ref
14256 based_loc_descr (rtx reg, poly_int64 offset,
14257 enum var_init_status initialized)
14258 {
14259 unsigned int regno;
14260 dw_loc_descr_ref result;
14261 dw_fde_ref fde = cfun->fde;
14262
14263 /* We only use "frame base" when we're sure we're talking about the
14264 post-prologue local stack frame. We do this by *not* running
14265 register elimination until this point, and recognizing the special
14266 argument pointer and soft frame pointer rtx's. */
14267 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14268 {
14269 rtx elim = (ira_use_lra_p
14270 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14271 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14272
14273 if (elim != reg)
14274 {
14275 /* Allow hard frame pointer here even if frame pointer
14276 isn't used since hard frame pointer is encoded with
14277 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14278 not hard frame pointer directly. */
14279 elim = strip_offset_and_add (elim, &offset);
14280 gcc_assert (elim == hard_frame_pointer_rtx
14281 || elim == stack_pointer_rtx);
14282
14283 /* If drap register is used to align stack, use frame
14284 pointer + offset to access stack variables. If stack
14285 is aligned without drap, use stack pointer + offset to
14286 access stack variables. */
14287 if (crtl->stack_realign_tried
14288 && reg == frame_pointer_rtx)
14289 {
14290 int base_reg
14291 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14292 ? HARD_FRAME_POINTER_REGNUM
14293 : REGNO (elim));
14294 return new_reg_loc_descr (base_reg, offset);
14295 }
14296
14297 gcc_assert (frame_pointer_fb_offset_valid);
14298 offset += frame_pointer_fb_offset;
14299 HOST_WIDE_INT const_offset;
14300 if (offset.is_constant (&const_offset))
14301 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14302 else
14303 {
14304 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14305 loc_descr_plus_const (&ret, offset);
14306 return ret;
14307 }
14308 }
14309 }
14310
14311 regno = REGNO (reg);
14312 #ifdef LEAF_REG_REMAP
14313 if (crtl->uses_only_leaf_regs)
14314 {
14315 int leaf_reg = LEAF_REG_REMAP (regno);
14316 if (leaf_reg != -1)
14317 regno = (unsigned) leaf_reg;
14318 }
14319 #endif
14320 regno = DWARF_FRAME_REGNUM (regno);
14321
14322 HOST_WIDE_INT const_offset;
14323 if (!optimize && fde
14324 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14325 && offset.is_constant (&const_offset))
14326 {
14327 /* Use cfa+offset to represent the location of arguments passed
14328 on the stack when drap is used to align stack.
14329 Only do this when not optimizing, for optimized code var-tracking
14330 is supposed to track where the arguments live and the register
14331 used as vdrap or drap in some spot might be used for something
14332 else in other part of the routine. */
14333 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14334 }
14335
14336 result = new_reg_loc_descr (regno, offset);
14337
14338 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14339 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14340
14341 return result;
14342 }
14343
14344 /* Return true if this RTL expression describes a base+offset calculation. */
14345
14346 static inline int
14347 is_based_loc (const_rtx rtl)
14348 {
14349 return (GET_CODE (rtl) == PLUS
14350 && ((REG_P (XEXP (rtl, 0))
14351 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14352 && CONST_INT_P (XEXP (rtl, 1)))));
14353 }
14354
14355 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14356 failed. */
14357
14358 static dw_loc_descr_ref
14359 tls_mem_loc_descriptor (rtx mem)
14360 {
14361 tree base;
14362 dw_loc_descr_ref loc_result;
14363
14364 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14365 return NULL;
14366
14367 base = get_base_address (MEM_EXPR (mem));
14368 if (base == NULL
14369 || !VAR_P (base)
14370 || !DECL_THREAD_LOCAL_P (base))
14371 return NULL;
14372
14373 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14374 if (loc_result == NULL)
14375 return NULL;
14376
14377 if (maybe_ne (MEM_OFFSET (mem), 0))
14378 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14379
14380 return loc_result;
14381 }
14382
14383 /* Output debug info about reason why we failed to expand expression as dwarf
14384 expression. */
14385
14386 static void
14387 expansion_failed (tree expr, rtx rtl, char const *reason)
14388 {
14389 if (dump_file && (dump_flags & TDF_DETAILS))
14390 {
14391 fprintf (dump_file, "Failed to expand as dwarf: ");
14392 if (expr)
14393 print_generic_expr (dump_file, expr, dump_flags);
14394 if (rtl)
14395 {
14396 fprintf (dump_file, "\n");
14397 print_rtl (dump_file, rtl);
14398 }
14399 fprintf (dump_file, "\nReason: %s\n", reason);
14400 }
14401 }
14402
14403 /* Helper function for const_ok_for_output. */
14404
14405 static bool
14406 const_ok_for_output_1 (rtx rtl)
14407 {
14408 if (targetm.const_not_ok_for_debug_p (rtl))
14409 {
14410 if (GET_CODE (rtl) != UNSPEC)
14411 {
14412 expansion_failed (NULL_TREE, rtl,
14413 "Expression rejected for debug by the backend.\n");
14414 return false;
14415 }
14416
14417 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14418 the target hook doesn't explicitly allow it in debug info, assume
14419 we can't express it in the debug info. */
14420 /* Don't complain about TLS UNSPECs, those are just too hard to
14421 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14422 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14423 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14424 if (flag_checking
14425 && (XVECLEN (rtl, 0) == 0
14426 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14427 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14428 inform (current_function_decl
14429 ? DECL_SOURCE_LOCATION (current_function_decl)
14430 : UNKNOWN_LOCATION,
14431 #if NUM_UNSPEC_VALUES > 0
14432 "non-delegitimized UNSPEC %s (%d) found in variable location",
14433 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14434 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14435 XINT (rtl, 1));
14436 #else
14437 "non-delegitimized UNSPEC %d found in variable location",
14438 XINT (rtl, 1));
14439 #endif
14440 expansion_failed (NULL_TREE, rtl,
14441 "UNSPEC hasn't been delegitimized.\n");
14442 return false;
14443 }
14444
14445 if (CONST_POLY_INT_P (rtl))
14446 return false;
14447
14448 if (targetm.const_not_ok_for_debug_p (rtl))
14449 {
14450 expansion_failed (NULL_TREE, rtl,
14451 "Expression rejected for debug by the backend.\n");
14452 return false;
14453 }
14454
14455 /* FIXME: Refer to PR60655. It is possible for simplification
14456 of rtl expressions in var tracking to produce such expressions.
14457 We should really identify / validate expressions
14458 enclosed in CONST that can be handled by assemblers on various
14459 targets and only handle legitimate cases here. */
14460 switch (GET_CODE (rtl))
14461 {
14462 case SYMBOL_REF:
14463 break;
14464 case NOT:
14465 case NEG:
14466 return false;
14467 default:
14468 return true;
14469 }
14470
14471 if (CONSTANT_POOL_ADDRESS_P (rtl))
14472 {
14473 bool marked;
14474 get_pool_constant_mark (rtl, &marked);
14475 /* If all references to this pool constant were optimized away,
14476 it was not output and thus we can't represent it. */
14477 if (!marked)
14478 {
14479 expansion_failed (NULL_TREE, rtl,
14480 "Constant was removed from constant pool.\n");
14481 return false;
14482 }
14483 }
14484
14485 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14486 return false;
14487
14488 /* Avoid references to external symbols in debug info, on several targets
14489 the linker might even refuse to link when linking a shared library,
14490 and in many other cases the relocations for .debug_info/.debug_loc are
14491 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14492 to be defined within the same shared library or executable are fine. */
14493 if (SYMBOL_REF_EXTERNAL_P (rtl))
14494 {
14495 tree decl = SYMBOL_REF_DECL (rtl);
14496
14497 if (decl == NULL || !targetm.binds_local_p (decl))
14498 {
14499 expansion_failed (NULL_TREE, rtl,
14500 "Symbol not defined in current TU.\n");
14501 return false;
14502 }
14503 }
14504
14505 return true;
14506 }
14507
14508 /* Return true if constant RTL can be emitted in DW_OP_addr or
14509 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14510 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14511
14512 static bool
14513 const_ok_for_output (rtx rtl)
14514 {
14515 if (GET_CODE (rtl) == SYMBOL_REF)
14516 return const_ok_for_output_1 (rtl);
14517
14518 if (GET_CODE (rtl) == CONST)
14519 {
14520 subrtx_var_iterator::array_type array;
14521 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14522 if (!const_ok_for_output_1 (*iter))
14523 return false;
14524 return true;
14525 }
14526
14527 return true;
14528 }
14529
14530 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14531 if possible, NULL otherwise. */
14532
14533 static dw_die_ref
14534 base_type_for_mode (machine_mode mode, bool unsignedp)
14535 {
14536 dw_die_ref type_die;
14537 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14538
14539 if (type == NULL)
14540 return NULL;
14541 switch (TREE_CODE (type))
14542 {
14543 case INTEGER_TYPE:
14544 case REAL_TYPE:
14545 break;
14546 default:
14547 return NULL;
14548 }
14549 type_die = lookup_type_die (type);
14550 if (!type_die)
14551 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14552 comp_unit_die ());
14553 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14554 return NULL;
14555 return type_die;
14556 }
14557
14558 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14559 type matching MODE, or, if MODE is narrower than or as wide as
14560 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14561 possible. */
14562
14563 static dw_loc_descr_ref
14564 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14565 {
14566 machine_mode outer_mode = mode;
14567 dw_die_ref type_die;
14568 dw_loc_descr_ref cvt;
14569
14570 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14571 {
14572 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14573 return op;
14574 }
14575 type_die = base_type_for_mode (outer_mode, 1);
14576 if (type_die == NULL)
14577 return NULL;
14578 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14579 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14580 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14581 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14582 add_loc_descr (&op, cvt);
14583 return op;
14584 }
14585
14586 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14587
14588 static dw_loc_descr_ref
14589 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14590 dw_loc_descr_ref op1)
14591 {
14592 dw_loc_descr_ref ret = op0;
14593 add_loc_descr (&ret, op1);
14594 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14595 if (STORE_FLAG_VALUE != 1)
14596 {
14597 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14598 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14599 }
14600 return ret;
14601 }
14602
14603 /* Subroutine of scompare_loc_descriptor for the case in which we're
14604 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14605 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14606
14607 static dw_loc_descr_ref
14608 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14609 scalar_int_mode op_mode,
14610 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14611 {
14612 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14613 dw_loc_descr_ref cvt;
14614
14615 if (type_die == NULL)
14616 return NULL;
14617 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14618 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14619 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14620 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14621 add_loc_descr (&op0, cvt);
14622 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14623 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14624 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14625 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14626 add_loc_descr (&op1, cvt);
14627 return compare_loc_descriptor (op, op0, op1);
14628 }
14629
14630 /* Subroutine of scompare_loc_descriptor for the case in which we're
14631 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14632 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14633
14634 static dw_loc_descr_ref
14635 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14636 scalar_int_mode op_mode,
14637 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14638 {
14639 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14640 /* For eq/ne, if the operands are known to be zero-extended,
14641 there is no need to do the fancy shifting up. */
14642 if (op == DW_OP_eq || op == DW_OP_ne)
14643 {
14644 dw_loc_descr_ref last0, last1;
14645 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14646 ;
14647 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14648 ;
14649 /* deref_size zero extends, and for constants we can check
14650 whether they are zero extended or not. */
14651 if (((last0->dw_loc_opc == DW_OP_deref_size
14652 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14653 || (CONST_INT_P (XEXP (rtl, 0))
14654 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14655 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14656 && ((last1->dw_loc_opc == DW_OP_deref_size
14657 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14658 || (CONST_INT_P (XEXP (rtl, 1))
14659 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14660 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14661 return compare_loc_descriptor (op, op0, op1);
14662
14663 /* EQ/NE comparison against constant in narrower type than
14664 DWARF2_ADDR_SIZE can be performed either as
14665 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14666 DW_OP_{eq,ne}
14667 or
14668 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14669 DW_OP_{eq,ne}. Pick whatever is shorter. */
14670 if (CONST_INT_P (XEXP (rtl, 1))
14671 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14672 && (size_of_int_loc_descriptor (shift) + 1
14673 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14674 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14675 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14676 & GET_MODE_MASK (op_mode))))
14677 {
14678 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14679 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14680 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14681 & GET_MODE_MASK (op_mode));
14682 return compare_loc_descriptor (op, op0, op1);
14683 }
14684 }
14685 add_loc_descr (&op0, int_loc_descriptor (shift));
14686 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14687 if (CONST_INT_P (XEXP (rtl, 1)))
14688 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14689 else
14690 {
14691 add_loc_descr (&op1, int_loc_descriptor (shift));
14692 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14693 }
14694 return compare_loc_descriptor (op, op0, op1);
14695 }
14696
14697 /* Return location descriptor for unsigned comparison OP RTL. */
14698
14699 static dw_loc_descr_ref
14700 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14701 machine_mode mem_mode)
14702 {
14703 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14704 dw_loc_descr_ref op0, op1;
14705
14706 if (op_mode == VOIDmode)
14707 op_mode = GET_MODE (XEXP (rtl, 1));
14708 if (op_mode == VOIDmode)
14709 return NULL;
14710
14711 scalar_int_mode int_op_mode;
14712 if (dwarf_strict
14713 && dwarf_version < 5
14714 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14715 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14716 return NULL;
14717
14718 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14719 VAR_INIT_STATUS_INITIALIZED);
14720 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14721 VAR_INIT_STATUS_INITIALIZED);
14722
14723 if (op0 == NULL || op1 == NULL)
14724 return NULL;
14725
14726 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14727 {
14728 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14729 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14730
14731 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14732 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14733 }
14734 return compare_loc_descriptor (op, op0, op1);
14735 }
14736
14737 /* Return location descriptor for unsigned comparison OP RTL. */
14738
14739 static dw_loc_descr_ref
14740 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14741 machine_mode mem_mode)
14742 {
14743 dw_loc_descr_ref op0, op1;
14744
14745 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14746 if (test_op_mode == VOIDmode)
14747 test_op_mode = GET_MODE (XEXP (rtl, 1));
14748
14749 scalar_int_mode op_mode;
14750 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14751 return NULL;
14752
14753 if (dwarf_strict
14754 && dwarf_version < 5
14755 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14756 return NULL;
14757
14758 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14759 VAR_INIT_STATUS_INITIALIZED);
14760 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14761 VAR_INIT_STATUS_INITIALIZED);
14762
14763 if (op0 == NULL || op1 == NULL)
14764 return NULL;
14765
14766 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14767 {
14768 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14769 dw_loc_descr_ref last0, last1;
14770 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14771 ;
14772 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14773 ;
14774 if (CONST_INT_P (XEXP (rtl, 0)))
14775 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14776 /* deref_size zero extends, so no need to mask it again. */
14777 else if (last0->dw_loc_opc != DW_OP_deref_size
14778 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14779 {
14780 add_loc_descr (&op0, int_loc_descriptor (mask));
14781 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14782 }
14783 if (CONST_INT_P (XEXP (rtl, 1)))
14784 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14785 /* deref_size zero extends, so no need to mask it again. */
14786 else if (last1->dw_loc_opc != DW_OP_deref_size
14787 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14788 {
14789 add_loc_descr (&op1, int_loc_descriptor (mask));
14790 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14791 }
14792 }
14793 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14794 {
14795 HOST_WIDE_INT bias = 1;
14796 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14797 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14798 if (CONST_INT_P (XEXP (rtl, 1)))
14799 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14800 + INTVAL (XEXP (rtl, 1)));
14801 else
14802 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14803 bias, 0));
14804 }
14805 return compare_loc_descriptor (op, op0, op1);
14806 }
14807
14808 /* Return location descriptor for {U,S}{MIN,MAX}. */
14809
14810 static dw_loc_descr_ref
14811 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14812 machine_mode mem_mode)
14813 {
14814 enum dwarf_location_atom op;
14815 dw_loc_descr_ref op0, op1, ret;
14816 dw_loc_descr_ref bra_node, drop_node;
14817
14818 scalar_int_mode int_mode;
14819 if (dwarf_strict
14820 && dwarf_version < 5
14821 && (!is_a <scalar_int_mode> (mode, &int_mode)
14822 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14823 return NULL;
14824
14825 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14828 VAR_INIT_STATUS_INITIALIZED);
14829
14830 if (op0 == NULL || op1 == NULL)
14831 return NULL;
14832
14833 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14834 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14835 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14836 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14837 {
14838 /* Checked by the caller. */
14839 int_mode = as_a <scalar_int_mode> (mode);
14840 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14841 {
14842 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14843 add_loc_descr (&op0, int_loc_descriptor (mask));
14844 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14845 add_loc_descr (&op1, int_loc_descriptor (mask));
14846 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14847 }
14848 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14849 {
14850 HOST_WIDE_INT bias = 1;
14851 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14852 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14853 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14854 }
14855 }
14856 else if (is_a <scalar_int_mode> (mode, &int_mode)
14857 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14858 {
14859 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14860 add_loc_descr (&op0, int_loc_descriptor (shift));
14861 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14862 add_loc_descr (&op1, int_loc_descriptor (shift));
14863 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14864 }
14865 else if (is_a <scalar_int_mode> (mode, &int_mode)
14866 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14867 {
14868 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14869 dw_loc_descr_ref cvt;
14870 if (type_die == NULL)
14871 return NULL;
14872 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14873 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14874 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14875 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14876 add_loc_descr (&op0, cvt);
14877 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14878 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14879 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14880 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14881 add_loc_descr (&op1, cvt);
14882 }
14883
14884 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14885 op = DW_OP_lt;
14886 else
14887 op = DW_OP_gt;
14888 ret = op0;
14889 add_loc_descr (&ret, op1);
14890 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14891 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14892 add_loc_descr (&ret, bra_node);
14893 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14894 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14895 add_loc_descr (&ret, drop_node);
14896 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14897 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14898 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14899 && is_a <scalar_int_mode> (mode, &int_mode)
14900 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14901 ret = convert_descriptor_to_mode (int_mode, ret);
14902 return ret;
14903 }
14904
14905 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14906 but after converting arguments to type_die, afterwards
14907 convert back to unsigned. */
14908
14909 static dw_loc_descr_ref
14910 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14911 scalar_int_mode mode, machine_mode mem_mode)
14912 {
14913 dw_loc_descr_ref cvt, op0, op1;
14914
14915 if (type_die == NULL)
14916 return NULL;
14917 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14918 VAR_INIT_STATUS_INITIALIZED);
14919 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14920 VAR_INIT_STATUS_INITIALIZED);
14921 if (op0 == NULL || op1 == NULL)
14922 return NULL;
14923 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14924 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14925 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14926 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14927 add_loc_descr (&op0, cvt);
14928 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14929 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14930 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14931 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14932 add_loc_descr (&op1, cvt);
14933 add_loc_descr (&op0, op1);
14934 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14935 return convert_descriptor_to_mode (mode, op0);
14936 }
14937
14938 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14939 const0 is DW_OP_lit0 or corresponding typed constant,
14940 const1 is DW_OP_lit1 or corresponding typed constant
14941 and constMSB is constant with just the MSB bit set
14942 for the mode):
14943 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14944 L1: const0 DW_OP_swap
14945 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14946 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14947 L3: DW_OP_drop
14948 L4: DW_OP_nop
14949
14950 CTZ is similar:
14951 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14952 L1: const0 DW_OP_swap
14953 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14954 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14955 L3: DW_OP_drop
14956 L4: DW_OP_nop
14957
14958 FFS is similar:
14959 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14960 L1: const1 DW_OP_swap
14961 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14962 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14963 L3: DW_OP_drop
14964 L4: DW_OP_nop */
14965
14966 static dw_loc_descr_ref
14967 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14968 machine_mode mem_mode)
14969 {
14970 dw_loc_descr_ref op0, ret, tmp;
14971 HOST_WIDE_INT valv;
14972 dw_loc_descr_ref l1jump, l1label;
14973 dw_loc_descr_ref l2jump, l2label;
14974 dw_loc_descr_ref l3jump, l3label;
14975 dw_loc_descr_ref l4jump, l4label;
14976 rtx msb;
14977
14978 if (GET_MODE (XEXP (rtl, 0)) != mode)
14979 return NULL;
14980
14981 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14982 VAR_INIT_STATUS_INITIALIZED);
14983 if (op0 == NULL)
14984 return NULL;
14985 ret = op0;
14986 if (GET_CODE (rtl) == CLZ)
14987 {
14988 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14989 valv = GET_MODE_BITSIZE (mode);
14990 }
14991 else if (GET_CODE (rtl) == FFS)
14992 valv = 0;
14993 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14994 valv = GET_MODE_BITSIZE (mode);
14995 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14996 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14997 add_loc_descr (&ret, l1jump);
14998 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14999 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15000 VAR_INIT_STATUS_INITIALIZED);
15001 if (tmp == NULL)
15002 return NULL;
15003 add_loc_descr (&ret, tmp);
15004 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15005 add_loc_descr (&ret, l4jump);
15006 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15007 ? const1_rtx : const0_rtx,
15008 mode, mem_mode,
15009 VAR_INIT_STATUS_INITIALIZED);
15010 if (l1label == NULL)
15011 return NULL;
15012 add_loc_descr (&ret, l1label);
15013 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15014 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15015 add_loc_descr (&ret, l2label);
15016 if (GET_CODE (rtl) != CLZ)
15017 msb = const1_rtx;
15018 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15019 msb = GEN_INT (HOST_WIDE_INT_1U
15020 << (GET_MODE_BITSIZE (mode) - 1));
15021 else
15022 msb = immed_wide_int_const
15023 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15024 GET_MODE_PRECISION (mode)), mode);
15025 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15026 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15027 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15028 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15029 else
15030 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15031 VAR_INIT_STATUS_INITIALIZED);
15032 if (tmp == NULL)
15033 return NULL;
15034 add_loc_descr (&ret, tmp);
15035 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15036 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15037 add_loc_descr (&ret, l3jump);
15038 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15039 VAR_INIT_STATUS_INITIALIZED);
15040 if (tmp == NULL)
15041 return NULL;
15042 add_loc_descr (&ret, tmp);
15043 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15044 ? DW_OP_shl : DW_OP_shr, 0, 0));
15045 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15046 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15047 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15048 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15049 add_loc_descr (&ret, l2jump);
15050 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15051 add_loc_descr (&ret, l3label);
15052 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15053 add_loc_descr (&ret, l4label);
15054 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15055 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15056 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15057 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15058 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15059 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15060 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15061 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15062 return ret;
15063 }
15064
15065 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15066 const1 is DW_OP_lit1 or corresponding typed constant):
15067 const0 DW_OP_swap
15068 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15069 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15070 L2: DW_OP_drop
15071
15072 PARITY is similar:
15073 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15074 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15075 L2: DW_OP_drop */
15076
15077 static dw_loc_descr_ref
15078 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15079 machine_mode mem_mode)
15080 {
15081 dw_loc_descr_ref op0, ret, tmp;
15082 dw_loc_descr_ref l1jump, l1label;
15083 dw_loc_descr_ref l2jump, l2label;
15084
15085 if (GET_MODE (XEXP (rtl, 0)) != mode)
15086 return NULL;
15087
15088 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15089 VAR_INIT_STATUS_INITIALIZED);
15090 if (op0 == NULL)
15091 return NULL;
15092 ret = op0;
15093 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15094 VAR_INIT_STATUS_INITIALIZED);
15095 if (tmp == NULL)
15096 return NULL;
15097 add_loc_descr (&ret, tmp);
15098 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15099 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15100 add_loc_descr (&ret, l1label);
15101 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15102 add_loc_descr (&ret, l2jump);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15104 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15105 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15106 VAR_INIT_STATUS_INITIALIZED);
15107 if (tmp == NULL)
15108 return NULL;
15109 add_loc_descr (&ret, tmp);
15110 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15111 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15112 ? DW_OP_plus : DW_OP_xor, 0, 0));
15113 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15114 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15115 VAR_INIT_STATUS_INITIALIZED);
15116 add_loc_descr (&ret, tmp);
15117 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15118 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15119 add_loc_descr (&ret, l1jump);
15120 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15121 add_loc_descr (&ret, l2label);
15122 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15123 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15124 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15125 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15126 return ret;
15127 }
15128
15129 /* BSWAP (constS is initial shift count, either 56 or 24):
15130 constS const0
15131 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15132 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15133 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15134 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15135 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15136
15137 static dw_loc_descr_ref
15138 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15139 machine_mode mem_mode)
15140 {
15141 dw_loc_descr_ref op0, ret, tmp;
15142 dw_loc_descr_ref l1jump, l1label;
15143 dw_loc_descr_ref l2jump, l2label;
15144
15145 if (BITS_PER_UNIT != 8
15146 || (GET_MODE_BITSIZE (mode) != 32
15147 && GET_MODE_BITSIZE (mode) != 64))
15148 return NULL;
15149
15150 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15151 VAR_INIT_STATUS_INITIALIZED);
15152 if (op0 == NULL)
15153 return NULL;
15154
15155 ret = op0;
15156 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15157 mode, mem_mode,
15158 VAR_INIT_STATUS_INITIALIZED);
15159 if (tmp == NULL)
15160 return NULL;
15161 add_loc_descr (&ret, tmp);
15162 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15163 VAR_INIT_STATUS_INITIALIZED);
15164 if (tmp == NULL)
15165 return NULL;
15166 add_loc_descr (&ret, tmp);
15167 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15168 add_loc_descr (&ret, l1label);
15169 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15170 mode, mem_mode,
15171 VAR_INIT_STATUS_INITIALIZED);
15172 add_loc_descr (&ret, tmp);
15173 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15174 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15175 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15176 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15177 VAR_INIT_STATUS_INITIALIZED);
15178 if (tmp == NULL)
15179 return NULL;
15180 add_loc_descr (&ret, tmp);
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15182 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15183 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15184 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15185 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15186 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15187 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15188 VAR_INIT_STATUS_INITIALIZED);
15189 add_loc_descr (&ret, tmp);
15190 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15191 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15192 add_loc_descr (&ret, l2jump);
15193 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 add_loc_descr (&ret, tmp);
15196 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15197 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15198 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15199 add_loc_descr (&ret, l1jump);
15200 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15201 add_loc_descr (&ret, l2label);
15202 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15204 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15205 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15206 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15207 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15208 return ret;
15209 }
15210
15211 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15212 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15213 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15214 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15215
15216 ROTATERT is similar:
15217 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15218 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15219 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15220
15221 static dw_loc_descr_ref
15222 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15223 machine_mode mem_mode)
15224 {
15225 rtx rtlop1 = XEXP (rtl, 1);
15226 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15227 int i;
15228
15229 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15230 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15231 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15232 VAR_INIT_STATUS_INITIALIZED);
15233 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15234 VAR_INIT_STATUS_INITIALIZED);
15235 if (op0 == NULL || op1 == NULL)
15236 return NULL;
15237 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15238 for (i = 0; i < 2; i++)
15239 {
15240 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15241 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15242 mode, mem_mode,
15243 VAR_INIT_STATUS_INITIALIZED);
15244 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15245 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15246 ? DW_OP_const4u
15247 : HOST_BITS_PER_WIDE_INT == 64
15248 ? DW_OP_const8u : DW_OP_constu,
15249 GET_MODE_MASK (mode), 0);
15250 else
15251 mask[i] = NULL;
15252 if (mask[i] == NULL)
15253 return NULL;
15254 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15255 }
15256 ret = op0;
15257 add_loc_descr (&ret, op1);
15258 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15259 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15260 if (GET_CODE (rtl) == ROTATERT)
15261 {
15262 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15263 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15264 GET_MODE_BITSIZE (mode), 0));
15265 }
15266 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15267 if (mask[0] != NULL)
15268 add_loc_descr (&ret, mask[0]);
15269 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15270 if (mask[1] != NULL)
15271 {
15272 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15273 add_loc_descr (&ret, mask[1]);
15274 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15275 }
15276 if (GET_CODE (rtl) == ROTATE)
15277 {
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15280 GET_MODE_BITSIZE (mode), 0));
15281 }
15282 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15283 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15284 return ret;
15285 }
15286
15287 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15288 for DEBUG_PARAMETER_REF RTL. */
15289
15290 static dw_loc_descr_ref
15291 parameter_ref_descriptor (rtx rtl)
15292 {
15293 dw_loc_descr_ref ret;
15294 dw_die_ref ref;
15295
15296 if (dwarf_strict)
15297 return NULL;
15298 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15299 /* With LTO during LTRANS we get the late DIE that refers to the early
15300 DIE, thus we add another indirection here. This seems to confuse
15301 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15302 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15303 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15304 if (ref)
15305 {
15306 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15307 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15308 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15309 }
15310 else
15311 {
15312 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15313 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15314 }
15315 return ret;
15316 }
15317
15318 /* The following routine converts the RTL for a variable or parameter
15319 (resident in memory) into an equivalent Dwarf representation of a
15320 mechanism for getting the address of that same variable onto the top of a
15321 hypothetical "address evaluation" stack.
15322
15323 When creating memory location descriptors, we are effectively transforming
15324 the RTL for a memory-resident object into its Dwarf postfix expression
15325 equivalent. This routine recursively descends an RTL tree, turning
15326 it into Dwarf postfix code as it goes.
15327
15328 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15329
15330 MEM_MODE is the mode of the memory reference, needed to handle some
15331 autoincrement addressing modes.
15332
15333 Return 0 if we can't represent the location. */
15334
15335 dw_loc_descr_ref
15336 mem_loc_descriptor (rtx rtl, machine_mode mode,
15337 machine_mode mem_mode,
15338 enum var_init_status initialized)
15339 {
15340 dw_loc_descr_ref mem_loc_result = NULL;
15341 enum dwarf_location_atom op;
15342 dw_loc_descr_ref op0, op1;
15343 rtx inner = NULL_RTX;
15344 poly_int64 offset;
15345
15346 if (mode == VOIDmode)
15347 mode = GET_MODE (rtl);
15348
15349 /* Note that for a dynamically sized array, the location we will generate a
15350 description of here will be the lowest numbered location which is
15351 actually within the array. That's *not* necessarily the same as the
15352 zeroth element of the array. */
15353
15354 rtl = targetm.delegitimize_address (rtl);
15355
15356 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15357 return NULL;
15358
15359 scalar_int_mode int_mode, inner_mode, op1_mode;
15360 switch (GET_CODE (rtl))
15361 {
15362 case POST_INC:
15363 case POST_DEC:
15364 case POST_MODIFY:
15365 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15366
15367 case SUBREG:
15368 /* The case of a subreg may arise when we have a local (register)
15369 variable or a formal (register) parameter which doesn't quite fill
15370 up an entire register. For now, just assume that it is
15371 legitimate to make the Dwarf info refer to the whole register which
15372 contains the given subreg. */
15373 if (!subreg_lowpart_p (rtl))
15374 break;
15375 inner = SUBREG_REG (rtl);
15376 /* FALLTHRU */
15377 case TRUNCATE:
15378 if (inner == NULL_RTX)
15379 inner = XEXP (rtl, 0);
15380 if (is_a <scalar_int_mode> (mode, &int_mode)
15381 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15382 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15383 #ifdef POINTERS_EXTEND_UNSIGNED
15384 || (int_mode == Pmode && mem_mode != VOIDmode)
15385 #endif
15386 )
15387 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15388 {
15389 mem_loc_result = mem_loc_descriptor (inner,
15390 inner_mode,
15391 mem_mode, initialized);
15392 break;
15393 }
15394 if (dwarf_strict && dwarf_version < 5)
15395 break;
15396 if (is_a <scalar_int_mode> (mode, &int_mode)
15397 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15398 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15399 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15400 {
15401 dw_die_ref type_die;
15402 dw_loc_descr_ref cvt;
15403
15404 mem_loc_result = mem_loc_descriptor (inner,
15405 GET_MODE (inner),
15406 mem_mode, initialized);
15407 if (mem_loc_result == NULL)
15408 break;
15409 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15410 if (type_die == NULL)
15411 {
15412 mem_loc_result = NULL;
15413 break;
15414 }
15415 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15416 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15417 else
15418 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15419 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15420 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15421 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15422 add_loc_descr (&mem_loc_result, cvt);
15423 if (is_a <scalar_int_mode> (mode, &int_mode)
15424 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15425 {
15426 /* Convert it to untyped afterwards. */
15427 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15428 add_loc_descr (&mem_loc_result, cvt);
15429 }
15430 }
15431 break;
15432
15433 case REG:
15434 if (!is_a <scalar_int_mode> (mode, &int_mode)
15435 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15436 && rtl != arg_pointer_rtx
15437 && rtl != frame_pointer_rtx
15438 #ifdef POINTERS_EXTEND_UNSIGNED
15439 && (int_mode != Pmode || mem_mode == VOIDmode)
15440 #endif
15441 ))
15442 {
15443 dw_die_ref type_die;
15444 unsigned int dbx_regnum;
15445
15446 if (dwarf_strict && dwarf_version < 5)
15447 break;
15448 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15449 break;
15450 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15451 if (type_die == NULL)
15452 break;
15453
15454 dbx_regnum = dbx_reg_number (rtl);
15455 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15456 break;
15457 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15458 dbx_regnum, 0);
15459 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15460 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15461 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15462 break;
15463 }
15464 /* Whenever a register number forms a part of the description of the
15465 method for calculating the (dynamic) address of a memory resident
15466 object, DWARF rules require the register number be referred to as
15467 a "base register". This distinction is not based in any way upon
15468 what category of register the hardware believes the given register
15469 belongs to. This is strictly DWARF terminology we're dealing with
15470 here. Note that in cases where the location of a memory-resident
15471 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15472 OP_CONST (0)) the actual DWARF location descriptor that we generate
15473 may just be OP_BASEREG (basereg). This may look deceptively like
15474 the object in question was allocated to a register (rather than in
15475 memory) so DWARF consumers need to be aware of the subtle
15476 distinction between OP_REG and OP_BASEREG. */
15477 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15478 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15479 else if (stack_realign_drap
15480 && crtl->drap_reg
15481 && crtl->args.internal_arg_pointer == rtl
15482 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15483 {
15484 /* If RTL is internal_arg_pointer, which has been optimized
15485 out, use DRAP instead. */
15486 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15487 VAR_INIT_STATUS_INITIALIZED);
15488 }
15489 break;
15490
15491 case SIGN_EXTEND:
15492 case ZERO_EXTEND:
15493 if (!is_a <scalar_int_mode> (mode, &int_mode)
15494 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15495 break;
15496 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15497 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15498 if (op0 == 0)
15499 break;
15500 else if (GET_CODE (rtl) == ZERO_EXTEND
15501 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15502 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15503 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15504 to expand zero extend as two shifts instead of
15505 masking. */
15506 && GET_MODE_SIZE (inner_mode) <= 4)
15507 {
15508 mem_loc_result = op0;
15509 add_loc_descr (&mem_loc_result,
15510 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15511 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15512 }
15513 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15514 {
15515 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15516 shift *= BITS_PER_UNIT;
15517 if (GET_CODE (rtl) == SIGN_EXTEND)
15518 op = DW_OP_shra;
15519 else
15520 op = DW_OP_shr;
15521 mem_loc_result = op0;
15522 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15523 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15524 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15525 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15526 }
15527 else if (!dwarf_strict || dwarf_version >= 5)
15528 {
15529 dw_die_ref type_die1, type_die2;
15530 dw_loc_descr_ref cvt;
15531
15532 type_die1 = base_type_for_mode (inner_mode,
15533 GET_CODE (rtl) == ZERO_EXTEND);
15534 if (type_die1 == NULL)
15535 break;
15536 type_die2 = base_type_for_mode (int_mode, 1);
15537 if (type_die2 == NULL)
15538 break;
15539 mem_loc_result = op0;
15540 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15541 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15542 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15543 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15544 add_loc_descr (&mem_loc_result, cvt);
15545 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15546 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15547 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15548 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15549 add_loc_descr (&mem_loc_result, cvt);
15550 }
15551 break;
15552
15553 case MEM:
15554 {
15555 rtx new_rtl = avoid_constant_pool_reference (rtl);
15556 if (new_rtl != rtl)
15557 {
15558 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15559 initialized);
15560 if (mem_loc_result != NULL)
15561 return mem_loc_result;
15562 }
15563 }
15564 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15565 get_address_mode (rtl), mode,
15566 VAR_INIT_STATUS_INITIALIZED);
15567 if (mem_loc_result == NULL)
15568 mem_loc_result = tls_mem_loc_descriptor (rtl);
15569 if (mem_loc_result != NULL)
15570 {
15571 if (!is_a <scalar_int_mode> (mode, &int_mode)
15572 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15573 {
15574 dw_die_ref type_die;
15575 dw_loc_descr_ref deref;
15576 HOST_WIDE_INT size;
15577
15578 if (dwarf_strict && dwarf_version < 5)
15579 return NULL;
15580 if (!GET_MODE_SIZE (mode).is_constant (&size))
15581 return NULL;
15582 type_die
15583 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15584 if (type_die == NULL)
15585 return NULL;
15586 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15587 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15588 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15589 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15590 add_loc_descr (&mem_loc_result, deref);
15591 }
15592 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15593 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15594 else
15595 add_loc_descr (&mem_loc_result,
15596 new_loc_descr (DW_OP_deref_size,
15597 GET_MODE_SIZE (int_mode), 0));
15598 }
15599 break;
15600
15601 case LO_SUM:
15602 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15603
15604 case LABEL_REF:
15605 /* Some ports can transform a symbol ref into a label ref, because
15606 the symbol ref is too far away and has to be dumped into a constant
15607 pool. */
15608 case CONST:
15609 case SYMBOL_REF:
15610 if (!is_a <scalar_int_mode> (mode, &int_mode)
15611 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15612 #ifdef POINTERS_EXTEND_UNSIGNED
15613 && (int_mode != Pmode || mem_mode == VOIDmode)
15614 #endif
15615 ))
15616 break;
15617 if (GET_CODE (rtl) == SYMBOL_REF
15618 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15619 {
15620 dw_loc_descr_ref temp;
15621
15622 /* If this is not defined, we have no way to emit the data. */
15623 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15624 break;
15625
15626 temp = new_addr_loc_descr (rtl, dtprel_true);
15627
15628 /* We check for DWARF 5 here because gdb did not implement
15629 DW_OP_form_tls_address until after 7.12. */
15630 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15631 ? DW_OP_form_tls_address
15632 : DW_OP_GNU_push_tls_address),
15633 0, 0);
15634 add_loc_descr (&mem_loc_result, temp);
15635
15636 break;
15637 }
15638
15639 if (!const_ok_for_output (rtl))
15640 {
15641 if (GET_CODE (rtl) == CONST)
15642 switch (GET_CODE (XEXP (rtl, 0)))
15643 {
15644 case NOT:
15645 op = DW_OP_not;
15646 goto try_const_unop;
15647 case NEG:
15648 op = DW_OP_neg;
15649 goto try_const_unop;
15650 try_const_unop:
15651 rtx arg;
15652 arg = XEXP (XEXP (rtl, 0), 0);
15653 if (!CONSTANT_P (arg))
15654 arg = gen_rtx_CONST (int_mode, arg);
15655 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15656 initialized);
15657 if (op0)
15658 {
15659 mem_loc_result = op0;
15660 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15661 }
15662 break;
15663 default:
15664 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15665 mem_mode, initialized);
15666 break;
15667 }
15668 break;
15669 }
15670
15671 symref:
15672 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15673 vec_safe_push (used_rtx_array, rtl);
15674 break;
15675
15676 case CONCAT:
15677 case CONCATN:
15678 case VAR_LOCATION:
15679 case DEBUG_IMPLICIT_PTR:
15680 expansion_failed (NULL_TREE, rtl,
15681 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15682 return 0;
15683
15684 case ENTRY_VALUE:
15685 if (dwarf_strict && dwarf_version < 5)
15686 return NULL;
15687 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15688 {
15689 if (!is_a <scalar_int_mode> (mode, &int_mode)
15690 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15691 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15692 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15693 else
15694 {
15695 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15696 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15697 return NULL;
15698 op0 = one_reg_loc_descriptor (dbx_regnum,
15699 VAR_INIT_STATUS_INITIALIZED);
15700 }
15701 }
15702 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15703 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15704 {
15705 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15706 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15707 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15708 return NULL;
15709 }
15710 else
15711 gcc_unreachable ();
15712 if (op0 == NULL)
15713 return NULL;
15714 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15715 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15716 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15717 break;
15718
15719 case DEBUG_PARAMETER_REF:
15720 mem_loc_result = parameter_ref_descriptor (rtl);
15721 break;
15722
15723 case PRE_MODIFY:
15724 /* Extract the PLUS expression nested inside and fall into
15725 PLUS code below. */
15726 rtl = XEXP (rtl, 1);
15727 goto plus;
15728
15729 case PRE_INC:
15730 case PRE_DEC:
15731 /* Turn these into a PLUS expression and fall into the PLUS code
15732 below. */
15733 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15734 gen_int_mode (GET_CODE (rtl) == PRE_INC
15735 ? GET_MODE_UNIT_SIZE (mem_mode)
15736 : -GET_MODE_UNIT_SIZE (mem_mode),
15737 mode));
15738
15739 /* fall through */
15740
15741 case PLUS:
15742 plus:
15743 if (is_based_loc (rtl)
15744 && is_a <scalar_int_mode> (mode, &int_mode)
15745 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15746 || XEXP (rtl, 0) == arg_pointer_rtx
15747 || XEXP (rtl, 0) == frame_pointer_rtx))
15748 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15749 INTVAL (XEXP (rtl, 1)),
15750 VAR_INIT_STATUS_INITIALIZED);
15751 else
15752 {
15753 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15754 VAR_INIT_STATUS_INITIALIZED);
15755 if (mem_loc_result == 0)
15756 break;
15757
15758 if (CONST_INT_P (XEXP (rtl, 1))
15759 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15760 <= DWARF2_ADDR_SIZE))
15761 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15762 else
15763 {
15764 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15765 VAR_INIT_STATUS_INITIALIZED);
15766 if (op1 == 0)
15767 return NULL;
15768 add_loc_descr (&mem_loc_result, op1);
15769 add_loc_descr (&mem_loc_result,
15770 new_loc_descr (DW_OP_plus, 0, 0));
15771 }
15772 }
15773 break;
15774
15775 /* If a pseudo-reg is optimized away, it is possible for it to
15776 be replaced with a MEM containing a multiply or shift. */
15777 case MINUS:
15778 op = DW_OP_minus;
15779 goto do_binop;
15780
15781 case MULT:
15782 op = DW_OP_mul;
15783 goto do_binop;
15784
15785 case DIV:
15786 if ((!dwarf_strict || dwarf_version >= 5)
15787 && is_a <scalar_int_mode> (mode, &int_mode)
15788 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15789 {
15790 mem_loc_result = typed_binop (DW_OP_div, rtl,
15791 base_type_for_mode (mode, 0),
15792 int_mode, mem_mode);
15793 break;
15794 }
15795 op = DW_OP_div;
15796 goto do_binop;
15797
15798 case UMOD:
15799 op = DW_OP_mod;
15800 goto do_binop;
15801
15802 case ASHIFT:
15803 op = DW_OP_shl;
15804 goto do_shift;
15805
15806 case ASHIFTRT:
15807 op = DW_OP_shra;
15808 goto do_shift;
15809
15810 case LSHIFTRT:
15811 op = DW_OP_shr;
15812 goto do_shift;
15813
15814 do_shift:
15815 if (!is_a <scalar_int_mode> (mode, &int_mode))
15816 break;
15817 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15818 VAR_INIT_STATUS_INITIALIZED);
15819 {
15820 rtx rtlop1 = XEXP (rtl, 1);
15821 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15822 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15823 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15824 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15825 VAR_INIT_STATUS_INITIALIZED);
15826 }
15827
15828 if (op0 == 0 || op1 == 0)
15829 break;
15830
15831 mem_loc_result = op0;
15832 add_loc_descr (&mem_loc_result, op1);
15833 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15834 break;
15835
15836 case AND:
15837 op = DW_OP_and;
15838 goto do_binop;
15839
15840 case IOR:
15841 op = DW_OP_or;
15842 goto do_binop;
15843
15844 case XOR:
15845 op = DW_OP_xor;
15846 goto do_binop;
15847
15848 do_binop:
15849 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15850 VAR_INIT_STATUS_INITIALIZED);
15851 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15852 VAR_INIT_STATUS_INITIALIZED);
15853
15854 if (op0 == 0 || op1 == 0)
15855 break;
15856
15857 mem_loc_result = op0;
15858 add_loc_descr (&mem_loc_result, op1);
15859 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15860 break;
15861
15862 case MOD:
15863 if ((!dwarf_strict || dwarf_version >= 5)
15864 && is_a <scalar_int_mode> (mode, &int_mode)
15865 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15866 {
15867 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15868 base_type_for_mode (mode, 0),
15869 int_mode, mem_mode);
15870 break;
15871 }
15872
15873 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15874 VAR_INIT_STATUS_INITIALIZED);
15875 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15876 VAR_INIT_STATUS_INITIALIZED);
15877
15878 if (op0 == 0 || op1 == 0)
15879 break;
15880
15881 mem_loc_result = op0;
15882 add_loc_descr (&mem_loc_result, op1);
15883 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15884 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15885 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15886 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15887 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15888 break;
15889
15890 case UDIV:
15891 if ((!dwarf_strict || dwarf_version >= 5)
15892 && is_a <scalar_int_mode> (mode, &int_mode))
15893 {
15894 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15895 {
15896 op = DW_OP_div;
15897 goto do_binop;
15898 }
15899 mem_loc_result = typed_binop (DW_OP_div, rtl,
15900 base_type_for_mode (int_mode, 1),
15901 int_mode, mem_mode);
15902 }
15903 break;
15904
15905 case NOT:
15906 op = DW_OP_not;
15907 goto do_unop;
15908
15909 case ABS:
15910 op = DW_OP_abs;
15911 goto do_unop;
15912
15913 case NEG:
15914 op = DW_OP_neg;
15915 goto do_unop;
15916
15917 do_unop:
15918 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15919 VAR_INIT_STATUS_INITIALIZED);
15920
15921 if (op0 == 0)
15922 break;
15923
15924 mem_loc_result = op0;
15925 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15926 break;
15927
15928 case CONST_INT:
15929 if (!is_a <scalar_int_mode> (mode, &int_mode)
15930 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15931 #ifdef POINTERS_EXTEND_UNSIGNED
15932 || (int_mode == Pmode
15933 && mem_mode != VOIDmode
15934 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15935 #endif
15936 )
15937 {
15938 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15939 break;
15940 }
15941 if ((!dwarf_strict || dwarf_version >= 5)
15942 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15943 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15944 {
15945 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15946 scalar_int_mode amode;
15947 if (type_die == NULL)
15948 return NULL;
15949 if (INTVAL (rtl) >= 0
15950 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15951 .exists (&amode))
15952 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15953 /* const DW_OP_convert <XXX> vs.
15954 DW_OP_const_type <XXX, 1, const>. */
15955 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15956 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15957 {
15958 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15959 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15960 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15961 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15962 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15963 add_loc_descr (&mem_loc_result, op0);
15964 return mem_loc_result;
15965 }
15966 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15967 INTVAL (rtl));
15968 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15969 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15970 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15971 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15972 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15973 else
15974 {
15975 mem_loc_result->dw_loc_oprnd2.val_class
15976 = dw_val_class_const_double;
15977 mem_loc_result->dw_loc_oprnd2.v.val_double
15978 = double_int::from_shwi (INTVAL (rtl));
15979 }
15980 }
15981 break;
15982
15983 case CONST_DOUBLE:
15984 if (!dwarf_strict || dwarf_version >= 5)
15985 {
15986 dw_die_ref type_die;
15987
15988 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15989 CONST_DOUBLE rtx could represent either a large integer
15990 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15991 the value is always a floating point constant.
15992
15993 When it is an integer, a CONST_DOUBLE is used whenever
15994 the constant requires 2 HWIs to be adequately represented.
15995 We output CONST_DOUBLEs as blocks. */
15996 if (mode == VOIDmode
15997 || (GET_MODE (rtl) == VOIDmode
15998 && maybe_ne (GET_MODE_BITSIZE (mode),
15999 HOST_BITS_PER_DOUBLE_INT)))
16000 break;
16001 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16002 if (type_die == NULL)
16003 return NULL;
16004 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16005 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16006 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16007 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16008 #if TARGET_SUPPORTS_WIDE_INT == 0
16009 if (!SCALAR_FLOAT_MODE_P (mode))
16010 {
16011 mem_loc_result->dw_loc_oprnd2.val_class
16012 = dw_val_class_const_double;
16013 mem_loc_result->dw_loc_oprnd2.v.val_double
16014 = rtx_to_double_int (rtl);
16015 }
16016 else
16017 #endif
16018 {
16019 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16020 unsigned int length = GET_MODE_SIZE (float_mode);
16021 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16022
16023 insert_float (rtl, array);
16024 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16025 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16026 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16027 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16028 }
16029 }
16030 break;
16031
16032 case CONST_WIDE_INT:
16033 if (!dwarf_strict || dwarf_version >= 5)
16034 {
16035 dw_die_ref type_die;
16036
16037 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16038 if (type_die == NULL)
16039 return NULL;
16040 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16041 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16042 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16043 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16044 mem_loc_result->dw_loc_oprnd2.val_class
16045 = dw_val_class_wide_int;
16046 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16047 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16048 }
16049 break;
16050
16051 case CONST_POLY_INT:
16052 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16053 break;
16054
16055 case EQ:
16056 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16057 break;
16058
16059 case GE:
16060 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16061 break;
16062
16063 case GT:
16064 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16065 break;
16066
16067 case LE:
16068 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16069 break;
16070
16071 case LT:
16072 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16073 break;
16074
16075 case NE:
16076 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16077 break;
16078
16079 case GEU:
16080 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16081 break;
16082
16083 case GTU:
16084 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16085 break;
16086
16087 case LEU:
16088 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16089 break;
16090
16091 case LTU:
16092 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16093 break;
16094
16095 case UMIN:
16096 case UMAX:
16097 if (!SCALAR_INT_MODE_P (mode))
16098 break;
16099 /* FALLTHRU */
16100 case SMIN:
16101 case SMAX:
16102 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16103 break;
16104
16105 case ZERO_EXTRACT:
16106 case SIGN_EXTRACT:
16107 if (CONST_INT_P (XEXP (rtl, 1))
16108 && CONST_INT_P (XEXP (rtl, 2))
16109 && is_a <scalar_int_mode> (mode, &int_mode)
16110 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16111 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16112 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16113 && ((unsigned) INTVAL (XEXP (rtl, 1))
16114 + (unsigned) INTVAL (XEXP (rtl, 2))
16115 <= GET_MODE_BITSIZE (int_mode)))
16116 {
16117 int shift, size;
16118 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16119 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16120 if (op0 == 0)
16121 break;
16122 if (GET_CODE (rtl) == SIGN_EXTRACT)
16123 op = DW_OP_shra;
16124 else
16125 op = DW_OP_shr;
16126 mem_loc_result = op0;
16127 size = INTVAL (XEXP (rtl, 1));
16128 shift = INTVAL (XEXP (rtl, 2));
16129 if (BITS_BIG_ENDIAN)
16130 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16131 if (shift + size != (int) DWARF2_ADDR_SIZE)
16132 {
16133 add_loc_descr (&mem_loc_result,
16134 int_loc_descriptor (DWARF2_ADDR_SIZE
16135 - shift - size));
16136 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16137 }
16138 if (size != (int) DWARF2_ADDR_SIZE)
16139 {
16140 add_loc_descr (&mem_loc_result,
16141 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16142 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16143 }
16144 }
16145 break;
16146
16147 case IF_THEN_ELSE:
16148 {
16149 dw_loc_descr_ref op2, bra_node, drop_node;
16150 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16151 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16152 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16153 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16154 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16155 VAR_INIT_STATUS_INITIALIZED);
16156 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16157 VAR_INIT_STATUS_INITIALIZED);
16158 if (op0 == NULL || op1 == NULL || op2 == NULL)
16159 break;
16160
16161 mem_loc_result = op1;
16162 add_loc_descr (&mem_loc_result, op2);
16163 add_loc_descr (&mem_loc_result, op0);
16164 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16165 add_loc_descr (&mem_loc_result, bra_node);
16166 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16167 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16168 add_loc_descr (&mem_loc_result, drop_node);
16169 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16170 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16171 }
16172 break;
16173
16174 case FLOAT_EXTEND:
16175 case FLOAT_TRUNCATE:
16176 case FLOAT:
16177 case UNSIGNED_FLOAT:
16178 case FIX:
16179 case UNSIGNED_FIX:
16180 if (!dwarf_strict || dwarf_version >= 5)
16181 {
16182 dw_die_ref type_die;
16183 dw_loc_descr_ref cvt;
16184
16185 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16186 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16187 if (op0 == NULL)
16188 break;
16189 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16190 && (GET_CODE (rtl) == FLOAT
16191 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16192 {
16193 type_die = base_type_for_mode (int_mode,
16194 GET_CODE (rtl) == UNSIGNED_FLOAT);
16195 if (type_die == NULL)
16196 break;
16197 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16198 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16199 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16200 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16201 add_loc_descr (&op0, cvt);
16202 }
16203 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16204 if (type_die == NULL)
16205 break;
16206 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16207 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16208 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16209 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16210 add_loc_descr (&op0, cvt);
16211 if (is_a <scalar_int_mode> (mode, &int_mode)
16212 && (GET_CODE (rtl) == FIX
16213 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16214 {
16215 op0 = convert_descriptor_to_mode (int_mode, op0);
16216 if (op0 == NULL)
16217 break;
16218 }
16219 mem_loc_result = op0;
16220 }
16221 break;
16222
16223 case CLZ:
16224 case CTZ:
16225 case FFS:
16226 if (is_a <scalar_int_mode> (mode, &int_mode))
16227 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16228 break;
16229
16230 case POPCOUNT:
16231 case PARITY:
16232 if (is_a <scalar_int_mode> (mode, &int_mode))
16233 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16234 break;
16235
16236 case BSWAP:
16237 if (is_a <scalar_int_mode> (mode, &int_mode))
16238 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16239 break;
16240
16241 case ROTATE:
16242 case ROTATERT:
16243 if (is_a <scalar_int_mode> (mode, &int_mode))
16244 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16245 break;
16246
16247 case COMPARE:
16248 /* In theory, we could implement the above. */
16249 /* DWARF cannot represent the unsigned compare operations
16250 natively. */
16251 case SS_MULT:
16252 case US_MULT:
16253 case SS_DIV:
16254 case US_DIV:
16255 case SS_PLUS:
16256 case US_PLUS:
16257 case SS_MINUS:
16258 case US_MINUS:
16259 case SS_NEG:
16260 case US_NEG:
16261 case SS_ABS:
16262 case SS_ASHIFT:
16263 case US_ASHIFT:
16264 case SS_TRUNCATE:
16265 case US_TRUNCATE:
16266 case UNORDERED:
16267 case ORDERED:
16268 case UNEQ:
16269 case UNGE:
16270 case UNGT:
16271 case UNLE:
16272 case UNLT:
16273 case LTGT:
16274 case FRACT_CONVERT:
16275 case UNSIGNED_FRACT_CONVERT:
16276 case SAT_FRACT:
16277 case UNSIGNED_SAT_FRACT:
16278 case SQRT:
16279 case ASM_OPERANDS:
16280 case VEC_MERGE:
16281 case VEC_SELECT:
16282 case VEC_CONCAT:
16283 case VEC_DUPLICATE:
16284 case VEC_SERIES:
16285 case UNSPEC:
16286 case HIGH:
16287 case FMA:
16288 case STRICT_LOW_PART:
16289 case CONST_VECTOR:
16290 case CONST_FIXED:
16291 case CLRSB:
16292 case CLOBBER:
16293 case CLOBBER_HIGH:
16294 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16295 can't express it in the debug info. This can happen e.g. with some
16296 TLS UNSPECs. */
16297 break;
16298
16299 case CONST_STRING:
16300 resolve_one_addr (&rtl);
16301 goto symref;
16302
16303 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16304 the expression. An UNSPEC rtx represents a raw DWARF operation,
16305 new_loc_descr is called for it to build the operation directly.
16306 Otherwise mem_loc_descriptor is called recursively. */
16307 case PARALLEL:
16308 {
16309 int index = 0;
16310 dw_loc_descr_ref exp_result = NULL;
16311
16312 for (; index < XVECLEN (rtl, 0); index++)
16313 {
16314 rtx elem = XVECEXP (rtl, 0, index);
16315 if (GET_CODE (elem) == UNSPEC)
16316 {
16317 /* Each DWARF operation UNSPEC contain two operands, if
16318 one operand is not used for the operation, const0_rtx is
16319 passed. */
16320 gcc_assert (XVECLEN (elem, 0) == 2);
16321
16322 HOST_WIDE_INT dw_op = XINT (elem, 1);
16323 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16324 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16325 exp_result
16326 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16327 oprnd2);
16328 }
16329 else
16330 exp_result
16331 = mem_loc_descriptor (elem, mode, mem_mode,
16332 VAR_INIT_STATUS_INITIALIZED);
16333
16334 if (!mem_loc_result)
16335 mem_loc_result = exp_result;
16336 else
16337 add_loc_descr (&mem_loc_result, exp_result);
16338 }
16339
16340 break;
16341 }
16342
16343 default:
16344 if (flag_checking)
16345 {
16346 print_rtl (stderr, rtl);
16347 gcc_unreachable ();
16348 }
16349 break;
16350 }
16351
16352 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16353 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16354
16355 return mem_loc_result;
16356 }
16357
16358 /* Return a descriptor that describes the concatenation of two locations.
16359 This is typically a complex variable. */
16360
16361 static dw_loc_descr_ref
16362 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16363 {
16364 /* At present we only track constant-sized pieces. */
16365 unsigned int size0, size1;
16366 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16367 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16368 return 0;
16369
16370 dw_loc_descr_ref cc_loc_result = NULL;
16371 dw_loc_descr_ref x0_ref
16372 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16373 dw_loc_descr_ref x1_ref
16374 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16375
16376 if (x0_ref == 0 || x1_ref == 0)
16377 return 0;
16378
16379 cc_loc_result = x0_ref;
16380 add_loc_descr_op_piece (&cc_loc_result, size0);
16381
16382 add_loc_descr (&cc_loc_result, x1_ref);
16383 add_loc_descr_op_piece (&cc_loc_result, size1);
16384
16385 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16386 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16387
16388 return cc_loc_result;
16389 }
16390
16391 /* Return a descriptor that describes the concatenation of N
16392 locations. */
16393
16394 static dw_loc_descr_ref
16395 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16396 {
16397 unsigned int i;
16398 dw_loc_descr_ref cc_loc_result = NULL;
16399 unsigned int n = XVECLEN (concatn, 0);
16400 unsigned int size;
16401
16402 for (i = 0; i < n; ++i)
16403 {
16404 dw_loc_descr_ref ref;
16405 rtx x = XVECEXP (concatn, 0, i);
16406
16407 /* At present we only track constant-sized pieces. */
16408 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16409 return NULL;
16410
16411 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16412 if (ref == NULL)
16413 return NULL;
16414
16415 add_loc_descr (&cc_loc_result, ref);
16416 add_loc_descr_op_piece (&cc_loc_result, size);
16417 }
16418
16419 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16420 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16421
16422 return cc_loc_result;
16423 }
16424
16425 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16426 for DEBUG_IMPLICIT_PTR RTL. */
16427
16428 static dw_loc_descr_ref
16429 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16430 {
16431 dw_loc_descr_ref ret;
16432 dw_die_ref ref;
16433
16434 if (dwarf_strict && dwarf_version < 5)
16435 return NULL;
16436 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16437 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16438 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16439 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16440 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16441 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16442 if (ref)
16443 {
16444 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16445 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16446 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16447 }
16448 else
16449 {
16450 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16451 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16452 }
16453 return ret;
16454 }
16455
16456 /* Output a proper Dwarf location descriptor for a variable or parameter
16457 which is either allocated in a register or in a memory location. For a
16458 register, we just generate an OP_REG and the register number. For a
16459 memory location we provide a Dwarf postfix expression describing how to
16460 generate the (dynamic) address of the object onto the address stack.
16461
16462 MODE is mode of the decl if this loc_descriptor is going to be used in
16463 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16464 allowed, VOIDmode otherwise.
16465
16466 If we don't know how to describe it, return 0. */
16467
16468 static dw_loc_descr_ref
16469 loc_descriptor (rtx rtl, machine_mode mode,
16470 enum var_init_status initialized)
16471 {
16472 dw_loc_descr_ref loc_result = NULL;
16473 scalar_int_mode int_mode;
16474
16475 switch (GET_CODE (rtl))
16476 {
16477 case SUBREG:
16478 /* The case of a subreg may arise when we have a local (register)
16479 variable or a formal (register) parameter which doesn't quite fill
16480 up an entire register. For now, just assume that it is
16481 legitimate to make the Dwarf info refer to the whole register which
16482 contains the given subreg. */
16483 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16484 loc_result = loc_descriptor (SUBREG_REG (rtl),
16485 GET_MODE (SUBREG_REG (rtl)), initialized);
16486 else
16487 goto do_default;
16488 break;
16489
16490 case REG:
16491 loc_result = reg_loc_descriptor (rtl, initialized);
16492 break;
16493
16494 case MEM:
16495 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16496 GET_MODE (rtl), initialized);
16497 if (loc_result == NULL)
16498 loc_result = tls_mem_loc_descriptor (rtl);
16499 if (loc_result == NULL)
16500 {
16501 rtx new_rtl = avoid_constant_pool_reference (rtl);
16502 if (new_rtl != rtl)
16503 loc_result = loc_descriptor (new_rtl, mode, initialized);
16504 }
16505 break;
16506
16507 case CONCAT:
16508 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16509 initialized);
16510 break;
16511
16512 case CONCATN:
16513 loc_result = concatn_loc_descriptor (rtl, initialized);
16514 break;
16515
16516 case VAR_LOCATION:
16517 /* Single part. */
16518 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16519 {
16520 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16521 if (GET_CODE (loc) == EXPR_LIST)
16522 loc = XEXP (loc, 0);
16523 loc_result = loc_descriptor (loc, mode, initialized);
16524 break;
16525 }
16526
16527 rtl = XEXP (rtl, 1);
16528 /* FALLTHRU */
16529
16530 case PARALLEL:
16531 {
16532 rtvec par_elems = XVEC (rtl, 0);
16533 int num_elem = GET_NUM_ELEM (par_elems);
16534 machine_mode mode;
16535 int i, size;
16536
16537 /* Create the first one, so we have something to add to. */
16538 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16539 VOIDmode, initialized);
16540 if (loc_result == NULL)
16541 return NULL;
16542 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16543 /* At present we only track constant-sized pieces. */
16544 if (!GET_MODE_SIZE (mode).is_constant (&size))
16545 return NULL;
16546 add_loc_descr_op_piece (&loc_result, size);
16547 for (i = 1; i < num_elem; i++)
16548 {
16549 dw_loc_descr_ref temp;
16550
16551 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16552 VOIDmode, initialized);
16553 if (temp == NULL)
16554 return NULL;
16555 add_loc_descr (&loc_result, temp);
16556 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16557 /* At present we only track constant-sized pieces. */
16558 if (!GET_MODE_SIZE (mode).is_constant (&size))
16559 return NULL;
16560 add_loc_descr_op_piece (&loc_result, size);
16561 }
16562 }
16563 break;
16564
16565 case CONST_INT:
16566 if (mode != VOIDmode && mode != BLKmode)
16567 {
16568 int_mode = as_a <scalar_int_mode> (mode);
16569 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16570 INTVAL (rtl));
16571 }
16572 break;
16573
16574 case CONST_DOUBLE:
16575 if (mode == VOIDmode)
16576 mode = GET_MODE (rtl);
16577
16578 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16579 {
16580 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16581
16582 /* Note that a CONST_DOUBLE rtx could represent either an integer
16583 or a floating-point constant. A CONST_DOUBLE is used whenever
16584 the constant requires more than one word in order to be
16585 adequately represented. We output CONST_DOUBLEs as blocks. */
16586 scalar_mode smode = as_a <scalar_mode> (mode);
16587 loc_result = new_loc_descr (DW_OP_implicit_value,
16588 GET_MODE_SIZE (smode), 0);
16589 #if TARGET_SUPPORTS_WIDE_INT == 0
16590 if (!SCALAR_FLOAT_MODE_P (smode))
16591 {
16592 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16593 loc_result->dw_loc_oprnd2.v.val_double
16594 = rtx_to_double_int (rtl);
16595 }
16596 else
16597 #endif
16598 {
16599 unsigned int length = GET_MODE_SIZE (smode);
16600 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16601
16602 insert_float (rtl, array);
16603 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16604 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16605 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16606 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16607 }
16608 }
16609 break;
16610
16611 case CONST_WIDE_INT:
16612 if (mode == VOIDmode)
16613 mode = GET_MODE (rtl);
16614
16615 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16616 {
16617 int_mode = as_a <scalar_int_mode> (mode);
16618 loc_result = new_loc_descr (DW_OP_implicit_value,
16619 GET_MODE_SIZE (int_mode), 0);
16620 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16621 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16622 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16623 }
16624 break;
16625
16626 case CONST_VECTOR:
16627 if (mode == VOIDmode)
16628 mode = GET_MODE (rtl);
16629
16630 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16631 {
16632 unsigned int length;
16633 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16634 return NULL;
16635
16636 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16637 unsigned char *array
16638 = ggc_vec_alloc<unsigned char> (length * elt_size);
16639 unsigned int i;
16640 unsigned char *p;
16641 machine_mode imode = GET_MODE_INNER (mode);
16642
16643 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16644 switch (GET_MODE_CLASS (mode))
16645 {
16646 case MODE_VECTOR_INT:
16647 for (i = 0, p = array; i < length; i++, p += elt_size)
16648 {
16649 rtx elt = CONST_VECTOR_ELT (rtl, i);
16650 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16651 }
16652 break;
16653
16654 case MODE_VECTOR_FLOAT:
16655 for (i = 0, p = array; i < length; i++, p += elt_size)
16656 {
16657 rtx elt = CONST_VECTOR_ELT (rtl, i);
16658 insert_float (elt, p);
16659 }
16660 break;
16661
16662 default:
16663 gcc_unreachable ();
16664 }
16665
16666 loc_result = new_loc_descr (DW_OP_implicit_value,
16667 length * elt_size, 0);
16668 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16669 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16670 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16671 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16672 }
16673 break;
16674
16675 case CONST:
16676 if (mode == VOIDmode
16677 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16678 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16679 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16680 {
16681 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16682 break;
16683 }
16684 /* FALLTHROUGH */
16685 case SYMBOL_REF:
16686 if (!const_ok_for_output (rtl))
16687 break;
16688 /* FALLTHROUGH */
16689 case LABEL_REF:
16690 if (is_a <scalar_int_mode> (mode, &int_mode)
16691 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16692 && (dwarf_version >= 4 || !dwarf_strict))
16693 {
16694 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16695 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16696 vec_safe_push (used_rtx_array, rtl);
16697 }
16698 break;
16699
16700 case DEBUG_IMPLICIT_PTR:
16701 loc_result = implicit_ptr_descriptor (rtl, 0);
16702 break;
16703
16704 case PLUS:
16705 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16706 && CONST_INT_P (XEXP (rtl, 1)))
16707 {
16708 loc_result
16709 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16710 break;
16711 }
16712 /* FALLTHRU */
16713 do_default:
16714 default:
16715 if ((is_a <scalar_int_mode> (mode, &int_mode)
16716 && GET_MODE (rtl) == int_mode
16717 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16718 && dwarf_version >= 4)
16719 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16720 {
16721 /* Value expression. */
16722 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16723 if (loc_result)
16724 add_loc_descr (&loc_result,
16725 new_loc_descr (DW_OP_stack_value, 0, 0));
16726 }
16727 break;
16728 }
16729
16730 return loc_result;
16731 }
16732
16733 /* We need to figure out what section we should use as the base for the
16734 address ranges where a given location is valid.
16735 1. If this particular DECL has a section associated with it, use that.
16736 2. If this function has a section associated with it, use that.
16737 3. Otherwise, use the text section.
16738 XXX: If you split a variable across multiple sections, we won't notice. */
16739
16740 static const char *
16741 secname_for_decl (const_tree decl)
16742 {
16743 const char *secname;
16744
16745 if (VAR_OR_FUNCTION_DECL_P (decl)
16746 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16747 && DECL_SECTION_NAME (decl))
16748 secname = DECL_SECTION_NAME (decl);
16749 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16750 {
16751 if (in_cold_section_p)
16752 {
16753 section *sec = current_function_section ();
16754 if (sec->common.flags & SECTION_NAMED)
16755 return sec->named.name;
16756 }
16757 secname = DECL_SECTION_NAME (current_function_decl);
16758 }
16759 else if (cfun && in_cold_section_p)
16760 secname = crtl->subsections.cold_section_label;
16761 else
16762 secname = text_section_label;
16763
16764 return secname;
16765 }
16766
16767 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16768
16769 static bool
16770 decl_by_reference_p (tree decl)
16771 {
16772 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16773 || VAR_P (decl))
16774 && DECL_BY_REFERENCE (decl));
16775 }
16776
16777 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16778 for VARLOC. */
16779
16780 static dw_loc_descr_ref
16781 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16782 enum var_init_status initialized)
16783 {
16784 int have_address = 0;
16785 dw_loc_descr_ref descr;
16786 machine_mode mode;
16787
16788 if (want_address != 2)
16789 {
16790 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16791 /* Single part. */
16792 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16793 {
16794 varloc = PAT_VAR_LOCATION_LOC (varloc);
16795 if (GET_CODE (varloc) == EXPR_LIST)
16796 varloc = XEXP (varloc, 0);
16797 mode = GET_MODE (varloc);
16798 if (MEM_P (varloc))
16799 {
16800 rtx addr = XEXP (varloc, 0);
16801 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16802 mode, initialized);
16803 if (descr)
16804 have_address = 1;
16805 else
16806 {
16807 rtx x = avoid_constant_pool_reference (varloc);
16808 if (x != varloc)
16809 descr = mem_loc_descriptor (x, mode, VOIDmode,
16810 initialized);
16811 }
16812 }
16813 else
16814 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16815 }
16816 else
16817 return 0;
16818 }
16819 else
16820 {
16821 if (GET_CODE (varloc) == VAR_LOCATION)
16822 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16823 else
16824 mode = DECL_MODE (loc);
16825 descr = loc_descriptor (varloc, mode, initialized);
16826 have_address = 1;
16827 }
16828
16829 if (!descr)
16830 return 0;
16831
16832 if (want_address == 2 && !have_address
16833 && (dwarf_version >= 4 || !dwarf_strict))
16834 {
16835 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16836 {
16837 expansion_failed (loc, NULL_RTX,
16838 "DWARF address size mismatch");
16839 return 0;
16840 }
16841 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16842 have_address = 1;
16843 }
16844 /* Show if we can't fill the request for an address. */
16845 if (want_address && !have_address)
16846 {
16847 expansion_failed (loc, NULL_RTX,
16848 "Want address and only have value");
16849 return 0;
16850 }
16851
16852 /* If we've got an address and don't want one, dereference. */
16853 if (!want_address && have_address)
16854 {
16855 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16856 enum dwarf_location_atom op;
16857
16858 if (size > DWARF2_ADDR_SIZE || size == -1)
16859 {
16860 expansion_failed (loc, NULL_RTX,
16861 "DWARF address size mismatch");
16862 return 0;
16863 }
16864 else if (size == DWARF2_ADDR_SIZE)
16865 op = DW_OP_deref;
16866 else
16867 op = DW_OP_deref_size;
16868
16869 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16870 }
16871
16872 return descr;
16873 }
16874
16875 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16876 if it is not possible. */
16877
16878 static dw_loc_descr_ref
16879 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16880 {
16881 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16882 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16883 else if (dwarf_version >= 3 || !dwarf_strict)
16884 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16885 else
16886 return NULL;
16887 }
16888
16889 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16890 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16891
16892 static dw_loc_descr_ref
16893 dw_sra_loc_expr (tree decl, rtx loc)
16894 {
16895 rtx p;
16896 unsigned HOST_WIDE_INT padsize = 0;
16897 dw_loc_descr_ref descr, *descr_tail;
16898 unsigned HOST_WIDE_INT decl_size;
16899 rtx varloc;
16900 enum var_init_status initialized;
16901
16902 if (DECL_SIZE (decl) == NULL
16903 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16904 return NULL;
16905
16906 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16907 descr = NULL;
16908 descr_tail = &descr;
16909
16910 for (p = loc; p; p = XEXP (p, 1))
16911 {
16912 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16913 rtx loc_note = *decl_piece_varloc_ptr (p);
16914 dw_loc_descr_ref cur_descr;
16915 dw_loc_descr_ref *tail, last = NULL;
16916 unsigned HOST_WIDE_INT opsize = 0;
16917
16918 if (loc_note == NULL_RTX
16919 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16920 {
16921 padsize += bitsize;
16922 continue;
16923 }
16924 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16925 varloc = NOTE_VAR_LOCATION (loc_note);
16926 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16927 if (cur_descr == NULL)
16928 {
16929 padsize += bitsize;
16930 continue;
16931 }
16932
16933 /* Check that cur_descr either doesn't use
16934 DW_OP_*piece operations, or their sum is equal
16935 to bitsize. Otherwise we can't embed it. */
16936 for (tail = &cur_descr; *tail != NULL;
16937 tail = &(*tail)->dw_loc_next)
16938 if ((*tail)->dw_loc_opc == DW_OP_piece)
16939 {
16940 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16941 * BITS_PER_UNIT;
16942 last = *tail;
16943 }
16944 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16945 {
16946 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16947 last = *tail;
16948 }
16949
16950 if (last != NULL && opsize != bitsize)
16951 {
16952 padsize += bitsize;
16953 /* Discard the current piece of the descriptor and release any
16954 addr_table entries it uses. */
16955 remove_loc_list_addr_table_entries (cur_descr);
16956 continue;
16957 }
16958
16959 /* If there is a hole, add DW_OP_*piece after empty DWARF
16960 expression, which means that those bits are optimized out. */
16961 if (padsize)
16962 {
16963 if (padsize > decl_size)
16964 {
16965 remove_loc_list_addr_table_entries (cur_descr);
16966 goto discard_descr;
16967 }
16968 decl_size -= padsize;
16969 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16970 if (*descr_tail == NULL)
16971 {
16972 remove_loc_list_addr_table_entries (cur_descr);
16973 goto discard_descr;
16974 }
16975 descr_tail = &(*descr_tail)->dw_loc_next;
16976 padsize = 0;
16977 }
16978 *descr_tail = cur_descr;
16979 descr_tail = tail;
16980 if (bitsize > decl_size)
16981 goto discard_descr;
16982 decl_size -= bitsize;
16983 if (last == NULL)
16984 {
16985 HOST_WIDE_INT offset = 0;
16986 if (GET_CODE (varloc) == VAR_LOCATION
16987 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16988 {
16989 varloc = PAT_VAR_LOCATION_LOC (varloc);
16990 if (GET_CODE (varloc) == EXPR_LIST)
16991 varloc = XEXP (varloc, 0);
16992 }
16993 do
16994 {
16995 if (GET_CODE (varloc) == CONST
16996 || GET_CODE (varloc) == SIGN_EXTEND
16997 || GET_CODE (varloc) == ZERO_EXTEND)
16998 varloc = XEXP (varloc, 0);
16999 else if (GET_CODE (varloc) == SUBREG)
17000 varloc = SUBREG_REG (varloc);
17001 else
17002 break;
17003 }
17004 while (1);
17005 /* DW_OP_bit_size offset should be zero for register
17006 or implicit location descriptions and empty location
17007 descriptions, but for memory addresses needs big endian
17008 adjustment. */
17009 if (MEM_P (varloc))
17010 {
17011 unsigned HOST_WIDE_INT memsize;
17012 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17013 goto discard_descr;
17014 memsize *= BITS_PER_UNIT;
17015 if (memsize != bitsize)
17016 {
17017 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17018 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17019 goto discard_descr;
17020 if (memsize < bitsize)
17021 goto discard_descr;
17022 if (BITS_BIG_ENDIAN)
17023 offset = memsize - bitsize;
17024 }
17025 }
17026
17027 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17028 if (*descr_tail == NULL)
17029 goto discard_descr;
17030 descr_tail = &(*descr_tail)->dw_loc_next;
17031 }
17032 }
17033
17034 /* If there were any non-empty expressions, add padding till the end of
17035 the decl. */
17036 if (descr != NULL && decl_size != 0)
17037 {
17038 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17039 if (*descr_tail == NULL)
17040 goto discard_descr;
17041 }
17042 return descr;
17043
17044 discard_descr:
17045 /* Discard the descriptor and release any addr_table entries it uses. */
17046 remove_loc_list_addr_table_entries (descr);
17047 return NULL;
17048 }
17049
17050 /* Return the dwarf representation of the location list LOC_LIST of
17051 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17052 function. */
17053
17054 static dw_loc_list_ref
17055 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17056 {
17057 const char *endname, *secname;
17058 var_loc_view endview;
17059 rtx varloc;
17060 enum var_init_status initialized;
17061 struct var_loc_node *node;
17062 dw_loc_descr_ref descr;
17063 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17064 dw_loc_list_ref list = NULL;
17065 dw_loc_list_ref *listp = &list;
17066
17067 /* Now that we know what section we are using for a base,
17068 actually construct the list of locations.
17069 The first location information is what is passed to the
17070 function that creates the location list, and the remaining
17071 locations just get added on to that list.
17072 Note that we only know the start address for a location
17073 (IE location changes), so to build the range, we use
17074 the range [current location start, next location start].
17075 This means we have to special case the last node, and generate
17076 a range of [last location start, end of function label]. */
17077
17078 if (cfun && crtl->has_bb_partition)
17079 {
17080 bool save_in_cold_section_p = in_cold_section_p;
17081 in_cold_section_p = first_function_block_is_cold;
17082 if (loc_list->last_before_switch == NULL)
17083 in_cold_section_p = !in_cold_section_p;
17084 secname = secname_for_decl (decl);
17085 in_cold_section_p = save_in_cold_section_p;
17086 }
17087 else
17088 secname = secname_for_decl (decl);
17089
17090 for (node = loc_list->first; node; node = node->next)
17091 {
17092 bool range_across_switch = false;
17093 if (GET_CODE (node->loc) == EXPR_LIST
17094 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17095 {
17096 if (GET_CODE (node->loc) == EXPR_LIST)
17097 {
17098 descr = NULL;
17099 /* This requires DW_OP_{,bit_}piece, which is not usable
17100 inside DWARF expressions. */
17101 if (want_address == 2)
17102 descr = dw_sra_loc_expr (decl, node->loc);
17103 }
17104 else
17105 {
17106 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17107 varloc = NOTE_VAR_LOCATION (node->loc);
17108 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17109 }
17110 if (descr)
17111 {
17112 /* If section switch happens in between node->label
17113 and node->next->label (or end of function) and
17114 we can't emit it as a single entry list,
17115 emit two ranges, first one ending at the end
17116 of first partition and second one starting at the
17117 beginning of second partition. */
17118 if (node == loc_list->last_before_switch
17119 && (node != loc_list->first || loc_list->first->next
17120 /* If we are to emit a view number, we will emit
17121 a loclist rather than a single location
17122 expression for the entire function (see
17123 loc_list_has_views), so we have to split the
17124 range that straddles across partitions. */
17125 || !ZERO_VIEW_P (node->view))
17126 && current_function_decl)
17127 {
17128 endname = cfun->fde->dw_fde_end;
17129 endview = 0;
17130 range_across_switch = true;
17131 }
17132 /* The variable has a location between NODE->LABEL and
17133 NODE->NEXT->LABEL. */
17134 else if (node->next)
17135 endname = node->next->label, endview = node->next->view;
17136 /* If the variable has a location at the last label
17137 it keeps its location until the end of function. */
17138 else if (!current_function_decl)
17139 endname = text_end_label, endview = 0;
17140 else
17141 {
17142 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17143 current_function_funcdef_no);
17144 endname = ggc_strdup (label_id);
17145 endview = 0;
17146 }
17147
17148 *listp = new_loc_list (descr, node->label, node->view,
17149 endname, endview, secname);
17150 if (TREE_CODE (decl) == PARM_DECL
17151 && node == loc_list->first
17152 && NOTE_P (node->loc)
17153 && strcmp (node->label, endname) == 0)
17154 (*listp)->force = true;
17155 listp = &(*listp)->dw_loc_next;
17156 }
17157 }
17158
17159 if (cfun
17160 && crtl->has_bb_partition
17161 && node == loc_list->last_before_switch)
17162 {
17163 bool save_in_cold_section_p = in_cold_section_p;
17164 in_cold_section_p = !first_function_block_is_cold;
17165 secname = secname_for_decl (decl);
17166 in_cold_section_p = save_in_cold_section_p;
17167 }
17168
17169 if (range_across_switch)
17170 {
17171 if (GET_CODE (node->loc) == EXPR_LIST)
17172 descr = dw_sra_loc_expr (decl, node->loc);
17173 else
17174 {
17175 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17176 varloc = NOTE_VAR_LOCATION (node->loc);
17177 descr = dw_loc_list_1 (decl, varloc, want_address,
17178 initialized);
17179 }
17180 gcc_assert (descr);
17181 /* The variable has a location between NODE->LABEL and
17182 NODE->NEXT->LABEL. */
17183 if (node->next)
17184 endname = node->next->label, endview = node->next->view;
17185 else
17186 endname = cfun->fde->dw_fde_second_end, endview = 0;
17187 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17188 endname, endview, secname);
17189 listp = &(*listp)->dw_loc_next;
17190 }
17191 }
17192
17193 /* Try to avoid the overhead of a location list emitting a location
17194 expression instead, but only if we didn't have more than one
17195 location entry in the first place. If some entries were not
17196 representable, we don't want to pretend a single entry that was
17197 applies to the entire scope in which the variable is
17198 available. */
17199 if (list && loc_list->first->next)
17200 gen_llsym (list);
17201 else
17202 maybe_gen_llsym (list);
17203
17204 return list;
17205 }
17206
17207 /* Return if the loc_list has only single element and thus can be represented
17208 as location description. */
17209
17210 static bool
17211 single_element_loc_list_p (dw_loc_list_ref list)
17212 {
17213 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17214 return !list->ll_symbol;
17215 }
17216
17217 /* Duplicate a single element of location list. */
17218
17219 static inline dw_loc_descr_ref
17220 copy_loc_descr (dw_loc_descr_ref ref)
17221 {
17222 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17223 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17224 return copy;
17225 }
17226
17227 /* To each location in list LIST append loc descr REF. */
17228
17229 static void
17230 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17231 {
17232 dw_loc_descr_ref copy;
17233 add_loc_descr (&list->expr, ref);
17234 list = list->dw_loc_next;
17235 while (list)
17236 {
17237 copy = copy_loc_descr (ref);
17238 add_loc_descr (&list->expr, copy);
17239 while (copy->dw_loc_next)
17240 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17241 list = list->dw_loc_next;
17242 }
17243 }
17244
17245 /* To each location in list LIST prepend loc descr REF. */
17246
17247 static void
17248 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17249 {
17250 dw_loc_descr_ref copy;
17251 dw_loc_descr_ref ref_end = list->expr;
17252 add_loc_descr (&ref, list->expr);
17253 list->expr = ref;
17254 list = list->dw_loc_next;
17255 while (list)
17256 {
17257 dw_loc_descr_ref end = list->expr;
17258 list->expr = copy = copy_loc_descr (ref);
17259 while (copy->dw_loc_next != ref_end)
17260 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17261 copy->dw_loc_next = end;
17262 list = list->dw_loc_next;
17263 }
17264 }
17265
17266 /* Given two lists RET and LIST
17267 produce location list that is result of adding expression in LIST
17268 to expression in RET on each position in program.
17269 Might be destructive on both RET and LIST.
17270
17271 TODO: We handle only simple cases of RET or LIST having at most one
17272 element. General case would involve sorting the lists in program order
17273 and merging them that will need some additional work.
17274 Adding that will improve quality of debug info especially for SRA-ed
17275 structures. */
17276
17277 static void
17278 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17279 {
17280 if (!list)
17281 return;
17282 if (!*ret)
17283 {
17284 *ret = list;
17285 return;
17286 }
17287 if (!list->dw_loc_next)
17288 {
17289 add_loc_descr_to_each (*ret, list->expr);
17290 return;
17291 }
17292 if (!(*ret)->dw_loc_next)
17293 {
17294 prepend_loc_descr_to_each (list, (*ret)->expr);
17295 *ret = list;
17296 return;
17297 }
17298 expansion_failed (NULL_TREE, NULL_RTX,
17299 "Don't know how to merge two non-trivial"
17300 " location lists.\n");
17301 *ret = NULL;
17302 return;
17303 }
17304
17305 /* LOC is constant expression. Try a luck, look it up in constant
17306 pool and return its loc_descr of its address. */
17307
17308 static dw_loc_descr_ref
17309 cst_pool_loc_descr (tree loc)
17310 {
17311 /* Get an RTL for this, if something has been emitted. */
17312 rtx rtl = lookup_constant_def (loc);
17313
17314 if (!rtl || !MEM_P (rtl))
17315 {
17316 gcc_assert (!rtl);
17317 return 0;
17318 }
17319 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17320
17321 /* TODO: We might get more coverage if we was actually delaying expansion
17322 of all expressions till end of compilation when constant pools are fully
17323 populated. */
17324 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17325 {
17326 expansion_failed (loc, NULL_RTX,
17327 "CST value in contant pool but not marked.");
17328 return 0;
17329 }
17330 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17331 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17332 }
17333
17334 /* Return dw_loc_list representing address of addr_expr LOC
17335 by looking for inner INDIRECT_REF expression and turning
17336 it into simple arithmetics.
17337
17338 See loc_list_from_tree for the meaning of CONTEXT. */
17339
17340 static dw_loc_list_ref
17341 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17342 loc_descr_context *context)
17343 {
17344 tree obj, offset;
17345 poly_int64 bitsize, bitpos, bytepos;
17346 machine_mode mode;
17347 int unsignedp, reversep, volatilep = 0;
17348 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17349
17350 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17351 &bitsize, &bitpos, &offset, &mode,
17352 &unsignedp, &reversep, &volatilep);
17353 STRIP_NOPS (obj);
17354 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17355 {
17356 expansion_failed (loc, NULL_RTX, "bitfield access");
17357 return 0;
17358 }
17359 if (!INDIRECT_REF_P (obj))
17360 {
17361 expansion_failed (obj,
17362 NULL_RTX, "no indirect ref in inner refrence");
17363 return 0;
17364 }
17365 if (!offset && known_eq (bitpos, 0))
17366 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17367 context);
17368 else if (toplev
17369 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17370 && (dwarf_version >= 4 || !dwarf_strict))
17371 {
17372 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17373 if (!list_ret)
17374 return 0;
17375 if (offset)
17376 {
17377 /* Variable offset. */
17378 list_ret1 = loc_list_from_tree (offset, 0, context);
17379 if (list_ret1 == 0)
17380 return 0;
17381 add_loc_list (&list_ret, list_ret1);
17382 if (!list_ret)
17383 return 0;
17384 add_loc_descr_to_each (list_ret,
17385 new_loc_descr (DW_OP_plus, 0, 0));
17386 }
17387 HOST_WIDE_INT value;
17388 if (bytepos.is_constant (&value) && value > 0)
17389 add_loc_descr_to_each (list_ret,
17390 new_loc_descr (DW_OP_plus_uconst, value, 0));
17391 else if (maybe_ne (bytepos, 0))
17392 loc_list_plus_const (list_ret, bytepos);
17393 add_loc_descr_to_each (list_ret,
17394 new_loc_descr (DW_OP_stack_value, 0, 0));
17395 }
17396 return list_ret;
17397 }
17398
17399 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17400 all operations from LOC are nops, move to the last one. Insert in NOPS all
17401 operations that are skipped. */
17402
17403 static void
17404 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17405 hash_set<dw_loc_descr_ref> &nops)
17406 {
17407 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17408 {
17409 nops.add (loc);
17410 loc = loc->dw_loc_next;
17411 }
17412 }
17413
17414 /* Helper for loc_descr_without_nops: free the location description operation
17415 P. */
17416
17417 bool
17418 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17419 {
17420 ggc_free (loc);
17421 return true;
17422 }
17423
17424 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17425 finishes LOC. */
17426
17427 static void
17428 loc_descr_without_nops (dw_loc_descr_ref &loc)
17429 {
17430 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17431 return;
17432
17433 /* Set of all DW_OP_nop operations we remove. */
17434 hash_set<dw_loc_descr_ref> nops;
17435
17436 /* First, strip all prefix NOP operations in order to keep the head of the
17437 operations list. */
17438 loc_descr_to_next_no_nop (loc, nops);
17439
17440 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17441 {
17442 /* For control flow operations: strip "prefix" nops in destination
17443 labels. */
17444 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17445 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17446 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17447 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17448
17449 /* Do the same for the operations that follow, then move to the next
17450 iteration. */
17451 if (cur->dw_loc_next != NULL)
17452 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17453 cur = cur->dw_loc_next;
17454 }
17455
17456 nops.traverse<void *, free_loc_descr> (NULL);
17457 }
17458
17459
17460 struct dwarf_procedure_info;
17461
17462 /* Helper structure for location descriptions generation. */
17463 struct loc_descr_context
17464 {
17465 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17466 NULL_TREE if DW_OP_push_object_address in invalid for this location
17467 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17468 tree context_type;
17469 /* The ..._DECL node that should be translated as a
17470 DW_OP_push_object_address operation. */
17471 tree base_decl;
17472 /* Information about the DWARF procedure we are currently generating. NULL if
17473 we are not generating a DWARF procedure. */
17474 struct dwarf_procedure_info *dpi;
17475 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17476 by consumer. Used for DW_TAG_generic_subrange attributes. */
17477 bool placeholder_arg;
17478 /* True if PLACEHOLDER_EXPR has been seen. */
17479 bool placeholder_seen;
17480 };
17481
17482 /* DWARF procedures generation
17483
17484 DWARF expressions (aka. location descriptions) are used to encode variable
17485 things such as sizes or offsets. Such computations can have redundant parts
17486 that can be factorized in order to reduce the size of the output debug
17487 information. This is the whole point of DWARF procedures.
17488
17489 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17490 already factorized into functions ("size functions") in order to handle very
17491 big and complex types. Such functions are quite simple: they have integral
17492 arguments, they return an integral result and their body contains only a
17493 return statement with arithmetic expressions. This is the only kind of
17494 function we are interested in translating into DWARF procedures, here.
17495
17496 DWARF expressions and DWARF procedure are executed using a stack, so we have
17497 to define some calling convention for them to interact. Let's say that:
17498
17499 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17500 all arguments in reverse order (right-to-left) so that when the DWARF
17501 procedure execution starts, the first argument is the top of the stack.
17502
17503 - Then, when returning, the DWARF procedure must have consumed all arguments
17504 on the stack, must have pushed the result and touched nothing else.
17505
17506 - Each integral argument and the result are integral types can be hold in a
17507 single stack slot.
17508
17509 - We call "frame offset" the number of stack slots that are "under DWARF
17510 procedure control": it includes the arguments slots, the temporaries and
17511 the result slot. Thus, it is equal to the number of arguments when the
17512 procedure execution starts and must be equal to one (the result) when it
17513 returns. */
17514
17515 /* Helper structure used when generating operations for a DWARF procedure. */
17516 struct dwarf_procedure_info
17517 {
17518 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17519 currently translated. */
17520 tree fndecl;
17521 /* The number of arguments FNDECL takes. */
17522 unsigned args_count;
17523 };
17524
17525 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17526 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17527 equate it to this DIE. */
17528
17529 static dw_die_ref
17530 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17531 dw_die_ref parent_die)
17532 {
17533 dw_die_ref dwarf_proc_die;
17534
17535 if ((dwarf_version < 3 && dwarf_strict)
17536 || location == NULL)
17537 return NULL;
17538
17539 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17540 if (fndecl)
17541 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17542 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17543 return dwarf_proc_die;
17544 }
17545
17546 /* Return whether TYPE is a supported type as a DWARF procedure argument
17547 type or return type (we handle only scalar types and pointer types that
17548 aren't wider than the DWARF expression evaluation stack. */
17549
17550 static bool
17551 is_handled_procedure_type (tree type)
17552 {
17553 return ((INTEGRAL_TYPE_P (type)
17554 || TREE_CODE (type) == OFFSET_TYPE
17555 || TREE_CODE (type) == POINTER_TYPE)
17556 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17557 }
17558
17559 /* Helper for resolve_args_picking: do the same but stop when coming across
17560 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17561 offset *before* evaluating the corresponding operation. */
17562
17563 static bool
17564 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17565 struct dwarf_procedure_info *dpi,
17566 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17567 {
17568 /* The "frame_offset" identifier is already used to name a macro... */
17569 unsigned frame_offset_ = initial_frame_offset;
17570 dw_loc_descr_ref l;
17571
17572 for (l = loc; l != NULL;)
17573 {
17574 bool existed;
17575 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17576
17577 /* If we already met this node, there is nothing to compute anymore. */
17578 if (existed)
17579 {
17580 /* Make sure that the stack size is consistent wherever the execution
17581 flow comes from. */
17582 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17583 break;
17584 }
17585 l_frame_offset = frame_offset_;
17586
17587 /* If needed, relocate the picking offset with respect to the frame
17588 offset. */
17589 if (l->frame_offset_rel)
17590 {
17591 unsigned HOST_WIDE_INT off;
17592 switch (l->dw_loc_opc)
17593 {
17594 case DW_OP_pick:
17595 off = l->dw_loc_oprnd1.v.val_unsigned;
17596 break;
17597 case DW_OP_dup:
17598 off = 0;
17599 break;
17600 case DW_OP_over:
17601 off = 1;
17602 break;
17603 default:
17604 gcc_unreachable ();
17605 }
17606 /* frame_offset_ is the size of the current stack frame, including
17607 incoming arguments. Besides, the arguments are pushed
17608 right-to-left. Thus, in order to access the Nth argument from
17609 this operation node, the picking has to skip temporaries *plus*
17610 one stack slot per argument (0 for the first one, 1 for the second
17611 one, etc.).
17612
17613 The targetted argument number (N) is already set as the operand,
17614 and the number of temporaries can be computed with:
17615 frame_offsets_ - dpi->args_count */
17616 off += frame_offset_ - dpi->args_count;
17617
17618 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17619 if (off > 255)
17620 return false;
17621
17622 if (off == 0)
17623 {
17624 l->dw_loc_opc = DW_OP_dup;
17625 l->dw_loc_oprnd1.v.val_unsigned = 0;
17626 }
17627 else if (off == 1)
17628 {
17629 l->dw_loc_opc = DW_OP_over;
17630 l->dw_loc_oprnd1.v.val_unsigned = 0;
17631 }
17632 else
17633 {
17634 l->dw_loc_opc = DW_OP_pick;
17635 l->dw_loc_oprnd1.v.val_unsigned = off;
17636 }
17637 }
17638
17639 /* Update frame_offset according to the effect the current operation has
17640 on the stack. */
17641 switch (l->dw_loc_opc)
17642 {
17643 case DW_OP_deref:
17644 case DW_OP_swap:
17645 case DW_OP_rot:
17646 case DW_OP_abs:
17647 case DW_OP_neg:
17648 case DW_OP_not:
17649 case DW_OP_plus_uconst:
17650 case DW_OP_skip:
17651 case DW_OP_reg0:
17652 case DW_OP_reg1:
17653 case DW_OP_reg2:
17654 case DW_OP_reg3:
17655 case DW_OP_reg4:
17656 case DW_OP_reg5:
17657 case DW_OP_reg6:
17658 case DW_OP_reg7:
17659 case DW_OP_reg8:
17660 case DW_OP_reg9:
17661 case DW_OP_reg10:
17662 case DW_OP_reg11:
17663 case DW_OP_reg12:
17664 case DW_OP_reg13:
17665 case DW_OP_reg14:
17666 case DW_OP_reg15:
17667 case DW_OP_reg16:
17668 case DW_OP_reg17:
17669 case DW_OP_reg18:
17670 case DW_OP_reg19:
17671 case DW_OP_reg20:
17672 case DW_OP_reg21:
17673 case DW_OP_reg22:
17674 case DW_OP_reg23:
17675 case DW_OP_reg24:
17676 case DW_OP_reg25:
17677 case DW_OP_reg26:
17678 case DW_OP_reg27:
17679 case DW_OP_reg28:
17680 case DW_OP_reg29:
17681 case DW_OP_reg30:
17682 case DW_OP_reg31:
17683 case DW_OP_bregx:
17684 case DW_OP_piece:
17685 case DW_OP_deref_size:
17686 case DW_OP_nop:
17687 case DW_OP_bit_piece:
17688 case DW_OP_implicit_value:
17689 case DW_OP_stack_value:
17690 break;
17691
17692 case DW_OP_addr:
17693 case DW_OP_const1u:
17694 case DW_OP_const1s:
17695 case DW_OP_const2u:
17696 case DW_OP_const2s:
17697 case DW_OP_const4u:
17698 case DW_OP_const4s:
17699 case DW_OP_const8u:
17700 case DW_OP_const8s:
17701 case DW_OP_constu:
17702 case DW_OP_consts:
17703 case DW_OP_dup:
17704 case DW_OP_over:
17705 case DW_OP_pick:
17706 case DW_OP_lit0:
17707 case DW_OP_lit1:
17708 case DW_OP_lit2:
17709 case DW_OP_lit3:
17710 case DW_OP_lit4:
17711 case DW_OP_lit5:
17712 case DW_OP_lit6:
17713 case DW_OP_lit7:
17714 case DW_OP_lit8:
17715 case DW_OP_lit9:
17716 case DW_OP_lit10:
17717 case DW_OP_lit11:
17718 case DW_OP_lit12:
17719 case DW_OP_lit13:
17720 case DW_OP_lit14:
17721 case DW_OP_lit15:
17722 case DW_OP_lit16:
17723 case DW_OP_lit17:
17724 case DW_OP_lit18:
17725 case DW_OP_lit19:
17726 case DW_OP_lit20:
17727 case DW_OP_lit21:
17728 case DW_OP_lit22:
17729 case DW_OP_lit23:
17730 case DW_OP_lit24:
17731 case DW_OP_lit25:
17732 case DW_OP_lit26:
17733 case DW_OP_lit27:
17734 case DW_OP_lit28:
17735 case DW_OP_lit29:
17736 case DW_OP_lit30:
17737 case DW_OP_lit31:
17738 case DW_OP_breg0:
17739 case DW_OP_breg1:
17740 case DW_OP_breg2:
17741 case DW_OP_breg3:
17742 case DW_OP_breg4:
17743 case DW_OP_breg5:
17744 case DW_OP_breg6:
17745 case DW_OP_breg7:
17746 case DW_OP_breg8:
17747 case DW_OP_breg9:
17748 case DW_OP_breg10:
17749 case DW_OP_breg11:
17750 case DW_OP_breg12:
17751 case DW_OP_breg13:
17752 case DW_OP_breg14:
17753 case DW_OP_breg15:
17754 case DW_OP_breg16:
17755 case DW_OP_breg17:
17756 case DW_OP_breg18:
17757 case DW_OP_breg19:
17758 case DW_OP_breg20:
17759 case DW_OP_breg21:
17760 case DW_OP_breg22:
17761 case DW_OP_breg23:
17762 case DW_OP_breg24:
17763 case DW_OP_breg25:
17764 case DW_OP_breg26:
17765 case DW_OP_breg27:
17766 case DW_OP_breg28:
17767 case DW_OP_breg29:
17768 case DW_OP_breg30:
17769 case DW_OP_breg31:
17770 case DW_OP_fbreg:
17771 case DW_OP_push_object_address:
17772 case DW_OP_call_frame_cfa:
17773 case DW_OP_GNU_variable_value:
17774 ++frame_offset_;
17775 break;
17776
17777 case DW_OP_drop:
17778 case DW_OP_xderef:
17779 case DW_OP_and:
17780 case DW_OP_div:
17781 case DW_OP_minus:
17782 case DW_OP_mod:
17783 case DW_OP_mul:
17784 case DW_OP_or:
17785 case DW_OP_plus:
17786 case DW_OP_shl:
17787 case DW_OP_shr:
17788 case DW_OP_shra:
17789 case DW_OP_xor:
17790 case DW_OP_bra:
17791 case DW_OP_eq:
17792 case DW_OP_ge:
17793 case DW_OP_gt:
17794 case DW_OP_le:
17795 case DW_OP_lt:
17796 case DW_OP_ne:
17797 case DW_OP_regx:
17798 case DW_OP_xderef_size:
17799 --frame_offset_;
17800 break;
17801
17802 case DW_OP_call2:
17803 case DW_OP_call4:
17804 case DW_OP_call_ref:
17805 {
17806 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17807 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17808
17809 if (stack_usage == NULL)
17810 return false;
17811 frame_offset_ += *stack_usage;
17812 break;
17813 }
17814
17815 case DW_OP_implicit_pointer:
17816 case DW_OP_entry_value:
17817 case DW_OP_const_type:
17818 case DW_OP_regval_type:
17819 case DW_OP_deref_type:
17820 case DW_OP_convert:
17821 case DW_OP_reinterpret:
17822 case DW_OP_form_tls_address:
17823 case DW_OP_GNU_push_tls_address:
17824 case DW_OP_GNU_uninit:
17825 case DW_OP_GNU_encoded_addr:
17826 case DW_OP_GNU_implicit_pointer:
17827 case DW_OP_GNU_entry_value:
17828 case DW_OP_GNU_const_type:
17829 case DW_OP_GNU_regval_type:
17830 case DW_OP_GNU_deref_type:
17831 case DW_OP_GNU_convert:
17832 case DW_OP_GNU_reinterpret:
17833 case DW_OP_GNU_parameter_ref:
17834 /* loc_list_from_tree will probably not output these operations for
17835 size functions, so assume they will not appear here. */
17836 /* Fall through... */
17837
17838 default:
17839 gcc_unreachable ();
17840 }
17841
17842 /* Now, follow the control flow (except subroutine calls). */
17843 switch (l->dw_loc_opc)
17844 {
17845 case DW_OP_bra:
17846 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17847 frame_offsets))
17848 return false;
17849 /* Fall through. */
17850
17851 case DW_OP_skip:
17852 l = l->dw_loc_oprnd1.v.val_loc;
17853 break;
17854
17855 case DW_OP_stack_value:
17856 return true;
17857
17858 default:
17859 l = l->dw_loc_next;
17860 break;
17861 }
17862 }
17863
17864 return true;
17865 }
17866
17867 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17868 operations) in order to resolve the operand of DW_OP_pick operations that
17869 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17870 offset *before* LOC is executed. Return if all relocations were
17871 successful. */
17872
17873 static bool
17874 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17875 struct dwarf_procedure_info *dpi)
17876 {
17877 /* Associate to all visited operations the frame offset *before* evaluating
17878 this operation. */
17879 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17880
17881 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17882 frame_offsets);
17883 }
17884
17885 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17886 Return NULL if it is not possible. */
17887
17888 static dw_die_ref
17889 function_to_dwarf_procedure (tree fndecl)
17890 {
17891 struct loc_descr_context ctx;
17892 struct dwarf_procedure_info dpi;
17893 dw_die_ref dwarf_proc_die;
17894 tree tree_body = DECL_SAVED_TREE (fndecl);
17895 dw_loc_descr_ref loc_body, epilogue;
17896
17897 tree cursor;
17898 unsigned i;
17899
17900 /* Do not generate multiple DWARF procedures for the same function
17901 declaration. */
17902 dwarf_proc_die = lookup_decl_die (fndecl);
17903 if (dwarf_proc_die != NULL)
17904 return dwarf_proc_die;
17905
17906 /* DWARF procedures are available starting with the DWARFv3 standard. */
17907 if (dwarf_version < 3 && dwarf_strict)
17908 return NULL;
17909
17910 /* We handle only functions for which we still have a body, that return a
17911 supported type and that takes arguments with supported types. Note that
17912 there is no point translating functions that return nothing. */
17913 if (tree_body == NULL_TREE
17914 || DECL_RESULT (fndecl) == NULL_TREE
17915 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17916 return NULL;
17917
17918 for (cursor = DECL_ARGUMENTS (fndecl);
17919 cursor != NULL_TREE;
17920 cursor = TREE_CHAIN (cursor))
17921 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17922 return NULL;
17923
17924 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17925 if (TREE_CODE (tree_body) != RETURN_EXPR)
17926 return NULL;
17927 tree_body = TREE_OPERAND (tree_body, 0);
17928 if (TREE_CODE (tree_body) != MODIFY_EXPR
17929 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17930 return NULL;
17931 tree_body = TREE_OPERAND (tree_body, 1);
17932
17933 /* Try to translate the body expression itself. Note that this will probably
17934 cause an infinite recursion if its call graph has a cycle. This is very
17935 unlikely for size functions, however, so don't bother with such things at
17936 the moment. */
17937 ctx.context_type = NULL_TREE;
17938 ctx.base_decl = NULL_TREE;
17939 ctx.dpi = &dpi;
17940 ctx.placeholder_arg = false;
17941 ctx.placeholder_seen = false;
17942 dpi.fndecl = fndecl;
17943 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17944 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17945 if (!loc_body)
17946 return NULL;
17947
17948 /* After evaluating all operands in "loc_body", we should still have on the
17949 stack all arguments plus the desired function result (top of the stack).
17950 Generate code in order to keep only the result in our stack frame. */
17951 epilogue = NULL;
17952 for (i = 0; i < dpi.args_count; ++i)
17953 {
17954 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17955 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17956 op_couple->dw_loc_next->dw_loc_next = epilogue;
17957 epilogue = op_couple;
17958 }
17959 add_loc_descr (&loc_body, epilogue);
17960 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17961 return NULL;
17962
17963 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17964 because they are considered useful. Now there is an epilogue, they are
17965 not anymore, so give it another try. */
17966 loc_descr_without_nops (loc_body);
17967
17968 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17969 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17970 though, given that size functions do not come from source, so they should
17971 not have a dedicated DW_TAG_subprogram DIE. */
17972 dwarf_proc_die
17973 = new_dwarf_proc_die (loc_body, fndecl,
17974 get_context_die (DECL_CONTEXT (fndecl)));
17975
17976 /* The called DWARF procedure consumes one stack slot per argument and
17977 returns one stack slot. */
17978 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17979
17980 return dwarf_proc_die;
17981 }
17982
17983
17984 /* Generate Dwarf location list representing LOC.
17985 If WANT_ADDRESS is false, expression computing LOC will be computed
17986 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17987 if WANT_ADDRESS is 2, expression computing address useable in location
17988 will be returned (i.e. DW_OP_reg can be used
17989 to refer to register values).
17990
17991 CONTEXT provides information to customize the location descriptions
17992 generation. Its context_type field specifies what type is implicitly
17993 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17994 will not be generated.
17995
17996 Its DPI field determines whether we are generating a DWARF expression for a
17997 DWARF procedure, so PARM_DECL references are processed specifically.
17998
17999 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18000 and dpi fields were null. */
18001
18002 static dw_loc_list_ref
18003 loc_list_from_tree_1 (tree loc, int want_address,
18004 struct loc_descr_context *context)
18005 {
18006 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18007 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18008 int have_address = 0;
18009 enum dwarf_location_atom op;
18010
18011 /* ??? Most of the time we do not take proper care for sign/zero
18012 extending the values properly. Hopefully this won't be a real
18013 problem... */
18014
18015 if (context != NULL
18016 && context->base_decl == loc
18017 && want_address == 0)
18018 {
18019 if (dwarf_version >= 3 || !dwarf_strict)
18020 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18021 NULL, 0, NULL, 0, NULL);
18022 else
18023 return NULL;
18024 }
18025
18026 switch (TREE_CODE (loc))
18027 {
18028 case ERROR_MARK:
18029 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18030 return 0;
18031
18032 case PLACEHOLDER_EXPR:
18033 /* This case involves extracting fields from an object to determine the
18034 position of other fields. It is supposed to appear only as the first
18035 operand of COMPONENT_REF nodes and to reference precisely the type
18036 that the context allows. */
18037 if (context != NULL
18038 && TREE_TYPE (loc) == context->context_type
18039 && want_address >= 1)
18040 {
18041 if (dwarf_version >= 3 || !dwarf_strict)
18042 {
18043 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18044 have_address = 1;
18045 break;
18046 }
18047 else
18048 return NULL;
18049 }
18050 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18051 the single argument passed by consumer. */
18052 else if (context != NULL
18053 && context->placeholder_arg
18054 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18055 && want_address == 0)
18056 {
18057 ret = new_loc_descr (DW_OP_pick, 0, 0);
18058 ret->frame_offset_rel = 1;
18059 context->placeholder_seen = true;
18060 break;
18061 }
18062 else
18063 expansion_failed (loc, NULL_RTX,
18064 "PLACEHOLDER_EXPR for an unexpected type");
18065 break;
18066
18067 case CALL_EXPR:
18068 {
18069 const int nargs = call_expr_nargs (loc);
18070 tree callee = get_callee_fndecl (loc);
18071 int i;
18072 dw_die_ref dwarf_proc;
18073
18074 if (callee == NULL_TREE)
18075 goto call_expansion_failed;
18076
18077 /* We handle only functions that return an integer. */
18078 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18079 goto call_expansion_failed;
18080
18081 dwarf_proc = function_to_dwarf_procedure (callee);
18082 if (dwarf_proc == NULL)
18083 goto call_expansion_failed;
18084
18085 /* Evaluate arguments right-to-left so that the first argument will
18086 be the top-most one on the stack. */
18087 for (i = nargs - 1; i >= 0; --i)
18088 {
18089 dw_loc_descr_ref loc_descr
18090 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18091 context);
18092
18093 if (loc_descr == NULL)
18094 goto call_expansion_failed;
18095
18096 add_loc_descr (&ret, loc_descr);
18097 }
18098
18099 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18100 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18101 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18102 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18103 add_loc_descr (&ret, ret1);
18104 break;
18105
18106 call_expansion_failed:
18107 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18108 /* There are no opcodes for these operations. */
18109 return 0;
18110 }
18111
18112 case PREINCREMENT_EXPR:
18113 case PREDECREMENT_EXPR:
18114 case POSTINCREMENT_EXPR:
18115 case POSTDECREMENT_EXPR:
18116 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18117 /* There are no opcodes for these operations. */
18118 return 0;
18119
18120 case ADDR_EXPR:
18121 /* If we already want an address, see if there is INDIRECT_REF inside
18122 e.g. for &this->field. */
18123 if (want_address)
18124 {
18125 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18126 (loc, want_address == 2, context);
18127 if (list_ret)
18128 have_address = 1;
18129 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18130 && (ret = cst_pool_loc_descr (loc)))
18131 have_address = 1;
18132 }
18133 /* Otherwise, process the argument and look for the address. */
18134 if (!list_ret && !ret)
18135 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18136 else
18137 {
18138 if (want_address)
18139 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18140 return NULL;
18141 }
18142 break;
18143
18144 case VAR_DECL:
18145 if (DECL_THREAD_LOCAL_P (loc))
18146 {
18147 rtx rtl;
18148 enum dwarf_location_atom tls_op;
18149 enum dtprel_bool dtprel = dtprel_false;
18150
18151 if (targetm.have_tls)
18152 {
18153 /* If this is not defined, we have no way to emit the
18154 data. */
18155 if (!targetm.asm_out.output_dwarf_dtprel)
18156 return 0;
18157
18158 /* The way DW_OP_GNU_push_tls_address is specified, we
18159 can only look up addresses of objects in the current
18160 module. We used DW_OP_addr as first op, but that's
18161 wrong, because DW_OP_addr is relocated by the debug
18162 info consumer, while DW_OP_GNU_push_tls_address
18163 operand shouldn't be. */
18164 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18165 return 0;
18166 dtprel = dtprel_true;
18167 /* We check for DWARF 5 here because gdb did not implement
18168 DW_OP_form_tls_address until after 7.12. */
18169 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18170 : DW_OP_GNU_push_tls_address);
18171 }
18172 else
18173 {
18174 if (!targetm.emutls.debug_form_tls_address
18175 || !(dwarf_version >= 3 || !dwarf_strict))
18176 return 0;
18177 /* We stuffed the control variable into the DECL_VALUE_EXPR
18178 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18179 no longer appear in gimple code. We used the control
18180 variable in specific so that we could pick it up here. */
18181 loc = DECL_VALUE_EXPR (loc);
18182 tls_op = DW_OP_form_tls_address;
18183 }
18184
18185 rtl = rtl_for_decl_location (loc);
18186 if (rtl == NULL_RTX)
18187 return 0;
18188
18189 if (!MEM_P (rtl))
18190 return 0;
18191 rtl = XEXP (rtl, 0);
18192 if (! CONSTANT_P (rtl))
18193 return 0;
18194
18195 ret = new_addr_loc_descr (rtl, dtprel);
18196 ret1 = new_loc_descr (tls_op, 0, 0);
18197 add_loc_descr (&ret, ret1);
18198
18199 have_address = 1;
18200 break;
18201 }
18202 /* FALLTHRU */
18203
18204 case PARM_DECL:
18205 if (context != NULL && context->dpi != NULL
18206 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18207 {
18208 /* We are generating code for a DWARF procedure and we want to access
18209 one of its arguments: find the appropriate argument offset and let
18210 the resolve_args_picking pass compute the offset that complies
18211 with the stack frame size. */
18212 unsigned i = 0;
18213 tree cursor;
18214
18215 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18216 cursor != NULL_TREE && cursor != loc;
18217 cursor = TREE_CHAIN (cursor), ++i)
18218 ;
18219 /* If we are translating a DWARF procedure, all referenced parameters
18220 must belong to the current function. */
18221 gcc_assert (cursor != NULL_TREE);
18222
18223 ret = new_loc_descr (DW_OP_pick, i, 0);
18224 ret->frame_offset_rel = 1;
18225 break;
18226 }
18227 /* FALLTHRU */
18228
18229 case RESULT_DECL:
18230 if (DECL_HAS_VALUE_EXPR_P (loc))
18231 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18232 want_address, context);
18233 /* FALLTHRU */
18234
18235 case FUNCTION_DECL:
18236 {
18237 rtx rtl;
18238 var_loc_list *loc_list = lookup_decl_loc (loc);
18239
18240 if (loc_list && loc_list->first)
18241 {
18242 list_ret = dw_loc_list (loc_list, loc, want_address);
18243 have_address = want_address != 0;
18244 break;
18245 }
18246 rtl = rtl_for_decl_location (loc);
18247 if (rtl == NULL_RTX)
18248 {
18249 if (TREE_CODE (loc) != FUNCTION_DECL
18250 && early_dwarf
18251 && current_function_decl
18252 && want_address != 1
18253 && ! DECL_IGNORED_P (loc)
18254 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18255 || POINTER_TYPE_P (TREE_TYPE (loc)))
18256 && DECL_CONTEXT (loc) == current_function_decl
18257 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18258 <= DWARF2_ADDR_SIZE))
18259 {
18260 dw_die_ref ref = lookup_decl_die (loc);
18261 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18262 if (ref)
18263 {
18264 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18265 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18266 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18267 }
18268 else
18269 {
18270 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18271 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18272 }
18273 break;
18274 }
18275 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18276 return 0;
18277 }
18278 else if (CONST_INT_P (rtl))
18279 {
18280 HOST_WIDE_INT val = INTVAL (rtl);
18281 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18282 val &= GET_MODE_MASK (DECL_MODE (loc));
18283 ret = int_loc_descriptor (val);
18284 }
18285 else if (GET_CODE (rtl) == CONST_STRING)
18286 {
18287 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18288 return 0;
18289 }
18290 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18291 ret = new_addr_loc_descr (rtl, dtprel_false);
18292 else
18293 {
18294 machine_mode mode, mem_mode;
18295
18296 /* Certain constructs can only be represented at top-level. */
18297 if (want_address == 2)
18298 {
18299 ret = loc_descriptor (rtl, VOIDmode,
18300 VAR_INIT_STATUS_INITIALIZED);
18301 have_address = 1;
18302 }
18303 else
18304 {
18305 mode = GET_MODE (rtl);
18306 mem_mode = VOIDmode;
18307 if (MEM_P (rtl))
18308 {
18309 mem_mode = mode;
18310 mode = get_address_mode (rtl);
18311 rtl = XEXP (rtl, 0);
18312 have_address = 1;
18313 }
18314 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18315 VAR_INIT_STATUS_INITIALIZED);
18316 }
18317 if (!ret)
18318 expansion_failed (loc, rtl,
18319 "failed to produce loc descriptor for rtl");
18320 }
18321 }
18322 break;
18323
18324 case MEM_REF:
18325 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18326 {
18327 have_address = 1;
18328 goto do_plus;
18329 }
18330 /* Fallthru. */
18331 case INDIRECT_REF:
18332 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18333 have_address = 1;
18334 break;
18335
18336 case TARGET_MEM_REF:
18337 case SSA_NAME:
18338 case DEBUG_EXPR_DECL:
18339 return NULL;
18340
18341 case COMPOUND_EXPR:
18342 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18343 context);
18344
18345 CASE_CONVERT:
18346 case VIEW_CONVERT_EXPR:
18347 case SAVE_EXPR:
18348 case MODIFY_EXPR:
18349 case NON_LVALUE_EXPR:
18350 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18351 context);
18352
18353 case COMPONENT_REF:
18354 case BIT_FIELD_REF:
18355 case ARRAY_REF:
18356 case ARRAY_RANGE_REF:
18357 case REALPART_EXPR:
18358 case IMAGPART_EXPR:
18359 {
18360 tree obj, offset;
18361 poly_int64 bitsize, bitpos, bytepos;
18362 machine_mode mode;
18363 int unsignedp, reversep, volatilep = 0;
18364
18365 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18366 &unsignedp, &reversep, &volatilep);
18367
18368 gcc_assert (obj != loc);
18369
18370 list_ret = loc_list_from_tree_1 (obj,
18371 want_address == 2
18372 && known_eq (bitpos, 0)
18373 && !offset ? 2 : 1,
18374 context);
18375 /* TODO: We can extract value of the small expression via shifting even
18376 for nonzero bitpos. */
18377 if (list_ret == 0)
18378 return 0;
18379 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18380 || !multiple_p (bitsize, BITS_PER_UNIT))
18381 {
18382 expansion_failed (loc, NULL_RTX,
18383 "bitfield access");
18384 return 0;
18385 }
18386
18387 if (offset != NULL_TREE)
18388 {
18389 /* Variable offset. */
18390 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18391 if (list_ret1 == 0)
18392 return 0;
18393 add_loc_list (&list_ret, list_ret1);
18394 if (!list_ret)
18395 return 0;
18396 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18397 }
18398
18399 HOST_WIDE_INT value;
18400 if (bytepos.is_constant (&value) && value > 0)
18401 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18402 value, 0));
18403 else if (maybe_ne (bytepos, 0))
18404 loc_list_plus_const (list_ret, bytepos);
18405
18406 have_address = 1;
18407 break;
18408 }
18409
18410 case INTEGER_CST:
18411 if ((want_address || !tree_fits_shwi_p (loc))
18412 && (ret = cst_pool_loc_descr (loc)))
18413 have_address = 1;
18414 else if (want_address == 2
18415 && tree_fits_shwi_p (loc)
18416 && (ret = address_of_int_loc_descriptor
18417 (int_size_in_bytes (TREE_TYPE (loc)),
18418 tree_to_shwi (loc))))
18419 have_address = 1;
18420 else if (tree_fits_shwi_p (loc))
18421 ret = int_loc_descriptor (tree_to_shwi (loc));
18422 else if (tree_fits_uhwi_p (loc))
18423 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18424 else
18425 {
18426 expansion_failed (loc, NULL_RTX,
18427 "Integer operand is not host integer");
18428 return 0;
18429 }
18430 break;
18431
18432 case CONSTRUCTOR:
18433 case REAL_CST:
18434 case STRING_CST:
18435 case COMPLEX_CST:
18436 if ((ret = cst_pool_loc_descr (loc)))
18437 have_address = 1;
18438 else if (TREE_CODE (loc) == CONSTRUCTOR)
18439 {
18440 tree type = TREE_TYPE (loc);
18441 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18442 unsigned HOST_WIDE_INT offset = 0;
18443 unsigned HOST_WIDE_INT cnt;
18444 constructor_elt *ce;
18445
18446 if (TREE_CODE (type) == RECORD_TYPE)
18447 {
18448 /* This is very limited, but it's enough to output
18449 pointers to member functions, as long as the
18450 referenced function is defined in the current
18451 translation unit. */
18452 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18453 {
18454 tree val = ce->value;
18455
18456 tree field = ce->index;
18457
18458 if (val)
18459 STRIP_NOPS (val);
18460
18461 if (!field || DECL_BIT_FIELD (field))
18462 {
18463 expansion_failed (loc, NULL_RTX,
18464 "bitfield in record type constructor");
18465 size = offset = (unsigned HOST_WIDE_INT)-1;
18466 ret = NULL;
18467 break;
18468 }
18469
18470 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18471 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18472 gcc_assert (pos + fieldsize <= size);
18473 if (pos < offset)
18474 {
18475 expansion_failed (loc, NULL_RTX,
18476 "out-of-order fields in record constructor");
18477 size = offset = (unsigned HOST_WIDE_INT)-1;
18478 ret = NULL;
18479 break;
18480 }
18481 if (pos > offset)
18482 {
18483 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18484 add_loc_descr (&ret, ret1);
18485 offset = pos;
18486 }
18487 if (val && fieldsize != 0)
18488 {
18489 ret1 = loc_descriptor_from_tree (val, want_address, context);
18490 if (!ret1)
18491 {
18492 expansion_failed (loc, NULL_RTX,
18493 "unsupported expression in field");
18494 size = offset = (unsigned HOST_WIDE_INT)-1;
18495 ret = NULL;
18496 break;
18497 }
18498 add_loc_descr (&ret, ret1);
18499 }
18500 if (fieldsize)
18501 {
18502 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18503 add_loc_descr (&ret, ret1);
18504 offset = pos + fieldsize;
18505 }
18506 }
18507
18508 if (offset != size)
18509 {
18510 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18511 add_loc_descr (&ret, ret1);
18512 offset = size;
18513 }
18514
18515 have_address = !!want_address;
18516 }
18517 else
18518 expansion_failed (loc, NULL_RTX,
18519 "constructor of non-record type");
18520 }
18521 else
18522 /* We can construct small constants here using int_loc_descriptor. */
18523 expansion_failed (loc, NULL_RTX,
18524 "constructor or constant not in constant pool");
18525 break;
18526
18527 case TRUTH_AND_EXPR:
18528 case TRUTH_ANDIF_EXPR:
18529 case BIT_AND_EXPR:
18530 op = DW_OP_and;
18531 goto do_binop;
18532
18533 case TRUTH_XOR_EXPR:
18534 case BIT_XOR_EXPR:
18535 op = DW_OP_xor;
18536 goto do_binop;
18537
18538 case TRUTH_OR_EXPR:
18539 case TRUTH_ORIF_EXPR:
18540 case BIT_IOR_EXPR:
18541 op = DW_OP_or;
18542 goto do_binop;
18543
18544 case FLOOR_DIV_EXPR:
18545 case CEIL_DIV_EXPR:
18546 case ROUND_DIV_EXPR:
18547 case TRUNC_DIV_EXPR:
18548 case EXACT_DIV_EXPR:
18549 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18550 return 0;
18551 op = DW_OP_div;
18552 goto do_binop;
18553
18554 case MINUS_EXPR:
18555 op = DW_OP_minus;
18556 goto do_binop;
18557
18558 case FLOOR_MOD_EXPR:
18559 case CEIL_MOD_EXPR:
18560 case ROUND_MOD_EXPR:
18561 case TRUNC_MOD_EXPR:
18562 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18563 {
18564 op = DW_OP_mod;
18565 goto do_binop;
18566 }
18567 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18568 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18569 if (list_ret == 0 || list_ret1 == 0)
18570 return 0;
18571
18572 add_loc_list (&list_ret, list_ret1);
18573 if (list_ret == 0)
18574 return 0;
18575 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18576 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18577 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18578 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18579 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18580 break;
18581
18582 case MULT_EXPR:
18583 op = DW_OP_mul;
18584 goto do_binop;
18585
18586 case LSHIFT_EXPR:
18587 op = DW_OP_shl;
18588 goto do_binop;
18589
18590 case RSHIFT_EXPR:
18591 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18592 goto do_binop;
18593
18594 case POINTER_PLUS_EXPR:
18595 case PLUS_EXPR:
18596 do_plus:
18597 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18598 {
18599 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18600 smarter to encode their opposite. The DW_OP_plus_uconst operation
18601 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18602 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18603 bytes, Y being the size of the operation that pushes the opposite
18604 of the addend. So let's choose the smallest representation. */
18605 const tree tree_addend = TREE_OPERAND (loc, 1);
18606 offset_int wi_addend;
18607 HOST_WIDE_INT shwi_addend;
18608 dw_loc_descr_ref loc_naddend;
18609
18610 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18611 if (list_ret == 0)
18612 return 0;
18613
18614 /* Try to get the literal to push. It is the opposite of the addend,
18615 so as we rely on wrapping during DWARF evaluation, first decode
18616 the literal as a "DWARF-sized" signed number. */
18617 wi_addend = wi::to_offset (tree_addend);
18618 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18619 shwi_addend = wi_addend.to_shwi ();
18620 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18621 ? int_loc_descriptor (-shwi_addend)
18622 : NULL;
18623
18624 if (loc_naddend != NULL
18625 && ((unsigned) size_of_uleb128 (shwi_addend)
18626 > size_of_loc_descr (loc_naddend)))
18627 {
18628 add_loc_descr_to_each (list_ret, loc_naddend);
18629 add_loc_descr_to_each (list_ret,
18630 new_loc_descr (DW_OP_minus, 0, 0));
18631 }
18632 else
18633 {
18634 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18635 {
18636 loc_naddend = loc_cur;
18637 loc_cur = loc_cur->dw_loc_next;
18638 ggc_free (loc_naddend);
18639 }
18640 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18641 }
18642 break;
18643 }
18644
18645 op = DW_OP_plus;
18646 goto do_binop;
18647
18648 case LE_EXPR:
18649 op = DW_OP_le;
18650 goto do_comp_binop;
18651
18652 case GE_EXPR:
18653 op = DW_OP_ge;
18654 goto do_comp_binop;
18655
18656 case LT_EXPR:
18657 op = DW_OP_lt;
18658 goto do_comp_binop;
18659
18660 case GT_EXPR:
18661 op = DW_OP_gt;
18662 goto do_comp_binop;
18663
18664 do_comp_binop:
18665 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18666 {
18667 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18668 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18669 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18670 TREE_CODE (loc));
18671 break;
18672 }
18673 else
18674 goto do_binop;
18675
18676 case EQ_EXPR:
18677 op = DW_OP_eq;
18678 goto do_binop;
18679
18680 case NE_EXPR:
18681 op = DW_OP_ne;
18682 goto do_binop;
18683
18684 do_binop:
18685 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18686 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18687 if (list_ret == 0 || list_ret1 == 0)
18688 return 0;
18689
18690 add_loc_list (&list_ret, list_ret1);
18691 if (list_ret == 0)
18692 return 0;
18693 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18694 break;
18695
18696 case TRUTH_NOT_EXPR:
18697 case BIT_NOT_EXPR:
18698 op = DW_OP_not;
18699 goto do_unop;
18700
18701 case ABS_EXPR:
18702 op = DW_OP_abs;
18703 goto do_unop;
18704
18705 case NEGATE_EXPR:
18706 op = DW_OP_neg;
18707 goto do_unop;
18708
18709 do_unop:
18710 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18711 if (list_ret == 0)
18712 return 0;
18713
18714 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18715 break;
18716
18717 case MIN_EXPR:
18718 case MAX_EXPR:
18719 {
18720 const enum tree_code code =
18721 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18722
18723 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18724 build2 (code, integer_type_node,
18725 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18726 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18727 }
18728
18729 /* fall through */
18730
18731 case COND_EXPR:
18732 {
18733 dw_loc_descr_ref lhs
18734 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18735 dw_loc_list_ref rhs
18736 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18737 dw_loc_descr_ref bra_node, jump_node, tmp;
18738
18739 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18740 if (list_ret == 0 || lhs == 0 || rhs == 0)
18741 return 0;
18742
18743 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18744 add_loc_descr_to_each (list_ret, bra_node);
18745
18746 add_loc_list (&list_ret, rhs);
18747 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18748 add_loc_descr_to_each (list_ret, jump_node);
18749
18750 add_loc_descr_to_each (list_ret, lhs);
18751 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18752 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18753
18754 /* ??? Need a node to point the skip at. Use a nop. */
18755 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18756 add_loc_descr_to_each (list_ret, tmp);
18757 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18758 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18759 }
18760 break;
18761
18762 case FIX_TRUNC_EXPR:
18763 return 0;
18764
18765 default:
18766 /* Leave front-end specific codes as simply unknown. This comes
18767 up, for instance, with the C STMT_EXPR. */
18768 if ((unsigned int) TREE_CODE (loc)
18769 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18770 {
18771 expansion_failed (loc, NULL_RTX,
18772 "language specific tree node");
18773 return 0;
18774 }
18775
18776 /* Otherwise this is a generic code; we should just lists all of
18777 these explicitly. We forgot one. */
18778 if (flag_checking)
18779 gcc_unreachable ();
18780
18781 /* In a release build, we want to degrade gracefully: better to
18782 generate incomplete debugging information than to crash. */
18783 return NULL;
18784 }
18785
18786 if (!ret && !list_ret)
18787 return 0;
18788
18789 if (want_address == 2 && !have_address
18790 && (dwarf_version >= 4 || !dwarf_strict))
18791 {
18792 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18793 {
18794 expansion_failed (loc, NULL_RTX,
18795 "DWARF address size mismatch");
18796 return 0;
18797 }
18798 if (ret)
18799 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18800 else
18801 add_loc_descr_to_each (list_ret,
18802 new_loc_descr (DW_OP_stack_value, 0, 0));
18803 have_address = 1;
18804 }
18805 /* Show if we can't fill the request for an address. */
18806 if (want_address && !have_address)
18807 {
18808 expansion_failed (loc, NULL_RTX,
18809 "Want address and only have value");
18810 return 0;
18811 }
18812
18813 gcc_assert (!ret || !list_ret);
18814
18815 /* If we've got an address and don't want one, dereference. */
18816 if (!want_address && have_address)
18817 {
18818 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18819
18820 if (size > DWARF2_ADDR_SIZE || size == -1)
18821 {
18822 expansion_failed (loc, NULL_RTX,
18823 "DWARF address size mismatch");
18824 return 0;
18825 }
18826 else if (size == DWARF2_ADDR_SIZE)
18827 op = DW_OP_deref;
18828 else
18829 op = DW_OP_deref_size;
18830
18831 if (ret)
18832 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18833 else
18834 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18835 }
18836 if (ret)
18837 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18838
18839 return list_ret;
18840 }
18841
18842 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18843 expressions. */
18844
18845 static dw_loc_list_ref
18846 loc_list_from_tree (tree loc, int want_address,
18847 struct loc_descr_context *context)
18848 {
18849 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18850
18851 for (dw_loc_list_ref loc_cur = result;
18852 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18853 loc_descr_without_nops (loc_cur->expr);
18854 return result;
18855 }
18856
18857 /* Same as above but return only single location expression. */
18858 static dw_loc_descr_ref
18859 loc_descriptor_from_tree (tree loc, int want_address,
18860 struct loc_descr_context *context)
18861 {
18862 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18863 if (!ret)
18864 return NULL;
18865 if (ret->dw_loc_next)
18866 {
18867 expansion_failed (loc, NULL_RTX,
18868 "Location list where only loc descriptor needed");
18869 return NULL;
18870 }
18871 return ret->expr;
18872 }
18873
18874 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18875 pointer to the declared type for the relevant field variable, or return
18876 `integer_type_node' if the given node turns out to be an
18877 ERROR_MARK node. */
18878
18879 static inline tree
18880 field_type (const_tree decl)
18881 {
18882 tree type;
18883
18884 if (TREE_CODE (decl) == ERROR_MARK)
18885 return integer_type_node;
18886
18887 type = DECL_BIT_FIELD_TYPE (decl);
18888 if (type == NULL_TREE)
18889 type = TREE_TYPE (decl);
18890
18891 return type;
18892 }
18893
18894 /* Given a pointer to a tree node, return the alignment in bits for
18895 it, or else return BITS_PER_WORD if the node actually turns out to
18896 be an ERROR_MARK node. */
18897
18898 static inline unsigned
18899 simple_type_align_in_bits (const_tree type)
18900 {
18901 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18902 }
18903
18904 static inline unsigned
18905 simple_decl_align_in_bits (const_tree decl)
18906 {
18907 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18908 }
18909
18910 /* Return the result of rounding T up to ALIGN. */
18911
18912 static inline offset_int
18913 round_up_to_align (const offset_int &t, unsigned int align)
18914 {
18915 return wi::udiv_trunc (t + align - 1, align) * align;
18916 }
18917
18918 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18919 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18920 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18921 if we fail to return the size in one of these two forms. */
18922
18923 static dw_loc_descr_ref
18924 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18925 {
18926 tree tree_size;
18927 struct loc_descr_context ctx;
18928
18929 /* Return a constant integer in priority, if possible. */
18930 *cst_size = int_size_in_bytes (type);
18931 if (*cst_size != -1)
18932 return NULL;
18933
18934 ctx.context_type = const_cast<tree> (type);
18935 ctx.base_decl = NULL_TREE;
18936 ctx.dpi = NULL;
18937 ctx.placeholder_arg = false;
18938 ctx.placeholder_seen = false;
18939
18940 type = TYPE_MAIN_VARIANT (type);
18941 tree_size = TYPE_SIZE_UNIT (type);
18942 return ((tree_size != NULL_TREE)
18943 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18944 : NULL);
18945 }
18946
18947 /* Helper structure for RECORD_TYPE processing. */
18948 struct vlr_context
18949 {
18950 /* Root RECORD_TYPE. It is needed to generate data member location
18951 descriptions in variable-length records (VLR), but also to cope with
18952 variants, which are composed of nested structures multiplexed with
18953 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18954 function processing a FIELD_DECL, it is required to be non null. */
18955 tree struct_type;
18956 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18957 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18958 this variant part as part of the root record (in storage units). For
18959 regular records, it must be NULL_TREE. */
18960 tree variant_part_offset;
18961 };
18962
18963 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18964 addressed byte of the "containing object" for the given FIELD_DECL. If
18965 possible, return a native constant through CST_OFFSET (in which case NULL is
18966 returned); otherwise return a DWARF expression that computes the offset.
18967
18968 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18969 that offset is, either because the argument turns out to be a pointer to an
18970 ERROR_MARK node, or because the offset expression is too complex for us.
18971
18972 CTX is required: see the comment for VLR_CONTEXT. */
18973
18974 static dw_loc_descr_ref
18975 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18976 HOST_WIDE_INT *cst_offset)
18977 {
18978 tree tree_result;
18979 dw_loc_list_ref loc_result;
18980
18981 *cst_offset = 0;
18982
18983 if (TREE_CODE (decl) == ERROR_MARK)
18984 return NULL;
18985 else
18986 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18987
18988 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18989 case. */
18990 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18991 return NULL;
18992
18993 /* We used to handle only constant offsets in all cases. Now, we handle
18994 properly dynamic byte offsets only when PCC bitfield type doesn't
18995 matter. */
18996 if (PCC_BITFIELD_TYPE_MATTERS
18997 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18998 {
18999 offset_int object_offset_in_bits;
19000 offset_int object_offset_in_bytes;
19001 offset_int bitpos_int;
19002 tree type;
19003 tree field_size_tree;
19004 offset_int deepest_bitpos;
19005 offset_int field_size_in_bits;
19006 unsigned int type_align_in_bits;
19007 unsigned int decl_align_in_bits;
19008 offset_int type_size_in_bits;
19009
19010 bitpos_int = wi::to_offset (bit_position (decl));
19011 type = field_type (decl);
19012 type_size_in_bits = offset_int_type_size_in_bits (type);
19013 type_align_in_bits = simple_type_align_in_bits (type);
19014
19015 field_size_tree = DECL_SIZE (decl);
19016
19017 /* The size could be unspecified if there was an error, or for
19018 a flexible array member. */
19019 if (!field_size_tree)
19020 field_size_tree = bitsize_zero_node;
19021
19022 /* If the size of the field is not constant, use the type size. */
19023 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19024 field_size_in_bits = wi::to_offset (field_size_tree);
19025 else
19026 field_size_in_bits = type_size_in_bits;
19027
19028 decl_align_in_bits = simple_decl_align_in_bits (decl);
19029
19030 /* The GCC front-end doesn't make any attempt to keep track of the
19031 starting bit offset (relative to the start of the containing
19032 structure type) of the hypothetical "containing object" for a
19033 bit-field. Thus, when computing the byte offset value for the
19034 start of the "containing object" of a bit-field, we must deduce
19035 this information on our own. This can be rather tricky to do in
19036 some cases. For example, handling the following structure type
19037 definition when compiling for an i386/i486 target (which only
19038 aligns long long's to 32-bit boundaries) can be very tricky:
19039
19040 struct S { int field1; long long field2:31; };
19041
19042 Fortunately, there is a simple rule-of-thumb which can be used
19043 in such cases. When compiling for an i386/i486, GCC will
19044 allocate 8 bytes for the structure shown above. It decides to
19045 do this based upon one simple rule for bit-field allocation.
19046 GCC allocates each "containing object" for each bit-field at
19047 the first (i.e. lowest addressed) legitimate alignment boundary
19048 (based upon the required minimum alignment for the declared
19049 type of the field) which it can possibly use, subject to the
19050 condition that there is still enough available space remaining
19051 in the containing object (when allocated at the selected point)
19052 to fully accommodate all of the bits of the bit-field itself.
19053
19054 This simple rule makes it obvious why GCC allocates 8 bytes for
19055 each object of the structure type shown above. When looking
19056 for a place to allocate the "containing object" for `field2',
19057 the compiler simply tries to allocate a 64-bit "containing
19058 object" at each successive 32-bit boundary (starting at zero)
19059 until it finds a place to allocate that 64- bit field such that
19060 at least 31 contiguous (and previously unallocated) bits remain
19061 within that selected 64 bit field. (As it turns out, for the
19062 example above, the compiler finds it is OK to allocate the
19063 "containing object" 64-bit field at bit-offset zero within the
19064 structure type.)
19065
19066 Here we attempt to work backwards from the limited set of facts
19067 we're given, and we try to deduce from those facts, where GCC
19068 must have believed that the containing object started (within
19069 the structure type). The value we deduce is then used (by the
19070 callers of this routine) to generate DW_AT_location and
19071 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19072 the case of DW_AT_location, regular fields as well). */
19073
19074 /* Figure out the bit-distance from the start of the structure to
19075 the "deepest" bit of the bit-field. */
19076 deepest_bitpos = bitpos_int + field_size_in_bits;
19077
19078 /* This is the tricky part. Use some fancy footwork to deduce
19079 where the lowest addressed bit of the containing object must
19080 be. */
19081 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19082
19083 /* Round up to type_align by default. This works best for
19084 bitfields. */
19085 object_offset_in_bits
19086 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19087
19088 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19089 {
19090 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19091
19092 /* Round up to decl_align instead. */
19093 object_offset_in_bits
19094 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19095 }
19096
19097 object_offset_in_bytes
19098 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19099 if (ctx->variant_part_offset == NULL_TREE)
19100 {
19101 *cst_offset = object_offset_in_bytes.to_shwi ();
19102 return NULL;
19103 }
19104 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19105 }
19106 else
19107 tree_result = byte_position (decl);
19108
19109 if (ctx->variant_part_offset != NULL_TREE)
19110 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19111 ctx->variant_part_offset, tree_result);
19112
19113 /* If the byte offset is a constant, it's simplier to handle a native
19114 constant rather than a DWARF expression. */
19115 if (TREE_CODE (tree_result) == INTEGER_CST)
19116 {
19117 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19118 return NULL;
19119 }
19120 struct loc_descr_context loc_ctx = {
19121 ctx->struct_type, /* context_type */
19122 NULL_TREE, /* base_decl */
19123 NULL, /* dpi */
19124 false, /* placeholder_arg */
19125 false /* placeholder_seen */
19126 };
19127 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19128
19129 /* We want a DWARF expression: abort if we only have a location list with
19130 multiple elements. */
19131 if (!loc_result || !single_element_loc_list_p (loc_result))
19132 return NULL;
19133 else
19134 return loc_result->expr;
19135 }
19136 \f
19137 /* The following routines define various Dwarf attributes and any data
19138 associated with them. */
19139
19140 /* Add a location description attribute value to a DIE.
19141
19142 This emits location attributes suitable for whole variables and
19143 whole parameters. Note that the location attributes for struct fields are
19144 generated by the routine `data_member_location_attribute' below. */
19145
19146 static inline void
19147 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19148 dw_loc_list_ref descr)
19149 {
19150 bool check_no_locviews = true;
19151 if (descr == 0)
19152 return;
19153 if (single_element_loc_list_p (descr))
19154 add_AT_loc (die, attr_kind, descr->expr);
19155 else
19156 {
19157 add_AT_loc_list (die, attr_kind, descr);
19158 gcc_assert (descr->ll_symbol);
19159 if (attr_kind == DW_AT_location && descr->vl_symbol
19160 && dwarf2out_locviews_in_attribute ())
19161 {
19162 add_AT_view_list (die, DW_AT_GNU_locviews);
19163 check_no_locviews = false;
19164 }
19165 }
19166
19167 if (check_no_locviews)
19168 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19169 }
19170
19171 /* Add DW_AT_accessibility attribute to DIE if needed. */
19172
19173 static void
19174 add_accessibility_attribute (dw_die_ref die, tree decl)
19175 {
19176 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19177 children, otherwise the default is DW_ACCESS_public. In DWARF2
19178 the default has always been DW_ACCESS_public. */
19179 if (TREE_PROTECTED (decl))
19180 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19181 else if (TREE_PRIVATE (decl))
19182 {
19183 if (dwarf_version == 2
19184 || die->die_parent == NULL
19185 || die->die_parent->die_tag != DW_TAG_class_type)
19186 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19187 }
19188 else if (dwarf_version > 2
19189 && die->die_parent
19190 && die->die_parent->die_tag == DW_TAG_class_type)
19191 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19192 }
19193
19194 /* Attach the specialized form of location attribute used for data members of
19195 struct and union types. In the special case of a FIELD_DECL node which
19196 represents a bit-field, the "offset" part of this special location
19197 descriptor must indicate the distance in bytes from the lowest-addressed
19198 byte of the containing struct or union type to the lowest-addressed byte of
19199 the "containing object" for the bit-field. (See the `field_byte_offset'
19200 function above).
19201
19202 For any given bit-field, the "containing object" is a hypothetical object
19203 (of some integral or enum type) within which the given bit-field lives. The
19204 type of this hypothetical "containing object" is always the same as the
19205 declared type of the individual bit-field itself (for GCC anyway... the
19206 DWARF spec doesn't actually mandate this). Note that it is the size (in
19207 bytes) of the hypothetical "containing object" which will be given in the
19208 DW_AT_byte_size attribute for this bit-field. (See the
19209 `byte_size_attribute' function below.) It is also used when calculating the
19210 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19211 function below.)
19212
19213 CTX is required: see the comment for VLR_CONTEXT. */
19214
19215 static void
19216 add_data_member_location_attribute (dw_die_ref die,
19217 tree decl,
19218 struct vlr_context *ctx)
19219 {
19220 HOST_WIDE_INT offset;
19221 dw_loc_descr_ref loc_descr = 0;
19222
19223 if (TREE_CODE (decl) == TREE_BINFO)
19224 {
19225 /* We're working on the TAG_inheritance for a base class. */
19226 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19227 {
19228 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19229 aren't at a fixed offset from all (sub)objects of the same
19230 type. We need to extract the appropriate offset from our
19231 vtable. The following dwarf expression means
19232
19233 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19234
19235 This is specific to the V3 ABI, of course. */
19236
19237 dw_loc_descr_ref tmp;
19238
19239 /* Make a copy of the object address. */
19240 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19241 add_loc_descr (&loc_descr, tmp);
19242
19243 /* Extract the vtable address. */
19244 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19245 add_loc_descr (&loc_descr, tmp);
19246
19247 /* Calculate the address of the offset. */
19248 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19249 gcc_assert (offset < 0);
19250
19251 tmp = int_loc_descriptor (-offset);
19252 add_loc_descr (&loc_descr, tmp);
19253 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19254 add_loc_descr (&loc_descr, tmp);
19255
19256 /* Extract the offset. */
19257 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19258 add_loc_descr (&loc_descr, tmp);
19259
19260 /* Add it to the object address. */
19261 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19262 add_loc_descr (&loc_descr, tmp);
19263 }
19264 else
19265 offset = tree_to_shwi (BINFO_OFFSET (decl));
19266 }
19267 else
19268 {
19269 loc_descr = field_byte_offset (decl, ctx, &offset);
19270
19271 /* If loc_descr is available then we know the field offset is dynamic.
19272 However, GDB does not handle dynamic field offsets very well at the
19273 moment. */
19274 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19275 {
19276 loc_descr = NULL;
19277 offset = 0;
19278 }
19279
19280 /* Data member location evalutation starts with the base address on the
19281 stack. Compute the field offset and add it to this base address. */
19282 else if (loc_descr != NULL)
19283 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19284 }
19285
19286 if (! loc_descr)
19287 {
19288 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19289 e.g. GDB only added support to it in November 2016. For DWARF5
19290 we need newer debug info consumers anyway. We might change this
19291 to dwarf_version >= 4 once most consumers catched up. */
19292 if (dwarf_version >= 5
19293 && TREE_CODE (decl) == FIELD_DECL
19294 && DECL_BIT_FIELD_TYPE (decl))
19295 {
19296 tree off = bit_position (decl);
19297 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19298 {
19299 remove_AT (die, DW_AT_byte_size);
19300 remove_AT (die, DW_AT_bit_offset);
19301 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19302 return;
19303 }
19304 }
19305 if (dwarf_version > 2)
19306 {
19307 /* Don't need to output a location expression, just the constant. */
19308 if (offset < 0)
19309 add_AT_int (die, DW_AT_data_member_location, offset);
19310 else
19311 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19312 return;
19313 }
19314 else
19315 {
19316 enum dwarf_location_atom op;
19317
19318 /* The DWARF2 standard says that we should assume that the structure
19319 address is already on the stack, so we can specify a structure
19320 field address by using DW_OP_plus_uconst. */
19321 op = DW_OP_plus_uconst;
19322 loc_descr = new_loc_descr (op, offset, 0);
19323 }
19324 }
19325
19326 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19327 }
19328
19329 /* Writes integer values to dw_vec_const array. */
19330
19331 static void
19332 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19333 {
19334 while (size != 0)
19335 {
19336 *dest++ = val & 0xff;
19337 val >>= 8;
19338 --size;
19339 }
19340 }
19341
19342 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19343
19344 static HOST_WIDE_INT
19345 extract_int (const unsigned char *src, unsigned int size)
19346 {
19347 HOST_WIDE_INT val = 0;
19348
19349 src += size;
19350 while (size != 0)
19351 {
19352 val <<= 8;
19353 val |= *--src & 0xff;
19354 --size;
19355 }
19356 return val;
19357 }
19358
19359 /* Writes wide_int values to dw_vec_const array. */
19360
19361 static void
19362 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19363 {
19364 int i;
19365
19366 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19367 {
19368 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19369 return;
19370 }
19371
19372 /* We'd have to extend this code to support odd sizes. */
19373 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19374
19375 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19376
19377 if (WORDS_BIG_ENDIAN)
19378 for (i = n - 1; i >= 0; i--)
19379 {
19380 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19381 dest += sizeof (HOST_WIDE_INT);
19382 }
19383 else
19384 for (i = 0; i < n; i++)
19385 {
19386 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19387 dest += sizeof (HOST_WIDE_INT);
19388 }
19389 }
19390
19391 /* Writes floating point values to dw_vec_const array. */
19392
19393 static void
19394 insert_float (const_rtx rtl, unsigned char *array)
19395 {
19396 long val[4];
19397 int i;
19398 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19399
19400 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19401
19402 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19403 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19404 {
19405 insert_int (val[i], 4, array);
19406 array += 4;
19407 }
19408 }
19409
19410 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19411 does not have a "location" either in memory or in a register. These
19412 things can arise in GNU C when a constant is passed as an actual parameter
19413 to an inlined function. They can also arise in C++ where declared
19414 constants do not necessarily get memory "homes". */
19415
19416 static bool
19417 add_const_value_attribute (dw_die_ref die, rtx rtl)
19418 {
19419 switch (GET_CODE (rtl))
19420 {
19421 case CONST_INT:
19422 {
19423 HOST_WIDE_INT val = INTVAL (rtl);
19424
19425 if (val < 0)
19426 add_AT_int (die, DW_AT_const_value, val);
19427 else
19428 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19429 }
19430 return true;
19431
19432 case CONST_WIDE_INT:
19433 {
19434 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19435 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19436 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19437 wide_int w = wi::zext (w1, prec);
19438 add_AT_wide (die, DW_AT_const_value, w);
19439 }
19440 return true;
19441
19442 case CONST_DOUBLE:
19443 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19444 floating-point constant. A CONST_DOUBLE is used whenever the
19445 constant requires more than one word in order to be adequately
19446 represented. */
19447 if (TARGET_SUPPORTS_WIDE_INT == 0
19448 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19449 add_AT_double (die, DW_AT_const_value,
19450 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19451 else
19452 {
19453 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19454 unsigned int length = GET_MODE_SIZE (mode);
19455 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19456
19457 insert_float (rtl, array);
19458 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19459 }
19460 return true;
19461
19462 case CONST_VECTOR:
19463 {
19464 unsigned int length;
19465 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19466 return false;
19467
19468 machine_mode mode = GET_MODE (rtl);
19469 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19470 unsigned char *array
19471 = ggc_vec_alloc<unsigned char> (length * elt_size);
19472 unsigned int i;
19473 unsigned char *p;
19474 machine_mode imode = GET_MODE_INNER (mode);
19475
19476 switch (GET_MODE_CLASS (mode))
19477 {
19478 case MODE_VECTOR_INT:
19479 for (i = 0, p = array; i < length; i++, p += elt_size)
19480 {
19481 rtx elt = CONST_VECTOR_ELT (rtl, i);
19482 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19483 }
19484 break;
19485
19486 case MODE_VECTOR_FLOAT:
19487 for (i = 0, p = array; i < length; i++, p += elt_size)
19488 {
19489 rtx elt = CONST_VECTOR_ELT (rtl, i);
19490 insert_float (elt, p);
19491 }
19492 break;
19493
19494 default:
19495 gcc_unreachable ();
19496 }
19497
19498 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19499 }
19500 return true;
19501
19502 case CONST_STRING:
19503 if (dwarf_version >= 4 || !dwarf_strict)
19504 {
19505 dw_loc_descr_ref loc_result;
19506 resolve_one_addr (&rtl);
19507 rtl_addr:
19508 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19509 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19510 add_AT_loc (die, DW_AT_location, loc_result);
19511 vec_safe_push (used_rtx_array, rtl);
19512 return true;
19513 }
19514 return false;
19515
19516 case CONST:
19517 if (CONSTANT_P (XEXP (rtl, 0)))
19518 return add_const_value_attribute (die, XEXP (rtl, 0));
19519 /* FALLTHROUGH */
19520 case SYMBOL_REF:
19521 if (!const_ok_for_output (rtl))
19522 return false;
19523 /* FALLTHROUGH */
19524 case LABEL_REF:
19525 if (dwarf_version >= 4 || !dwarf_strict)
19526 goto rtl_addr;
19527 return false;
19528
19529 case PLUS:
19530 /* In cases where an inlined instance of an inline function is passed
19531 the address of an `auto' variable (which is local to the caller) we
19532 can get a situation where the DECL_RTL of the artificial local
19533 variable (for the inlining) which acts as a stand-in for the
19534 corresponding formal parameter (of the inline function) will look
19535 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19536 exactly a compile-time constant expression, but it isn't the address
19537 of the (artificial) local variable either. Rather, it represents the
19538 *value* which the artificial local variable always has during its
19539 lifetime. We currently have no way to represent such quasi-constant
19540 values in Dwarf, so for now we just punt and generate nothing. */
19541 return false;
19542
19543 case HIGH:
19544 case CONST_FIXED:
19545 return false;
19546
19547 case MEM:
19548 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19549 && MEM_READONLY_P (rtl)
19550 && GET_MODE (rtl) == BLKmode)
19551 {
19552 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19553 return true;
19554 }
19555 return false;
19556
19557 default:
19558 /* No other kinds of rtx should be possible here. */
19559 gcc_unreachable ();
19560 }
19561 return false;
19562 }
19563
19564 /* Determine whether the evaluation of EXPR references any variables
19565 or functions which aren't otherwise used (and therefore may not be
19566 output). */
19567 static tree
19568 reference_to_unused (tree * tp, int * walk_subtrees,
19569 void * data ATTRIBUTE_UNUSED)
19570 {
19571 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19572 *walk_subtrees = 0;
19573
19574 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19575 && ! TREE_ASM_WRITTEN (*tp))
19576 return *tp;
19577 /* ??? The C++ FE emits debug information for using decls, so
19578 putting gcc_unreachable here falls over. See PR31899. For now
19579 be conservative. */
19580 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19581 return *tp;
19582 else if (VAR_P (*tp))
19583 {
19584 varpool_node *node = varpool_node::get (*tp);
19585 if (!node || !node->definition)
19586 return *tp;
19587 }
19588 else if (TREE_CODE (*tp) == FUNCTION_DECL
19589 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19590 {
19591 /* The call graph machinery must have finished analyzing,
19592 optimizing and gimplifying the CU by now.
19593 So if *TP has no call graph node associated
19594 to it, it means *TP will not be emitted. */
19595 if (!cgraph_node::get (*tp))
19596 return *tp;
19597 }
19598 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19599 return *tp;
19600
19601 return NULL_TREE;
19602 }
19603
19604 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19605 for use in a later add_const_value_attribute call. */
19606
19607 static rtx
19608 rtl_for_decl_init (tree init, tree type)
19609 {
19610 rtx rtl = NULL_RTX;
19611
19612 STRIP_NOPS (init);
19613
19614 /* If a variable is initialized with a string constant without embedded
19615 zeros, build CONST_STRING. */
19616 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19617 {
19618 tree enttype = TREE_TYPE (type);
19619 tree domain = TYPE_DOMAIN (type);
19620 scalar_int_mode mode;
19621
19622 if (is_int_mode (TYPE_MODE (enttype), &mode)
19623 && GET_MODE_SIZE (mode) == 1
19624 && domain
19625 && TYPE_MAX_VALUE (domain)
19626 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19627 && integer_zerop (TYPE_MIN_VALUE (domain))
19628 && compare_tree_int (TYPE_MAX_VALUE (domain),
19629 TREE_STRING_LENGTH (init) - 1) == 0
19630 && ((size_t) TREE_STRING_LENGTH (init)
19631 == strlen (TREE_STRING_POINTER (init)) + 1))
19632 {
19633 rtl = gen_rtx_CONST_STRING (VOIDmode,
19634 ggc_strdup (TREE_STRING_POINTER (init)));
19635 rtl = gen_rtx_MEM (BLKmode, rtl);
19636 MEM_READONLY_P (rtl) = 1;
19637 }
19638 }
19639 /* Other aggregates, and complex values, could be represented using
19640 CONCAT: FIXME! */
19641 else if (AGGREGATE_TYPE_P (type)
19642 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19643 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19644 || TREE_CODE (type) == COMPLEX_TYPE)
19645 ;
19646 /* Vectors only work if their mode is supported by the target.
19647 FIXME: generic vectors ought to work too. */
19648 else if (TREE_CODE (type) == VECTOR_TYPE
19649 && !VECTOR_MODE_P (TYPE_MODE (type)))
19650 ;
19651 /* If the initializer is something that we know will expand into an
19652 immediate RTL constant, expand it now. We must be careful not to
19653 reference variables which won't be output. */
19654 else if (initializer_constant_valid_p (init, type)
19655 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19656 {
19657 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19658 possible. */
19659 if (TREE_CODE (type) == VECTOR_TYPE)
19660 switch (TREE_CODE (init))
19661 {
19662 case VECTOR_CST:
19663 break;
19664 case CONSTRUCTOR:
19665 if (TREE_CONSTANT (init))
19666 {
19667 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19668 bool constant_p = true;
19669 tree value;
19670 unsigned HOST_WIDE_INT ix;
19671
19672 /* Even when ctor is constant, it might contain non-*_CST
19673 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19674 belong into VECTOR_CST nodes. */
19675 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19676 if (!CONSTANT_CLASS_P (value))
19677 {
19678 constant_p = false;
19679 break;
19680 }
19681
19682 if (constant_p)
19683 {
19684 init = build_vector_from_ctor (type, elts);
19685 break;
19686 }
19687 }
19688 /* FALLTHRU */
19689
19690 default:
19691 return NULL;
19692 }
19693
19694 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19695
19696 /* If expand_expr returns a MEM, it wasn't immediate. */
19697 gcc_assert (!rtl || !MEM_P (rtl));
19698 }
19699
19700 return rtl;
19701 }
19702
19703 /* Generate RTL for the variable DECL to represent its location. */
19704
19705 static rtx
19706 rtl_for_decl_location (tree decl)
19707 {
19708 rtx rtl;
19709
19710 /* Here we have to decide where we are going to say the parameter "lives"
19711 (as far as the debugger is concerned). We only have a couple of
19712 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19713
19714 DECL_RTL normally indicates where the parameter lives during most of the
19715 activation of the function. If optimization is enabled however, this
19716 could be either NULL or else a pseudo-reg. Both of those cases indicate
19717 that the parameter doesn't really live anywhere (as far as the code
19718 generation parts of GCC are concerned) during most of the function's
19719 activation. That will happen (for example) if the parameter is never
19720 referenced within the function.
19721
19722 We could just generate a location descriptor here for all non-NULL
19723 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19724 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19725 where DECL_RTL is NULL or is a pseudo-reg.
19726
19727 Note however that we can only get away with using DECL_INCOMING_RTL as
19728 a backup substitute for DECL_RTL in certain limited cases. In cases
19729 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19730 we can be sure that the parameter was passed using the same type as it is
19731 declared to have within the function, and that its DECL_INCOMING_RTL
19732 points us to a place where a value of that type is passed.
19733
19734 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19735 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19736 because in these cases DECL_INCOMING_RTL points us to a value of some
19737 type which is *different* from the type of the parameter itself. Thus,
19738 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19739 such cases, the debugger would end up (for example) trying to fetch a
19740 `float' from a place which actually contains the first part of a
19741 `double'. That would lead to really incorrect and confusing
19742 output at debug-time.
19743
19744 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19745 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19746 are a couple of exceptions however. On little-endian machines we can
19747 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19748 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19749 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19750 when (on a little-endian machine) a non-prototyped function has a
19751 parameter declared to be of type `short' or `char'. In such cases,
19752 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19753 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19754 passed `int' value. If the debugger then uses that address to fetch
19755 a `short' or a `char' (on a little-endian machine) the result will be
19756 the correct data, so we allow for such exceptional cases below.
19757
19758 Note that our goal here is to describe the place where the given formal
19759 parameter lives during most of the function's activation (i.e. between the
19760 end of the prologue and the start of the epilogue). We'll do that as best
19761 as we can. Note however that if the given formal parameter is modified
19762 sometime during the execution of the function, then a stack backtrace (at
19763 debug-time) will show the function as having been called with the *new*
19764 value rather than the value which was originally passed in. This happens
19765 rarely enough that it is not a major problem, but it *is* a problem, and
19766 I'd like to fix it.
19767
19768 A future version of dwarf2out.c may generate two additional attributes for
19769 any given DW_TAG_formal_parameter DIE which will describe the "passed
19770 type" and the "passed location" for the given formal parameter in addition
19771 to the attributes we now generate to indicate the "declared type" and the
19772 "active location" for each parameter. This additional set of attributes
19773 could be used by debuggers for stack backtraces. Separately, note that
19774 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19775 This happens (for example) for inlined-instances of inline function formal
19776 parameters which are never referenced. This really shouldn't be
19777 happening. All PARM_DECL nodes should get valid non-NULL
19778 DECL_INCOMING_RTL values. FIXME. */
19779
19780 /* Use DECL_RTL as the "location" unless we find something better. */
19781 rtl = DECL_RTL_IF_SET (decl);
19782
19783 /* When generating abstract instances, ignore everything except
19784 constants, symbols living in memory, and symbols living in
19785 fixed registers. */
19786 if (! reload_completed)
19787 {
19788 if (rtl
19789 && (CONSTANT_P (rtl)
19790 || (MEM_P (rtl)
19791 && CONSTANT_P (XEXP (rtl, 0)))
19792 || (REG_P (rtl)
19793 && VAR_P (decl)
19794 && TREE_STATIC (decl))))
19795 {
19796 rtl = targetm.delegitimize_address (rtl);
19797 return rtl;
19798 }
19799 rtl = NULL_RTX;
19800 }
19801 else if (TREE_CODE (decl) == PARM_DECL)
19802 {
19803 if (rtl == NULL_RTX
19804 || is_pseudo_reg (rtl)
19805 || (MEM_P (rtl)
19806 && is_pseudo_reg (XEXP (rtl, 0))
19807 && DECL_INCOMING_RTL (decl)
19808 && MEM_P (DECL_INCOMING_RTL (decl))
19809 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19810 {
19811 tree declared_type = TREE_TYPE (decl);
19812 tree passed_type = DECL_ARG_TYPE (decl);
19813 machine_mode dmode = TYPE_MODE (declared_type);
19814 machine_mode pmode = TYPE_MODE (passed_type);
19815
19816 /* This decl represents a formal parameter which was optimized out.
19817 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19818 all cases where (rtl == NULL_RTX) just below. */
19819 if (dmode == pmode)
19820 rtl = DECL_INCOMING_RTL (decl);
19821 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19822 && SCALAR_INT_MODE_P (dmode)
19823 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19824 && DECL_INCOMING_RTL (decl))
19825 {
19826 rtx inc = DECL_INCOMING_RTL (decl);
19827 if (REG_P (inc))
19828 rtl = inc;
19829 else if (MEM_P (inc))
19830 {
19831 if (BYTES_BIG_ENDIAN)
19832 rtl = adjust_address_nv (inc, dmode,
19833 GET_MODE_SIZE (pmode)
19834 - GET_MODE_SIZE (dmode));
19835 else
19836 rtl = inc;
19837 }
19838 }
19839 }
19840
19841 /* If the parm was passed in registers, but lives on the stack, then
19842 make a big endian correction if the mode of the type of the
19843 parameter is not the same as the mode of the rtl. */
19844 /* ??? This is the same series of checks that are made in dbxout.c before
19845 we reach the big endian correction code there. It isn't clear if all
19846 of these checks are necessary here, but keeping them all is the safe
19847 thing to do. */
19848 else if (MEM_P (rtl)
19849 && XEXP (rtl, 0) != const0_rtx
19850 && ! CONSTANT_P (XEXP (rtl, 0))
19851 /* Not passed in memory. */
19852 && !MEM_P (DECL_INCOMING_RTL (decl))
19853 /* Not passed by invisible reference. */
19854 && (!REG_P (XEXP (rtl, 0))
19855 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19856 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19857 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19858 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19859 #endif
19860 )
19861 /* Big endian correction check. */
19862 && BYTES_BIG_ENDIAN
19863 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19864 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19865 UNITS_PER_WORD))
19866 {
19867 machine_mode addr_mode = get_address_mode (rtl);
19868 poly_int64 offset = (UNITS_PER_WORD
19869 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19870
19871 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19872 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19873 }
19874 }
19875 else if (VAR_P (decl)
19876 && rtl
19877 && MEM_P (rtl)
19878 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19879 {
19880 machine_mode addr_mode = get_address_mode (rtl);
19881 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19882 GET_MODE (rtl));
19883
19884 /* If a variable is declared "register" yet is smaller than
19885 a register, then if we store the variable to memory, it
19886 looks like we're storing a register-sized value, when in
19887 fact we are not. We need to adjust the offset of the
19888 storage location to reflect the actual value's bytes,
19889 else gdb will not be able to display it. */
19890 if (maybe_ne (offset, 0))
19891 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19892 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19893 }
19894
19895 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19896 and will have been substituted directly into all expressions that use it.
19897 C does not have such a concept, but C++ and other languages do. */
19898 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19899 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19900
19901 if (rtl)
19902 rtl = targetm.delegitimize_address (rtl);
19903
19904 /* If we don't look past the constant pool, we risk emitting a
19905 reference to a constant pool entry that isn't referenced from
19906 code, and thus is not emitted. */
19907 if (rtl)
19908 rtl = avoid_constant_pool_reference (rtl);
19909
19910 /* Try harder to get a rtl. If this symbol ends up not being emitted
19911 in the current CU, resolve_addr will remove the expression referencing
19912 it. */
19913 if (rtl == NULL_RTX
19914 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19915 && VAR_P (decl)
19916 && !DECL_EXTERNAL (decl)
19917 && TREE_STATIC (decl)
19918 && DECL_NAME (decl)
19919 && !DECL_HARD_REGISTER (decl)
19920 && DECL_MODE (decl) != VOIDmode)
19921 {
19922 rtl = make_decl_rtl_for_debug (decl);
19923 if (!MEM_P (rtl)
19924 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19925 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19926 rtl = NULL_RTX;
19927 }
19928
19929 return rtl;
19930 }
19931
19932 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19933 returned. If so, the decl for the COMMON block is returned, and the
19934 value is the offset into the common block for the symbol. */
19935
19936 static tree
19937 fortran_common (tree decl, HOST_WIDE_INT *value)
19938 {
19939 tree val_expr, cvar;
19940 machine_mode mode;
19941 poly_int64 bitsize, bitpos;
19942 tree offset;
19943 HOST_WIDE_INT cbitpos;
19944 int unsignedp, reversep, volatilep = 0;
19945
19946 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19947 it does not have a value (the offset into the common area), or if it
19948 is thread local (as opposed to global) then it isn't common, and shouldn't
19949 be handled as such. */
19950 if (!VAR_P (decl)
19951 || !TREE_STATIC (decl)
19952 || !DECL_HAS_VALUE_EXPR_P (decl)
19953 || !is_fortran ())
19954 return NULL_TREE;
19955
19956 val_expr = DECL_VALUE_EXPR (decl);
19957 if (TREE_CODE (val_expr) != COMPONENT_REF)
19958 return NULL_TREE;
19959
19960 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19961 &unsignedp, &reversep, &volatilep);
19962
19963 if (cvar == NULL_TREE
19964 || !VAR_P (cvar)
19965 || DECL_ARTIFICIAL (cvar)
19966 || !TREE_PUBLIC (cvar)
19967 /* We don't expect to have to cope with variable offsets,
19968 since at present all static data must have a constant size. */
19969 || !bitpos.is_constant (&cbitpos))
19970 return NULL_TREE;
19971
19972 *value = 0;
19973 if (offset != NULL)
19974 {
19975 if (!tree_fits_shwi_p (offset))
19976 return NULL_TREE;
19977 *value = tree_to_shwi (offset);
19978 }
19979 if (cbitpos != 0)
19980 *value += cbitpos / BITS_PER_UNIT;
19981
19982 return cvar;
19983 }
19984
19985 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19986 data attribute for a variable or a parameter. We generate the
19987 DW_AT_const_value attribute only in those cases where the given variable
19988 or parameter does not have a true "location" either in memory or in a
19989 register. This can happen (for example) when a constant is passed as an
19990 actual argument in a call to an inline function. (It's possible that
19991 these things can crop up in other ways also.) Note that one type of
19992 constant value which can be passed into an inlined function is a constant
19993 pointer. This can happen for example if an actual argument in an inlined
19994 function call evaluates to a compile-time constant address.
19995
19996 CACHE_P is true if it is worth caching the location list for DECL,
19997 so that future calls can reuse it rather than regenerate it from scratch.
19998 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19999 since we will need to refer to them each time the function is inlined. */
20000
20001 static bool
20002 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20003 {
20004 rtx rtl;
20005 dw_loc_list_ref list;
20006 var_loc_list *loc_list;
20007 cached_dw_loc_list *cache;
20008
20009 if (early_dwarf)
20010 return false;
20011
20012 if (TREE_CODE (decl) == ERROR_MARK)
20013 return false;
20014
20015 if (get_AT (die, DW_AT_location)
20016 || get_AT (die, DW_AT_const_value))
20017 return true;
20018
20019 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20020 || TREE_CODE (decl) == RESULT_DECL);
20021
20022 /* Try to get some constant RTL for this decl, and use that as the value of
20023 the location. */
20024
20025 rtl = rtl_for_decl_location (decl);
20026 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20027 && add_const_value_attribute (die, rtl))
20028 return true;
20029
20030 /* See if we have single element location list that is equivalent to
20031 a constant value. That way we are better to use add_const_value_attribute
20032 rather than expanding constant value equivalent. */
20033 loc_list = lookup_decl_loc (decl);
20034 if (loc_list
20035 && loc_list->first
20036 && loc_list->first->next == NULL
20037 && NOTE_P (loc_list->first->loc)
20038 && NOTE_VAR_LOCATION (loc_list->first->loc)
20039 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20040 {
20041 struct var_loc_node *node;
20042
20043 node = loc_list->first;
20044 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20045 if (GET_CODE (rtl) == EXPR_LIST)
20046 rtl = XEXP (rtl, 0);
20047 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20048 && add_const_value_attribute (die, rtl))
20049 return true;
20050 }
20051 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20052 list several times. See if we've already cached the contents. */
20053 list = NULL;
20054 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20055 cache_p = false;
20056 if (cache_p)
20057 {
20058 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20059 if (cache)
20060 list = cache->loc_list;
20061 }
20062 if (list == NULL)
20063 {
20064 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20065 NULL);
20066 /* It is usually worth caching this result if the decl is from
20067 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20068 if (cache_p && list && list->dw_loc_next)
20069 {
20070 cached_dw_loc_list **slot
20071 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20072 DECL_UID (decl),
20073 INSERT);
20074 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20075 cache->decl_id = DECL_UID (decl);
20076 cache->loc_list = list;
20077 *slot = cache;
20078 }
20079 }
20080 if (list)
20081 {
20082 add_AT_location_description (die, DW_AT_location, list);
20083 return true;
20084 }
20085 /* None of that worked, so it must not really have a location;
20086 try adding a constant value attribute from the DECL_INITIAL. */
20087 return tree_add_const_value_attribute_for_decl (die, decl);
20088 }
20089
20090 /* Helper function for tree_add_const_value_attribute. Natively encode
20091 initializer INIT into an array. Return true if successful. */
20092
20093 static bool
20094 native_encode_initializer (tree init, unsigned char *array, int size)
20095 {
20096 tree type;
20097
20098 if (init == NULL_TREE)
20099 return false;
20100
20101 STRIP_NOPS (init);
20102 switch (TREE_CODE (init))
20103 {
20104 case STRING_CST:
20105 type = TREE_TYPE (init);
20106 if (TREE_CODE (type) == ARRAY_TYPE)
20107 {
20108 tree enttype = TREE_TYPE (type);
20109 scalar_int_mode mode;
20110
20111 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20112 || GET_MODE_SIZE (mode) != 1)
20113 return false;
20114 if (int_size_in_bytes (type) != size)
20115 return false;
20116 if (size > TREE_STRING_LENGTH (init))
20117 {
20118 memcpy (array, TREE_STRING_POINTER (init),
20119 TREE_STRING_LENGTH (init));
20120 memset (array + TREE_STRING_LENGTH (init),
20121 '\0', size - TREE_STRING_LENGTH (init));
20122 }
20123 else
20124 memcpy (array, TREE_STRING_POINTER (init), size);
20125 return true;
20126 }
20127 return false;
20128 case CONSTRUCTOR:
20129 type = TREE_TYPE (init);
20130 if (int_size_in_bytes (type) != size)
20131 return false;
20132 if (TREE_CODE (type) == ARRAY_TYPE)
20133 {
20134 HOST_WIDE_INT min_index;
20135 unsigned HOST_WIDE_INT cnt;
20136 int curpos = 0, fieldsize;
20137 constructor_elt *ce;
20138
20139 if (TYPE_DOMAIN (type) == NULL_TREE
20140 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20141 return false;
20142
20143 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20144 if (fieldsize <= 0)
20145 return false;
20146
20147 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20148 memset (array, '\0', size);
20149 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20150 {
20151 tree val = ce->value;
20152 tree index = ce->index;
20153 int pos = curpos;
20154 if (index && TREE_CODE (index) == RANGE_EXPR)
20155 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20156 * fieldsize;
20157 else if (index)
20158 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20159
20160 if (val)
20161 {
20162 STRIP_NOPS (val);
20163 if (!native_encode_initializer (val, array + pos, fieldsize))
20164 return false;
20165 }
20166 curpos = pos + fieldsize;
20167 if (index && TREE_CODE (index) == RANGE_EXPR)
20168 {
20169 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20170 - tree_to_shwi (TREE_OPERAND (index, 0));
20171 while (count-- > 0)
20172 {
20173 if (val)
20174 memcpy (array + curpos, array + pos, fieldsize);
20175 curpos += fieldsize;
20176 }
20177 }
20178 gcc_assert (curpos <= size);
20179 }
20180 return true;
20181 }
20182 else if (TREE_CODE (type) == RECORD_TYPE
20183 || TREE_CODE (type) == UNION_TYPE)
20184 {
20185 tree field = NULL_TREE;
20186 unsigned HOST_WIDE_INT cnt;
20187 constructor_elt *ce;
20188
20189 if (int_size_in_bytes (type) != size)
20190 return false;
20191
20192 if (TREE_CODE (type) == RECORD_TYPE)
20193 field = TYPE_FIELDS (type);
20194
20195 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20196 {
20197 tree val = ce->value;
20198 int pos, fieldsize;
20199
20200 if (ce->index != 0)
20201 field = ce->index;
20202
20203 if (val)
20204 STRIP_NOPS (val);
20205
20206 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20207 return false;
20208
20209 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20210 && TYPE_DOMAIN (TREE_TYPE (field))
20211 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20212 return false;
20213 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20214 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20215 return false;
20216 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20217 pos = int_byte_position (field);
20218 gcc_assert (pos + fieldsize <= size);
20219 if (val && fieldsize != 0
20220 && !native_encode_initializer (val, array + pos, fieldsize))
20221 return false;
20222 }
20223 return true;
20224 }
20225 return false;
20226 case VIEW_CONVERT_EXPR:
20227 case NON_LVALUE_EXPR:
20228 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20229 default:
20230 return native_encode_expr (init, array, size) == size;
20231 }
20232 }
20233
20234 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20235 attribute is the const value T. */
20236
20237 static bool
20238 tree_add_const_value_attribute (dw_die_ref die, tree t)
20239 {
20240 tree init;
20241 tree type = TREE_TYPE (t);
20242 rtx rtl;
20243
20244 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20245 return false;
20246
20247 init = t;
20248 gcc_assert (!DECL_P (init));
20249
20250 if (TREE_CODE (init) == INTEGER_CST)
20251 {
20252 if (tree_fits_uhwi_p (init))
20253 {
20254 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20255 return true;
20256 }
20257 if (tree_fits_shwi_p (init))
20258 {
20259 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20260 return true;
20261 }
20262 }
20263 if (! early_dwarf)
20264 {
20265 rtl = rtl_for_decl_init (init, type);
20266 if (rtl)
20267 return add_const_value_attribute (die, rtl);
20268 }
20269 /* If the host and target are sane, try harder. */
20270 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20271 && initializer_constant_valid_p (init, type))
20272 {
20273 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20274 if (size > 0 && (int) size == size)
20275 {
20276 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20277
20278 if (native_encode_initializer (init, array, size))
20279 {
20280 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20281 return true;
20282 }
20283 ggc_free (array);
20284 }
20285 }
20286 return false;
20287 }
20288
20289 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20290 attribute is the const value of T, where T is an integral constant
20291 variable with static storage duration
20292 (so it can't be a PARM_DECL or a RESULT_DECL). */
20293
20294 static bool
20295 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20296 {
20297
20298 if (!decl
20299 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20300 || (VAR_P (decl) && !TREE_STATIC (decl)))
20301 return false;
20302
20303 if (TREE_READONLY (decl)
20304 && ! TREE_THIS_VOLATILE (decl)
20305 && DECL_INITIAL (decl))
20306 /* OK */;
20307 else
20308 return false;
20309
20310 /* Don't add DW_AT_const_value if abstract origin already has one. */
20311 if (get_AT (var_die, DW_AT_const_value))
20312 return false;
20313
20314 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20315 }
20316
20317 /* Convert the CFI instructions for the current function into a
20318 location list. This is used for DW_AT_frame_base when we targeting
20319 a dwarf2 consumer that does not support the dwarf3
20320 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20321 expressions. */
20322
20323 static dw_loc_list_ref
20324 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20325 {
20326 int ix;
20327 dw_fde_ref fde;
20328 dw_loc_list_ref list, *list_tail;
20329 dw_cfi_ref cfi;
20330 dw_cfa_location last_cfa, next_cfa;
20331 const char *start_label, *last_label, *section;
20332 dw_cfa_location remember;
20333
20334 fde = cfun->fde;
20335 gcc_assert (fde != NULL);
20336
20337 section = secname_for_decl (current_function_decl);
20338 list_tail = &list;
20339 list = NULL;
20340
20341 memset (&next_cfa, 0, sizeof (next_cfa));
20342 next_cfa.reg = INVALID_REGNUM;
20343 remember = next_cfa;
20344
20345 start_label = fde->dw_fde_begin;
20346
20347 /* ??? Bald assumption that the CIE opcode list does not contain
20348 advance opcodes. */
20349 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20350 lookup_cfa_1 (cfi, &next_cfa, &remember);
20351
20352 last_cfa = next_cfa;
20353 last_label = start_label;
20354
20355 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20356 {
20357 /* If the first partition contained no CFI adjustments, the
20358 CIE opcodes apply to the whole first partition. */
20359 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20360 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20361 list_tail =&(*list_tail)->dw_loc_next;
20362 start_label = last_label = fde->dw_fde_second_begin;
20363 }
20364
20365 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20366 {
20367 switch (cfi->dw_cfi_opc)
20368 {
20369 case DW_CFA_set_loc:
20370 case DW_CFA_advance_loc1:
20371 case DW_CFA_advance_loc2:
20372 case DW_CFA_advance_loc4:
20373 if (!cfa_equal_p (&last_cfa, &next_cfa))
20374 {
20375 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20376 start_label, 0, last_label, 0, section);
20377
20378 list_tail = &(*list_tail)->dw_loc_next;
20379 last_cfa = next_cfa;
20380 start_label = last_label;
20381 }
20382 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20383 break;
20384
20385 case DW_CFA_advance_loc:
20386 /* The encoding is complex enough that we should never emit this. */
20387 gcc_unreachable ();
20388
20389 default:
20390 lookup_cfa_1 (cfi, &next_cfa, &remember);
20391 break;
20392 }
20393 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20394 {
20395 if (!cfa_equal_p (&last_cfa, &next_cfa))
20396 {
20397 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20398 start_label, 0, last_label, 0, section);
20399
20400 list_tail = &(*list_tail)->dw_loc_next;
20401 last_cfa = next_cfa;
20402 start_label = last_label;
20403 }
20404 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20405 start_label, 0, fde->dw_fde_end, 0, section);
20406 list_tail = &(*list_tail)->dw_loc_next;
20407 start_label = last_label = fde->dw_fde_second_begin;
20408 }
20409 }
20410
20411 if (!cfa_equal_p (&last_cfa, &next_cfa))
20412 {
20413 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20414 start_label, 0, last_label, 0, section);
20415 list_tail = &(*list_tail)->dw_loc_next;
20416 start_label = last_label;
20417 }
20418
20419 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20420 start_label, 0,
20421 fde->dw_fde_second_begin
20422 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20423 section);
20424
20425 maybe_gen_llsym (list);
20426
20427 return list;
20428 }
20429
20430 /* Compute a displacement from the "steady-state frame pointer" to the
20431 frame base (often the same as the CFA), and store it in
20432 frame_pointer_fb_offset. OFFSET is added to the displacement
20433 before the latter is negated. */
20434
20435 static void
20436 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20437 {
20438 rtx reg, elim;
20439
20440 #ifdef FRAME_POINTER_CFA_OFFSET
20441 reg = frame_pointer_rtx;
20442 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20443 #else
20444 reg = arg_pointer_rtx;
20445 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20446 #endif
20447
20448 elim = (ira_use_lra_p
20449 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20450 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20451 elim = strip_offset_and_add (elim, &offset);
20452
20453 frame_pointer_fb_offset = -offset;
20454
20455 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20456 in which to eliminate. This is because it's stack pointer isn't
20457 directly accessible as a register within the ISA. To work around
20458 this, assume that while we cannot provide a proper value for
20459 frame_pointer_fb_offset, we won't need one either. We can use
20460 hard frame pointer in debug info even if frame pointer isn't used
20461 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20462 which uses the DW_AT_frame_base attribute, not hard frame pointer
20463 directly. */
20464 frame_pointer_fb_offset_valid
20465 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20466 }
20467
20468 /* Generate a DW_AT_name attribute given some string value to be included as
20469 the value of the attribute. */
20470
20471 static void
20472 add_name_attribute (dw_die_ref die, const char *name_string)
20473 {
20474 if (name_string != NULL && *name_string != 0)
20475 {
20476 if (demangle_name_func)
20477 name_string = (*demangle_name_func) (name_string);
20478
20479 add_AT_string (die, DW_AT_name, name_string);
20480 }
20481 }
20482
20483 /* Generate a DW_AT_description attribute given some string value to be included
20484 as the value of the attribute. */
20485
20486 static void
20487 add_desc_attribute (dw_die_ref die, const char *name_string)
20488 {
20489 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20490 return;
20491
20492 if (name_string == NULL || *name_string == 0)
20493 return;
20494
20495 if (demangle_name_func)
20496 name_string = (*demangle_name_func) (name_string);
20497
20498 add_AT_string (die, DW_AT_description, name_string);
20499 }
20500
20501 /* Generate a DW_AT_description attribute given some decl to be included
20502 as the value of the attribute. */
20503
20504 static void
20505 add_desc_attribute (dw_die_ref die, tree decl)
20506 {
20507 tree decl_name;
20508
20509 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20510 return;
20511
20512 if (decl == NULL_TREE || !DECL_P (decl))
20513 return;
20514 decl_name = DECL_NAME (decl);
20515
20516 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20517 {
20518 const char *name = dwarf2_name (decl, 0);
20519 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20520 }
20521 else
20522 {
20523 char *desc = print_generic_expr_to_str (decl);
20524 add_desc_attribute (die, desc);
20525 free (desc);
20526 }
20527 }
20528
20529 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20530 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20531 of TYPE accordingly.
20532
20533 ??? This is a temporary measure until after we're able to generate
20534 regular DWARF for the complex Ada type system. */
20535
20536 static void
20537 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20538 dw_die_ref context_die)
20539 {
20540 tree dtype;
20541 dw_die_ref dtype_die;
20542
20543 if (!lang_hooks.types.descriptive_type)
20544 return;
20545
20546 dtype = lang_hooks.types.descriptive_type (type);
20547 if (!dtype)
20548 return;
20549
20550 dtype_die = lookup_type_die (dtype);
20551 if (!dtype_die)
20552 {
20553 gen_type_die (dtype, context_die);
20554 dtype_die = lookup_type_die (dtype);
20555 gcc_assert (dtype_die);
20556 }
20557
20558 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20559 }
20560
20561 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20562
20563 static const char *
20564 comp_dir_string (void)
20565 {
20566 const char *wd;
20567 char *wd1;
20568 static const char *cached_wd = NULL;
20569
20570 if (cached_wd != NULL)
20571 return cached_wd;
20572
20573 wd = get_src_pwd ();
20574 if (wd == NULL)
20575 return NULL;
20576
20577 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20578 {
20579 int wdlen;
20580
20581 wdlen = strlen (wd);
20582 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20583 strcpy (wd1, wd);
20584 wd1 [wdlen] = DIR_SEPARATOR;
20585 wd1 [wdlen + 1] = 0;
20586 wd = wd1;
20587 }
20588
20589 cached_wd = remap_debug_filename (wd);
20590 return cached_wd;
20591 }
20592
20593 /* Generate a DW_AT_comp_dir attribute for DIE. */
20594
20595 static void
20596 add_comp_dir_attribute (dw_die_ref die)
20597 {
20598 const char * wd = comp_dir_string ();
20599 if (wd != NULL)
20600 add_AT_string (die, DW_AT_comp_dir, wd);
20601 }
20602
20603 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20604 pointer computation, ...), output a representation for that bound according
20605 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20606 loc_list_from_tree for the meaning of CONTEXT. */
20607
20608 static void
20609 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20610 int forms, struct loc_descr_context *context)
20611 {
20612 dw_die_ref context_die, decl_die = NULL;
20613 dw_loc_list_ref list;
20614 bool strip_conversions = true;
20615 bool placeholder_seen = false;
20616
20617 while (strip_conversions)
20618 switch (TREE_CODE (value))
20619 {
20620 case ERROR_MARK:
20621 case SAVE_EXPR:
20622 return;
20623
20624 CASE_CONVERT:
20625 case VIEW_CONVERT_EXPR:
20626 value = TREE_OPERAND (value, 0);
20627 break;
20628
20629 default:
20630 strip_conversions = false;
20631 break;
20632 }
20633
20634 /* If possible and permitted, output the attribute as a constant. */
20635 if ((forms & dw_scalar_form_constant) != 0
20636 && TREE_CODE (value) == INTEGER_CST)
20637 {
20638 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20639
20640 /* If HOST_WIDE_INT is big enough then represent the bound as
20641 a constant value. We need to choose a form based on
20642 whether the type is signed or unsigned. We cannot just
20643 call add_AT_unsigned if the value itself is positive
20644 (add_AT_unsigned might add the unsigned value encoded as
20645 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20646 bounds type and then sign extend any unsigned values found
20647 for signed types. This is needed only for
20648 DW_AT_{lower,upper}_bound, since for most other attributes,
20649 consumers will treat DW_FORM_data[1248] as unsigned values,
20650 regardless of the underlying type. */
20651 if (prec <= HOST_BITS_PER_WIDE_INT
20652 || tree_fits_uhwi_p (value))
20653 {
20654 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20655 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20656 else
20657 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20658 }
20659 else
20660 /* Otherwise represent the bound as an unsigned value with
20661 the precision of its type. The precision and signedness
20662 of the type will be necessary to re-interpret it
20663 unambiguously. */
20664 add_AT_wide (die, attr, wi::to_wide (value));
20665 return;
20666 }
20667
20668 /* Otherwise, if it's possible and permitted too, output a reference to
20669 another DIE. */
20670 if ((forms & dw_scalar_form_reference) != 0)
20671 {
20672 tree decl = NULL_TREE;
20673
20674 /* Some type attributes reference an outer type. For instance, the upper
20675 bound of an array may reference an embedding record (this happens in
20676 Ada). */
20677 if (TREE_CODE (value) == COMPONENT_REF
20678 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20679 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20680 decl = TREE_OPERAND (value, 1);
20681
20682 else if (VAR_P (value)
20683 || TREE_CODE (value) == PARM_DECL
20684 || TREE_CODE (value) == RESULT_DECL)
20685 decl = value;
20686
20687 if (decl != NULL_TREE)
20688 {
20689 decl_die = lookup_decl_die (decl);
20690
20691 /* ??? Can this happen, or should the variable have been bound
20692 first? Probably it can, since I imagine that we try to create
20693 the types of parameters in the order in which they exist in
20694 the list, and won't have created a forward reference to a
20695 later parameter. */
20696 if (decl_die != NULL)
20697 {
20698 if (get_AT (decl_die, DW_AT_location)
20699 || get_AT (decl_die, DW_AT_const_value))
20700 {
20701 add_AT_die_ref (die, attr, decl_die);
20702 return;
20703 }
20704 }
20705 }
20706 }
20707
20708 /* Last chance: try to create a stack operation procedure to evaluate the
20709 value. Do nothing if even that is not possible or permitted. */
20710 if ((forms & dw_scalar_form_exprloc) == 0)
20711 return;
20712
20713 list = loc_list_from_tree (value, 2, context);
20714 if (context && context->placeholder_arg)
20715 {
20716 placeholder_seen = context->placeholder_seen;
20717 context->placeholder_seen = false;
20718 }
20719 if (list == NULL || single_element_loc_list_p (list))
20720 {
20721 /* If this attribute is not a reference nor constant, it is
20722 a DWARF expression rather than location description. For that
20723 loc_list_from_tree (value, 0, &context) is needed. */
20724 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20725 if (list2 && single_element_loc_list_p (list2))
20726 {
20727 if (placeholder_seen)
20728 {
20729 struct dwarf_procedure_info dpi;
20730 dpi.fndecl = NULL_TREE;
20731 dpi.args_count = 1;
20732 if (!resolve_args_picking (list2->expr, 1, &dpi))
20733 return;
20734 }
20735 add_AT_loc (die, attr, list2->expr);
20736 return;
20737 }
20738 }
20739
20740 /* If that failed to give a single element location list, fall back to
20741 outputting this as a reference... still if permitted. */
20742 if (list == NULL
20743 || (forms & dw_scalar_form_reference) == 0
20744 || placeholder_seen)
20745 return;
20746
20747 if (!decl_die)
20748 {
20749 if (current_function_decl == 0)
20750 context_die = comp_unit_die ();
20751 else
20752 context_die = lookup_decl_die (current_function_decl);
20753
20754 decl_die = new_die (DW_TAG_variable, context_die, value);
20755 add_AT_flag (decl_die, DW_AT_artificial, 1);
20756 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20757 context_die);
20758 }
20759
20760 add_AT_location_description (decl_die, DW_AT_location, list);
20761 add_AT_die_ref (die, attr, decl_die);
20762 }
20763
20764 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20765 default. */
20766
20767 static int
20768 lower_bound_default (void)
20769 {
20770 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20771 {
20772 case DW_LANG_C:
20773 case DW_LANG_C89:
20774 case DW_LANG_C99:
20775 case DW_LANG_C11:
20776 case DW_LANG_C_plus_plus:
20777 case DW_LANG_C_plus_plus_11:
20778 case DW_LANG_C_plus_plus_14:
20779 case DW_LANG_ObjC:
20780 case DW_LANG_ObjC_plus_plus:
20781 return 0;
20782 case DW_LANG_Fortran77:
20783 case DW_LANG_Fortran90:
20784 case DW_LANG_Fortran95:
20785 case DW_LANG_Fortran03:
20786 case DW_LANG_Fortran08:
20787 return 1;
20788 case DW_LANG_UPC:
20789 case DW_LANG_D:
20790 case DW_LANG_Python:
20791 return dwarf_version >= 4 ? 0 : -1;
20792 case DW_LANG_Ada95:
20793 case DW_LANG_Ada83:
20794 case DW_LANG_Cobol74:
20795 case DW_LANG_Cobol85:
20796 case DW_LANG_Modula2:
20797 case DW_LANG_PLI:
20798 return dwarf_version >= 4 ? 1 : -1;
20799 default:
20800 return -1;
20801 }
20802 }
20803
20804 /* Given a tree node describing an array bound (either lower or upper) output
20805 a representation for that bound. */
20806
20807 static void
20808 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20809 tree bound, struct loc_descr_context *context)
20810 {
20811 int dflt;
20812
20813 while (1)
20814 switch (TREE_CODE (bound))
20815 {
20816 /* Strip all conversions. */
20817 CASE_CONVERT:
20818 case VIEW_CONVERT_EXPR:
20819 bound = TREE_OPERAND (bound, 0);
20820 break;
20821
20822 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20823 are even omitted when they are the default. */
20824 case INTEGER_CST:
20825 /* If the value for this bound is the default one, we can even omit the
20826 attribute. */
20827 if (bound_attr == DW_AT_lower_bound
20828 && tree_fits_shwi_p (bound)
20829 && (dflt = lower_bound_default ()) != -1
20830 && tree_to_shwi (bound) == dflt)
20831 return;
20832
20833 /* FALLTHRU */
20834
20835 default:
20836 /* Because of the complex interaction there can be with other GNAT
20837 encodings, GDB isn't ready yet to handle proper DWARF description
20838 for self-referencial subrange bounds: let GNAT encodings do the
20839 magic in such a case. */
20840 if (is_ada ()
20841 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20842 && contains_placeholder_p (bound))
20843 return;
20844
20845 add_scalar_info (subrange_die, bound_attr, bound,
20846 dw_scalar_form_constant
20847 | dw_scalar_form_exprloc
20848 | dw_scalar_form_reference,
20849 context);
20850 return;
20851 }
20852 }
20853
20854 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20855 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20856 Note that the block of subscript information for an array type also
20857 includes information about the element type of the given array type.
20858
20859 This function reuses previously set type and bound information if
20860 available. */
20861
20862 static void
20863 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20864 {
20865 unsigned dimension_number;
20866 tree lower, upper;
20867 dw_die_ref child = type_die->die_child;
20868
20869 for (dimension_number = 0;
20870 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20871 type = TREE_TYPE (type), dimension_number++)
20872 {
20873 tree domain = TYPE_DOMAIN (type);
20874
20875 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20876 break;
20877
20878 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20879 and (in GNU C only) variable bounds. Handle all three forms
20880 here. */
20881
20882 /* Find and reuse a previously generated DW_TAG_subrange_type if
20883 available.
20884
20885 For multi-dimensional arrays, as we iterate through the
20886 various dimensions in the enclosing for loop above, we also
20887 iterate through the DIE children and pick at each
20888 DW_TAG_subrange_type previously generated (if available).
20889 Each child DW_TAG_subrange_type DIE describes the range of
20890 the current dimension. At this point we should have as many
20891 DW_TAG_subrange_type's as we have dimensions in the
20892 array. */
20893 dw_die_ref subrange_die = NULL;
20894 if (child)
20895 while (1)
20896 {
20897 child = child->die_sib;
20898 if (child->die_tag == DW_TAG_subrange_type)
20899 subrange_die = child;
20900 if (child == type_die->die_child)
20901 {
20902 /* If we wrapped around, stop looking next time. */
20903 child = NULL;
20904 break;
20905 }
20906 if (child->die_tag == DW_TAG_subrange_type)
20907 break;
20908 }
20909 if (!subrange_die)
20910 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20911
20912 if (domain)
20913 {
20914 /* We have an array type with specified bounds. */
20915 lower = TYPE_MIN_VALUE (domain);
20916 upper = TYPE_MAX_VALUE (domain);
20917
20918 /* Define the index type. */
20919 if (TREE_TYPE (domain)
20920 && !get_AT (subrange_die, DW_AT_type))
20921 {
20922 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20923 TREE_TYPE field. We can't emit debug info for this
20924 because it is an unnamed integral type. */
20925 if (TREE_CODE (domain) == INTEGER_TYPE
20926 && TYPE_NAME (domain) == NULL_TREE
20927 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20928 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20929 ;
20930 else
20931 add_type_attribute (subrange_die, TREE_TYPE (domain),
20932 TYPE_UNQUALIFIED, false, type_die);
20933 }
20934
20935 /* ??? If upper is NULL, the array has unspecified length,
20936 but it does have a lower bound. This happens with Fortran
20937 dimension arr(N:*)
20938 Since the debugger is definitely going to need to know N
20939 to produce useful results, go ahead and output the lower
20940 bound solo, and hope the debugger can cope. */
20941
20942 if (!get_AT (subrange_die, DW_AT_lower_bound))
20943 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20944 if (!get_AT (subrange_die, DW_AT_upper_bound)
20945 && !get_AT (subrange_die, DW_AT_count))
20946 {
20947 if (upper)
20948 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20949 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20950 /* Zero-length array. */
20951 add_bound_info (subrange_die, DW_AT_count,
20952 build_int_cst (TREE_TYPE (lower), 0), NULL);
20953 }
20954 }
20955
20956 /* Otherwise we have an array type with an unspecified length. The
20957 DWARF-2 spec does not say how to handle this; let's just leave out the
20958 bounds. */
20959 }
20960 }
20961
20962 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20963
20964 static void
20965 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20966 {
20967 dw_die_ref decl_die;
20968 HOST_WIDE_INT size;
20969 dw_loc_descr_ref size_expr = NULL;
20970
20971 switch (TREE_CODE (tree_node))
20972 {
20973 case ERROR_MARK:
20974 size = 0;
20975 break;
20976 case ENUMERAL_TYPE:
20977 case RECORD_TYPE:
20978 case UNION_TYPE:
20979 case QUAL_UNION_TYPE:
20980 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20981 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20982 {
20983 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20984 return;
20985 }
20986 size_expr = type_byte_size (tree_node, &size);
20987 break;
20988 case FIELD_DECL:
20989 /* For a data member of a struct or union, the DW_AT_byte_size is
20990 generally given as the number of bytes normally allocated for an
20991 object of the *declared* type of the member itself. This is true
20992 even for bit-fields. */
20993 size = int_size_in_bytes (field_type (tree_node));
20994 break;
20995 default:
20996 gcc_unreachable ();
20997 }
20998
20999 /* Support for dynamically-sized objects was introduced by DWARFv3.
21000 At the moment, GDB does not handle variable byte sizes very well,
21001 though. */
21002 if ((dwarf_version >= 3 || !dwarf_strict)
21003 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21004 && size_expr != NULL)
21005 add_AT_loc (die, DW_AT_byte_size, size_expr);
21006
21007 /* Note that `size' might be -1 when we get to this point. If it is, that
21008 indicates that the byte size of the entity in question is variable and
21009 that we could not generate a DWARF expression that computes it. */
21010 if (size >= 0)
21011 add_AT_unsigned (die, DW_AT_byte_size, size);
21012 }
21013
21014 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21015 alignment. */
21016
21017 static void
21018 add_alignment_attribute (dw_die_ref die, tree tree_node)
21019 {
21020 if (dwarf_version < 5 && dwarf_strict)
21021 return;
21022
21023 unsigned align;
21024
21025 if (DECL_P (tree_node))
21026 {
21027 if (!DECL_USER_ALIGN (tree_node))
21028 return;
21029
21030 align = DECL_ALIGN_UNIT (tree_node);
21031 }
21032 else if (TYPE_P (tree_node))
21033 {
21034 if (!TYPE_USER_ALIGN (tree_node))
21035 return;
21036
21037 align = TYPE_ALIGN_UNIT (tree_node);
21038 }
21039 else
21040 gcc_unreachable ();
21041
21042 add_AT_unsigned (die, DW_AT_alignment, align);
21043 }
21044
21045 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21046 which specifies the distance in bits from the highest order bit of the
21047 "containing object" for the bit-field to the highest order bit of the
21048 bit-field itself.
21049
21050 For any given bit-field, the "containing object" is a hypothetical object
21051 (of some integral or enum type) within which the given bit-field lives. The
21052 type of this hypothetical "containing object" is always the same as the
21053 declared type of the individual bit-field itself. The determination of the
21054 exact location of the "containing object" for a bit-field is rather
21055 complicated. It's handled by the `field_byte_offset' function (above).
21056
21057 CTX is required: see the comment for VLR_CONTEXT.
21058
21059 Note that it is the size (in bytes) of the hypothetical "containing object"
21060 which will be given in the DW_AT_byte_size attribute for this bit-field.
21061 (See `byte_size_attribute' above). */
21062
21063 static inline void
21064 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21065 {
21066 HOST_WIDE_INT object_offset_in_bytes;
21067 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21068 HOST_WIDE_INT bitpos_int;
21069 HOST_WIDE_INT highest_order_object_bit_offset;
21070 HOST_WIDE_INT highest_order_field_bit_offset;
21071 HOST_WIDE_INT bit_offset;
21072
21073 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21074
21075 /* Must be a field and a bit field. */
21076 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21077
21078 /* We can't yet handle bit-fields whose offsets are variable, so if we
21079 encounter such things, just return without generating any attribute
21080 whatsoever. Likewise for variable or too large size. */
21081 if (! tree_fits_shwi_p (bit_position (decl))
21082 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21083 return;
21084
21085 bitpos_int = int_bit_position (decl);
21086
21087 /* Note that the bit offset is always the distance (in bits) from the
21088 highest-order bit of the "containing object" to the highest-order bit of
21089 the bit-field itself. Since the "high-order end" of any object or field
21090 is different on big-endian and little-endian machines, the computation
21091 below must take account of these differences. */
21092 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21093 highest_order_field_bit_offset = bitpos_int;
21094
21095 if (! BYTES_BIG_ENDIAN)
21096 {
21097 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21098 highest_order_object_bit_offset +=
21099 simple_type_size_in_bits (original_type);
21100 }
21101
21102 bit_offset
21103 = (! BYTES_BIG_ENDIAN
21104 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21105 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21106
21107 if (bit_offset < 0)
21108 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21109 else
21110 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21111 }
21112
21113 /* For a FIELD_DECL node which represents a bit field, output an attribute
21114 which specifies the length in bits of the given field. */
21115
21116 static inline void
21117 add_bit_size_attribute (dw_die_ref die, tree decl)
21118 {
21119 /* Must be a field and a bit field. */
21120 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21121 && DECL_BIT_FIELD_TYPE (decl));
21122
21123 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21124 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21125 }
21126
21127 /* If the compiled language is ANSI C, then add a 'prototyped'
21128 attribute, if arg types are given for the parameters of a function. */
21129
21130 static inline void
21131 add_prototyped_attribute (dw_die_ref die, tree func_type)
21132 {
21133 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21134 {
21135 case DW_LANG_C:
21136 case DW_LANG_C89:
21137 case DW_LANG_C99:
21138 case DW_LANG_C11:
21139 case DW_LANG_ObjC:
21140 if (prototype_p (func_type))
21141 add_AT_flag (die, DW_AT_prototyped, 1);
21142 break;
21143 default:
21144 break;
21145 }
21146 }
21147
21148 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21149 by looking in the type declaration, the object declaration equate table or
21150 the block mapping. */
21151
21152 static inline void
21153 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21154 {
21155 dw_die_ref origin_die = NULL;
21156
21157 /* For late LTO debug output we want to refer directly to the abstract
21158 DIE in the early debug rather to the possibly existing concrete
21159 instance and avoid creating that just for this purpose. */
21160 sym_off_pair *desc;
21161 if (in_lto_p
21162 && external_die_map
21163 && (desc = external_die_map->get (origin)))
21164 {
21165 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21166 desc->sym, desc->off);
21167 return;
21168 }
21169
21170 if (DECL_P (origin))
21171 origin_die = lookup_decl_die (origin);
21172 else if (TYPE_P (origin))
21173 origin_die = lookup_type_die (origin);
21174 else if (TREE_CODE (origin) == BLOCK)
21175 origin_die = lookup_block_die (origin);
21176
21177 /* XXX: Functions that are never lowered don't always have correct block
21178 trees (in the case of java, they simply have no block tree, in some other
21179 languages). For these functions, there is nothing we can really do to
21180 output correct debug info for inlined functions in all cases. Rather
21181 than die, we'll just produce deficient debug info now, in that we will
21182 have variables without a proper abstract origin. In the future, when all
21183 functions are lowered, we should re-add a gcc_assert (origin_die)
21184 here. */
21185
21186 if (origin_die)
21187 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21188 }
21189
21190 /* We do not currently support the pure_virtual attribute. */
21191
21192 static inline void
21193 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21194 {
21195 if (DECL_VINDEX (func_decl))
21196 {
21197 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21198
21199 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21200 add_AT_loc (die, DW_AT_vtable_elem_location,
21201 new_loc_descr (DW_OP_constu,
21202 tree_to_shwi (DECL_VINDEX (func_decl)),
21203 0));
21204
21205 /* GNU extension: Record what type this method came from originally. */
21206 if (debug_info_level > DINFO_LEVEL_TERSE
21207 && DECL_CONTEXT (func_decl))
21208 add_AT_die_ref (die, DW_AT_containing_type,
21209 lookup_type_die (DECL_CONTEXT (func_decl)));
21210 }
21211 }
21212 \f
21213 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21214 given decl. This used to be a vendor extension until after DWARF 4
21215 standardized it. */
21216
21217 static void
21218 add_linkage_attr (dw_die_ref die, tree decl)
21219 {
21220 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21221
21222 /* Mimic what assemble_name_raw does with a leading '*'. */
21223 if (name[0] == '*')
21224 name = &name[1];
21225
21226 if (dwarf_version >= 4)
21227 add_AT_string (die, DW_AT_linkage_name, name);
21228 else
21229 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21230 }
21231
21232 /* Add source coordinate attributes for the given decl. */
21233
21234 static void
21235 add_src_coords_attributes (dw_die_ref die, tree decl)
21236 {
21237 expanded_location s;
21238
21239 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21240 return;
21241 s = expand_location (DECL_SOURCE_LOCATION (decl));
21242 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21243 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21244 if (debug_column_info && s.column)
21245 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21246 }
21247
21248 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21249
21250 static void
21251 add_linkage_name_raw (dw_die_ref die, tree decl)
21252 {
21253 /* Defer until we have an assembler name set. */
21254 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21255 {
21256 limbo_die_node *asm_name;
21257
21258 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21259 asm_name->die = die;
21260 asm_name->created_for = decl;
21261 asm_name->next = deferred_asm_name;
21262 deferred_asm_name = asm_name;
21263 }
21264 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21265 add_linkage_attr (die, decl);
21266 }
21267
21268 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21269
21270 static void
21271 add_linkage_name (dw_die_ref die, tree decl)
21272 {
21273 if (debug_info_level > DINFO_LEVEL_NONE
21274 && VAR_OR_FUNCTION_DECL_P (decl)
21275 && TREE_PUBLIC (decl)
21276 && !(VAR_P (decl) && DECL_REGISTER (decl))
21277 && die->die_tag != DW_TAG_member)
21278 add_linkage_name_raw (die, decl);
21279 }
21280
21281 /* Add a DW_AT_name attribute and source coordinate attribute for the
21282 given decl, but only if it actually has a name. */
21283
21284 static void
21285 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21286 bool no_linkage_name)
21287 {
21288 tree decl_name;
21289
21290 decl_name = DECL_NAME (decl);
21291 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21292 {
21293 const char *name = dwarf2_name (decl, 0);
21294 if (name)
21295 add_name_attribute (die, name);
21296 else
21297 add_desc_attribute (die, decl);
21298
21299 if (! DECL_ARTIFICIAL (decl))
21300 add_src_coords_attributes (die, decl);
21301
21302 if (!no_linkage_name)
21303 add_linkage_name (die, decl);
21304 }
21305 else
21306 add_desc_attribute (die, decl);
21307
21308 #ifdef VMS_DEBUGGING_INFO
21309 /* Get the function's name, as described by its RTL. This may be different
21310 from the DECL_NAME name used in the source file. */
21311 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21312 {
21313 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21314 XEXP (DECL_RTL (decl), 0), false);
21315 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21316 }
21317 #endif /* VMS_DEBUGGING_INFO */
21318 }
21319
21320 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21321
21322 static void
21323 add_discr_value (dw_die_ref die, dw_discr_value *value)
21324 {
21325 dw_attr_node attr;
21326
21327 attr.dw_attr = DW_AT_discr_value;
21328 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21329 attr.dw_attr_val.val_entry = NULL;
21330 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21331 if (value->pos)
21332 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21333 else
21334 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21335 add_dwarf_attr (die, &attr);
21336 }
21337
21338 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21339
21340 static void
21341 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21342 {
21343 dw_attr_node attr;
21344
21345 attr.dw_attr = DW_AT_discr_list;
21346 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21347 attr.dw_attr_val.val_entry = NULL;
21348 attr.dw_attr_val.v.val_discr_list = discr_list;
21349 add_dwarf_attr (die, &attr);
21350 }
21351
21352 static inline dw_discr_list_ref
21353 AT_discr_list (dw_attr_node *attr)
21354 {
21355 return attr->dw_attr_val.v.val_discr_list;
21356 }
21357
21358 #ifdef VMS_DEBUGGING_INFO
21359 /* Output the debug main pointer die for VMS */
21360
21361 void
21362 dwarf2out_vms_debug_main_pointer (void)
21363 {
21364 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21365 dw_die_ref die;
21366
21367 /* Allocate the VMS debug main subprogram die. */
21368 die = new_die_raw (DW_TAG_subprogram);
21369 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21370 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21371 current_function_funcdef_no);
21372 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21373
21374 /* Make it the first child of comp_unit_die (). */
21375 die->die_parent = comp_unit_die ();
21376 if (comp_unit_die ()->die_child)
21377 {
21378 die->die_sib = comp_unit_die ()->die_child->die_sib;
21379 comp_unit_die ()->die_child->die_sib = die;
21380 }
21381 else
21382 {
21383 die->die_sib = die;
21384 comp_unit_die ()->die_child = die;
21385 }
21386 }
21387 #endif /* VMS_DEBUGGING_INFO */
21388
21389 /* walk_tree helper function for uses_local_type, below. */
21390
21391 static tree
21392 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21393 {
21394 if (!TYPE_P (*tp))
21395 *walk_subtrees = 0;
21396 else
21397 {
21398 tree name = TYPE_NAME (*tp);
21399 if (name && DECL_P (name) && decl_function_context (name))
21400 return *tp;
21401 }
21402 return NULL_TREE;
21403 }
21404
21405 /* If TYPE involves a function-local type (including a local typedef to a
21406 non-local type), returns that type; otherwise returns NULL_TREE. */
21407
21408 static tree
21409 uses_local_type (tree type)
21410 {
21411 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21412 return used;
21413 }
21414
21415 /* Return the DIE for the scope that immediately contains this type.
21416 Non-named types that do not involve a function-local type get global
21417 scope. Named types nested in namespaces or other types get their
21418 containing scope. All other types (i.e. function-local named types) get
21419 the current active scope. */
21420
21421 static dw_die_ref
21422 scope_die_for (tree t, dw_die_ref context_die)
21423 {
21424 dw_die_ref scope_die = NULL;
21425 tree containing_scope;
21426
21427 /* Non-types always go in the current scope. */
21428 gcc_assert (TYPE_P (t));
21429
21430 /* Use the scope of the typedef, rather than the scope of the type
21431 it refers to. */
21432 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21433 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21434 else
21435 containing_scope = TYPE_CONTEXT (t);
21436
21437 /* Use the containing namespace if there is one. */
21438 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21439 {
21440 if (context_die == lookup_decl_die (containing_scope))
21441 /* OK */;
21442 else if (debug_info_level > DINFO_LEVEL_TERSE)
21443 context_die = get_context_die (containing_scope);
21444 else
21445 containing_scope = NULL_TREE;
21446 }
21447
21448 /* Ignore function type "scopes" from the C frontend. They mean that
21449 a tagged type is local to a parmlist of a function declarator, but
21450 that isn't useful to DWARF. */
21451 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21452 containing_scope = NULL_TREE;
21453
21454 if (SCOPE_FILE_SCOPE_P (containing_scope))
21455 {
21456 /* If T uses a local type keep it local as well, to avoid references
21457 to function-local DIEs from outside the function. */
21458 if (current_function_decl && uses_local_type (t))
21459 scope_die = context_die;
21460 else
21461 scope_die = comp_unit_die ();
21462 }
21463 else if (TYPE_P (containing_scope))
21464 {
21465 /* For types, we can just look up the appropriate DIE. */
21466 if (debug_info_level > DINFO_LEVEL_TERSE)
21467 scope_die = get_context_die (containing_scope);
21468 else
21469 {
21470 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21471 if (scope_die == NULL)
21472 scope_die = comp_unit_die ();
21473 }
21474 }
21475 else
21476 scope_die = context_die;
21477
21478 return scope_die;
21479 }
21480
21481 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21482
21483 static inline int
21484 local_scope_p (dw_die_ref context_die)
21485 {
21486 for (; context_die; context_die = context_die->die_parent)
21487 if (context_die->die_tag == DW_TAG_inlined_subroutine
21488 || context_die->die_tag == DW_TAG_subprogram)
21489 return 1;
21490
21491 return 0;
21492 }
21493
21494 /* Returns nonzero if CONTEXT_DIE is a class. */
21495
21496 static inline int
21497 class_scope_p (dw_die_ref context_die)
21498 {
21499 return (context_die
21500 && (context_die->die_tag == DW_TAG_structure_type
21501 || context_die->die_tag == DW_TAG_class_type
21502 || context_die->die_tag == DW_TAG_interface_type
21503 || context_die->die_tag == DW_TAG_union_type));
21504 }
21505
21506 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21507 whether or not to treat a DIE in this context as a declaration. */
21508
21509 static inline int
21510 class_or_namespace_scope_p (dw_die_ref context_die)
21511 {
21512 return (class_scope_p (context_die)
21513 || (context_die && context_die->die_tag == DW_TAG_namespace));
21514 }
21515
21516 /* Many forms of DIEs require a "type description" attribute. This
21517 routine locates the proper "type descriptor" die for the type given
21518 by 'type' plus any additional qualifiers given by 'cv_quals', and
21519 adds a DW_AT_type attribute below the given die. */
21520
21521 static void
21522 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21523 bool reverse, dw_die_ref context_die)
21524 {
21525 enum tree_code code = TREE_CODE (type);
21526 dw_die_ref type_die = NULL;
21527
21528 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21529 or fixed-point type, use the inner type. This is because we have no
21530 support for unnamed types in base_type_die. This can happen if this is
21531 an Ada subrange type. Correct solution is emit a subrange type die. */
21532 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21533 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21534 type = TREE_TYPE (type), code = TREE_CODE (type);
21535
21536 if (code == ERROR_MARK
21537 /* Handle a special case. For functions whose return type is void, we
21538 generate *no* type attribute. (Note that no object may have type
21539 `void', so this only applies to function return types). */
21540 || code == VOID_TYPE)
21541 return;
21542
21543 type_die = modified_type_die (type,
21544 cv_quals | TYPE_QUALS (type),
21545 reverse,
21546 context_die);
21547
21548 if (type_die != NULL)
21549 add_AT_die_ref (object_die, DW_AT_type, type_die);
21550 }
21551
21552 /* Given an object die, add the calling convention attribute for the
21553 function call type. */
21554 static void
21555 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21556 {
21557 enum dwarf_calling_convention value = DW_CC_normal;
21558
21559 value = ((enum dwarf_calling_convention)
21560 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21561
21562 if (is_fortran ()
21563 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21564 {
21565 /* DWARF 2 doesn't provide a way to identify a program's source-level
21566 entry point. DW_AT_calling_convention attributes are only meant
21567 to describe functions' calling conventions. However, lacking a
21568 better way to signal the Fortran main program, we used this for
21569 a long time, following existing custom. Now, DWARF 4 has
21570 DW_AT_main_subprogram, which we add below, but some tools still
21571 rely on the old way, which we thus keep. */
21572 value = DW_CC_program;
21573
21574 if (dwarf_version >= 4 || !dwarf_strict)
21575 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21576 }
21577
21578 /* Only add the attribute if the backend requests it, and
21579 is not DW_CC_normal. */
21580 if (value && (value != DW_CC_normal))
21581 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21582 }
21583
21584 /* Given a tree pointer to a struct, class, union, or enum type node, return
21585 a pointer to the (string) tag name for the given type, or zero if the type
21586 was declared without a tag. */
21587
21588 static const char *
21589 type_tag (const_tree type)
21590 {
21591 const char *name = 0;
21592
21593 if (TYPE_NAME (type) != 0)
21594 {
21595 tree t = 0;
21596
21597 /* Find the IDENTIFIER_NODE for the type name. */
21598 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21599 && !TYPE_NAMELESS (type))
21600 t = TYPE_NAME (type);
21601
21602 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21603 a TYPE_DECL node, regardless of whether or not a `typedef' was
21604 involved. */
21605 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21606 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21607 {
21608 /* We want to be extra verbose. Don't call dwarf_name if
21609 DECL_NAME isn't set. The default hook for decl_printable_name
21610 doesn't like that, and in this context it's correct to return
21611 0, instead of "<anonymous>" or the like. */
21612 if (DECL_NAME (TYPE_NAME (type))
21613 && !DECL_NAMELESS (TYPE_NAME (type)))
21614 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21615 }
21616
21617 /* Now get the name as a string, or invent one. */
21618 if (!name && t != 0)
21619 name = IDENTIFIER_POINTER (t);
21620 }
21621
21622 return (name == 0 || *name == '\0') ? 0 : name;
21623 }
21624
21625 /* Return the type associated with a data member, make a special check
21626 for bit field types. */
21627
21628 static inline tree
21629 member_declared_type (const_tree member)
21630 {
21631 return (DECL_BIT_FIELD_TYPE (member)
21632 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21633 }
21634
21635 /* Get the decl's label, as described by its RTL. This may be different
21636 from the DECL_NAME name used in the source file. */
21637
21638 #if 0
21639 static const char *
21640 decl_start_label (tree decl)
21641 {
21642 rtx x;
21643 const char *fnname;
21644
21645 x = DECL_RTL (decl);
21646 gcc_assert (MEM_P (x));
21647
21648 x = XEXP (x, 0);
21649 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21650
21651 fnname = XSTR (x, 0);
21652 return fnname;
21653 }
21654 #endif
21655 \f
21656 /* For variable-length arrays that have been previously generated, but
21657 may be incomplete due to missing subscript info, fill the subscript
21658 info. Return TRUE if this is one of those cases. */
21659 static bool
21660 fill_variable_array_bounds (tree type)
21661 {
21662 if (TREE_ASM_WRITTEN (type)
21663 && TREE_CODE (type) == ARRAY_TYPE
21664 && variably_modified_type_p (type, NULL))
21665 {
21666 dw_die_ref array_die = lookup_type_die (type);
21667 if (!array_die)
21668 return false;
21669 add_subscript_info (array_die, type, !is_ada ());
21670 return true;
21671 }
21672 return false;
21673 }
21674
21675 /* These routines generate the internal representation of the DIE's for
21676 the compilation unit. Debugging information is collected by walking
21677 the declaration trees passed in from dwarf2out_decl(). */
21678
21679 static void
21680 gen_array_type_die (tree type, dw_die_ref context_die)
21681 {
21682 dw_die_ref array_die;
21683
21684 /* GNU compilers represent multidimensional array types as sequences of one
21685 dimensional array types whose element types are themselves array types.
21686 We sometimes squish that down to a single array_type DIE with multiple
21687 subscripts in the Dwarf debugging info. The draft Dwarf specification
21688 say that we are allowed to do this kind of compression in C, because
21689 there is no difference between an array of arrays and a multidimensional
21690 array. We don't do this for Ada to remain as close as possible to the
21691 actual representation, which is especially important against the language
21692 flexibilty wrt arrays of variable size. */
21693
21694 bool collapse_nested_arrays = !is_ada ();
21695
21696 if (fill_variable_array_bounds (type))
21697 return;
21698
21699 dw_die_ref scope_die = scope_die_for (type, context_die);
21700 tree element_type;
21701
21702 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21703 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21704 if (TYPE_STRING_FLAG (type)
21705 && TREE_CODE (type) == ARRAY_TYPE
21706 && is_fortran ()
21707 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21708 {
21709 HOST_WIDE_INT size;
21710
21711 array_die = new_die (DW_TAG_string_type, scope_die, type);
21712 add_name_attribute (array_die, type_tag (type));
21713 equate_type_number_to_die (type, array_die);
21714 size = int_size_in_bytes (type);
21715 if (size >= 0)
21716 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21717 /* ??? We can't annotate types late, but for LTO we may not
21718 generate a location early either (gfortran.dg/save_6.f90). */
21719 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21720 && TYPE_DOMAIN (type) != NULL_TREE
21721 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21722 {
21723 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21724 tree rszdecl = szdecl;
21725
21726 size = int_size_in_bytes (TREE_TYPE (szdecl));
21727 if (!DECL_P (szdecl))
21728 {
21729 if (TREE_CODE (szdecl) == INDIRECT_REF
21730 && DECL_P (TREE_OPERAND (szdecl, 0)))
21731 {
21732 rszdecl = TREE_OPERAND (szdecl, 0);
21733 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21734 != DWARF2_ADDR_SIZE)
21735 size = 0;
21736 }
21737 else
21738 size = 0;
21739 }
21740 if (size > 0)
21741 {
21742 dw_loc_list_ref loc
21743 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21744 NULL);
21745 if (loc)
21746 {
21747 add_AT_location_description (array_die, DW_AT_string_length,
21748 loc);
21749 if (size != DWARF2_ADDR_SIZE)
21750 add_AT_unsigned (array_die, dwarf_version >= 5
21751 ? DW_AT_string_length_byte_size
21752 : DW_AT_byte_size, size);
21753 }
21754 }
21755 }
21756 return;
21757 }
21758
21759 array_die = new_die (DW_TAG_array_type, scope_die, type);
21760 add_name_attribute (array_die, type_tag (type));
21761 equate_type_number_to_die (type, array_die);
21762
21763 if (TREE_CODE (type) == VECTOR_TYPE)
21764 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21765
21766 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21767 if (is_fortran ()
21768 && TREE_CODE (type) == ARRAY_TYPE
21769 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21770 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21771 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21772
21773 #if 0
21774 /* We default the array ordering. Debuggers will probably do the right
21775 things even if DW_AT_ordering is not present. It's not even an issue
21776 until we start to get into multidimensional arrays anyway. If a debugger
21777 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21778 then we'll have to put the DW_AT_ordering attribute back in. (But if
21779 and when we find out that we need to put these in, we will only do so
21780 for multidimensional arrays. */
21781 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21782 #endif
21783
21784 if (TREE_CODE (type) == VECTOR_TYPE)
21785 {
21786 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21787 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21788 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21789 add_bound_info (subrange_die, DW_AT_upper_bound,
21790 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21791 }
21792 else
21793 add_subscript_info (array_die, type, collapse_nested_arrays);
21794
21795 /* Add representation of the type of the elements of this array type and
21796 emit the corresponding DIE if we haven't done it already. */
21797 element_type = TREE_TYPE (type);
21798 if (collapse_nested_arrays)
21799 while (TREE_CODE (element_type) == ARRAY_TYPE)
21800 {
21801 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21802 break;
21803 element_type = TREE_TYPE (element_type);
21804 }
21805
21806 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21807 TREE_CODE (type) == ARRAY_TYPE
21808 && TYPE_REVERSE_STORAGE_ORDER (type),
21809 context_die);
21810
21811 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21812 if (TYPE_ARTIFICIAL (type))
21813 add_AT_flag (array_die, DW_AT_artificial, 1);
21814
21815 if (get_AT (array_die, DW_AT_name))
21816 add_pubtype (type, array_die);
21817
21818 add_alignment_attribute (array_die, type);
21819 }
21820
21821 /* This routine generates DIE for array with hidden descriptor, details
21822 are filled into *info by a langhook. */
21823
21824 static void
21825 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21826 dw_die_ref context_die)
21827 {
21828 const dw_die_ref scope_die = scope_die_for (type, context_die);
21829 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21830 struct loc_descr_context context = { type, info->base_decl, NULL,
21831 false, false };
21832 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21833 int dim;
21834
21835 add_name_attribute (array_die, type_tag (type));
21836 equate_type_number_to_die (type, array_die);
21837
21838 if (info->ndimensions > 1)
21839 switch (info->ordering)
21840 {
21841 case array_descr_ordering_row_major:
21842 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21843 break;
21844 case array_descr_ordering_column_major:
21845 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21846 break;
21847 default:
21848 break;
21849 }
21850
21851 if (dwarf_version >= 3 || !dwarf_strict)
21852 {
21853 if (info->data_location)
21854 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21855 dw_scalar_form_exprloc, &context);
21856 if (info->associated)
21857 add_scalar_info (array_die, DW_AT_associated, info->associated,
21858 dw_scalar_form_constant
21859 | dw_scalar_form_exprloc
21860 | dw_scalar_form_reference, &context);
21861 if (info->allocated)
21862 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21863 dw_scalar_form_constant
21864 | dw_scalar_form_exprloc
21865 | dw_scalar_form_reference, &context);
21866 if (info->stride)
21867 {
21868 const enum dwarf_attribute attr
21869 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21870 const int forms
21871 = (info->stride_in_bits)
21872 ? dw_scalar_form_constant
21873 : (dw_scalar_form_constant
21874 | dw_scalar_form_exprloc
21875 | dw_scalar_form_reference);
21876
21877 add_scalar_info (array_die, attr, info->stride, forms, &context);
21878 }
21879 }
21880 if (dwarf_version >= 5)
21881 {
21882 if (info->rank)
21883 {
21884 add_scalar_info (array_die, DW_AT_rank, info->rank,
21885 dw_scalar_form_constant
21886 | dw_scalar_form_exprloc, &context);
21887 subrange_tag = DW_TAG_generic_subrange;
21888 context.placeholder_arg = true;
21889 }
21890 }
21891
21892 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21893
21894 for (dim = 0; dim < info->ndimensions; dim++)
21895 {
21896 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21897
21898 if (info->dimen[dim].bounds_type)
21899 add_type_attribute (subrange_die,
21900 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21901 false, context_die);
21902 if (info->dimen[dim].lower_bound)
21903 add_bound_info (subrange_die, DW_AT_lower_bound,
21904 info->dimen[dim].lower_bound, &context);
21905 if (info->dimen[dim].upper_bound)
21906 add_bound_info (subrange_die, DW_AT_upper_bound,
21907 info->dimen[dim].upper_bound, &context);
21908 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21909 add_scalar_info (subrange_die, DW_AT_byte_stride,
21910 info->dimen[dim].stride,
21911 dw_scalar_form_constant
21912 | dw_scalar_form_exprloc
21913 | dw_scalar_form_reference,
21914 &context);
21915 }
21916
21917 gen_type_die (info->element_type, context_die);
21918 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21919 TREE_CODE (type) == ARRAY_TYPE
21920 && TYPE_REVERSE_STORAGE_ORDER (type),
21921 context_die);
21922
21923 if (get_AT (array_die, DW_AT_name))
21924 add_pubtype (type, array_die);
21925
21926 add_alignment_attribute (array_die, type);
21927 }
21928
21929 #if 0
21930 static void
21931 gen_entry_point_die (tree decl, dw_die_ref context_die)
21932 {
21933 tree origin = decl_ultimate_origin (decl);
21934 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21935
21936 if (origin != NULL)
21937 add_abstract_origin_attribute (decl_die, origin);
21938 else
21939 {
21940 add_name_and_src_coords_attributes (decl_die, decl);
21941 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21942 TYPE_UNQUALIFIED, false, context_die);
21943 }
21944
21945 if (DECL_ABSTRACT_P (decl))
21946 equate_decl_number_to_die (decl, decl_die);
21947 else
21948 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21949 }
21950 #endif
21951
21952 /* Walk through the list of incomplete types again, trying once more to
21953 emit full debugging info for them. */
21954
21955 static void
21956 retry_incomplete_types (void)
21957 {
21958 set_early_dwarf s;
21959 int i;
21960
21961 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21962 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21963 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21964 vec_safe_truncate (incomplete_types, 0);
21965 }
21966
21967 /* Determine what tag to use for a record type. */
21968
21969 static enum dwarf_tag
21970 record_type_tag (tree type)
21971 {
21972 if (! lang_hooks.types.classify_record)
21973 return DW_TAG_structure_type;
21974
21975 switch (lang_hooks.types.classify_record (type))
21976 {
21977 case RECORD_IS_STRUCT:
21978 return DW_TAG_structure_type;
21979
21980 case RECORD_IS_CLASS:
21981 return DW_TAG_class_type;
21982
21983 case RECORD_IS_INTERFACE:
21984 if (dwarf_version >= 3 || !dwarf_strict)
21985 return DW_TAG_interface_type;
21986 return DW_TAG_structure_type;
21987
21988 default:
21989 gcc_unreachable ();
21990 }
21991 }
21992
21993 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21994 include all of the information about the enumeration values also. Each
21995 enumerated type name/value is listed as a child of the enumerated type
21996 DIE. */
21997
21998 static dw_die_ref
21999 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22000 {
22001 dw_die_ref type_die = lookup_type_die (type);
22002 dw_die_ref orig_type_die = type_die;
22003
22004 if (type_die == NULL)
22005 {
22006 type_die = new_die (DW_TAG_enumeration_type,
22007 scope_die_for (type, context_die), type);
22008 equate_type_number_to_die (type, type_die);
22009 add_name_attribute (type_die, type_tag (type));
22010 if ((dwarf_version >= 4 || !dwarf_strict)
22011 && ENUM_IS_SCOPED (type))
22012 add_AT_flag (type_die, DW_AT_enum_class, 1);
22013 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22014 add_AT_flag (type_die, DW_AT_declaration, 1);
22015 if (!dwarf_strict)
22016 add_AT_unsigned (type_die, DW_AT_encoding,
22017 TYPE_UNSIGNED (type)
22018 ? DW_ATE_unsigned
22019 : DW_ATE_signed);
22020 }
22021 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22022 return type_die;
22023 else
22024 remove_AT (type_die, DW_AT_declaration);
22025
22026 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22027 given enum type is incomplete, do not generate the DW_AT_byte_size
22028 attribute or the DW_AT_element_list attribute. */
22029 if (TYPE_SIZE (type))
22030 {
22031 tree link;
22032
22033 if (!ENUM_IS_OPAQUE (type))
22034 TREE_ASM_WRITTEN (type) = 1;
22035 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22036 add_byte_size_attribute (type_die, type);
22037 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22038 add_alignment_attribute (type_die, type);
22039 if ((dwarf_version >= 3 || !dwarf_strict)
22040 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22041 {
22042 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22043 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22044 context_die);
22045 }
22046 if (TYPE_STUB_DECL (type) != NULL_TREE)
22047 {
22048 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22049 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22050 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22051 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22052 }
22053
22054 /* If the first reference to this type was as the return type of an
22055 inline function, then it may not have a parent. Fix this now. */
22056 if (type_die->die_parent == NULL)
22057 add_child_die (scope_die_for (type, context_die), type_die);
22058
22059 for (link = TYPE_VALUES (type);
22060 link != NULL; link = TREE_CHAIN (link))
22061 {
22062 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22063 tree value = TREE_VALUE (link);
22064
22065 gcc_assert (!ENUM_IS_OPAQUE (type));
22066 add_name_attribute (enum_die,
22067 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22068
22069 if (TREE_CODE (value) == CONST_DECL)
22070 value = DECL_INITIAL (value);
22071
22072 if (simple_type_size_in_bits (TREE_TYPE (value))
22073 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22074 {
22075 /* For constant forms created by add_AT_unsigned DWARF
22076 consumers (GDB, elfutils, etc.) always zero extend
22077 the value. Only when the actual value is negative
22078 do we need to use add_AT_int to generate a constant
22079 form that can represent negative values. */
22080 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22081 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22082 add_AT_unsigned (enum_die, DW_AT_const_value,
22083 (unsigned HOST_WIDE_INT) val);
22084 else
22085 add_AT_int (enum_die, DW_AT_const_value, val);
22086 }
22087 else
22088 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22089 that here. TODO: This should be re-worked to use correct
22090 signed/unsigned double tags for all cases. */
22091 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22092 }
22093
22094 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22095 if (TYPE_ARTIFICIAL (type)
22096 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22097 add_AT_flag (type_die, DW_AT_artificial, 1);
22098 }
22099 else
22100 add_AT_flag (type_die, DW_AT_declaration, 1);
22101
22102 add_pubtype (type, type_die);
22103
22104 return type_die;
22105 }
22106
22107 /* Generate a DIE to represent either a real live formal parameter decl or to
22108 represent just the type of some formal parameter position in some function
22109 type.
22110
22111 Note that this routine is a bit unusual because its argument may be a
22112 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22113 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22114 node. If it's the former then this function is being called to output a
22115 DIE to represent a formal parameter object (or some inlining thereof). If
22116 it's the latter, then this function is only being called to output a
22117 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22118 argument type of some subprogram type.
22119 If EMIT_NAME_P is true, name and source coordinate attributes
22120 are emitted. */
22121
22122 static dw_die_ref
22123 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22124 dw_die_ref context_die)
22125 {
22126 tree node_or_origin = node ? node : origin;
22127 tree ultimate_origin;
22128 dw_die_ref parm_die = NULL;
22129
22130 if (DECL_P (node_or_origin))
22131 {
22132 parm_die = lookup_decl_die (node);
22133
22134 /* If the contexts differ, we may not be talking about the same
22135 thing.
22136 ??? When in LTO the DIE parent is the "abstract" copy and the
22137 context_die is the specification "copy". But this whole block
22138 should eventually be no longer needed. */
22139 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22140 {
22141 if (!DECL_ABSTRACT_P (node))
22142 {
22143 /* This can happen when creating an inlined instance, in
22144 which case we need to create a new DIE that will get
22145 annotated with DW_AT_abstract_origin. */
22146 parm_die = NULL;
22147 }
22148 else
22149 gcc_unreachable ();
22150 }
22151
22152 if (parm_die && parm_die->die_parent == NULL)
22153 {
22154 /* Check that parm_die already has the right attributes that
22155 we would have added below. If any attributes are
22156 missing, fall through to add them. */
22157 if (! DECL_ABSTRACT_P (node_or_origin)
22158 && !get_AT (parm_die, DW_AT_location)
22159 && !get_AT (parm_die, DW_AT_const_value))
22160 /* We are missing location info, and are about to add it. */
22161 ;
22162 else
22163 {
22164 add_child_die (context_die, parm_die);
22165 return parm_die;
22166 }
22167 }
22168 }
22169
22170 /* If we have a previously generated DIE, use it, unless this is an
22171 concrete instance (origin != NULL), in which case we need a new
22172 DIE with a corresponding DW_AT_abstract_origin. */
22173 bool reusing_die;
22174 if (parm_die && origin == NULL)
22175 reusing_die = true;
22176 else
22177 {
22178 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22179 reusing_die = false;
22180 }
22181
22182 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22183 {
22184 case tcc_declaration:
22185 ultimate_origin = decl_ultimate_origin (node_or_origin);
22186 if (node || ultimate_origin)
22187 origin = ultimate_origin;
22188
22189 if (reusing_die)
22190 goto add_location;
22191
22192 if (origin != NULL)
22193 add_abstract_origin_attribute (parm_die, origin);
22194 else if (emit_name_p)
22195 add_name_and_src_coords_attributes (parm_die, node);
22196 if (origin == NULL
22197 || (! DECL_ABSTRACT_P (node_or_origin)
22198 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22199 decl_function_context
22200 (node_or_origin))))
22201 {
22202 tree type = TREE_TYPE (node_or_origin);
22203 if (decl_by_reference_p (node_or_origin))
22204 add_type_attribute (parm_die, TREE_TYPE (type),
22205 TYPE_UNQUALIFIED,
22206 false, context_die);
22207 else
22208 add_type_attribute (parm_die, type,
22209 decl_quals (node_or_origin),
22210 false, context_die);
22211 }
22212 if (origin == NULL && DECL_ARTIFICIAL (node))
22213 add_AT_flag (parm_die, DW_AT_artificial, 1);
22214 add_location:
22215 if (node && node != origin)
22216 equate_decl_number_to_die (node, parm_die);
22217 if (! DECL_ABSTRACT_P (node_or_origin))
22218 add_location_or_const_value_attribute (parm_die, node_or_origin,
22219 node == NULL);
22220
22221 break;
22222
22223 case tcc_type:
22224 /* We were called with some kind of a ..._TYPE node. */
22225 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22226 context_die);
22227 break;
22228
22229 default:
22230 gcc_unreachable ();
22231 }
22232
22233 return parm_die;
22234 }
22235
22236 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22237 children DW_TAG_formal_parameter DIEs representing the arguments of the
22238 parameter pack.
22239
22240 PARM_PACK must be a function parameter pack.
22241 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22242 must point to the subsequent arguments of the function PACK_ARG belongs to.
22243 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22244 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22245 following the last one for which a DIE was generated. */
22246
22247 static dw_die_ref
22248 gen_formal_parameter_pack_die (tree parm_pack,
22249 tree pack_arg,
22250 dw_die_ref subr_die,
22251 tree *next_arg)
22252 {
22253 tree arg;
22254 dw_die_ref parm_pack_die;
22255
22256 gcc_assert (parm_pack
22257 && lang_hooks.function_parameter_pack_p (parm_pack)
22258 && subr_die);
22259
22260 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22261 add_src_coords_attributes (parm_pack_die, parm_pack);
22262
22263 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22264 {
22265 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22266 parm_pack))
22267 break;
22268 gen_formal_parameter_die (arg, NULL,
22269 false /* Don't emit name attribute. */,
22270 parm_pack_die);
22271 }
22272 if (next_arg)
22273 *next_arg = arg;
22274 return parm_pack_die;
22275 }
22276
22277 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22278 at the end of an (ANSI prototyped) formal parameters list. */
22279
22280 static void
22281 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22282 {
22283 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22284 }
22285
22286 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22287 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22288 parameters as specified in some function type specification (except for
22289 those which appear as part of a function *definition*). */
22290
22291 static void
22292 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22293 {
22294 tree link;
22295 tree formal_type = NULL;
22296 tree first_parm_type;
22297 tree arg;
22298
22299 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22300 {
22301 arg = DECL_ARGUMENTS (function_or_method_type);
22302 function_or_method_type = TREE_TYPE (function_or_method_type);
22303 }
22304 else
22305 arg = NULL_TREE;
22306
22307 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22308
22309 /* Make our first pass over the list of formal parameter types and output a
22310 DW_TAG_formal_parameter DIE for each one. */
22311 for (link = first_parm_type; link; )
22312 {
22313 dw_die_ref parm_die;
22314
22315 formal_type = TREE_VALUE (link);
22316 if (formal_type == void_type_node)
22317 break;
22318
22319 /* Output a (nameless) DIE to represent the formal parameter itself. */
22320 parm_die = gen_formal_parameter_die (formal_type, NULL,
22321 true /* Emit name attribute. */,
22322 context_die);
22323 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22324 && link == first_parm_type)
22325 {
22326 add_AT_flag (parm_die, DW_AT_artificial, 1);
22327 if (dwarf_version >= 3 || !dwarf_strict)
22328 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22329 }
22330 else if (arg && DECL_ARTIFICIAL (arg))
22331 add_AT_flag (parm_die, DW_AT_artificial, 1);
22332
22333 link = TREE_CHAIN (link);
22334 if (arg)
22335 arg = DECL_CHAIN (arg);
22336 }
22337
22338 /* If this function type has an ellipsis, add a
22339 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22340 if (formal_type != void_type_node)
22341 gen_unspecified_parameters_die (function_or_method_type, context_die);
22342
22343 /* Make our second (and final) pass over the list of formal parameter types
22344 and output DIEs to represent those types (as necessary). */
22345 for (link = TYPE_ARG_TYPES (function_or_method_type);
22346 link && TREE_VALUE (link);
22347 link = TREE_CHAIN (link))
22348 gen_type_die (TREE_VALUE (link), context_die);
22349 }
22350
22351 /* We want to generate the DIE for TYPE so that we can generate the
22352 die for MEMBER, which has been defined; we will need to refer back
22353 to the member declaration nested within TYPE. If we're trying to
22354 generate minimal debug info for TYPE, processing TYPE won't do the
22355 trick; we need to attach the member declaration by hand. */
22356
22357 static void
22358 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22359 {
22360 gen_type_die (type, context_die);
22361
22362 /* If we're trying to avoid duplicate debug info, we may not have
22363 emitted the member decl for this function. Emit it now. */
22364 if (TYPE_STUB_DECL (type)
22365 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22366 && ! lookup_decl_die (member))
22367 {
22368 dw_die_ref type_die;
22369 gcc_assert (!decl_ultimate_origin (member));
22370
22371 type_die = lookup_type_die_strip_naming_typedef (type);
22372 if (TREE_CODE (member) == FUNCTION_DECL)
22373 gen_subprogram_die (member, type_die);
22374 else if (TREE_CODE (member) == FIELD_DECL)
22375 {
22376 /* Ignore the nameless fields that are used to skip bits but handle
22377 C++ anonymous unions and structs. */
22378 if (DECL_NAME (member) != NULL_TREE
22379 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22380 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22381 {
22382 struct vlr_context vlr_ctx = {
22383 DECL_CONTEXT (member), /* struct_type */
22384 NULL_TREE /* variant_part_offset */
22385 };
22386 gen_type_die (member_declared_type (member), type_die);
22387 gen_field_die (member, &vlr_ctx, type_die);
22388 }
22389 }
22390 else
22391 gen_variable_die (member, NULL_TREE, type_die);
22392 }
22393 }
22394 \f
22395 /* Forward declare these functions, because they are mutually recursive
22396 with their set_block_* pairing functions. */
22397 static void set_decl_origin_self (tree);
22398
22399 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22400 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22401 that it points to the node itself, thus indicating that the node is its
22402 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22403 the given node is NULL, recursively descend the decl/block tree which
22404 it is the root of, and for each other ..._DECL or BLOCK node contained
22405 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22406 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22407 values to point to themselves. */
22408
22409 static void
22410 set_block_origin_self (tree stmt)
22411 {
22412 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22413 {
22414 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22415
22416 {
22417 tree local_decl;
22418
22419 for (local_decl = BLOCK_VARS (stmt);
22420 local_decl != NULL_TREE;
22421 local_decl = DECL_CHAIN (local_decl))
22422 /* Do not recurse on nested functions since the inlining status
22423 of parent and child can be different as per the DWARF spec. */
22424 if (TREE_CODE (local_decl) != FUNCTION_DECL
22425 && !DECL_EXTERNAL (local_decl))
22426 set_decl_origin_self (local_decl);
22427 }
22428
22429 {
22430 tree subblock;
22431
22432 for (subblock = BLOCK_SUBBLOCKS (stmt);
22433 subblock != NULL_TREE;
22434 subblock = BLOCK_CHAIN (subblock))
22435 set_block_origin_self (subblock); /* Recurse. */
22436 }
22437 }
22438 }
22439
22440 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22441 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22442 node to so that it points to the node itself, thus indicating that the
22443 node represents its own (abstract) origin. Additionally, if the
22444 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22445 the decl/block tree of which the given node is the root of, and for
22446 each other ..._DECL or BLOCK node contained therein whose
22447 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22448 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22449 point to themselves. */
22450
22451 static void
22452 set_decl_origin_self (tree decl)
22453 {
22454 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22455 {
22456 DECL_ABSTRACT_ORIGIN (decl) = decl;
22457 if (TREE_CODE (decl) == FUNCTION_DECL)
22458 {
22459 tree arg;
22460
22461 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22462 DECL_ABSTRACT_ORIGIN (arg) = arg;
22463 if (DECL_INITIAL (decl) != NULL_TREE
22464 && DECL_INITIAL (decl) != error_mark_node)
22465 set_block_origin_self (DECL_INITIAL (decl));
22466 }
22467 }
22468 }
22469 \f
22470 /* Mark the early DIE for DECL as the abstract instance. */
22471
22472 static void
22473 dwarf2out_abstract_function (tree decl)
22474 {
22475 dw_die_ref old_die;
22476
22477 /* Make sure we have the actual abstract inline, not a clone. */
22478 decl = DECL_ORIGIN (decl);
22479
22480 if (DECL_IGNORED_P (decl))
22481 return;
22482
22483 /* In LTO we're all set. We already created abstract instances
22484 early and we want to avoid creating a concrete instance of that
22485 if we don't output it. */
22486 if (in_lto_p)
22487 return;
22488
22489 old_die = lookup_decl_die (decl);
22490 gcc_assert (old_die != NULL);
22491 if (get_AT (old_die, DW_AT_inline))
22492 /* We've already generated the abstract instance. */
22493 return;
22494
22495 /* Go ahead and put DW_AT_inline on the DIE. */
22496 if (DECL_DECLARED_INLINE_P (decl))
22497 {
22498 if (cgraph_function_possibly_inlined_p (decl))
22499 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22500 else
22501 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22502 }
22503 else
22504 {
22505 if (cgraph_function_possibly_inlined_p (decl))
22506 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22507 else
22508 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22509 }
22510
22511 if (DECL_DECLARED_INLINE_P (decl)
22512 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22513 add_AT_flag (old_die, DW_AT_artificial, 1);
22514
22515 set_decl_origin_self (decl);
22516 }
22517
22518 /* Helper function of premark_used_types() which gets called through
22519 htab_traverse.
22520
22521 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22522 marked as unused by prune_unused_types. */
22523
22524 bool
22525 premark_used_types_helper (tree const &type, void *)
22526 {
22527 dw_die_ref die;
22528
22529 die = lookup_type_die (type);
22530 if (die != NULL)
22531 die->die_perennial_p = 1;
22532 return true;
22533 }
22534
22535 /* Helper function of premark_types_used_by_global_vars which gets called
22536 through htab_traverse.
22537
22538 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22539 marked as unused by prune_unused_types. The DIE of the type is marked
22540 only if the global variable using the type will actually be emitted. */
22541
22542 int
22543 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22544 void *)
22545 {
22546 struct types_used_by_vars_entry *entry;
22547 dw_die_ref die;
22548
22549 entry = (struct types_used_by_vars_entry *) *slot;
22550 gcc_assert (entry->type != NULL
22551 && entry->var_decl != NULL);
22552 die = lookup_type_die (entry->type);
22553 if (die)
22554 {
22555 /* Ask cgraph if the global variable really is to be emitted.
22556 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22557 varpool_node *node = varpool_node::get (entry->var_decl);
22558 if (node && node->definition)
22559 {
22560 die->die_perennial_p = 1;
22561 /* Keep the parent DIEs as well. */
22562 while ((die = die->die_parent) && die->die_perennial_p == 0)
22563 die->die_perennial_p = 1;
22564 }
22565 }
22566 return 1;
22567 }
22568
22569 /* Mark all members of used_types_hash as perennial. */
22570
22571 static void
22572 premark_used_types (struct function *fun)
22573 {
22574 if (fun && fun->used_types_hash)
22575 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22576 }
22577
22578 /* Mark all members of types_used_by_vars_entry as perennial. */
22579
22580 static void
22581 premark_types_used_by_global_vars (void)
22582 {
22583 if (types_used_by_vars_hash)
22584 types_used_by_vars_hash
22585 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22586 }
22587
22588 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22589 for CA_LOC call arg loc node. */
22590
22591 static dw_die_ref
22592 gen_call_site_die (tree decl, dw_die_ref subr_die,
22593 struct call_arg_loc_node *ca_loc)
22594 {
22595 dw_die_ref stmt_die = NULL, die;
22596 tree block = ca_loc->block;
22597
22598 while (block
22599 && block != DECL_INITIAL (decl)
22600 && TREE_CODE (block) == BLOCK)
22601 {
22602 stmt_die = lookup_block_die (block);
22603 if (stmt_die)
22604 break;
22605 block = BLOCK_SUPERCONTEXT (block);
22606 }
22607 if (stmt_die == NULL)
22608 stmt_die = subr_die;
22609 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22610 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22611 if (ca_loc->tail_call_p)
22612 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22613 if (ca_loc->symbol_ref)
22614 {
22615 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22616 if (tdie)
22617 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22618 else
22619 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22620 false);
22621 }
22622 return die;
22623 }
22624
22625 /* Generate a DIE to represent a declared function (either file-scope or
22626 block-local). */
22627
22628 static void
22629 gen_subprogram_die (tree decl, dw_die_ref context_die)
22630 {
22631 tree origin = decl_ultimate_origin (decl);
22632 dw_die_ref subr_die;
22633 dw_die_ref old_die = lookup_decl_die (decl);
22634
22635 /* This function gets called multiple times for different stages of
22636 the debug process. For example, for func() in this code:
22637
22638 namespace S
22639 {
22640 void func() { ... }
22641 }
22642
22643 ...we get called 4 times. Twice in early debug and twice in
22644 late debug:
22645
22646 Early debug
22647 -----------
22648
22649 1. Once while generating func() within the namespace. This is
22650 the declaration. The declaration bit below is set, as the
22651 context is the namespace.
22652
22653 A new DIE will be generated with DW_AT_declaration set.
22654
22655 2. Once for func() itself. This is the specification. The
22656 declaration bit below is clear as the context is the CU.
22657
22658 We will use the cached DIE from (1) to create a new DIE with
22659 DW_AT_specification pointing to the declaration in (1).
22660
22661 Late debug via rest_of_handle_final()
22662 -------------------------------------
22663
22664 3. Once generating func() within the namespace. This is also the
22665 declaration, as in (1), but this time we will early exit below
22666 as we have a cached DIE and a declaration needs no additional
22667 annotations (no locations), as the source declaration line
22668 info is enough.
22669
22670 4. Once for func() itself. As in (2), this is the specification,
22671 but this time we will re-use the cached DIE, and just annotate
22672 it with the location information that should now be available.
22673
22674 For something without namespaces, but with abstract instances, we
22675 are also called a multiple times:
22676
22677 class Base
22678 {
22679 public:
22680 Base (); // constructor declaration (1)
22681 };
22682
22683 Base::Base () { } // constructor specification (2)
22684
22685 Early debug
22686 -----------
22687
22688 1. Once for the Base() constructor by virtue of it being a
22689 member of the Base class. This is done via
22690 rest_of_type_compilation.
22691
22692 This is a declaration, so a new DIE will be created with
22693 DW_AT_declaration.
22694
22695 2. Once for the Base() constructor definition, but this time
22696 while generating the abstract instance of the base
22697 constructor (__base_ctor) which is being generated via early
22698 debug of reachable functions.
22699
22700 Even though we have a cached version of the declaration (1),
22701 we will create a DW_AT_specification of the declaration DIE
22702 in (1).
22703
22704 3. Once for the __base_ctor itself, but this time, we generate
22705 an DW_AT_abstract_origin version of the DW_AT_specification in
22706 (2).
22707
22708 Late debug via rest_of_handle_final
22709 -----------------------------------
22710
22711 4. One final time for the __base_ctor (which will have a cached
22712 DIE with DW_AT_abstract_origin created in (3). This time,
22713 we will just annotate the location information now
22714 available.
22715 */
22716 int declaration = (current_function_decl != decl
22717 || class_or_namespace_scope_p (context_die));
22718
22719 /* A declaration that has been previously dumped needs no
22720 additional information. */
22721 if (old_die && declaration)
22722 return;
22723
22724 /* Now that the C++ front end lazily declares artificial member fns, we
22725 might need to retrofit the declaration into its class. */
22726 if (!declaration && !origin && !old_die
22727 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22728 && !class_or_namespace_scope_p (context_die)
22729 && debug_info_level > DINFO_LEVEL_TERSE)
22730 old_die = force_decl_die (decl);
22731
22732 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22733 if (origin != NULL)
22734 {
22735 gcc_assert (!declaration || local_scope_p (context_die));
22736
22737 /* Fixup die_parent for the abstract instance of a nested
22738 inline function. */
22739 if (old_die && old_die->die_parent == NULL)
22740 add_child_die (context_die, old_die);
22741
22742 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22743 {
22744 /* If we have a DW_AT_abstract_origin we have a working
22745 cached version. */
22746 subr_die = old_die;
22747 }
22748 else
22749 {
22750 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22751 add_abstract_origin_attribute (subr_die, origin);
22752 /* This is where the actual code for a cloned function is.
22753 Let's emit linkage name attribute for it. This helps
22754 debuggers to e.g, set breakpoints into
22755 constructors/destructors when the user asks "break
22756 K::K". */
22757 add_linkage_name (subr_die, decl);
22758 }
22759 }
22760 /* A cached copy, possibly from early dwarf generation. Reuse as
22761 much as possible. */
22762 else if (old_die)
22763 {
22764 if (!get_AT_flag (old_die, DW_AT_declaration)
22765 /* We can have a normal definition following an inline one in the
22766 case of redefinition of GNU C extern inlines.
22767 It seems reasonable to use AT_specification in this case. */
22768 && !get_AT (old_die, DW_AT_inline))
22769 {
22770 /* Detect and ignore this case, where we are trying to output
22771 something we have already output. */
22772 if (get_AT (old_die, DW_AT_low_pc)
22773 || get_AT (old_die, DW_AT_ranges))
22774 return;
22775
22776 /* If we have no location information, this must be a
22777 partially generated DIE from early dwarf generation.
22778 Fall through and generate it. */
22779 }
22780
22781 /* If the definition comes from the same place as the declaration,
22782 maybe use the old DIE. We always want the DIE for this function
22783 that has the *_pc attributes to be under comp_unit_die so the
22784 debugger can find it. We also need to do this for abstract
22785 instances of inlines, since the spec requires the out-of-line copy
22786 to have the same parent. For local class methods, this doesn't
22787 apply; we just use the old DIE. */
22788 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22789 struct dwarf_file_data * file_index = lookup_filename (s.file);
22790 if (((is_unit_die (old_die->die_parent)
22791 /* This condition fixes the inconsistency/ICE with the
22792 following Fortran test (or some derivative thereof) while
22793 building libgfortran:
22794
22795 module some_m
22796 contains
22797 logical function funky (FLAG)
22798 funky = .true.
22799 end function
22800 end module
22801 */
22802 || (old_die->die_parent
22803 && old_die->die_parent->die_tag == DW_TAG_module)
22804 || local_scope_p (old_die->die_parent)
22805 || context_die == NULL)
22806 && (DECL_ARTIFICIAL (decl)
22807 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22808 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22809 == (unsigned) s.line)
22810 && (!debug_column_info
22811 || s.column == 0
22812 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22813 == (unsigned) s.column)))))
22814 /* With LTO if there's an abstract instance for
22815 the old DIE, this is a concrete instance and
22816 thus re-use the DIE. */
22817 || get_AT (old_die, DW_AT_abstract_origin))
22818 {
22819 subr_die = old_die;
22820
22821 /* Clear out the declaration attribute, but leave the
22822 parameters so they can be augmented with location
22823 information later. Unless this was a declaration, in
22824 which case, wipe out the nameless parameters and recreate
22825 them further down. */
22826 if (remove_AT (subr_die, DW_AT_declaration))
22827 {
22828
22829 remove_AT (subr_die, DW_AT_object_pointer);
22830 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22831 }
22832 }
22833 /* Make a specification pointing to the previously built
22834 declaration. */
22835 else
22836 {
22837 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22838 add_AT_specification (subr_die, old_die);
22839 add_pubname (decl, subr_die);
22840 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22841 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22842 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22843 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22844 if (debug_column_info
22845 && s.column
22846 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22847 != (unsigned) s.column))
22848 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22849
22850 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22851 emit the real type on the definition die. */
22852 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22853 {
22854 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22855 if (die == auto_die || die == decltype_auto_die)
22856 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22857 TYPE_UNQUALIFIED, false, context_die);
22858 }
22859
22860 /* When we process the method declaration, we haven't seen
22861 the out-of-class defaulted definition yet, so we have to
22862 recheck now. */
22863 if ((dwarf_version >= 5 || ! dwarf_strict)
22864 && !get_AT (subr_die, DW_AT_defaulted))
22865 {
22866 int defaulted
22867 = lang_hooks.decls.decl_dwarf_attribute (decl,
22868 DW_AT_defaulted);
22869 if (defaulted != -1)
22870 {
22871 /* Other values must have been handled before. */
22872 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22873 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22874 }
22875 }
22876 }
22877 }
22878 /* Create a fresh DIE for anything else. */
22879 else
22880 {
22881 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22882
22883 if (TREE_PUBLIC (decl))
22884 add_AT_flag (subr_die, DW_AT_external, 1);
22885
22886 add_name_and_src_coords_attributes (subr_die, decl);
22887 add_pubname (decl, subr_die);
22888 if (debug_info_level > DINFO_LEVEL_TERSE)
22889 {
22890 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22891 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22892 TYPE_UNQUALIFIED, false, context_die);
22893 }
22894
22895 add_pure_or_virtual_attribute (subr_die, decl);
22896 if (DECL_ARTIFICIAL (decl))
22897 add_AT_flag (subr_die, DW_AT_artificial, 1);
22898
22899 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22900 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22901
22902 add_alignment_attribute (subr_die, decl);
22903
22904 add_accessibility_attribute (subr_die, decl);
22905 }
22906
22907 /* Unless we have an existing non-declaration DIE, equate the new
22908 DIE. */
22909 if (!old_die || is_declaration_die (old_die))
22910 equate_decl_number_to_die (decl, subr_die);
22911
22912 if (declaration)
22913 {
22914 if (!old_die || !get_AT (old_die, DW_AT_inline))
22915 {
22916 add_AT_flag (subr_die, DW_AT_declaration, 1);
22917
22918 /* If this is an explicit function declaration then generate
22919 a DW_AT_explicit attribute. */
22920 if ((dwarf_version >= 3 || !dwarf_strict)
22921 && lang_hooks.decls.decl_dwarf_attribute (decl,
22922 DW_AT_explicit) == 1)
22923 add_AT_flag (subr_die, DW_AT_explicit, 1);
22924
22925 /* If this is a C++11 deleted special function member then generate
22926 a DW_AT_deleted attribute. */
22927 if ((dwarf_version >= 5 || !dwarf_strict)
22928 && lang_hooks.decls.decl_dwarf_attribute (decl,
22929 DW_AT_deleted) == 1)
22930 add_AT_flag (subr_die, DW_AT_deleted, 1);
22931
22932 /* If this is a C++11 defaulted special function member then
22933 generate a DW_AT_defaulted attribute. */
22934 if (dwarf_version >= 5 || !dwarf_strict)
22935 {
22936 int defaulted
22937 = lang_hooks.decls.decl_dwarf_attribute (decl,
22938 DW_AT_defaulted);
22939 if (defaulted != -1)
22940 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22941 }
22942
22943 /* If this is a C++11 non-static member function with & ref-qualifier
22944 then generate a DW_AT_reference attribute. */
22945 if ((dwarf_version >= 5 || !dwarf_strict)
22946 && lang_hooks.decls.decl_dwarf_attribute (decl,
22947 DW_AT_reference) == 1)
22948 add_AT_flag (subr_die, DW_AT_reference, 1);
22949
22950 /* If this is a C++11 non-static member function with &&
22951 ref-qualifier then generate a DW_AT_reference attribute. */
22952 if ((dwarf_version >= 5 || !dwarf_strict)
22953 && lang_hooks.decls.decl_dwarf_attribute (decl,
22954 DW_AT_rvalue_reference)
22955 == 1)
22956 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22957 }
22958 }
22959 /* For non DECL_EXTERNALs, if range information is available, fill
22960 the DIE with it. */
22961 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22962 {
22963 HOST_WIDE_INT cfa_fb_offset;
22964
22965 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22966
22967 if (!crtl->has_bb_partition)
22968 {
22969 dw_fde_ref fde = fun->fde;
22970 if (fde->dw_fde_begin)
22971 {
22972 /* We have already generated the labels. */
22973 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22974 fde->dw_fde_end, false);
22975 }
22976 else
22977 {
22978 /* Create start/end labels and add the range. */
22979 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22980 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22981 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22982 current_function_funcdef_no);
22983 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22984 current_function_funcdef_no);
22985 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22986 false);
22987 }
22988
22989 #if VMS_DEBUGGING_INFO
22990 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22991 Section 2.3 Prologue and Epilogue Attributes:
22992 When a breakpoint is set on entry to a function, it is generally
22993 desirable for execution to be suspended, not on the very first
22994 instruction of the function, but rather at a point after the
22995 function's frame has been set up, after any language defined local
22996 declaration processing has been completed, and before execution of
22997 the first statement of the function begins. Debuggers generally
22998 cannot properly determine where this point is. Similarly for a
22999 breakpoint set on exit from a function. The prologue and epilogue
23000 attributes allow a compiler to communicate the location(s) to use. */
23001
23002 {
23003 if (fde->dw_fde_vms_end_prologue)
23004 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23005 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23006
23007 if (fde->dw_fde_vms_begin_epilogue)
23008 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23009 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23010 }
23011 #endif
23012
23013 }
23014 else
23015 {
23016 /* Generate pubnames entries for the split function code ranges. */
23017 dw_fde_ref fde = fun->fde;
23018
23019 if (fde->dw_fde_second_begin)
23020 {
23021 if (dwarf_version >= 3 || !dwarf_strict)
23022 {
23023 /* We should use ranges for non-contiguous code section
23024 addresses. Use the actual code range for the initial
23025 section, since the HOT/COLD labels might precede an
23026 alignment offset. */
23027 bool range_list_added = false;
23028 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23029 fde->dw_fde_end, &range_list_added,
23030 false);
23031 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23032 fde->dw_fde_second_end,
23033 &range_list_added, false);
23034 if (range_list_added)
23035 add_ranges (NULL);
23036 }
23037 else
23038 {
23039 /* There is no real support in DW2 for this .. so we make
23040 a work-around. First, emit the pub name for the segment
23041 containing the function label. Then make and emit a
23042 simplified subprogram DIE for the second segment with the
23043 name pre-fixed by __hot/cold_sect_of_. We use the same
23044 linkage name for the second die so that gdb will find both
23045 sections when given "b foo". */
23046 const char *name = NULL;
23047 tree decl_name = DECL_NAME (decl);
23048 dw_die_ref seg_die;
23049
23050 /* Do the 'primary' section. */
23051 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23052 fde->dw_fde_end, false);
23053
23054 /* Build a minimal DIE for the secondary section. */
23055 seg_die = new_die (DW_TAG_subprogram,
23056 subr_die->die_parent, decl);
23057
23058 if (TREE_PUBLIC (decl))
23059 add_AT_flag (seg_die, DW_AT_external, 1);
23060
23061 if (decl_name != NULL
23062 && IDENTIFIER_POINTER (decl_name) != NULL)
23063 {
23064 name = dwarf2_name (decl, 1);
23065 if (! DECL_ARTIFICIAL (decl))
23066 add_src_coords_attributes (seg_die, decl);
23067
23068 add_linkage_name (seg_die, decl);
23069 }
23070 gcc_assert (name != NULL);
23071 add_pure_or_virtual_attribute (seg_die, decl);
23072 if (DECL_ARTIFICIAL (decl))
23073 add_AT_flag (seg_die, DW_AT_artificial, 1);
23074
23075 name = concat ("__second_sect_of_", name, NULL);
23076 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23077 fde->dw_fde_second_end, false);
23078 add_name_attribute (seg_die, name);
23079 if (want_pubnames ())
23080 add_pubname_string (name, seg_die);
23081 }
23082 }
23083 else
23084 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23085 false);
23086 }
23087
23088 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23089
23090 /* We define the "frame base" as the function's CFA. This is more
23091 convenient for several reasons: (1) It's stable across the prologue
23092 and epilogue, which makes it better than just a frame pointer,
23093 (2) With dwarf3, there exists a one-byte encoding that allows us
23094 to reference the .debug_frame data by proxy, but failing that,
23095 (3) We can at least reuse the code inspection and interpretation
23096 code that determines the CFA position at various points in the
23097 function. */
23098 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23099 {
23100 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23101 add_AT_loc (subr_die, DW_AT_frame_base, op);
23102 }
23103 else
23104 {
23105 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23106 if (list->dw_loc_next)
23107 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23108 else
23109 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23110 }
23111
23112 /* Compute a displacement from the "steady-state frame pointer" to
23113 the CFA. The former is what all stack slots and argument slots
23114 will reference in the rtl; the latter is what we've told the
23115 debugger about. We'll need to adjust all frame_base references
23116 by this displacement. */
23117 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23118
23119 if (fun->static_chain_decl)
23120 {
23121 /* DWARF requires here a location expression that computes the
23122 address of the enclosing subprogram's frame base. The machinery
23123 in tree-nested.c is supposed to store this specific address in the
23124 last field of the FRAME record. */
23125 const tree frame_type
23126 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23127 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23128
23129 tree fb_expr
23130 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23131 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23132 fb_expr, fb_decl, NULL_TREE);
23133
23134 add_AT_location_description (subr_die, DW_AT_static_link,
23135 loc_list_from_tree (fb_expr, 0, NULL));
23136 }
23137
23138 resolve_variable_values ();
23139 }
23140
23141 /* Generate child dies for template paramaters. */
23142 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23143 gen_generic_params_dies (decl);
23144
23145 /* Now output descriptions of the arguments for this function. This gets
23146 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23147 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23148 `...' at the end of the formal parameter list. In order to find out if
23149 there was a trailing ellipsis or not, we must instead look at the type
23150 associated with the FUNCTION_DECL. This will be a node of type
23151 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23152 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23153 an ellipsis at the end. */
23154
23155 /* In the case where we are describing a mere function declaration, all we
23156 need to do here (and all we *can* do here) is to describe the *types* of
23157 its formal parameters. */
23158 if (debug_info_level <= DINFO_LEVEL_TERSE)
23159 ;
23160 else if (declaration)
23161 gen_formal_types_die (decl, subr_die);
23162 else
23163 {
23164 /* Generate DIEs to represent all known formal parameters. */
23165 tree parm = DECL_ARGUMENTS (decl);
23166 tree generic_decl = early_dwarf
23167 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23168 tree generic_decl_parm = generic_decl
23169 ? DECL_ARGUMENTS (generic_decl)
23170 : NULL;
23171
23172 /* Now we want to walk the list of parameters of the function and
23173 emit their relevant DIEs.
23174
23175 We consider the case of DECL being an instance of a generic function
23176 as well as it being a normal function.
23177
23178 If DECL is an instance of a generic function we walk the
23179 parameters of the generic function declaration _and_ the parameters of
23180 DECL itself. This is useful because we want to emit specific DIEs for
23181 function parameter packs and those are declared as part of the
23182 generic function declaration. In that particular case,
23183 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23184 That DIE has children DIEs representing the set of arguments
23185 of the pack. Note that the set of pack arguments can be empty.
23186 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23187 children DIE.
23188
23189 Otherwise, we just consider the parameters of DECL. */
23190 while (generic_decl_parm || parm)
23191 {
23192 if (generic_decl_parm
23193 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23194 gen_formal_parameter_pack_die (generic_decl_parm,
23195 parm, subr_die,
23196 &parm);
23197 else if (parm)
23198 {
23199 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23200
23201 if (early_dwarf
23202 && parm == DECL_ARGUMENTS (decl)
23203 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23204 && parm_die
23205 && (dwarf_version >= 3 || !dwarf_strict))
23206 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23207
23208 parm = DECL_CHAIN (parm);
23209 }
23210 else if (parm)
23211 parm = DECL_CHAIN (parm);
23212
23213 if (generic_decl_parm)
23214 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23215 }
23216
23217 /* Decide whether we need an unspecified_parameters DIE at the end.
23218 There are 2 more cases to do this for: 1) the ansi ... declaration -
23219 this is detectable when the end of the arg list is not a
23220 void_type_node 2) an unprototyped function declaration (not a
23221 definition). This just means that we have no info about the
23222 parameters at all. */
23223 if (early_dwarf)
23224 {
23225 if (prototype_p (TREE_TYPE (decl)))
23226 {
23227 /* This is the prototyped case, check for.... */
23228 if (stdarg_p (TREE_TYPE (decl)))
23229 gen_unspecified_parameters_die (decl, subr_die);
23230 }
23231 else if (DECL_INITIAL (decl) == NULL_TREE)
23232 gen_unspecified_parameters_die (decl, subr_die);
23233 }
23234 }
23235
23236 if (subr_die != old_die)
23237 /* Add the calling convention attribute if requested. */
23238 add_calling_convention_attribute (subr_die, decl);
23239
23240 /* Output Dwarf info for all of the stuff within the body of the function
23241 (if it has one - it may be just a declaration).
23242
23243 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23244 a function. This BLOCK actually represents the outermost binding contour
23245 for the function, i.e. the contour in which the function's formal
23246 parameters and labels get declared. Curiously, it appears that the front
23247 end doesn't actually put the PARM_DECL nodes for the current function onto
23248 the BLOCK_VARS list for this outer scope, but are strung off of the
23249 DECL_ARGUMENTS list for the function instead.
23250
23251 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23252 the LABEL_DECL nodes for the function however, and we output DWARF info
23253 for those in decls_for_scope. Just within the `outer_scope' there will be
23254 a BLOCK node representing the function's outermost pair of curly braces,
23255 and any blocks used for the base and member initializers of a C++
23256 constructor function. */
23257 tree outer_scope = DECL_INITIAL (decl);
23258 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23259 {
23260 int call_site_note_count = 0;
23261 int tail_call_site_note_count = 0;
23262
23263 /* Emit a DW_TAG_variable DIE for a named return value. */
23264 if (DECL_NAME (DECL_RESULT (decl)))
23265 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23266
23267 /* The first time through decls_for_scope we will generate the
23268 DIEs for the locals. The second time, we fill in the
23269 location info. */
23270 decls_for_scope (outer_scope, subr_die);
23271
23272 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23273 {
23274 struct call_arg_loc_node *ca_loc;
23275 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23276 {
23277 dw_die_ref die = NULL;
23278 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23279 rtx arg, next_arg;
23280 tree arg_decl = NULL_TREE;
23281
23282 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23283 ? XEXP (ca_loc->call_arg_loc_note, 0)
23284 : NULL_RTX);
23285 arg; arg = next_arg)
23286 {
23287 dw_loc_descr_ref reg, val;
23288 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23289 dw_die_ref cdie, tdie = NULL;
23290
23291 next_arg = XEXP (arg, 1);
23292 if (REG_P (XEXP (XEXP (arg, 0), 0))
23293 && next_arg
23294 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23295 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23296 && REGNO (XEXP (XEXP (arg, 0), 0))
23297 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23298 next_arg = XEXP (next_arg, 1);
23299 if (mode == VOIDmode)
23300 {
23301 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23302 if (mode == VOIDmode)
23303 mode = GET_MODE (XEXP (arg, 0));
23304 }
23305 if (mode == VOIDmode || mode == BLKmode)
23306 continue;
23307 /* Get dynamic information about call target only if we
23308 have no static information: we cannot generate both
23309 DW_AT_call_origin and DW_AT_call_target
23310 attributes. */
23311 if (ca_loc->symbol_ref == NULL_RTX)
23312 {
23313 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23314 {
23315 tloc = XEXP (XEXP (arg, 0), 1);
23316 continue;
23317 }
23318 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23319 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23320 {
23321 tlocc = XEXP (XEXP (arg, 0), 1);
23322 continue;
23323 }
23324 }
23325 reg = NULL;
23326 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23327 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23328 VAR_INIT_STATUS_INITIALIZED);
23329 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23330 {
23331 rtx mem = XEXP (XEXP (arg, 0), 0);
23332 reg = mem_loc_descriptor (XEXP (mem, 0),
23333 get_address_mode (mem),
23334 GET_MODE (mem),
23335 VAR_INIT_STATUS_INITIALIZED);
23336 }
23337 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23338 == DEBUG_PARAMETER_REF)
23339 {
23340 tree tdecl
23341 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23342 tdie = lookup_decl_die (tdecl);
23343 if (tdie == NULL)
23344 continue;
23345 arg_decl = tdecl;
23346 }
23347 else
23348 continue;
23349 if (reg == NULL
23350 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23351 != DEBUG_PARAMETER_REF)
23352 continue;
23353 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23354 VOIDmode,
23355 VAR_INIT_STATUS_INITIALIZED);
23356 if (val == NULL)
23357 continue;
23358 if (die == NULL)
23359 die = gen_call_site_die (decl, subr_die, ca_loc);
23360 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23361 NULL_TREE);
23362 add_desc_attribute (cdie, arg_decl);
23363 if (reg != NULL)
23364 add_AT_loc (cdie, DW_AT_location, reg);
23365 else if (tdie != NULL)
23366 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23367 tdie);
23368 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23369 if (next_arg != XEXP (arg, 1))
23370 {
23371 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23372 if (mode == VOIDmode)
23373 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23374 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23375 0), 1),
23376 mode, VOIDmode,
23377 VAR_INIT_STATUS_INITIALIZED);
23378 if (val != NULL)
23379 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23380 val);
23381 }
23382 }
23383 if (die == NULL
23384 && (ca_loc->symbol_ref || tloc))
23385 die = gen_call_site_die (decl, subr_die, ca_loc);
23386 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23387 {
23388 dw_loc_descr_ref tval = NULL;
23389
23390 if (tloc != NULL_RTX)
23391 tval = mem_loc_descriptor (tloc,
23392 GET_MODE (tloc) == VOIDmode
23393 ? Pmode : GET_MODE (tloc),
23394 VOIDmode,
23395 VAR_INIT_STATUS_INITIALIZED);
23396 if (tval)
23397 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23398 else if (tlocc != NULL_RTX)
23399 {
23400 tval = mem_loc_descriptor (tlocc,
23401 GET_MODE (tlocc) == VOIDmode
23402 ? Pmode : GET_MODE (tlocc),
23403 VOIDmode,
23404 VAR_INIT_STATUS_INITIALIZED);
23405 if (tval)
23406 add_AT_loc (die,
23407 dwarf_AT (DW_AT_call_target_clobbered),
23408 tval);
23409 }
23410 }
23411 if (die != NULL)
23412 {
23413 call_site_note_count++;
23414 if (ca_loc->tail_call_p)
23415 tail_call_site_note_count++;
23416 }
23417 }
23418 }
23419 call_arg_locations = NULL;
23420 call_arg_loc_last = NULL;
23421 if (tail_call_site_count >= 0
23422 && tail_call_site_count == tail_call_site_note_count
23423 && (!dwarf_strict || dwarf_version >= 5))
23424 {
23425 if (call_site_count >= 0
23426 && call_site_count == call_site_note_count)
23427 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23428 else
23429 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23430 }
23431 call_site_count = -1;
23432 tail_call_site_count = -1;
23433 }
23434
23435 /* Mark used types after we have created DIEs for the functions scopes. */
23436 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23437 }
23438
23439 /* Returns a hash value for X (which really is a die_struct). */
23440
23441 hashval_t
23442 block_die_hasher::hash (die_struct *d)
23443 {
23444 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23445 }
23446
23447 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23448 as decl_id and die_parent of die_struct Y. */
23449
23450 bool
23451 block_die_hasher::equal (die_struct *x, die_struct *y)
23452 {
23453 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23454 }
23455
23456 /* Hold information about markers for inlined entry points. */
23457 struct GTY ((for_user)) inline_entry_data
23458 {
23459 /* The block that's the inlined_function_outer_scope for an inlined
23460 function. */
23461 tree block;
23462
23463 /* The label at the inlined entry point. */
23464 const char *label_pfx;
23465 unsigned int label_num;
23466
23467 /* The view number to be used as the inlined entry point. */
23468 var_loc_view view;
23469 };
23470
23471 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23472 {
23473 typedef tree compare_type;
23474 static inline hashval_t hash (const inline_entry_data *);
23475 static inline bool equal (const inline_entry_data *, const_tree);
23476 };
23477
23478 /* Hash table routines for inline_entry_data. */
23479
23480 inline hashval_t
23481 inline_entry_data_hasher::hash (const inline_entry_data *data)
23482 {
23483 return htab_hash_pointer (data->block);
23484 }
23485
23486 inline bool
23487 inline_entry_data_hasher::equal (const inline_entry_data *data,
23488 const_tree block)
23489 {
23490 return data->block == block;
23491 }
23492
23493 /* Inlined entry points pending DIE creation in this compilation unit. */
23494
23495 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23496
23497
23498 /* Return TRUE if DECL, which may have been previously generated as
23499 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23500 true if decl (or its origin) is either an extern declaration or a
23501 class/namespace scoped declaration.
23502
23503 The declare_in_namespace support causes us to get two DIEs for one
23504 variable, both of which are declarations. We want to avoid
23505 considering one to be a specification, so we must test for
23506 DECLARATION and DW_AT_declaration. */
23507 static inline bool
23508 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23509 {
23510 return (old_die && TREE_STATIC (decl) && !declaration
23511 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23512 }
23513
23514 /* Return true if DECL is a local static. */
23515
23516 static inline bool
23517 local_function_static (tree decl)
23518 {
23519 gcc_assert (VAR_P (decl));
23520 return TREE_STATIC (decl)
23521 && DECL_CONTEXT (decl)
23522 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23523 }
23524
23525 /* Generate a DIE to represent a declared data object.
23526 Either DECL or ORIGIN must be non-null. */
23527
23528 static void
23529 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23530 {
23531 HOST_WIDE_INT off = 0;
23532 tree com_decl;
23533 tree decl_or_origin = decl ? decl : origin;
23534 tree ultimate_origin;
23535 dw_die_ref var_die;
23536 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23537 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23538 || class_or_namespace_scope_p (context_die));
23539 bool specialization_p = false;
23540 bool no_linkage_name = false;
23541
23542 /* While C++ inline static data members have definitions inside of the
23543 class, force the first DIE to be a declaration, then let gen_member_die
23544 reparent it to the class context and call gen_variable_die again
23545 to create the outside of the class DIE for the definition. */
23546 if (!declaration
23547 && old_die == NULL
23548 && decl
23549 && DECL_CONTEXT (decl)
23550 && TYPE_P (DECL_CONTEXT (decl))
23551 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23552 {
23553 declaration = true;
23554 if (dwarf_version < 5)
23555 no_linkage_name = true;
23556 }
23557
23558 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23559 if (decl || ultimate_origin)
23560 origin = ultimate_origin;
23561 com_decl = fortran_common (decl_or_origin, &off);
23562
23563 /* Symbol in common gets emitted as a child of the common block, in the form
23564 of a data member. */
23565 if (com_decl)
23566 {
23567 dw_die_ref com_die;
23568 dw_loc_list_ref loc = NULL;
23569 die_node com_die_arg;
23570
23571 var_die = lookup_decl_die (decl_or_origin);
23572 if (var_die)
23573 {
23574 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23575 {
23576 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23577 if (loc)
23578 {
23579 if (off)
23580 {
23581 /* Optimize the common case. */
23582 if (single_element_loc_list_p (loc)
23583 && loc->expr->dw_loc_opc == DW_OP_addr
23584 && loc->expr->dw_loc_next == NULL
23585 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23586 == SYMBOL_REF)
23587 {
23588 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23589 loc->expr->dw_loc_oprnd1.v.val_addr
23590 = plus_constant (GET_MODE (x), x , off);
23591 }
23592 else
23593 loc_list_plus_const (loc, off);
23594 }
23595 add_AT_location_description (var_die, DW_AT_location, loc);
23596 remove_AT (var_die, DW_AT_declaration);
23597 }
23598 }
23599 return;
23600 }
23601
23602 if (common_block_die_table == NULL)
23603 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23604
23605 com_die_arg.decl_id = DECL_UID (com_decl);
23606 com_die_arg.die_parent = context_die;
23607 com_die = common_block_die_table->find (&com_die_arg);
23608 if (! early_dwarf)
23609 loc = loc_list_from_tree (com_decl, 2, NULL);
23610 if (com_die == NULL)
23611 {
23612 const char *cnam
23613 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23614 die_node **slot;
23615
23616 com_die = new_die (DW_TAG_common_block, context_die, decl);
23617 add_name_and_src_coords_attributes (com_die, com_decl);
23618 if (loc)
23619 {
23620 add_AT_location_description (com_die, DW_AT_location, loc);
23621 /* Avoid sharing the same loc descriptor between
23622 DW_TAG_common_block and DW_TAG_variable. */
23623 loc = loc_list_from_tree (com_decl, 2, NULL);
23624 }
23625 else if (DECL_EXTERNAL (decl_or_origin))
23626 add_AT_flag (com_die, DW_AT_declaration, 1);
23627 if (want_pubnames ())
23628 add_pubname_string (cnam, com_die); /* ??? needed? */
23629 com_die->decl_id = DECL_UID (com_decl);
23630 slot = common_block_die_table->find_slot (com_die, INSERT);
23631 *slot = com_die;
23632 }
23633 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23634 {
23635 add_AT_location_description (com_die, DW_AT_location, loc);
23636 loc = loc_list_from_tree (com_decl, 2, NULL);
23637 remove_AT (com_die, DW_AT_declaration);
23638 }
23639 var_die = new_die (DW_TAG_variable, com_die, decl);
23640 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23641 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23642 decl_quals (decl_or_origin), false,
23643 context_die);
23644 add_alignment_attribute (var_die, decl);
23645 add_AT_flag (var_die, DW_AT_external, 1);
23646 if (loc)
23647 {
23648 if (off)
23649 {
23650 /* Optimize the common case. */
23651 if (single_element_loc_list_p (loc)
23652 && loc->expr->dw_loc_opc == DW_OP_addr
23653 && loc->expr->dw_loc_next == NULL
23654 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23655 {
23656 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23657 loc->expr->dw_loc_oprnd1.v.val_addr
23658 = plus_constant (GET_MODE (x), x, off);
23659 }
23660 else
23661 loc_list_plus_const (loc, off);
23662 }
23663 add_AT_location_description (var_die, DW_AT_location, loc);
23664 }
23665 else if (DECL_EXTERNAL (decl_or_origin))
23666 add_AT_flag (var_die, DW_AT_declaration, 1);
23667 if (decl)
23668 equate_decl_number_to_die (decl, var_die);
23669 return;
23670 }
23671
23672 if (old_die)
23673 {
23674 if (declaration)
23675 {
23676 /* A declaration that has been previously dumped, needs no
23677 further annotations, since it doesn't need location on
23678 the second pass. */
23679 return;
23680 }
23681 else if (decl_will_get_specification_p (old_die, decl, declaration)
23682 && !get_AT (old_die, DW_AT_specification))
23683 {
23684 /* Fall-thru so we can make a new variable die along with a
23685 DW_AT_specification. */
23686 }
23687 else if (origin && old_die->die_parent != context_die)
23688 {
23689 /* If we will be creating an inlined instance, we need a
23690 new DIE that will get annotated with
23691 DW_AT_abstract_origin. */
23692 gcc_assert (!DECL_ABSTRACT_P (decl));
23693 }
23694 else
23695 {
23696 /* If a DIE was dumped early, it still needs location info.
23697 Skip to where we fill the location bits. */
23698 var_die = old_die;
23699
23700 /* ??? In LTRANS we cannot annotate early created variably
23701 modified type DIEs without copying them and adjusting all
23702 references to them. Thus we dumped them again. Also add a
23703 reference to them but beware of -g0 compile and -g link
23704 in which case the reference will be already present. */
23705 tree type = TREE_TYPE (decl_or_origin);
23706 if (in_lto_p
23707 && ! get_AT (var_die, DW_AT_type)
23708 && variably_modified_type_p
23709 (type, decl_function_context (decl_or_origin)))
23710 {
23711 if (decl_by_reference_p (decl_or_origin))
23712 add_type_attribute (var_die, TREE_TYPE (type),
23713 TYPE_UNQUALIFIED, false, context_die);
23714 else
23715 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23716 false, context_die);
23717 }
23718
23719 goto gen_variable_die_location;
23720 }
23721 }
23722
23723 /* For static data members, the declaration in the class is supposed
23724 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23725 also in DWARF2; the specification should still be DW_TAG_variable
23726 referencing the DW_TAG_member DIE. */
23727 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23728 var_die = new_die (DW_TAG_member, context_die, decl);
23729 else
23730 var_die = new_die (DW_TAG_variable, context_die, decl);
23731
23732 if (origin != NULL)
23733 add_abstract_origin_attribute (var_die, origin);
23734
23735 /* Loop unrolling can create multiple blocks that refer to the same
23736 static variable, so we must test for the DW_AT_declaration flag.
23737
23738 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23739 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23740 sharing them.
23741
23742 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23743 else if (decl_will_get_specification_p (old_die, decl, declaration))
23744 {
23745 /* This is a definition of a C++ class level static. */
23746 add_AT_specification (var_die, old_die);
23747 specialization_p = true;
23748 if (DECL_NAME (decl))
23749 {
23750 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23751 struct dwarf_file_data * file_index = lookup_filename (s.file);
23752
23753 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23754 add_AT_file (var_die, DW_AT_decl_file, file_index);
23755
23756 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23757 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23758
23759 if (debug_column_info
23760 && s.column
23761 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23762 != (unsigned) s.column))
23763 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23764
23765 if (old_die->die_tag == DW_TAG_member)
23766 add_linkage_name (var_die, decl);
23767 }
23768 }
23769 else
23770 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23771
23772 if ((origin == NULL && !specialization_p)
23773 || (origin != NULL
23774 && !DECL_ABSTRACT_P (decl_or_origin)
23775 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23776 decl_function_context
23777 (decl_or_origin))))
23778 {
23779 tree type = TREE_TYPE (decl_or_origin);
23780
23781 if (decl_by_reference_p (decl_or_origin))
23782 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23783 context_die);
23784 else
23785 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23786 context_die);
23787 }
23788
23789 if (origin == NULL && !specialization_p)
23790 {
23791 if (TREE_PUBLIC (decl))
23792 add_AT_flag (var_die, DW_AT_external, 1);
23793
23794 if (DECL_ARTIFICIAL (decl))
23795 add_AT_flag (var_die, DW_AT_artificial, 1);
23796
23797 add_alignment_attribute (var_die, decl);
23798
23799 add_accessibility_attribute (var_die, decl);
23800 }
23801
23802 if (declaration)
23803 add_AT_flag (var_die, DW_AT_declaration, 1);
23804
23805 if (decl && (DECL_ABSTRACT_P (decl)
23806 || !old_die || is_declaration_die (old_die)))
23807 equate_decl_number_to_die (decl, var_die);
23808
23809 gen_variable_die_location:
23810 if (! declaration
23811 && (! DECL_ABSTRACT_P (decl_or_origin)
23812 /* Local static vars are shared between all clones/inlines,
23813 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23814 already set. */
23815 || (VAR_P (decl_or_origin)
23816 && TREE_STATIC (decl_or_origin)
23817 && DECL_RTL_SET_P (decl_or_origin))))
23818 {
23819 if (early_dwarf)
23820 add_pubname (decl_or_origin, var_die);
23821 else
23822 add_location_or_const_value_attribute (var_die, decl_or_origin,
23823 decl == NULL);
23824 }
23825 else
23826 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23827
23828 if ((dwarf_version >= 4 || !dwarf_strict)
23829 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23830 DW_AT_const_expr) == 1
23831 && !get_AT (var_die, DW_AT_const_expr)
23832 && !specialization_p)
23833 add_AT_flag (var_die, DW_AT_const_expr, 1);
23834
23835 if (!dwarf_strict)
23836 {
23837 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23838 DW_AT_inline);
23839 if (inl != -1
23840 && !get_AT (var_die, DW_AT_inline)
23841 && !specialization_p)
23842 add_AT_unsigned (var_die, DW_AT_inline, inl);
23843 }
23844 }
23845
23846 /* Generate a DIE to represent a named constant. */
23847
23848 static void
23849 gen_const_die (tree decl, dw_die_ref context_die)
23850 {
23851 dw_die_ref const_die;
23852 tree type = TREE_TYPE (decl);
23853
23854 const_die = lookup_decl_die (decl);
23855 if (const_die)
23856 return;
23857
23858 const_die = new_die (DW_TAG_constant, context_die, decl);
23859 equate_decl_number_to_die (decl, const_die);
23860 add_name_and_src_coords_attributes (const_die, decl);
23861 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23862 if (TREE_PUBLIC (decl))
23863 add_AT_flag (const_die, DW_AT_external, 1);
23864 if (DECL_ARTIFICIAL (decl))
23865 add_AT_flag (const_die, DW_AT_artificial, 1);
23866 tree_add_const_value_attribute_for_decl (const_die, decl);
23867 }
23868
23869 /* Generate a DIE to represent a label identifier. */
23870
23871 static void
23872 gen_label_die (tree decl, dw_die_ref context_die)
23873 {
23874 tree origin = decl_ultimate_origin (decl);
23875 dw_die_ref lbl_die = lookup_decl_die (decl);
23876 rtx insn;
23877 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23878
23879 if (!lbl_die)
23880 {
23881 lbl_die = new_die (DW_TAG_label, context_die, decl);
23882 equate_decl_number_to_die (decl, lbl_die);
23883
23884 if (origin != NULL)
23885 add_abstract_origin_attribute (lbl_die, origin);
23886 else
23887 add_name_and_src_coords_attributes (lbl_die, decl);
23888 }
23889
23890 if (DECL_ABSTRACT_P (decl))
23891 equate_decl_number_to_die (decl, lbl_die);
23892 else if (! early_dwarf)
23893 {
23894 insn = DECL_RTL_IF_SET (decl);
23895
23896 /* Deleted labels are programmer specified labels which have been
23897 eliminated because of various optimizations. We still emit them
23898 here so that it is possible to put breakpoints on them. */
23899 if (insn
23900 && (LABEL_P (insn)
23901 || ((NOTE_P (insn)
23902 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23903 {
23904 /* When optimization is enabled (via -O) some parts of the compiler
23905 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23906 represent source-level labels which were explicitly declared by
23907 the user. This really shouldn't be happening though, so catch
23908 it if it ever does happen. */
23909 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23910
23911 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23912 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23913 }
23914 else if (insn
23915 && NOTE_P (insn)
23916 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23917 && CODE_LABEL_NUMBER (insn) != -1)
23918 {
23919 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23920 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23921 }
23922 }
23923 }
23924
23925 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23926 attributes to the DIE for a block STMT, to describe where the inlined
23927 function was called from. This is similar to add_src_coords_attributes. */
23928
23929 static inline void
23930 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23931 {
23932 /* We can end up with BUILTINS_LOCATION here. */
23933 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
23934 return;
23935
23936 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23937
23938 if (dwarf_version >= 3 || !dwarf_strict)
23939 {
23940 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23941 add_AT_unsigned (die, DW_AT_call_line, s.line);
23942 if (debug_column_info && s.column)
23943 add_AT_unsigned (die, DW_AT_call_column, s.column);
23944 }
23945 }
23946
23947
23948 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23949 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23950
23951 static inline void
23952 add_high_low_attributes (tree stmt, dw_die_ref die)
23953 {
23954 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23955
23956 if (inline_entry_data **iedp
23957 = !inline_entry_data_table ? NULL
23958 : inline_entry_data_table->find_slot_with_hash (stmt,
23959 htab_hash_pointer (stmt),
23960 NO_INSERT))
23961 {
23962 inline_entry_data *ied = *iedp;
23963 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23964 gcc_assert (debug_inline_points);
23965 gcc_assert (inlined_function_outer_scope_p (stmt));
23966
23967 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23968 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23969
23970 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23971 && !dwarf_strict)
23972 {
23973 if (!output_asm_line_debug_info ())
23974 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23975 else
23976 {
23977 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23978 /* FIXME: this will resolve to a small number. Could we
23979 possibly emit smaller data? Ideally we'd emit a
23980 uleb128, but that would make the size of DIEs
23981 impossible for the compiler to compute, since it's
23982 the assembler that computes the value of the view
23983 label in this case. Ideally, we'd have a single form
23984 encompassing both the address and the view, and
23985 indirecting them through a table might make things
23986 easier, but even that would be more wasteful,
23987 space-wise, than what we have now. */
23988 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23989 }
23990 }
23991
23992 inline_entry_data_table->clear_slot (iedp);
23993 }
23994
23995 if (BLOCK_FRAGMENT_CHAIN (stmt)
23996 && (dwarf_version >= 3 || !dwarf_strict))
23997 {
23998 tree chain, superblock = NULL_TREE;
23999 dw_die_ref pdie;
24000 dw_attr_node *attr = NULL;
24001
24002 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24003 {
24004 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24005 BLOCK_NUMBER (stmt));
24006 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24007 }
24008
24009 /* Optimize duplicate .debug_ranges lists or even tails of
24010 lists. If this BLOCK has same ranges as its supercontext,
24011 lookup DW_AT_ranges attribute in the supercontext (and
24012 recursively so), verify that the ranges_table contains the
24013 right values and use it instead of adding a new .debug_range. */
24014 for (chain = stmt, pdie = die;
24015 BLOCK_SAME_RANGE (chain);
24016 chain = BLOCK_SUPERCONTEXT (chain))
24017 {
24018 dw_attr_node *new_attr;
24019
24020 pdie = pdie->die_parent;
24021 if (pdie == NULL)
24022 break;
24023 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24024 break;
24025 new_attr = get_AT (pdie, DW_AT_ranges);
24026 if (new_attr == NULL
24027 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24028 break;
24029 attr = new_attr;
24030 superblock = BLOCK_SUPERCONTEXT (chain);
24031 }
24032 if (attr != NULL
24033 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24034 == (int)BLOCK_NUMBER (superblock))
24035 && BLOCK_FRAGMENT_CHAIN (superblock))
24036 {
24037 unsigned long off = attr->dw_attr_val.v.val_offset;
24038 unsigned long supercnt = 0, thiscnt = 0;
24039 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24040 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24041 {
24042 ++supercnt;
24043 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24044 == (int)BLOCK_NUMBER (chain));
24045 }
24046 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24047 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24048 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24049 ++thiscnt;
24050 gcc_assert (supercnt >= thiscnt);
24051 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24052 false);
24053 note_rnglist_head (off + supercnt - thiscnt);
24054 return;
24055 }
24056
24057 unsigned int offset = add_ranges (stmt, true);
24058 add_AT_range_list (die, DW_AT_ranges, offset, false);
24059 note_rnglist_head (offset);
24060
24061 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24062 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24063 do
24064 {
24065 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24066 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24067 chain = BLOCK_FRAGMENT_CHAIN (chain);
24068 }
24069 while (chain);
24070 add_ranges (NULL);
24071 }
24072 else
24073 {
24074 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24075 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24076 BLOCK_NUMBER (stmt));
24077 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24078 BLOCK_NUMBER (stmt));
24079 add_AT_low_high_pc (die, label, label_high, false);
24080 }
24081 }
24082
24083 /* Generate a DIE for a lexical block. */
24084
24085 static void
24086 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24087 {
24088 dw_die_ref old_die = lookup_block_die (stmt);
24089 dw_die_ref stmt_die = NULL;
24090 if (!old_die)
24091 {
24092 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24093 equate_block_to_die (stmt, stmt_die);
24094 }
24095
24096 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24097 {
24098 /* If this is an inlined or conrecte instance, create a new lexical
24099 die for anything below to attach DW_AT_abstract_origin to. */
24100 if (old_die)
24101 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24102
24103 tree origin = block_ultimate_origin (stmt);
24104 if (origin != NULL_TREE && (origin != stmt || old_die))
24105 add_abstract_origin_attribute (stmt_die, origin);
24106
24107 old_die = NULL;
24108 }
24109
24110 if (old_die)
24111 stmt_die = old_die;
24112
24113 /* A non abstract block whose blocks have already been reordered
24114 should have the instruction range for this block. If so, set the
24115 high/low attributes. */
24116 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24117 {
24118 gcc_assert (stmt_die);
24119 add_high_low_attributes (stmt, stmt_die);
24120 }
24121
24122 decls_for_scope (stmt, stmt_die);
24123 }
24124
24125 /* Generate a DIE for an inlined subprogram. */
24126
24127 static void
24128 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24129 {
24130 tree decl = block_ultimate_origin (stmt);
24131
24132 /* Make sure any inlined functions are known to be inlineable. */
24133 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24134 || cgraph_function_possibly_inlined_p (decl));
24135
24136 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24137
24138 if (call_arg_locations || debug_inline_points)
24139 equate_block_to_die (stmt, subr_die);
24140 add_abstract_origin_attribute (subr_die, decl);
24141 if (TREE_ASM_WRITTEN (stmt))
24142 add_high_low_attributes (stmt, subr_die);
24143 add_call_src_coords_attributes (stmt, subr_die);
24144
24145 /* The inliner creates an extra BLOCK for the parameter setup,
24146 we want to merge that with the actual outermost BLOCK of the
24147 inlined function to avoid duplicate locals in consumers.
24148 Do that by doing the recursion to subblocks on the single subblock
24149 of STMT. */
24150 bool unwrap_one = false;
24151 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24152 {
24153 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24154 if (origin
24155 && TREE_CODE (origin) == BLOCK
24156 && BLOCK_SUPERCONTEXT (origin) == decl)
24157 unwrap_one = true;
24158 }
24159 decls_for_scope (stmt, subr_die, !unwrap_one);
24160 if (unwrap_one)
24161 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24162 }
24163
24164 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24165 the comment for VLR_CONTEXT. */
24166
24167 static void
24168 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24169 {
24170 dw_die_ref decl_die;
24171
24172 if (TREE_TYPE (decl) == error_mark_node)
24173 return;
24174
24175 decl_die = new_die (DW_TAG_member, context_die, decl);
24176 add_name_and_src_coords_attributes (decl_die, decl);
24177 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24178 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24179 context_die);
24180
24181 if (DECL_BIT_FIELD_TYPE (decl))
24182 {
24183 add_byte_size_attribute (decl_die, decl);
24184 add_bit_size_attribute (decl_die, decl);
24185 add_bit_offset_attribute (decl_die, decl, ctx);
24186 }
24187
24188 add_alignment_attribute (decl_die, decl);
24189
24190 /* If we have a variant part offset, then we are supposed to process a member
24191 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24192 trees. */
24193 gcc_assert (ctx->variant_part_offset == NULL_TREE
24194 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24195 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24196 add_data_member_location_attribute (decl_die, decl, ctx);
24197
24198 if (DECL_ARTIFICIAL (decl))
24199 add_AT_flag (decl_die, DW_AT_artificial, 1);
24200
24201 add_accessibility_attribute (decl_die, decl);
24202
24203 /* Equate decl number to die, so that we can look up this decl later on. */
24204 equate_decl_number_to_die (decl, decl_die);
24205 }
24206
24207 /* Generate a DIE for a pointer to a member type. TYPE can be an
24208 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24209 pointer to member function. */
24210
24211 static void
24212 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24213 {
24214 if (lookup_type_die (type))
24215 return;
24216
24217 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24218 scope_die_for (type, context_die), type);
24219
24220 equate_type_number_to_die (type, ptr_die);
24221 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24222 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24223 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24224 context_die);
24225 add_alignment_attribute (ptr_die, type);
24226
24227 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24228 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24229 {
24230 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24231 add_AT_loc (ptr_die, DW_AT_use_location, op);
24232 }
24233 }
24234
24235 static char *producer_string;
24236
24237 /* Return a heap allocated producer string including command line options
24238 if -grecord-gcc-switches. */
24239
24240 static char *
24241 gen_producer_string (void)
24242 {
24243 size_t j;
24244 auto_vec<const char *> switches;
24245 const char *language_string = lang_hooks.name;
24246 char *producer, *tail;
24247 const char *p;
24248 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24249 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24250
24251 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24252 switch (save_decoded_options[j].opt_index)
24253 {
24254 case OPT_o:
24255 case OPT_d:
24256 case OPT_dumpbase:
24257 case OPT_dumpdir:
24258 case OPT_auxbase:
24259 case OPT_auxbase_strip:
24260 case OPT_quiet:
24261 case OPT_version:
24262 case OPT_v:
24263 case OPT_w:
24264 case OPT_L:
24265 case OPT_D:
24266 case OPT_I:
24267 case OPT_U:
24268 case OPT_SPECIAL_unknown:
24269 case OPT_SPECIAL_ignore:
24270 case OPT_SPECIAL_deprecated:
24271 case OPT_SPECIAL_program_name:
24272 case OPT_SPECIAL_input_file:
24273 case OPT_grecord_gcc_switches:
24274 case OPT__output_pch_:
24275 case OPT_fdiagnostics_show_location_:
24276 case OPT_fdiagnostics_show_option:
24277 case OPT_fdiagnostics_show_caret:
24278 case OPT_fdiagnostics_show_labels:
24279 case OPT_fdiagnostics_show_line_numbers:
24280 case OPT_fdiagnostics_color_:
24281 case OPT_fdiagnostics_format_:
24282 case OPT_fverbose_asm:
24283 case OPT____:
24284 case OPT__sysroot_:
24285 case OPT_nostdinc:
24286 case OPT_nostdinc__:
24287 case OPT_fpreprocessed:
24288 case OPT_fltrans_output_list_:
24289 case OPT_fresolution_:
24290 case OPT_fdebug_prefix_map_:
24291 case OPT_fmacro_prefix_map_:
24292 case OPT_ffile_prefix_map_:
24293 case OPT_fcompare_debug:
24294 case OPT_fchecking:
24295 case OPT_fchecking_:
24296 /* Ignore these. */
24297 continue;
24298 default:
24299 if (cl_options[save_decoded_options[j].opt_index].flags
24300 & CL_NO_DWARF_RECORD)
24301 continue;
24302 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24303 == '-');
24304 switch (save_decoded_options[j].canonical_option[0][1])
24305 {
24306 case 'M':
24307 case 'i':
24308 case 'W':
24309 continue;
24310 case 'f':
24311 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24312 "dump", 4) == 0)
24313 continue;
24314 break;
24315 default:
24316 break;
24317 }
24318 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24319 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24320 break;
24321 }
24322
24323 producer = XNEWVEC (char, plen + 1 + len + 1);
24324 tail = producer;
24325 sprintf (tail, "%s %s", language_string, version_string);
24326 tail += plen;
24327
24328 FOR_EACH_VEC_ELT (switches, j, p)
24329 {
24330 len = strlen (p);
24331 *tail = ' ';
24332 memcpy (tail + 1, p, len);
24333 tail += len + 1;
24334 }
24335
24336 *tail = '\0';
24337 return producer;
24338 }
24339
24340 /* Given a C and/or C++ language/version string return the "highest".
24341 C++ is assumed to be "higher" than C in this case. Used for merging
24342 LTO translation unit languages. */
24343 static const char *
24344 highest_c_language (const char *lang1, const char *lang2)
24345 {
24346 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24347 return "GNU C++17";
24348 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24349 return "GNU C++14";
24350 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24351 return "GNU C++11";
24352 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24353 return "GNU C++98";
24354
24355 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24356 return "GNU C2X";
24357 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24358 return "GNU C17";
24359 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24360 return "GNU C11";
24361 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24362 return "GNU C99";
24363 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24364 return "GNU C89";
24365
24366 gcc_unreachable ();
24367 }
24368
24369
24370 /* Generate the DIE for the compilation unit. */
24371
24372 static dw_die_ref
24373 gen_compile_unit_die (const char *filename)
24374 {
24375 dw_die_ref die;
24376 const char *language_string = lang_hooks.name;
24377 int language;
24378
24379 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24380
24381 if (filename)
24382 {
24383 add_name_attribute (die, filename);
24384 /* Don't add cwd for <built-in>. */
24385 if (filename[0] != '<')
24386 add_comp_dir_attribute (die);
24387 }
24388
24389 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24390
24391 /* If our producer is LTO try to figure out a common language to use
24392 from the global list of translation units. */
24393 if (strcmp (language_string, "GNU GIMPLE") == 0)
24394 {
24395 unsigned i;
24396 tree t;
24397 const char *common_lang = NULL;
24398
24399 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24400 {
24401 if (!TRANSLATION_UNIT_LANGUAGE (t))
24402 continue;
24403 if (!common_lang)
24404 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24405 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24406 ;
24407 else if (strncmp (common_lang, "GNU C", 5) == 0
24408 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24409 /* Mixing C and C++ is ok, use C++ in that case. */
24410 common_lang = highest_c_language (common_lang,
24411 TRANSLATION_UNIT_LANGUAGE (t));
24412 else
24413 {
24414 /* Fall back to C. */
24415 common_lang = NULL;
24416 break;
24417 }
24418 }
24419
24420 if (common_lang)
24421 language_string = common_lang;
24422 }
24423
24424 language = DW_LANG_C;
24425 if (strncmp (language_string, "GNU C", 5) == 0
24426 && ISDIGIT (language_string[5]))
24427 {
24428 language = DW_LANG_C89;
24429 if (dwarf_version >= 3 || !dwarf_strict)
24430 {
24431 if (strcmp (language_string, "GNU C89") != 0)
24432 language = DW_LANG_C99;
24433
24434 if (dwarf_version >= 5 /* || !dwarf_strict */)
24435 if (strcmp (language_string, "GNU C11") == 0
24436 || strcmp (language_string, "GNU C17") == 0
24437 || strcmp (language_string, "GNU C2X"))
24438 language = DW_LANG_C11;
24439 }
24440 }
24441 else if (strncmp (language_string, "GNU C++", 7) == 0)
24442 {
24443 language = DW_LANG_C_plus_plus;
24444 if (dwarf_version >= 5 /* || !dwarf_strict */)
24445 {
24446 if (strcmp (language_string, "GNU C++11") == 0)
24447 language = DW_LANG_C_plus_plus_11;
24448 else if (strcmp (language_string, "GNU C++14") == 0)
24449 language = DW_LANG_C_plus_plus_14;
24450 else if (strcmp (language_string, "GNU C++17") == 0)
24451 /* For now. */
24452 language = DW_LANG_C_plus_plus_14;
24453 }
24454 }
24455 else if (strcmp (language_string, "GNU F77") == 0)
24456 language = DW_LANG_Fortran77;
24457 else if (dwarf_version >= 3 || !dwarf_strict)
24458 {
24459 if (strcmp (language_string, "GNU Ada") == 0)
24460 language = DW_LANG_Ada95;
24461 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24462 {
24463 language = DW_LANG_Fortran95;
24464 if (dwarf_version >= 5 /* || !dwarf_strict */)
24465 {
24466 if (strcmp (language_string, "GNU Fortran2003") == 0)
24467 language = DW_LANG_Fortran03;
24468 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24469 language = DW_LANG_Fortran08;
24470 }
24471 }
24472 else if (strcmp (language_string, "GNU Objective-C") == 0)
24473 language = DW_LANG_ObjC;
24474 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24475 language = DW_LANG_ObjC_plus_plus;
24476 else if (strcmp (language_string, "GNU D") == 0)
24477 language = DW_LANG_D;
24478 else if (dwarf_version >= 5 || !dwarf_strict)
24479 {
24480 if (strcmp (language_string, "GNU Go") == 0)
24481 language = DW_LANG_Go;
24482 }
24483 }
24484 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24485 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24486 language = DW_LANG_Fortran90;
24487 /* Likewise for Ada. */
24488 else if (strcmp (language_string, "GNU Ada") == 0)
24489 language = DW_LANG_Ada83;
24490
24491 add_AT_unsigned (die, DW_AT_language, language);
24492
24493 switch (language)
24494 {
24495 case DW_LANG_Fortran77:
24496 case DW_LANG_Fortran90:
24497 case DW_LANG_Fortran95:
24498 case DW_LANG_Fortran03:
24499 case DW_LANG_Fortran08:
24500 /* Fortran has case insensitive identifiers and the front-end
24501 lowercases everything. */
24502 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24503 break;
24504 default:
24505 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24506 break;
24507 }
24508 return die;
24509 }
24510
24511 /* Generate the DIE for a base class. */
24512
24513 static void
24514 gen_inheritance_die (tree binfo, tree access, tree type,
24515 dw_die_ref context_die)
24516 {
24517 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24518 struct vlr_context ctx = { type, NULL };
24519
24520 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24521 context_die);
24522 add_data_member_location_attribute (die, binfo, &ctx);
24523
24524 if (BINFO_VIRTUAL_P (binfo))
24525 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24526
24527 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24528 children, otherwise the default is DW_ACCESS_public. In DWARF2
24529 the default has always been DW_ACCESS_private. */
24530 if (access == access_public_node)
24531 {
24532 if (dwarf_version == 2
24533 || context_die->die_tag == DW_TAG_class_type)
24534 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24535 }
24536 else if (access == access_protected_node)
24537 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24538 else if (dwarf_version > 2
24539 && context_die->die_tag != DW_TAG_class_type)
24540 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24541 }
24542
24543 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24544 structure. */
24545
24546 static bool
24547 is_variant_part (tree decl)
24548 {
24549 return (TREE_CODE (decl) == FIELD_DECL
24550 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24551 }
24552
24553 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24554 return the FIELD_DECL. Return NULL_TREE otherwise. */
24555
24556 static tree
24557 analyze_discr_in_predicate (tree operand, tree struct_type)
24558 {
24559 while (CONVERT_EXPR_P (operand))
24560 operand = TREE_OPERAND (operand, 0);
24561
24562 /* Match field access to members of struct_type only. */
24563 if (TREE_CODE (operand) == COMPONENT_REF
24564 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24565 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24566 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24567 return TREE_OPERAND (operand, 1);
24568 else
24569 return NULL_TREE;
24570 }
24571
24572 /* Check that SRC is a constant integer that can be represented as a native
24573 integer constant (either signed or unsigned). If so, store it into DEST and
24574 return true. Return false otherwise. */
24575
24576 static bool
24577 get_discr_value (tree src, dw_discr_value *dest)
24578 {
24579 tree discr_type = TREE_TYPE (src);
24580
24581 if (lang_hooks.types.get_debug_type)
24582 {
24583 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24584 if (debug_type != NULL)
24585 discr_type = debug_type;
24586 }
24587
24588 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24589 return false;
24590
24591 /* Signedness can vary between the original type and the debug type. This
24592 can happen for character types in Ada for instance: the character type
24593 used for code generation can be signed, to be compatible with the C one,
24594 but from a debugger point of view, it must be unsigned. */
24595 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24596 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24597
24598 if (is_orig_unsigned != is_debug_unsigned)
24599 src = fold_convert (discr_type, src);
24600
24601 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24602 return false;
24603
24604 dest->pos = is_debug_unsigned;
24605 if (is_debug_unsigned)
24606 dest->v.uval = tree_to_uhwi (src);
24607 else
24608 dest->v.sval = tree_to_shwi (src);
24609
24610 return true;
24611 }
24612
24613 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24614 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24615 store NULL_TREE in DISCR_DECL. Otherwise:
24616
24617 - store the discriminant field in STRUCT_TYPE that controls the variant
24618 part to *DISCR_DECL
24619
24620 - put in *DISCR_LISTS_P an array where for each variant, the item
24621 represents the corresponding matching list of discriminant values.
24622
24623 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24624 the above array.
24625
24626 Note that when the array is allocated (i.e. when the analysis is
24627 successful), it is up to the caller to free the array. */
24628
24629 static void
24630 analyze_variants_discr (tree variant_part_decl,
24631 tree struct_type,
24632 tree *discr_decl,
24633 dw_discr_list_ref **discr_lists_p,
24634 unsigned *discr_lists_length)
24635 {
24636 tree variant_part_type = TREE_TYPE (variant_part_decl);
24637 tree variant;
24638 dw_discr_list_ref *discr_lists;
24639 unsigned i;
24640
24641 /* Compute how many variants there are in this variant part. */
24642 *discr_lists_length = 0;
24643 for (variant = TYPE_FIELDS (variant_part_type);
24644 variant != NULL_TREE;
24645 variant = DECL_CHAIN (variant))
24646 ++*discr_lists_length;
24647
24648 *discr_decl = NULL_TREE;
24649 *discr_lists_p
24650 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24651 sizeof (**discr_lists_p));
24652 discr_lists = *discr_lists_p;
24653
24654 /* And then analyze all variants to extract discriminant information for all
24655 of them. This analysis is conservative: as soon as we detect something we
24656 do not support, abort everything and pretend we found nothing. */
24657 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24658 variant != NULL_TREE;
24659 variant = DECL_CHAIN (variant), ++i)
24660 {
24661 tree match_expr = DECL_QUALIFIER (variant);
24662
24663 /* Now, try to analyze the predicate and deduce a discriminant for
24664 it. */
24665 if (match_expr == boolean_true_node)
24666 /* Typically happens for the default variant: it matches all cases that
24667 previous variants rejected. Don't output any matching value for
24668 this one. */
24669 continue;
24670
24671 /* The following loop tries to iterate over each discriminant
24672 possibility: single values or ranges. */
24673 while (match_expr != NULL_TREE)
24674 {
24675 tree next_round_match_expr;
24676 tree candidate_discr = NULL_TREE;
24677 dw_discr_list_ref new_node = NULL;
24678
24679 /* Possibilities are matched one after the other by nested
24680 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24681 continue with the rest at next iteration. */
24682 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24683 {
24684 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24685 match_expr = TREE_OPERAND (match_expr, 1);
24686 }
24687 else
24688 next_round_match_expr = NULL_TREE;
24689
24690 if (match_expr == boolean_false_node)
24691 /* This sub-expression matches nothing: just wait for the next
24692 one. */
24693 ;
24694
24695 else if (TREE_CODE (match_expr) == EQ_EXPR)
24696 {
24697 /* We are matching: <discr_field> == <integer_cst>
24698 This sub-expression matches a single value. */
24699 tree integer_cst = TREE_OPERAND (match_expr, 1);
24700
24701 candidate_discr
24702 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24703 struct_type);
24704
24705 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24706 if (!get_discr_value (integer_cst,
24707 &new_node->dw_discr_lower_bound))
24708 goto abort;
24709 new_node->dw_discr_range = false;
24710 }
24711
24712 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24713 {
24714 /* We are matching:
24715 <discr_field> > <integer_cst>
24716 && <discr_field> < <integer_cst>.
24717 This sub-expression matches the range of values between the
24718 two matched integer constants. Note that comparisons can be
24719 inclusive or exclusive. */
24720 tree candidate_discr_1, candidate_discr_2;
24721 tree lower_cst, upper_cst;
24722 bool lower_cst_included, upper_cst_included;
24723 tree lower_op = TREE_OPERAND (match_expr, 0);
24724 tree upper_op = TREE_OPERAND (match_expr, 1);
24725
24726 /* When the comparison is exclusive, the integer constant is not
24727 the discriminant range bound we are looking for: we will have
24728 to increment or decrement it. */
24729 if (TREE_CODE (lower_op) == GE_EXPR)
24730 lower_cst_included = true;
24731 else if (TREE_CODE (lower_op) == GT_EXPR)
24732 lower_cst_included = false;
24733 else
24734 goto abort;
24735
24736 if (TREE_CODE (upper_op) == LE_EXPR)
24737 upper_cst_included = true;
24738 else if (TREE_CODE (upper_op) == LT_EXPR)
24739 upper_cst_included = false;
24740 else
24741 goto abort;
24742
24743 /* Extract the discriminant from the first operand and check it
24744 is consistant with the same analysis in the second
24745 operand. */
24746 candidate_discr_1
24747 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24748 struct_type);
24749 candidate_discr_2
24750 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24751 struct_type);
24752 if (candidate_discr_1 == candidate_discr_2)
24753 candidate_discr = candidate_discr_1;
24754 else
24755 goto abort;
24756
24757 /* Extract bounds from both. */
24758 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24759 lower_cst = TREE_OPERAND (lower_op, 1);
24760 upper_cst = TREE_OPERAND (upper_op, 1);
24761
24762 if (!lower_cst_included)
24763 lower_cst
24764 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24765 build_int_cst (TREE_TYPE (lower_cst), 1));
24766 if (!upper_cst_included)
24767 upper_cst
24768 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24769 build_int_cst (TREE_TYPE (upper_cst), 1));
24770
24771 if (!get_discr_value (lower_cst,
24772 &new_node->dw_discr_lower_bound)
24773 || !get_discr_value (upper_cst,
24774 &new_node->dw_discr_upper_bound))
24775 goto abort;
24776
24777 new_node->dw_discr_range = true;
24778 }
24779
24780 else if ((candidate_discr
24781 = analyze_discr_in_predicate (match_expr, struct_type))
24782 && TREE_TYPE (candidate_discr) == boolean_type_node)
24783 {
24784 /* We are matching: <discr_field> for a boolean discriminant.
24785 This sub-expression matches boolean_true_node. */
24786 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24787 if (!get_discr_value (boolean_true_node,
24788 &new_node->dw_discr_lower_bound))
24789 goto abort;
24790 new_node->dw_discr_range = false;
24791 }
24792
24793 else
24794 /* Unsupported sub-expression: we cannot determine the set of
24795 matching discriminant values. Abort everything. */
24796 goto abort;
24797
24798 /* If the discriminant info is not consistant with what we saw so
24799 far, consider the analysis failed and abort everything. */
24800 if (candidate_discr == NULL_TREE
24801 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24802 goto abort;
24803 else
24804 *discr_decl = candidate_discr;
24805
24806 if (new_node != NULL)
24807 {
24808 new_node->dw_discr_next = discr_lists[i];
24809 discr_lists[i] = new_node;
24810 }
24811 match_expr = next_round_match_expr;
24812 }
24813 }
24814
24815 /* If we reach this point, we could match everything we were interested
24816 in. */
24817 return;
24818
24819 abort:
24820 /* Clean all data structure and return no result. */
24821 free (*discr_lists_p);
24822 *discr_lists_p = NULL;
24823 *discr_decl = NULL_TREE;
24824 }
24825
24826 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24827 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24828 under CONTEXT_DIE.
24829
24830 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24831 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24832 this type, which are record types, represent the available variants and each
24833 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24834 values are inferred from these attributes.
24835
24836 In trees, the offsets for the fields inside these sub-records are relative
24837 to the variant part itself, whereas the corresponding DIEs should have
24838 offset attributes that are relative to the embedding record base address.
24839 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24840 must be an expression that computes the offset of the variant part to
24841 describe in DWARF. */
24842
24843 static void
24844 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24845 dw_die_ref context_die)
24846 {
24847 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24848 tree variant_part_offset = vlr_ctx->variant_part_offset;
24849 struct loc_descr_context ctx = {
24850 vlr_ctx->struct_type, /* context_type */
24851 NULL_TREE, /* base_decl */
24852 NULL, /* dpi */
24853 false, /* placeholder_arg */
24854 false /* placeholder_seen */
24855 };
24856
24857 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24858 NULL_TREE if there is no such field. */
24859 tree discr_decl = NULL_TREE;
24860 dw_discr_list_ref *discr_lists;
24861 unsigned discr_lists_length = 0;
24862 unsigned i;
24863
24864 dw_die_ref dwarf_proc_die = NULL;
24865 dw_die_ref variant_part_die
24866 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24867
24868 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24869
24870 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24871 &discr_decl, &discr_lists, &discr_lists_length);
24872
24873 if (discr_decl != NULL_TREE)
24874 {
24875 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24876
24877 if (discr_die)
24878 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24879 else
24880 /* We have no DIE for the discriminant, so just discard all
24881 discrimimant information in the output. */
24882 discr_decl = NULL_TREE;
24883 }
24884
24885 /* If the offset for this variant part is more complex than a constant,
24886 create a DWARF procedure for it so that we will not have to generate DWARF
24887 expressions for it for each member. */
24888 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24889 && (dwarf_version >= 3 || !dwarf_strict))
24890 {
24891 const tree dwarf_proc_fndecl
24892 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24893 build_function_type (TREE_TYPE (variant_part_offset),
24894 NULL_TREE));
24895 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24896 const dw_loc_descr_ref dwarf_proc_body
24897 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24898
24899 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24900 dwarf_proc_fndecl, context_die);
24901 if (dwarf_proc_die != NULL)
24902 variant_part_offset = dwarf_proc_call;
24903 }
24904
24905 /* Output DIEs for all variants. */
24906 i = 0;
24907 for (tree variant = TYPE_FIELDS (variant_part_type);
24908 variant != NULL_TREE;
24909 variant = DECL_CHAIN (variant), ++i)
24910 {
24911 tree variant_type = TREE_TYPE (variant);
24912 dw_die_ref variant_die;
24913
24914 /* All variants (i.e. members of a variant part) are supposed to be
24915 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24916 under these records. */
24917 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24918
24919 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24920 equate_decl_number_to_die (variant, variant_die);
24921
24922 /* Output discriminant values this variant matches, if any. */
24923 if (discr_decl == NULL || discr_lists[i] == NULL)
24924 /* In the case we have discriminant information at all, this is
24925 probably the default variant: as the standard says, don't
24926 output any discriminant value/list attribute. */
24927 ;
24928 else if (discr_lists[i]->dw_discr_next == NULL
24929 && !discr_lists[i]->dw_discr_range)
24930 /* If there is only one accepted value, don't bother outputting a
24931 list. */
24932 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24933 else
24934 add_discr_list (variant_die, discr_lists[i]);
24935
24936 for (tree member = TYPE_FIELDS (variant_type);
24937 member != NULL_TREE;
24938 member = DECL_CHAIN (member))
24939 {
24940 struct vlr_context vlr_sub_ctx = {
24941 vlr_ctx->struct_type, /* struct_type */
24942 NULL /* variant_part_offset */
24943 };
24944 if (is_variant_part (member))
24945 {
24946 /* All offsets for fields inside variant parts are relative to
24947 the top-level embedding RECORD_TYPE's base address. On the
24948 other hand, offsets in GCC's types are relative to the
24949 nested-most variant part. So we have to sum offsets each time
24950 we recurse. */
24951
24952 vlr_sub_ctx.variant_part_offset
24953 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24954 variant_part_offset, byte_position (member));
24955 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24956 }
24957 else
24958 {
24959 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24960 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24961 }
24962 }
24963 }
24964
24965 free (discr_lists);
24966 }
24967
24968 /* Generate a DIE for a class member. */
24969
24970 static void
24971 gen_member_die (tree type, dw_die_ref context_die)
24972 {
24973 tree member;
24974 tree binfo = TYPE_BINFO (type);
24975
24976 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24977
24978 /* If this is not an incomplete type, output descriptions of each of its
24979 members. Note that as we output the DIEs necessary to represent the
24980 members of this record or union type, we will also be trying to output
24981 DIEs to represent the *types* of those members. However the `type'
24982 function (above) will specifically avoid generating type DIEs for member
24983 types *within* the list of member DIEs for this (containing) type except
24984 for those types (of members) which are explicitly marked as also being
24985 members of this (containing) type themselves. The g++ front- end can
24986 force any given type to be treated as a member of some other (containing)
24987 type by setting the TYPE_CONTEXT of the given (member) type to point to
24988 the TREE node representing the appropriate (containing) type. */
24989
24990 /* First output info about the base classes. */
24991 if (binfo)
24992 {
24993 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24994 int i;
24995 tree base;
24996
24997 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24998 gen_inheritance_die (base,
24999 (accesses ? (*accesses)[i] : access_public_node),
25000 type,
25001 context_die);
25002 }
25003
25004 /* Now output info about the data members and type members. */
25005 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25006 {
25007 struct vlr_context vlr_ctx = { type, NULL_TREE };
25008 bool static_inline_p
25009 = (TREE_STATIC (member)
25010 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25011 != -1));
25012
25013 /* Ignore clones. */
25014 if (DECL_ABSTRACT_ORIGIN (member))
25015 continue;
25016
25017 /* If we thought we were generating minimal debug info for TYPE
25018 and then changed our minds, some of the member declarations
25019 may have already been defined. Don't define them again, but
25020 do put them in the right order. */
25021
25022 if (dw_die_ref child = lookup_decl_die (member))
25023 {
25024 /* Handle inline static data members, which only have in-class
25025 declarations. */
25026 dw_die_ref ref = NULL;
25027 if (child->die_tag == DW_TAG_variable
25028 && child->die_parent == comp_unit_die ())
25029 {
25030 ref = get_AT_ref (child, DW_AT_specification);
25031 /* For C++17 inline static data members followed by redundant
25032 out of class redeclaration, we might get here with
25033 child being the DIE created for the out of class
25034 redeclaration and with its DW_AT_specification being
25035 the DIE created for in-class definition. We want to
25036 reparent the latter, and don't want to create another
25037 DIE with DW_AT_specification in that case, because
25038 we already have one. */
25039 if (ref
25040 && static_inline_p
25041 && ref->die_tag == DW_TAG_variable
25042 && ref->die_parent == comp_unit_die ()
25043 && get_AT (ref, DW_AT_specification) == NULL)
25044 {
25045 child = ref;
25046 ref = NULL;
25047 static_inline_p = false;
25048 }
25049 }
25050
25051 if (child->die_tag == DW_TAG_variable
25052 && child->die_parent == comp_unit_die ()
25053 && ref == NULL)
25054 {
25055 reparent_child (child, context_die);
25056 if (dwarf_version < 5)
25057 child->die_tag = DW_TAG_member;
25058 }
25059 else
25060 splice_child_die (context_die, child);
25061 }
25062
25063 /* Do not generate standard DWARF for variant parts if we are generating
25064 the corresponding GNAT encodings: DIEs generated for both would
25065 conflict in our mappings. */
25066 else if (is_variant_part (member)
25067 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25068 {
25069 vlr_ctx.variant_part_offset = byte_position (member);
25070 gen_variant_part (member, &vlr_ctx, context_die);
25071 }
25072 else
25073 {
25074 vlr_ctx.variant_part_offset = NULL_TREE;
25075 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25076 }
25077
25078 /* For C++ inline static data members emit immediately a DW_TAG_variable
25079 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25080 DW_AT_specification. */
25081 if (static_inline_p)
25082 {
25083 int old_extern = DECL_EXTERNAL (member);
25084 DECL_EXTERNAL (member) = 0;
25085 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25086 DECL_EXTERNAL (member) = old_extern;
25087 }
25088 }
25089 }
25090
25091 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25092 is set, we pretend that the type was never defined, so we only get the
25093 member DIEs needed by later specification DIEs. */
25094
25095 static void
25096 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25097 enum debug_info_usage usage)
25098 {
25099 if (TREE_ASM_WRITTEN (type))
25100 {
25101 /* Fill in the bound of variable-length fields in late dwarf if
25102 still incomplete. */
25103 if (!early_dwarf && variably_modified_type_p (type, NULL))
25104 for (tree member = TYPE_FIELDS (type);
25105 member;
25106 member = DECL_CHAIN (member))
25107 fill_variable_array_bounds (TREE_TYPE (member));
25108 return;
25109 }
25110
25111 dw_die_ref type_die = lookup_type_die (type);
25112 dw_die_ref scope_die = 0;
25113 int nested = 0;
25114 int complete = (TYPE_SIZE (type)
25115 && (! TYPE_STUB_DECL (type)
25116 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25117 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25118 complete = complete && should_emit_struct_debug (type, usage);
25119
25120 if (type_die && ! complete)
25121 return;
25122
25123 if (TYPE_CONTEXT (type) != NULL_TREE
25124 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25125 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25126 nested = 1;
25127
25128 scope_die = scope_die_for (type, context_die);
25129
25130 /* Generate child dies for template paramaters. */
25131 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25132 schedule_generic_params_dies_gen (type);
25133
25134 if (! type_die || (nested && is_cu_die (scope_die)))
25135 /* First occurrence of type or toplevel definition of nested class. */
25136 {
25137 dw_die_ref old_die = type_die;
25138
25139 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25140 ? record_type_tag (type) : DW_TAG_union_type,
25141 scope_die, type);
25142 equate_type_number_to_die (type, type_die);
25143 if (old_die)
25144 add_AT_specification (type_die, old_die);
25145 else
25146 add_name_attribute (type_die, type_tag (type));
25147 }
25148 else
25149 remove_AT (type_die, DW_AT_declaration);
25150
25151 /* If this type has been completed, then give it a byte_size attribute and
25152 then give a list of members. */
25153 if (complete && !ns_decl)
25154 {
25155 /* Prevent infinite recursion in cases where the type of some member of
25156 this type is expressed in terms of this type itself. */
25157 TREE_ASM_WRITTEN (type) = 1;
25158 add_byte_size_attribute (type_die, type);
25159 add_alignment_attribute (type_die, type);
25160 if (TYPE_STUB_DECL (type) != NULL_TREE)
25161 {
25162 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25163 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25164 }
25165
25166 /* If the first reference to this type was as the return type of an
25167 inline function, then it may not have a parent. Fix this now. */
25168 if (type_die->die_parent == NULL)
25169 add_child_die (scope_die, type_die);
25170
25171 gen_member_die (type, type_die);
25172
25173 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25174 if (TYPE_ARTIFICIAL (type))
25175 add_AT_flag (type_die, DW_AT_artificial, 1);
25176
25177 /* GNU extension: Record what type our vtable lives in. */
25178 if (TYPE_VFIELD (type))
25179 {
25180 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25181
25182 gen_type_die (vtype, context_die);
25183 add_AT_die_ref (type_die, DW_AT_containing_type,
25184 lookup_type_die (vtype));
25185 }
25186 }
25187 else
25188 {
25189 add_AT_flag (type_die, DW_AT_declaration, 1);
25190
25191 /* We don't need to do this for function-local types. */
25192 if (TYPE_STUB_DECL (type)
25193 && ! decl_function_context (TYPE_STUB_DECL (type)))
25194 vec_safe_push (incomplete_types, type);
25195 }
25196
25197 if (get_AT (type_die, DW_AT_name))
25198 add_pubtype (type, type_die);
25199 }
25200
25201 /* Generate a DIE for a subroutine _type_. */
25202
25203 static void
25204 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25205 {
25206 tree return_type = TREE_TYPE (type);
25207 dw_die_ref subr_die
25208 = new_die (DW_TAG_subroutine_type,
25209 scope_die_for (type, context_die), type);
25210
25211 equate_type_number_to_die (type, subr_die);
25212 add_prototyped_attribute (subr_die, type);
25213 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25214 context_die);
25215 add_alignment_attribute (subr_die, type);
25216 gen_formal_types_die (type, subr_die);
25217
25218 if (get_AT (subr_die, DW_AT_name))
25219 add_pubtype (type, subr_die);
25220 if ((dwarf_version >= 5 || !dwarf_strict)
25221 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25222 add_AT_flag (subr_die, DW_AT_reference, 1);
25223 if ((dwarf_version >= 5 || !dwarf_strict)
25224 && lang_hooks.types.type_dwarf_attribute (type,
25225 DW_AT_rvalue_reference) != -1)
25226 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25227 }
25228
25229 /* Generate a DIE for a type definition. */
25230
25231 static void
25232 gen_typedef_die (tree decl, dw_die_ref context_die)
25233 {
25234 dw_die_ref type_die;
25235 tree type;
25236
25237 if (TREE_ASM_WRITTEN (decl))
25238 {
25239 if (DECL_ORIGINAL_TYPE (decl))
25240 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25241 return;
25242 }
25243
25244 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25245 checks in process_scope_var and modified_type_die), this should be called
25246 only for original types. */
25247 gcc_assert (decl_ultimate_origin (decl) == NULL
25248 || decl_ultimate_origin (decl) == decl);
25249
25250 TREE_ASM_WRITTEN (decl) = 1;
25251 type_die = new_die (DW_TAG_typedef, context_die, decl);
25252
25253 add_name_and_src_coords_attributes (type_die, decl);
25254 if (DECL_ORIGINAL_TYPE (decl))
25255 {
25256 type = DECL_ORIGINAL_TYPE (decl);
25257 if (type == error_mark_node)
25258 return;
25259
25260 gcc_assert (type != TREE_TYPE (decl));
25261 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25262 }
25263 else
25264 {
25265 type = TREE_TYPE (decl);
25266 if (type == error_mark_node)
25267 return;
25268
25269 if (is_naming_typedef_decl (TYPE_NAME (type)))
25270 {
25271 /* Here, we are in the case of decl being a typedef naming
25272 an anonymous type, e.g:
25273 typedef struct {...} foo;
25274 In that case TREE_TYPE (decl) is not a typedef variant
25275 type and TYPE_NAME of the anonymous type is set to the
25276 TYPE_DECL of the typedef. This construct is emitted by
25277 the C++ FE.
25278
25279 TYPE is the anonymous struct named by the typedef
25280 DECL. As we need the DW_AT_type attribute of the
25281 DW_TAG_typedef to point to the DIE of TYPE, let's
25282 generate that DIE right away. add_type_attribute
25283 called below will then pick (via lookup_type_die) that
25284 anonymous struct DIE. */
25285 if (!TREE_ASM_WRITTEN (type))
25286 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25287
25288 /* This is a GNU Extension. We are adding a
25289 DW_AT_linkage_name attribute to the DIE of the
25290 anonymous struct TYPE. The value of that attribute
25291 is the name of the typedef decl naming the anonymous
25292 struct. This greatly eases the work of consumers of
25293 this debug info. */
25294 add_linkage_name_raw (lookup_type_die (type), decl);
25295 }
25296 }
25297
25298 add_type_attribute (type_die, type, decl_quals (decl), false,
25299 context_die);
25300
25301 if (is_naming_typedef_decl (decl))
25302 /* We want that all subsequent calls to lookup_type_die with
25303 TYPE in argument yield the DW_TAG_typedef we have just
25304 created. */
25305 equate_type_number_to_die (type, type_die);
25306
25307 add_alignment_attribute (type_die, TREE_TYPE (decl));
25308
25309 add_accessibility_attribute (type_die, decl);
25310
25311 if (DECL_ABSTRACT_P (decl))
25312 equate_decl_number_to_die (decl, type_die);
25313
25314 if (get_AT (type_die, DW_AT_name))
25315 add_pubtype (decl, type_die);
25316 }
25317
25318 /* Generate a DIE for a struct, class, enum or union type. */
25319
25320 static void
25321 gen_tagged_type_die (tree type,
25322 dw_die_ref context_die,
25323 enum debug_info_usage usage)
25324 {
25325 if (type == NULL_TREE
25326 || !is_tagged_type (type))
25327 return;
25328
25329 if (TREE_ASM_WRITTEN (type))
25330 ;
25331 /* If this is a nested type whose containing class hasn't been written
25332 out yet, writing it out will cover this one, too. This does not apply
25333 to instantiations of member class templates; they need to be added to
25334 the containing class as they are generated. FIXME: This hurts the
25335 idea of combining type decls from multiple TUs, since we can't predict
25336 what set of template instantiations we'll get. */
25337 else if (TYPE_CONTEXT (type)
25338 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25339 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25340 {
25341 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25342
25343 if (TREE_ASM_WRITTEN (type))
25344 return;
25345
25346 /* If that failed, attach ourselves to the stub. */
25347 context_die = lookup_type_die (TYPE_CONTEXT (type));
25348 }
25349 else if (TYPE_CONTEXT (type) != NULL_TREE
25350 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25351 {
25352 /* If this type is local to a function that hasn't been written
25353 out yet, use a NULL context for now; it will be fixed up in
25354 decls_for_scope. */
25355 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25356 /* A declaration DIE doesn't count; nested types need to go in the
25357 specification. */
25358 if (context_die && is_declaration_die (context_die))
25359 context_die = NULL;
25360 }
25361 else
25362 context_die = declare_in_namespace (type, context_die);
25363
25364 if (TREE_CODE (type) == ENUMERAL_TYPE)
25365 {
25366 /* This might have been written out by the call to
25367 declare_in_namespace. */
25368 if (!TREE_ASM_WRITTEN (type))
25369 gen_enumeration_type_die (type, context_die);
25370 }
25371 else
25372 gen_struct_or_union_type_die (type, context_die, usage);
25373
25374 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25375 it up if it is ever completed. gen_*_type_die will set it for us
25376 when appropriate. */
25377 }
25378
25379 /* Generate a type description DIE. */
25380
25381 static void
25382 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25383 enum debug_info_usage usage)
25384 {
25385 struct array_descr_info info;
25386
25387 if (type == NULL_TREE || type == error_mark_node)
25388 return;
25389
25390 if (flag_checking && type)
25391 verify_type (type);
25392
25393 if (TYPE_NAME (type) != NULL_TREE
25394 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25395 && is_redundant_typedef (TYPE_NAME (type))
25396 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25397 /* The DECL of this type is a typedef we don't want to emit debug
25398 info for but we want debug info for its underlying typedef.
25399 This can happen for e.g, the injected-class-name of a C++
25400 type. */
25401 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25402
25403 /* If TYPE is a typedef type variant, let's generate debug info
25404 for the parent typedef which TYPE is a type of. */
25405 if (typedef_variant_p (type))
25406 {
25407 if (TREE_ASM_WRITTEN (type))
25408 return;
25409
25410 tree name = TYPE_NAME (type);
25411 tree origin = decl_ultimate_origin (name);
25412 if (origin != NULL && origin != name)
25413 {
25414 gen_decl_die (origin, NULL, NULL, context_die);
25415 return;
25416 }
25417
25418 /* Prevent broken recursion; we can't hand off to the same type. */
25419 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25420
25421 /* Give typedefs the right scope. */
25422 context_die = scope_die_for (type, context_die);
25423
25424 TREE_ASM_WRITTEN (type) = 1;
25425
25426 gen_decl_die (name, NULL, NULL, context_die);
25427 return;
25428 }
25429
25430 /* If type is an anonymous tagged type named by a typedef, let's
25431 generate debug info for the typedef. */
25432 if (is_naming_typedef_decl (TYPE_NAME (type)))
25433 {
25434 /* Give typedefs the right scope. */
25435 context_die = scope_die_for (type, context_die);
25436
25437 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25438 return;
25439 }
25440
25441 if (lang_hooks.types.get_debug_type)
25442 {
25443 tree debug_type = lang_hooks.types.get_debug_type (type);
25444
25445 if (debug_type != NULL_TREE && debug_type != type)
25446 {
25447 gen_type_die_with_usage (debug_type, context_die, usage);
25448 return;
25449 }
25450 }
25451
25452 /* We are going to output a DIE to represent the unqualified version
25453 of this type (i.e. without any const or volatile qualifiers) so
25454 get the main variant (i.e. the unqualified version) of this type
25455 now. (Vectors and arrays are special because the debugging info is in the
25456 cloned type itself. Similarly function/method types can contain extra
25457 ref-qualification). */
25458 if (TREE_CODE (type) == FUNCTION_TYPE
25459 || TREE_CODE (type) == METHOD_TYPE)
25460 {
25461 /* For function/method types, can't use type_main_variant here,
25462 because that can have different ref-qualifiers for C++,
25463 but try to canonicalize. */
25464 tree main = TYPE_MAIN_VARIANT (type);
25465 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25466 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25467 && check_base_type (t, main)
25468 && check_lang_type (t, type))
25469 {
25470 type = t;
25471 break;
25472 }
25473 }
25474 else if (TREE_CODE (type) != VECTOR_TYPE
25475 && TREE_CODE (type) != ARRAY_TYPE)
25476 type = type_main_variant (type);
25477
25478 /* If this is an array type with hidden descriptor, handle it first. */
25479 if (!TREE_ASM_WRITTEN (type)
25480 && lang_hooks.types.get_array_descr_info)
25481 {
25482 memset (&info, 0, sizeof (info));
25483 if (lang_hooks.types.get_array_descr_info (type, &info))
25484 {
25485 /* Fortran sometimes emits array types with no dimension. */
25486 gcc_assert (info.ndimensions >= 0
25487 && (info.ndimensions
25488 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25489 gen_descr_array_type_die (type, &info, context_die);
25490 TREE_ASM_WRITTEN (type) = 1;
25491 return;
25492 }
25493 }
25494
25495 if (TREE_ASM_WRITTEN (type))
25496 {
25497 /* Variable-length types may be incomplete even if
25498 TREE_ASM_WRITTEN. For such types, fall through to
25499 gen_array_type_die() and possibly fill in
25500 DW_AT_{upper,lower}_bound attributes. */
25501 if ((TREE_CODE (type) != ARRAY_TYPE
25502 && TREE_CODE (type) != RECORD_TYPE
25503 && TREE_CODE (type) != UNION_TYPE
25504 && TREE_CODE (type) != QUAL_UNION_TYPE)
25505 || !variably_modified_type_p (type, NULL))
25506 return;
25507 }
25508
25509 switch (TREE_CODE (type))
25510 {
25511 case ERROR_MARK:
25512 break;
25513
25514 case POINTER_TYPE:
25515 case REFERENCE_TYPE:
25516 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25517 ensures that the gen_type_die recursion will terminate even if the
25518 type is recursive. Recursive types are possible in Ada. */
25519 /* ??? We could perhaps do this for all types before the switch
25520 statement. */
25521 TREE_ASM_WRITTEN (type) = 1;
25522
25523 /* For these types, all that is required is that we output a DIE (or a
25524 set of DIEs) to represent the "basis" type. */
25525 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25526 DINFO_USAGE_IND_USE);
25527 break;
25528
25529 case OFFSET_TYPE:
25530 /* This code is used for C++ pointer-to-data-member types.
25531 Output a description of the relevant class type. */
25532 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25533 DINFO_USAGE_IND_USE);
25534
25535 /* Output a description of the type of the object pointed to. */
25536 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25537 DINFO_USAGE_IND_USE);
25538
25539 /* Now output a DIE to represent this pointer-to-data-member type
25540 itself. */
25541 gen_ptr_to_mbr_type_die (type, context_die);
25542 break;
25543
25544 case FUNCTION_TYPE:
25545 /* Force out return type (in case it wasn't forced out already). */
25546 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25547 DINFO_USAGE_DIR_USE);
25548 gen_subroutine_type_die (type, context_die);
25549 break;
25550
25551 case METHOD_TYPE:
25552 /* Force out return type (in case it wasn't forced out already). */
25553 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25554 DINFO_USAGE_DIR_USE);
25555 gen_subroutine_type_die (type, context_die);
25556 break;
25557
25558 case ARRAY_TYPE:
25559 case VECTOR_TYPE:
25560 gen_array_type_die (type, context_die);
25561 break;
25562
25563 case ENUMERAL_TYPE:
25564 case RECORD_TYPE:
25565 case UNION_TYPE:
25566 case QUAL_UNION_TYPE:
25567 gen_tagged_type_die (type, context_die, usage);
25568 return;
25569
25570 case VOID_TYPE:
25571 case INTEGER_TYPE:
25572 case REAL_TYPE:
25573 case FIXED_POINT_TYPE:
25574 case COMPLEX_TYPE:
25575 case BOOLEAN_TYPE:
25576 /* No DIEs needed for fundamental types. */
25577 break;
25578
25579 case NULLPTR_TYPE:
25580 case LANG_TYPE:
25581 /* Just use DW_TAG_unspecified_type. */
25582 {
25583 dw_die_ref type_die = lookup_type_die (type);
25584 if (type_die == NULL)
25585 {
25586 tree name = TYPE_IDENTIFIER (type);
25587 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25588 type);
25589 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25590 equate_type_number_to_die (type, type_die);
25591 }
25592 }
25593 break;
25594
25595 default:
25596 if (is_cxx_auto (type))
25597 {
25598 tree name = TYPE_IDENTIFIER (type);
25599 dw_die_ref *die = (name == get_identifier ("auto")
25600 ? &auto_die : &decltype_auto_die);
25601 if (!*die)
25602 {
25603 *die = new_die (DW_TAG_unspecified_type,
25604 comp_unit_die (), NULL_TREE);
25605 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25606 }
25607 equate_type_number_to_die (type, *die);
25608 break;
25609 }
25610 gcc_unreachable ();
25611 }
25612
25613 TREE_ASM_WRITTEN (type) = 1;
25614 }
25615
25616 static void
25617 gen_type_die (tree type, dw_die_ref context_die)
25618 {
25619 if (type != error_mark_node)
25620 {
25621 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25622 if (flag_checking)
25623 {
25624 dw_die_ref die = lookup_type_die (type);
25625 if (die)
25626 check_die (die);
25627 }
25628 }
25629 }
25630
25631 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25632 things which are local to the given block. */
25633
25634 static void
25635 gen_block_die (tree stmt, dw_die_ref context_die)
25636 {
25637 int must_output_die = 0;
25638 bool inlined_func;
25639
25640 /* Ignore blocks that are NULL. */
25641 if (stmt == NULL_TREE)
25642 return;
25643
25644 inlined_func = inlined_function_outer_scope_p (stmt);
25645
25646 /* If the block is one fragment of a non-contiguous block, do not
25647 process the variables, since they will have been done by the
25648 origin block. Do process subblocks. */
25649 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25650 {
25651 tree sub;
25652
25653 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25654 gen_block_die (sub, context_die);
25655
25656 return;
25657 }
25658
25659 /* Determine if we need to output any Dwarf DIEs at all to represent this
25660 block. */
25661 if (inlined_func)
25662 /* The outer scopes for inlinings *must* always be represented. We
25663 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25664 must_output_die = 1;
25665 else if (lookup_block_die (stmt))
25666 /* If we already have a DIE then it was filled early. Meanwhile
25667 we might have pruned all BLOCK_VARS as optimized out but we
25668 still want to generate high/low PC attributes so output it. */
25669 must_output_die = 1;
25670 else if (TREE_USED (stmt)
25671 || TREE_ASM_WRITTEN (stmt))
25672 {
25673 /* Determine if this block directly contains any "significant"
25674 local declarations which we will need to output DIEs for. */
25675 if (debug_info_level > DINFO_LEVEL_TERSE)
25676 {
25677 /* We are not in terse mode so any local declaration that
25678 is not ignored for debug purposes counts as being a
25679 "significant" one. */
25680 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25681 must_output_die = 1;
25682 else
25683 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25684 if (!DECL_IGNORED_P (var))
25685 {
25686 must_output_die = 1;
25687 break;
25688 }
25689 }
25690 else if (!dwarf2out_ignore_block (stmt))
25691 must_output_die = 1;
25692 }
25693
25694 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25695 DIE for any block which contains no significant local declarations at
25696 all. Rather, in such cases we just call `decls_for_scope' so that any
25697 needed Dwarf info for any sub-blocks will get properly generated. Note
25698 that in terse mode, our definition of what constitutes a "significant"
25699 local declaration gets restricted to include only inlined function
25700 instances and local (nested) function definitions. */
25701 if (must_output_die)
25702 {
25703 if (inlined_func)
25704 gen_inlined_subroutine_die (stmt, context_die);
25705 else
25706 gen_lexical_block_die (stmt, context_die);
25707 }
25708 else
25709 decls_for_scope (stmt, context_die);
25710 }
25711
25712 /* Process variable DECL (or variable with origin ORIGIN) within
25713 block STMT and add it to CONTEXT_DIE. */
25714 static void
25715 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25716 {
25717 dw_die_ref die;
25718 tree decl_or_origin = decl ? decl : origin;
25719
25720 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25721 die = lookup_decl_die (decl_or_origin);
25722 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25723 {
25724 if (TYPE_DECL_IS_STUB (decl_or_origin))
25725 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25726 else
25727 die = lookup_decl_die (decl_or_origin);
25728 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25729 if (! die && ! early_dwarf)
25730 return;
25731 }
25732 else
25733 die = NULL;
25734
25735 /* Avoid creating DIEs for local typedefs and concrete static variables that
25736 will only be pruned later. */
25737 if ((origin || decl_ultimate_origin (decl))
25738 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25739 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25740 {
25741 origin = decl_ultimate_origin (decl_or_origin);
25742 if (decl && VAR_P (decl) && die != NULL)
25743 {
25744 die = lookup_decl_die (origin);
25745 if (die != NULL)
25746 equate_decl_number_to_die (decl, die);
25747 }
25748 return;
25749 }
25750
25751 if (die != NULL && die->die_parent == NULL)
25752 add_child_die (context_die, die);
25753 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25754 {
25755 if (early_dwarf)
25756 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25757 stmt, context_die);
25758 }
25759 else
25760 {
25761 if (decl && DECL_P (decl))
25762 {
25763 die = lookup_decl_die (decl);
25764
25765 /* Early created DIEs do not have a parent as the decls refer
25766 to the function as DECL_CONTEXT rather than the BLOCK. */
25767 if (die && die->die_parent == NULL)
25768 {
25769 gcc_assert (in_lto_p);
25770 add_child_die (context_die, die);
25771 }
25772 }
25773
25774 gen_decl_die (decl, origin, NULL, context_die);
25775 }
25776 }
25777
25778 /* Generate all of the decls declared within a given scope and (recursively)
25779 all of its sub-blocks. */
25780
25781 static void
25782 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25783 {
25784 tree decl;
25785 unsigned int i;
25786 tree subblocks;
25787
25788 /* Ignore NULL blocks. */
25789 if (stmt == NULL_TREE)
25790 return;
25791
25792 /* Output the DIEs to represent all of the data objects and typedefs
25793 declared directly within this block but not within any nested
25794 sub-blocks. Also, nested function and tag DIEs have been
25795 generated with a parent of NULL; fix that up now. We don't
25796 have to do this if we're at -g1. */
25797 if (debug_info_level > DINFO_LEVEL_TERSE)
25798 {
25799 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25800 process_scope_var (stmt, decl, NULL_TREE, context_die);
25801 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25802 origin - avoid doing this twice as we have no good way to see
25803 if we've done it once already. */
25804 if (! early_dwarf)
25805 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25806 {
25807 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25808 if (decl == current_function_decl)
25809 /* Ignore declarations of the current function, while they
25810 are declarations, gen_subprogram_die would treat them
25811 as definitions again, because they are equal to
25812 current_function_decl and endlessly recurse. */;
25813 else if (TREE_CODE (decl) == FUNCTION_DECL)
25814 process_scope_var (stmt, decl, NULL_TREE, context_die);
25815 else
25816 process_scope_var (stmt, NULL_TREE, decl, context_die);
25817 }
25818 }
25819
25820 /* Even if we're at -g1, we need to process the subblocks in order to get
25821 inlined call information. */
25822
25823 /* Output the DIEs to represent all sub-blocks (and the items declared
25824 therein) of this block. */
25825 if (recurse)
25826 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25827 subblocks != NULL;
25828 subblocks = BLOCK_CHAIN (subblocks))
25829 gen_block_die (subblocks, context_die);
25830 }
25831
25832 /* Is this a typedef we can avoid emitting? */
25833
25834 static bool
25835 is_redundant_typedef (const_tree decl)
25836 {
25837 if (TYPE_DECL_IS_STUB (decl))
25838 return true;
25839
25840 if (DECL_ARTIFICIAL (decl)
25841 && DECL_CONTEXT (decl)
25842 && is_tagged_type (DECL_CONTEXT (decl))
25843 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25844 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25845 /* Also ignore the artificial member typedef for the class name. */
25846 return true;
25847
25848 return false;
25849 }
25850
25851 /* Return TRUE if TYPE is a typedef that names a type for linkage
25852 purposes. This kind of typedefs is produced by the C++ FE for
25853 constructs like:
25854
25855 typedef struct {...} foo;
25856
25857 In that case, there is no typedef variant type produced for foo.
25858 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25859 struct type. */
25860
25861 static bool
25862 is_naming_typedef_decl (const_tree decl)
25863 {
25864 if (decl == NULL_TREE
25865 || TREE_CODE (decl) != TYPE_DECL
25866 || DECL_NAMELESS (decl)
25867 || !is_tagged_type (TREE_TYPE (decl))
25868 || DECL_IS_BUILTIN (decl)
25869 || is_redundant_typedef (decl)
25870 /* It looks like Ada produces TYPE_DECLs that are very similar
25871 to C++ naming typedefs but that have different
25872 semantics. Let's be specific to c++ for now. */
25873 || !is_cxx (decl))
25874 return FALSE;
25875
25876 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25877 && TYPE_NAME (TREE_TYPE (decl)) == decl
25878 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25879 != TYPE_NAME (TREE_TYPE (decl))));
25880 }
25881
25882 /* Looks up the DIE for a context. */
25883
25884 static inline dw_die_ref
25885 lookup_context_die (tree context)
25886 {
25887 if (context)
25888 {
25889 /* Find die that represents this context. */
25890 if (TYPE_P (context))
25891 {
25892 context = TYPE_MAIN_VARIANT (context);
25893 dw_die_ref ctx = lookup_type_die (context);
25894 if (!ctx)
25895 return NULL;
25896 return strip_naming_typedef (context, ctx);
25897 }
25898 else
25899 return lookup_decl_die (context);
25900 }
25901 return comp_unit_die ();
25902 }
25903
25904 /* Returns the DIE for a context. */
25905
25906 static inline dw_die_ref
25907 get_context_die (tree context)
25908 {
25909 if (context)
25910 {
25911 /* Find die that represents this context. */
25912 if (TYPE_P (context))
25913 {
25914 context = TYPE_MAIN_VARIANT (context);
25915 return strip_naming_typedef (context, force_type_die (context));
25916 }
25917 else
25918 return force_decl_die (context);
25919 }
25920 return comp_unit_die ();
25921 }
25922
25923 /* Returns the DIE for decl. A DIE will always be returned. */
25924
25925 static dw_die_ref
25926 force_decl_die (tree decl)
25927 {
25928 dw_die_ref decl_die;
25929 unsigned saved_external_flag;
25930 tree save_fn = NULL_TREE;
25931 decl_die = lookup_decl_die (decl);
25932 if (!decl_die)
25933 {
25934 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25935
25936 decl_die = lookup_decl_die (decl);
25937 if (decl_die)
25938 return decl_die;
25939
25940 switch (TREE_CODE (decl))
25941 {
25942 case FUNCTION_DECL:
25943 /* Clear current_function_decl, so that gen_subprogram_die thinks
25944 that this is a declaration. At this point, we just want to force
25945 declaration die. */
25946 save_fn = current_function_decl;
25947 current_function_decl = NULL_TREE;
25948 gen_subprogram_die (decl, context_die);
25949 current_function_decl = save_fn;
25950 break;
25951
25952 case VAR_DECL:
25953 /* Set external flag to force declaration die. Restore it after
25954 gen_decl_die() call. */
25955 saved_external_flag = DECL_EXTERNAL (decl);
25956 DECL_EXTERNAL (decl) = 1;
25957 gen_decl_die (decl, NULL, NULL, context_die);
25958 DECL_EXTERNAL (decl) = saved_external_flag;
25959 break;
25960
25961 case NAMESPACE_DECL:
25962 if (dwarf_version >= 3 || !dwarf_strict)
25963 dwarf2out_decl (decl);
25964 else
25965 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25966 decl_die = comp_unit_die ();
25967 break;
25968
25969 case TRANSLATION_UNIT_DECL:
25970 decl_die = comp_unit_die ();
25971 break;
25972
25973 default:
25974 gcc_unreachable ();
25975 }
25976
25977 /* We should be able to find the DIE now. */
25978 if (!decl_die)
25979 decl_die = lookup_decl_die (decl);
25980 gcc_assert (decl_die);
25981 }
25982
25983 return decl_die;
25984 }
25985
25986 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25987 always returned. */
25988
25989 static dw_die_ref
25990 force_type_die (tree type)
25991 {
25992 dw_die_ref type_die;
25993
25994 type_die = lookup_type_die (type);
25995 if (!type_die)
25996 {
25997 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25998
25999 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26000 false, context_die);
26001 gcc_assert (type_die);
26002 }
26003 return type_die;
26004 }
26005
26006 /* Force out any required namespaces to be able to output DECL,
26007 and return the new context_die for it, if it's changed. */
26008
26009 static dw_die_ref
26010 setup_namespace_context (tree thing, dw_die_ref context_die)
26011 {
26012 tree context = (DECL_P (thing)
26013 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26014 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26015 /* Force out the namespace. */
26016 context_die = force_decl_die (context);
26017
26018 return context_die;
26019 }
26020
26021 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26022 type) within its namespace, if appropriate.
26023
26024 For compatibility with older debuggers, namespace DIEs only contain
26025 declarations; all definitions are emitted at CU scope, with
26026 DW_AT_specification pointing to the declaration (like with class
26027 members). */
26028
26029 static dw_die_ref
26030 declare_in_namespace (tree thing, dw_die_ref context_die)
26031 {
26032 dw_die_ref ns_context;
26033
26034 if (debug_info_level <= DINFO_LEVEL_TERSE)
26035 return context_die;
26036
26037 /* External declarations in the local scope only need to be emitted
26038 once, not once in the namespace and once in the scope.
26039
26040 This avoids declaring the `extern' below in the
26041 namespace DIE as well as in the innermost scope:
26042
26043 namespace S
26044 {
26045 int i=5;
26046 int foo()
26047 {
26048 int i=8;
26049 extern int i;
26050 return i;
26051 }
26052 }
26053 */
26054 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26055 return context_die;
26056
26057 /* If this decl is from an inlined function, then don't try to emit it in its
26058 namespace, as we will get confused. It would have already been emitted
26059 when the abstract instance of the inline function was emitted anyways. */
26060 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26061 return context_die;
26062
26063 ns_context = setup_namespace_context (thing, context_die);
26064
26065 if (ns_context != context_die)
26066 {
26067 if (is_fortran () || is_dlang ())
26068 return ns_context;
26069 if (DECL_P (thing))
26070 gen_decl_die (thing, NULL, NULL, ns_context);
26071 else
26072 gen_type_die (thing, ns_context);
26073 }
26074 return context_die;
26075 }
26076
26077 /* Generate a DIE for a namespace or namespace alias. */
26078
26079 static void
26080 gen_namespace_die (tree decl, dw_die_ref context_die)
26081 {
26082 dw_die_ref namespace_die;
26083
26084 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26085 they are an alias of. */
26086 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26087 {
26088 /* Output a real namespace or module. */
26089 context_die = setup_namespace_context (decl, comp_unit_die ());
26090 namespace_die = new_die (is_fortran () || is_dlang ()
26091 ? DW_TAG_module : DW_TAG_namespace,
26092 context_die, decl);
26093 /* For Fortran modules defined in different CU don't add src coords. */
26094 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26095 {
26096 const char *name = dwarf2_name (decl, 0);
26097 if (name)
26098 add_name_attribute (namespace_die, name);
26099 }
26100 else
26101 add_name_and_src_coords_attributes (namespace_die, decl);
26102 if (DECL_EXTERNAL (decl))
26103 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26104 equate_decl_number_to_die (decl, namespace_die);
26105 }
26106 else
26107 {
26108 /* Output a namespace alias. */
26109
26110 /* Force out the namespace we are an alias of, if necessary. */
26111 dw_die_ref origin_die
26112 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26113
26114 if (DECL_FILE_SCOPE_P (decl)
26115 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26116 context_die = setup_namespace_context (decl, comp_unit_die ());
26117 /* Now create the namespace alias DIE. */
26118 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26119 add_name_and_src_coords_attributes (namespace_die, decl);
26120 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26121 equate_decl_number_to_die (decl, namespace_die);
26122 }
26123 if ((dwarf_version >= 5 || !dwarf_strict)
26124 && lang_hooks.decls.decl_dwarf_attribute (decl,
26125 DW_AT_export_symbols) == 1)
26126 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26127
26128 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26129 if (want_pubnames ())
26130 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26131 }
26132
26133 /* Generate Dwarf debug information for a decl described by DECL.
26134 The return value is currently only meaningful for PARM_DECLs,
26135 for all other decls it returns NULL.
26136
26137 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26138 It can be NULL otherwise. */
26139
26140 static dw_die_ref
26141 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26142 dw_die_ref context_die)
26143 {
26144 tree decl_or_origin = decl ? decl : origin;
26145 tree class_origin = NULL, ultimate_origin;
26146
26147 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26148 return NULL;
26149
26150 switch (TREE_CODE (decl_or_origin))
26151 {
26152 case ERROR_MARK:
26153 break;
26154
26155 case CONST_DECL:
26156 if (!is_fortran () && !is_ada () && !is_dlang ())
26157 {
26158 /* The individual enumerators of an enum type get output when we output
26159 the Dwarf representation of the relevant enum type itself. */
26160 break;
26161 }
26162
26163 /* Emit its type. */
26164 gen_type_die (TREE_TYPE (decl), context_die);
26165
26166 /* And its containing namespace. */
26167 context_die = declare_in_namespace (decl, context_die);
26168
26169 gen_const_die (decl, context_die);
26170 break;
26171
26172 case FUNCTION_DECL:
26173 #if 0
26174 /* FIXME */
26175 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26176 on local redeclarations of global functions. That seems broken. */
26177 if (current_function_decl != decl)
26178 /* This is only a declaration. */;
26179 #endif
26180
26181 /* We should have abstract copies already and should not generate
26182 stray type DIEs in late LTO dumping. */
26183 if (! early_dwarf)
26184 ;
26185
26186 /* If we're emitting a clone, emit info for the abstract instance. */
26187 else if (origin || DECL_ORIGIN (decl) != decl)
26188 dwarf2out_abstract_function (origin
26189 ? DECL_ORIGIN (origin)
26190 : DECL_ABSTRACT_ORIGIN (decl));
26191
26192 /* If we're emitting a possibly inlined function emit it as
26193 abstract instance. */
26194 else if (cgraph_function_possibly_inlined_p (decl)
26195 && ! DECL_ABSTRACT_P (decl)
26196 && ! class_or_namespace_scope_p (context_die)
26197 /* dwarf2out_abstract_function won't emit a die if this is just
26198 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26199 that case, because that works only if we have a die. */
26200 && DECL_INITIAL (decl) != NULL_TREE)
26201 dwarf2out_abstract_function (decl);
26202
26203 /* Otherwise we're emitting the primary DIE for this decl. */
26204 else if (debug_info_level > DINFO_LEVEL_TERSE)
26205 {
26206 /* Before we describe the FUNCTION_DECL itself, make sure that we
26207 have its containing type. */
26208 if (!origin)
26209 origin = decl_class_context (decl);
26210 if (origin != NULL_TREE)
26211 gen_type_die (origin, context_die);
26212
26213 /* And its return type. */
26214 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26215
26216 /* And its virtual context. */
26217 if (DECL_VINDEX (decl) != NULL_TREE)
26218 gen_type_die (DECL_CONTEXT (decl), context_die);
26219
26220 /* Make sure we have a member DIE for decl. */
26221 if (origin != NULL_TREE)
26222 gen_type_die_for_member (origin, decl, context_die);
26223
26224 /* And its containing namespace. */
26225 context_die = declare_in_namespace (decl, context_die);
26226 }
26227
26228 /* Now output a DIE to represent the function itself. */
26229 if (decl)
26230 gen_subprogram_die (decl, context_die);
26231 break;
26232
26233 case TYPE_DECL:
26234 /* If we are in terse mode, don't generate any DIEs to represent any
26235 actual typedefs. */
26236 if (debug_info_level <= DINFO_LEVEL_TERSE)
26237 break;
26238
26239 /* In the special case of a TYPE_DECL node representing the declaration
26240 of some type tag, if the given TYPE_DECL is marked as having been
26241 instantiated from some other (original) TYPE_DECL node (e.g. one which
26242 was generated within the original definition of an inline function) we
26243 used to generate a special (abbreviated) DW_TAG_structure_type,
26244 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26245 should be actually referencing those DIEs, as variable DIEs with that
26246 type would be emitted already in the abstract origin, so it was always
26247 removed during unused type prunning. Don't add anything in this
26248 case. */
26249 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26250 break;
26251
26252 if (is_redundant_typedef (decl))
26253 gen_type_die (TREE_TYPE (decl), context_die);
26254 else
26255 /* Output a DIE to represent the typedef itself. */
26256 gen_typedef_die (decl, context_die);
26257 break;
26258
26259 case LABEL_DECL:
26260 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26261 gen_label_die (decl, context_die);
26262 break;
26263
26264 case VAR_DECL:
26265 case RESULT_DECL:
26266 /* If we are in terse mode, don't generate any DIEs to represent any
26267 variable declarations or definitions. */
26268 if (debug_info_level <= DINFO_LEVEL_TERSE)
26269 break;
26270
26271 /* Avoid generating stray type DIEs during late dwarf dumping.
26272 All types have been dumped early. */
26273 if (early_dwarf
26274 /* ??? But in LTRANS we cannot annotate early created variably
26275 modified type DIEs without copying them and adjusting all
26276 references to them. Dump them again as happens for inlining
26277 which copies both the decl and the types. */
26278 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26279 in VLA bound information for example. */
26280 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26281 current_function_decl)))
26282 {
26283 /* Output any DIEs that are needed to specify the type of this data
26284 object. */
26285 if (decl_by_reference_p (decl_or_origin))
26286 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26287 else
26288 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26289 }
26290
26291 if (early_dwarf)
26292 {
26293 /* And its containing type. */
26294 class_origin = decl_class_context (decl_or_origin);
26295 if (class_origin != NULL_TREE)
26296 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26297
26298 /* And its containing namespace. */
26299 context_die = declare_in_namespace (decl_or_origin, context_die);
26300 }
26301
26302 /* Now output the DIE to represent the data object itself. This gets
26303 complicated because of the possibility that the VAR_DECL really
26304 represents an inlined instance of a formal parameter for an inline
26305 function. */
26306 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26307 if (ultimate_origin != NULL_TREE
26308 && TREE_CODE (ultimate_origin) == PARM_DECL)
26309 gen_formal_parameter_die (decl, origin,
26310 true /* Emit name attribute. */,
26311 context_die);
26312 else
26313 gen_variable_die (decl, origin, context_die);
26314 break;
26315
26316 case FIELD_DECL:
26317 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26318 /* Ignore the nameless fields that are used to skip bits but handle C++
26319 anonymous unions and structs. */
26320 if (DECL_NAME (decl) != NULL_TREE
26321 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26322 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26323 {
26324 gen_type_die (member_declared_type (decl), context_die);
26325 gen_field_die (decl, ctx, context_die);
26326 }
26327 break;
26328
26329 case PARM_DECL:
26330 /* Avoid generating stray type DIEs during late dwarf dumping.
26331 All types have been dumped early. */
26332 if (early_dwarf
26333 /* ??? But in LTRANS we cannot annotate early created variably
26334 modified type DIEs without copying them and adjusting all
26335 references to them. Dump them again as happens for inlining
26336 which copies both the decl and the types. */
26337 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26338 in VLA bound information for example. */
26339 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26340 current_function_decl)))
26341 {
26342 if (DECL_BY_REFERENCE (decl_or_origin))
26343 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26344 else
26345 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26346 }
26347 return gen_formal_parameter_die (decl, origin,
26348 true /* Emit name attribute. */,
26349 context_die);
26350
26351 case NAMESPACE_DECL:
26352 if (dwarf_version >= 3 || !dwarf_strict)
26353 gen_namespace_die (decl, context_die);
26354 break;
26355
26356 case IMPORTED_DECL:
26357 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26358 DECL_CONTEXT (decl), context_die);
26359 break;
26360
26361 case NAMELIST_DECL:
26362 gen_namelist_decl (DECL_NAME (decl), context_die,
26363 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26364 break;
26365
26366 default:
26367 /* Probably some frontend-internal decl. Assume we don't care. */
26368 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26369 break;
26370 }
26371
26372 return NULL;
26373 }
26374 \f
26375 /* Output initial debug information for global DECL. Called at the
26376 end of the parsing process.
26377
26378 This is the initial debug generation process. As such, the DIEs
26379 generated may be incomplete. A later debug generation pass
26380 (dwarf2out_late_global_decl) will augment the information generated
26381 in this pass (e.g., with complete location info). */
26382
26383 static void
26384 dwarf2out_early_global_decl (tree decl)
26385 {
26386 set_early_dwarf s;
26387
26388 /* gen_decl_die() will set DECL_ABSTRACT because
26389 cgraph_function_possibly_inlined_p() returns true. This is in
26390 turn will cause DW_AT_inline attributes to be set.
26391
26392 This happens because at early dwarf generation, there is no
26393 cgraph information, causing cgraph_function_possibly_inlined_p()
26394 to return true. Trick cgraph_function_possibly_inlined_p()
26395 while we generate dwarf early. */
26396 bool save = symtab->global_info_ready;
26397 symtab->global_info_ready = true;
26398
26399 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26400 other DECLs and they can point to template types or other things
26401 that dwarf2out can't handle when done via dwarf2out_decl. */
26402 if (TREE_CODE (decl) != TYPE_DECL
26403 && TREE_CODE (decl) != PARM_DECL)
26404 {
26405 if (TREE_CODE (decl) == FUNCTION_DECL)
26406 {
26407 tree save_fndecl = current_function_decl;
26408
26409 /* For nested functions, make sure we have DIEs for the parents first
26410 so that all nested DIEs are generated at the proper scope in the
26411 first shot. */
26412 tree context = decl_function_context (decl);
26413 if (context != NULL)
26414 {
26415 dw_die_ref context_die = lookup_decl_die (context);
26416 current_function_decl = context;
26417
26418 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26419 enough so that it lands in its own context. This avoids type
26420 pruning issues later on. */
26421 if (context_die == NULL || is_declaration_die (context_die))
26422 dwarf2out_early_global_decl (context);
26423 }
26424
26425 /* Emit an abstract origin of a function first. This happens
26426 with C++ constructor clones for example and makes
26427 dwarf2out_abstract_function happy which requires the early
26428 DIE of the abstract instance to be present. */
26429 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26430 dw_die_ref origin_die;
26431 if (origin != NULL
26432 /* Do not emit the DIE multiple times but make sure to
26433 process it fully here in case we just saw a declaration. */
26434 && ((origin_die = lookup_decl_die (origin)) == NULL
26435 || is_declaration_die (origin_die)))
26436 {
26437 current_function_decl = origin;
26438 dwarf2out_decl (origin);
26439 }
26440
26441 /* Emit the DIE for decl but avoid doing that multiple times. */
26442 dw_die_ref old_die;
26443 if ((old_die = lookup_decl_die (decl)) == NULL
26444 || is_declaration_die (old_die))
26445 {
26446 current_function_decl = decl;
26447 dwarf2out_decl (decl);
26448 }
26449
26450 current_function_decl = save_fndecl;
26451 }
26452 else
26453 dwarf2out_decl (decl);
26454 }
26455 symtab->global_info_ready = save;
26456 }
26457
26458 /* Return whether EXPR is an expression with the following pattern:
26459 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26460
26461 static bool
26462 is_trivial_indirect_ref (tree expr)
26463 {
26464 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26465 return false;
26466
26467 tree nop = TREE_OPERAND (expr, 0);
26468 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26469 return false;
26470
26471 tree int_cst = TREE_OPERAND (nop, 0);
26472 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26473 }
26474
26475 /* Output debug information for global decl DECL. Called from
26476 toplev.c after compilation proper has finished. */
26477
26478 static void
26479 dwarf2out_late_global_decl (tree decl)
26480 {
26481 /* Fill-in any location information we were unable to determine
26482 on the first pass. */
26483 if (VAR_P (decl))
26484 {
26485 dw_die_ref die = lookup_decl_die (decl);
26486
26487 /* We may have to generate early debug late for LTO in case debug
26488 was not enabled at compile-time or the target doesn't support
26489 the LTO early debug scheme. */
26490 if (! die && in_lto_p)
26491 {
26492 dwarf2out_decl (decl);
26493 die = lookup_decl_die (decl);
26494 }
26495
26496 if (die)
26497 {
26498 /* We get called via the symtab code invoking late_global_decl
26499 for symbols that are optimized out.
26500
26501 Do not add locations for those, except if they have a
26502 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26503 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26504 INDIRECT_REF expression, as this could generate relocations to
26505 text symbols in LTO object files, which is invalid. */
26506 varpool_node *node = varpool_node::get (decl);
26507 if ((! node || ! node->definition)
26508 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26509 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26510 tree_add_const_value_attribute_for_decl (die, decl);
26511 else
26512 add_location_or_const_value_attribute (die, decl, false);
26513 }
26514 }
26515 }
26516
26517 /* Output debug information for type decl DECL. Called from toplev.c
26518 and from language front ends (to record built-in types). */
26519 static void
26520 dwarf2out_type_decl (tree decl, int local)
26521 {
26522 if (!local)
26523 {
26524 set_early_dwarf s;
26525 dwarf2out_decl (decl);
26526 }
26527 }
26528
26529 /* Output debug information for imported module or decl DECL.
26530 NAME is non-NULL name in the lexical block if the decl has been renamed.
26531 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26532 that DECL belongs to.
26533 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26534 static void
26535 dwarf2out_imported_module_or_decl_1 (tree decl,
26536 tree name,
26537 tree lexical_block,
26538 dw_die_ref lexical_block_die)
26539 {
26540 expanded_location xloc;
26541 dw_die_ref imported_die = NULL;
26542 dw_die_ref at_import_die;
26543
26544 if (TREE_CODE (decl) == IMPORTED_DECL)
26545 {
26546 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26547 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26548 gcc_assert (decl);
26549 }
26550 else
26551 xloc = expand_location (input_location);
26552
26553 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26554 {
26555 at_import_die = force_type_die (TREE_TYPE (decl));
26556 /* For namespace N { typedef void T; } using N::T; base_type_die
26557 returns NULL, but DW_TAG_imported_declaration requires
26558 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26559 if (!at_import_die)
26560 {
26561 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26562 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26563 at_import_die = lookup_type_die (TREE_TYPE (decl));
26564 gcc_assert (at_import_die);
26565 }
26566 }
26567 else
26568 {
26569 at_import_die = lookup_decl_die (decl);
26570 if (!at_import_die)
26571 {
26572 /* If we're trying to avoid duplicate debug info, we may not have
26573 emitted the member decl for this field. Emit it now. */
26574 if (TREE_CODE (decl) == FIELD_DECL)
26575 {
26576 tree type = DECL_CONTEXT (decl);
26577
26578 if (TYPE_CONTEXT (type)
26579 && TYPE_P (TYPE_CONTEXT (type))
26580 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26581 DINFO_USAGE_DIR_USE))
26582 return;
26583 gen_type_die_for_member (type, decl,
26584 get_context_die (TYPE_CONTEXT (type)));
26585 }
26586 if (TREE_CODE (decl) == NAMELIST_DECL)
26587 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26588 get_context_die (DECL_CONTEXT (decl)),
26589 NULL_TREE);
26590 else
26591 at_import_die = force_decl_die (decl);
26592 }
26593 }
26594
26595 if (TREE_CODE (decl) == NAMESPACE_DECL)
26596 {
26597 if (dwarf_version >= 3 || !dwarf_strict)
26598 imported_die = new_die (DW_TAG_imported_module,
26599 lexical_block_die,
26600 lexical_block);
26601 else
26602 return;
26603 }
26604 else
26605 imported_die = new_die (DW_TAG_imported_declaration,
26606 lexical_block_die,
26607 lexical_block);
26608
26609 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26610 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26611 if (debug_column_info && xloc.column)
26612 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26613 if (name)
26614 add_AT_string (imported_die, DW_AT_name,
26615 IDENTIFIER_POINTER (name));
26616 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26617 }
26618
26619 /* Output debug information for imported module or decl DECL.
26620 NAME is non-NULL name in context if the decl has been renamed.
26621 CHILD is true if decl is one of the renamed decls as part of
26622 importing whole module.
26623 IMPLICIT is set if this hook is called for an implicit import
26624 such as inline namespace. */
26625
26626 static void
26627 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26628 bool child, bool implicit)
26629 {
26630 /* dw_die_ref at_import_die; */
26631 dw_die_ref scope_die;
26632
26633 if (debug_info_level <= DINFO_LEVEL_TERSE)
26634 return;
26635
26636 gcc_assert (decl);
26637
26638 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26639 should be enough, for DWARF4 and older even if we emit as extension
26640 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26641 for the benefit of consumers unaware of DW_AT_export_symbols. */
26642 if (implicit
26643 && dwarf_version >= 5
26644 && lang_hooks.decls.decl_dwarf_attribute (decl,
26645 DW_AT_export_symbols) == 1)
26646 return;
26647
26648 set_early_dwarf s;
26649
26650 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26651 We need decl DIE for reference and scope die. First, get DIE for the decl
26652 itself. */
26653
26654 /* Get the scope die for decl context. Use comp_unit_die for global module
26655 or decl. If die is not found for non globals, force new die. */
26656 if (context
26657 && TYPE_P (context)
26658 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26659 return;
26660
26661 scope_die = get_context_die (context);
26662
26663 if (child)
26664 {
26665 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26666 there is nothing we can do, here. */
26667 if (dwarf_version < 3 && dwarf_strict)
26668 return;
26669
26670 gcc_assert (scope_die->die_child);
26671 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26672 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26673 scope_die = scope_die->die_child;
26674 }
26675
26676 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26677 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26678 }
26679
26680 /* Output debug information for namelists. */
26681
26682 static dw_die_ref
26683 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26684 {
26685 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26686 tree value;
26687 unsigned i;
26688
26689 if (debug_info_level <= DINFO_LEVEL_TERSE)
26690 return NULL;
26691
26692 gcc_assert (scope_die != NULL);
26693 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26694 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26695
26696 /* If there are no item_decls, we have a nondefining namelist, e.g.
26697 with USE association; hence, set DW_AT_declaration. */
26698 if (item_decls == NULL_TREE)
26699 {
26700 add_AT_flag (nml_die, DW_AT_declaration, 1);
26701 return nml_die;
26702 }
26703
26704 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26705 {
26706 nml_item_ref_die = lookup_decl_die (value);
26707 if (!nml_item_ref_die)
26708 nml_item_ref_die = force_decl_die (value);
26709
26710 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26711 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26712 }
26713 return nml_die;
26714 }
26715
26716
26717 /* Write the debugging output for DECL and return the DIE. */
26718
26719 static void
26720 dwarf2out_decl (tree decl)
26721 {
26722 dw_die_ref context_die = comp_unit_die ();
26723
26724 switch (TREE_CODE (decl))
26725 {
26726 case ERROR_MARK:
26727 return;
26728
26729 case FUNCTION_DECL:
26730 /* If we're a nested function, initially use a parent of NULL; if we're
26731 a plain function, this will be fixed up in decls_for_scope. If
26732 we're a method, it will be ignored, since we already have a DIE.
26733 Avoid doing this late though since clones of class methods may
26734 otherwise end up in limbo and create type DIEs late. */
26735 if (early_dwarf
26736 && decl_function_context (decl)
26737 /* But if we're in terse mode, we don't care about scope. */
26738 && debug_info_level > DINFO_LEVEL_TERSE)
26739 context_die = NULL;
26740 break;
26741
26742 case VAR_DECL:
26743 /* For local statics lookup proper context die. */
26744 if (local_function_static (decl))
26745 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26746
26747 /* If we are in terse mode, don't generate any DIEs to represent any
26748 variable declarations or definitions. */
26749 if (debug_info_level <= DINFO_LEVEL_TERSE)
26750 return;
26751 break;
26752
26753 case CONST_DECL:
26754 if (debug_info_level <= DINFO_LEVEL_TERSE)
26755 return;
26756 if (!is_fortran () && !is_ada () && !is_dlang ())
26757 return;
26758 if (TREE_STATIC (decl) && decl_function_context (decl))
26759 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26760 break;
26761
26762 case NAMESPACE_DECL:
26763 case IMPORTED_DECL:
26764 if (debug_info_level <= DINFO_LEVEL_TERSE)
26765 return;
26766 if (lookup_decl_die (decl) != NULL)
26767 return;
26768 break;
26769
26770 case TYPE_DECL:
26771 /* Don't emit stubs for types unless they are needed by other DIEs. */
26772 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26773 return;
26774
26775 /* Don't bother trying to generate any DIEs to represent any of the
26776 normal built-in types for the language we are compiling. */
26777 if (DECL_IS_BUILTIN (decl))
26778 return;
26779
26780 /* If we are in terse mode, don't generate any DIEs for types. */
26781 if (debug_info_level <= DINFO_LEVEL_TERSE)
26782 return;
26783
26784 /* If we're a function-scope tag, initially use a parent of NULL;
26785 this will be fixed up in decls_for_scope. */
26786 if (decl_function_context (decl))
26787 context_die = NULL;
26788
26789 break;
26790
26791 case NAMELIST_DECL:
26792 break;
26793
26794 default:
26795 return;
26796 }
26797
26798 gen_decl_die (decl, NULL, NULL, context_die);
26799
26800 if (flag_checking)
26801 {
26802 dw_die_ref die = lookup_decl_die (decl);
26803 if (die)
26804 check_die (die);
26805 }
26806 }
26807
26808 /* Write the debugging output for DECL. */
26809
26810 static void
26811 dwarf2out_function_decl (tree decl)
26812 {
26813 dwarf2out_decl (decl);
26814 call_arg_locations = NULL;
26815 call_arg_loc_last = NULL;
26816 call_site_count = -1;
26817 tail_call_site_count = -1;
26818 decl_loc_table->empty ();
26819 cached_dw_loc_list_table->empty ();
26820 }
26821
26822 /* Output a marker (i.e. a label) for the beginning of the generated code for
26823 a lexical block. */
26824
26825 static void
26826 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26827 unsigned int blocknum)
26828 {
26829 switch_to_section (current_function_section ());
26830 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26831 }
26832
26833 /* Output a marker (i.e. a label) for the end of the generated code for a
26834 lexical block. */
26835
26836 static void
26837 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26838 {
26839 switch_to_section (current_function_section ());
26840 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26841 }
26842
26843 /* Returns nonzero if it is appropriate not to emit any debugging
26844 information for BLOCK, because it doesn't contain any instructions.
26845
26846 Don't allow this for blocks with nested functions or local classes
26847 as we would end up with orphans, and in the presence of scheduling
26848 we may end up calling them anyway. */
26849
26850 static bool
26851 dwarf2out_ignore_block (const_tree block)
26852 {
26853 tree decl;
26854 unsigned int i;
26855
26856 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26857 if (TREE_CODE (decl) == FUNCTION_DECL
26858 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26859 return 0;
26860 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26861 {
26862 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26863 if (TREE_CODE (decl) == FUNCTION_DECL
26864 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26865 return 0;
26866 }
26867
26868 return 1;
26869 }
26870
26871 /* Hash table routines for file_hash. */
26872
26873 bool
26874 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26875 {
26876 return filename_cmp (p1->filename, p2) == 0;
26877 }
26878
26879 hashval_t
26880 dwarf_file_hasher::hash (dwarf_file_data *p)
26881 {
26882 return htab_hash_string (p->filename);
26883 }
26884
26885 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26886 dwarf2out.c) and return its "index". The index of each (known) filename is
26887 just a unique number which is associated with only that one filename. We
26888 need such numbers for the sake of generating labels (in the .debug_sfnames
26889 section) and references to those files numbers (in the .debug_srcinfo
26890 and .debug_macinfo sections). If the filename given as an argument is not
26891 found in our current list, add it to the list and assign it the next
26892 available unique index number. */
26893
26894 static struct dwarf_file_data *
26895 lookup_filename (const char *file_name)
26896 {
26897 struct dwarf_file_data * created;
26898
26899 if (!file_name)
26900 return NULL;
26901
26902 dwarf_file_data **slot
26903 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26904 INSERT);
26905 if (*slot)
26906 return *slot;
26907
26908 created = ggc_alloc<dwarf_file_data> ();
26909 created->filename = file_name;
26910 created->emitted_number = 0;
26911 *slot = created;
26912 return created;
26913 }
26914
26915 /* If the assembler will construct the file table, then translate the compiler
26916 internal file table number into the assembler file table number, and emit
26917 a .file directive if we haven't already emitted one yet. The file table
26918 numbers are different because we prune debug info for unused variables and
26919 types, which may include filenames. */
26920
26921 static int
26922 maybe_emit_file (struct dwarf_file_data * fd)
26923 {
26924 if (! fd->emitted_number)
26925 {
26926 if (last_emitted_file)
26927 fd->emitted_number = last_emitted_file->emitted_number + 1;
26928 else
26929 fd->emitted_number = 1;
26930 last_emitted_file = fd;
26931
26932 if (output_asm_line_debug_info ())
26933 {
26934 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26935 output_quoted_string (asm_out_file,
26936 remap_debug_filename (fd->filename));
26937 fputc ('\n', asm_out_file);
26938 }
26939 }
26940
26941 return fd->emitted_number;
26942 }
26943
26944 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26945 That generation should happen after function debug info has been
26946 generated. The value of the attribute is the constant value of ARG. */
26947
26948 static void
26949 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26950 {
26951 die_arg_entry entry;
26952
26953 if (!die || !arg)
26954 return;
26955
26956 gcc_assert (early_dwarf);
26957
26958 if (!tmpl_value_parm_die_table)
26959 vec_alloc (tmpl_value_parm_die_table, 32);
26960
26961 entry.die = die;
26962 entry.arg = arg;
26963 vec_safe_push (tmpl_value_parm_die_table, entry);
26964 }
26965
26966 /* Return TRUE if T is an instance of generic type, FALSE
26967 otherwise. */
26968
26969 static bool
26970 generic_type_p (tree t)
26971 {
26972 if (t == NULL_TREE || !TYPE_P (t))
26973 return false;
26974 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26975 }
26976
26977 /* Schedule the generation of the generic parameter dies for the
26978 instance of generic type T. The proper generation itself is later
26979 done by gen_scheduled_generic_parms_dies. */
26980
26981 static void
26982 schedule_generic_params_dies_gen (tree t)
26983 {
26984 if (!generic_type_p (t))
26985 return;
26986
26987 gcc_assert (early_dwarf);
26988
26989 if (!generic_type_instances)
26990 vec_alloc (generic_type_instances, 256);
26991
26992 vec_safe_push (generic_type_instances, t);
26993 }
26994
26995 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26996 by append_entry_to_tmpl_value_parm_die_table. This function must
26997 be called after function DIEs have been generated. */
26998
26999 static void
27000 gen_remaining_tmpl_value_param_die_attribute (void)
27001 {
27002 if (tmpl_value_parm_die_table)
27003 {
27004 unsigned i, j;
27005 die_arg_entry *e;
27006
27007 /* We do this in two phases - first get the cases we can
27008 handle during early-finish, preserving those we cannot
27009 (containing symbolic constants where we don't yet know
27010 whether we are going to output the referenced symbols).
27011 For those we try again at late-finish. */
27012 j = 0;
27013 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27014 {
27015 if (!e->die->removed
27016 && !tree_add_const_value_attribute (e->die, e->arg))
27017 {
27018 dw_loc_descr_ref loc = NULL;
27019 if (! early_dwarf
27020 && (dwarf_version >= 5 || !dwarf_strict))
27021 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27022 if (loc)
27023 add_AT_loc (e->die, DW_AT_location, loc);
27024 else
27025 (*tmpl_value_parm_die_table)[j++] = *e;
27026 }
27027 }
27028 tmpl_value_parm_die_table->truncate (j);
27029 }
27030 }
27031
27032 /* Generate generic parameters DIEs for instances of generic types
27033 that have been previously scheduled by
27034 schedule_generic_params_dies_gen. This function must be called
27035 after all the types of the CU have been laid out. */
27036
27037 static void
27038 gen_scheduled_generic_parms_dies (void)
27039 {
27040 unsigned i;
27041 tree t;
27042
27043 if (!generic_type_instances)
27044 return;
27045
27046 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27047 if (COMPLETE_TYPE_P (t))
27048 gen_generic_params_dies (t);
27049
27050 generic_type_instances = NULL;
27051 }
27052
27053
27054 /* Replace DW_AT_name for the decl with name. */
27055
27056 static void
27057 dwarf2out_set_name (tree decl, tree name)
27058 {
27059 dw_die_ref die;
27060 dw_attr_node *attr;
27061 const char *dname;
27062
27063 die = TYPE_SYMTAB_DIE (decl);
27064 if (!die)
27065 return;
27066
27067 dname = dwarf2_name (name, 0);
27068 if (!dname)
27069 return;
27070
27071 attr = get_AT (die, DW_AT_name);
27072 if (attr)
27073 {
27074 struct indirect_string_node *node;
27075
27076 node = find_AT_string (dname);
27077 /* replace the string. */
27078 attr->dw_attr_val.v.val_str = node;
27079 }
27080
27081 else
27082 add_name_attribute (die, dname);
27083 }
27084
27085 /* True if before or during processing of the first function being emitted. */
27086 static bool in_first_function_p = true;
27087 /* True if loc_note during dwarf2out_var_location call might still be
27088 before first real instruction at address equal to .Ltext0. */
27089 static bool maybe_at_text_label_p = true;
27090 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27091 static unsigned int first_loclabel_num_not_at_text_label;
27092
27093 /* Look ahead for a real insn, or for a begin stmt marker. */
27094
27095 static rtx_insn *
27096 dwarf2out_next_real_insn (rtx_insn *loc_note)
27097 {
27098 rtx_insn *next_real = NEXT_INSN (loc_note);
27099
27100 while (next_real)
27101 if (INSN_P (next_real))
27102 break;
27103 else
27104 next_real = NEXT_INSN (next_real);
27105
27106 return next_real;
27107 }
27108
27109 /* Called by the final INSN scan whenever we see a var location. We
27110 use it to drop labels in the right places, and throw the location in
27111 our lookup table. */
27112
27113 static void
27114 dwarf2out_var_location (rtx_insn *loc_note)
27115 {
27116 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27117 struct var_loc_node *newloc;
27118 rtx_insn *next_real, *next_note;
27119 rtx_insn *call_insn = NULL;
27120 static const char *last_label;
27121 static const char *last_postcall_label;
27122 static bool last_in_cold_section_p;
27123 static rtx_insn *expected_next_loc_note;
27124 tree decl;
27125 bool var_loc_p;
27126 var_loc_view view = 0;
27127
27128 if (!NOTE_P (loc_note))
27129 {
27130 if (CALL_P (loc_note))
27131 {
27132 maybe_reset_location_view (loc_note, cur_line_info_table);
27133 call_site_count++;
27134 if (SIBLING_CALL_P (loc_note))
27135 tail_call_site_count++;
27136 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27137 {
27138 call_insn = loc_note;
27139 loc_note = NULL;
27140 var_loc_p = false;
27141
27142 next_real = dwarf2out_next_real_insn (call_insn);
27143 next_note = NULL;
27144 cached_next_real_insn = NULL;
27145 goto create_label;
27146 }
27147 if (optimize == 0 && !flag_var_tracking)
27148 {
27149 /* When the var-tracking pass is not running, there is no note
27150 for indirect calls whose target is compile-time known. In this
27151 case, process such calls specifically so that we generate call
27152 sites for them anyway. */
27153 rtx x = PATTERN (loc_note);
27154 if (GET_CODE (x) == PARALLEL)
27155 x = XVECEXP (x, 0, 0);
27156 if (GET_CODE (x) == SET)
27157 x = SET_SRC (x);
27158 if (GET_CODE (x) == CALL)
27159 x = XEXP (x, 0);
27160 if (!MEM_P (x)
27161 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27162 || !SYMBOL_REF_DECL (XEXP (x, 0))
27163 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27164 != FUNCTION_DECL))
27165 {
27166 call_insn = loc_note;
27167 loc_note = NULL;
27168 var_loc_p = false;
27169
27170 next_real = dwarf2out_next_real_insn (call_insn);
27171 next_note = NULL;
27172 cached_next_real_insn = NULL;
27173 goto create_label;
27174 }
27175 }
27176 }
27177 else if (!debug_variable_location_views)
27178 gcc_unreachable ();
27179 else
27180 maybe_reset_location_view (loc_note, cur_line_info_table);
27181
27182 return;
27183 }
27184
27185 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27186 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27187 return;
27188
27189 /* Optimize processing a large consecutive sequence of location
27190 notes so we don't spend too much time in next_real_insn. If the
27191 next insn is another location note, remember the next_real_insn
27192 calculation for next time. */
27193 next_real = cached_next_real_insn;
27194 if (next_real)
27195 {
27196 if (expected_next_loc_note != loc_note)
27197 next_real = NULL;
27198 }
27199
27200 next_note = NEXT_INSN (loc_note);
27201 if (! next_note
27202 || next_note->deleted ()
27203 || ! NOTE_P (next_note)
27204 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27205 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27206 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27207 next_note = NULL;
27208
27209 if (! next_real)
27210 next_real = dwarf2out_next_real_insn (loc_note);
27211
27212 if (next_note)
27213 {
27214 expected_next_loc_note = next_note;
27215 cached_next_real_insn = next_real;
27216 }
27217 else
27218 cached_next_real_insn = NULL;
27219
27220 /* If there are no instructions which would be affected by this note,
27221 don't do anything. */
27222 if (var_loc_p
27223 && next_real == NULL_RTX
27224 && !NOTE_DURING_CALL_P (loc_note))
27225 return;
27226
27227 create_label:
27228
27229 if (next_real == NULL_RTX)
27230 next_real = get_last_insn ();
27231
27232 /* If there were any real insns between note we processed last time
27233 and this note (or if it is the first note), clear
27234 last_{,postcall_}label so that they are not reused this time. */
27235 if (last_var_location_insn == NULL_RTX
27236 || last_var_location_insn != next_real
27237 || last_in_cold_section_p != in_cold_section_p)
27238 {
27239 last_label = NULL;
27240 last_postcall_label = NULL;
27241 }
27242
27243 if (var_loc_p)
27244 {
27245 const char *label
27246 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27247 view = cur_line_info_table->view;
27248 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27249 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27250 if (newloc == NULL)
27251 return;
27252 }
27253 else
27254 {
27255 decl = NULL_TREE;
27256 newloc = NULL;
27257 }
27258
27259 /* If there were no real insns between note we processed last time
27260 and this note, use the label we emitted last time. Otherwise
27261 create a new label and emit it. */
27262 if (last_label == NULL)
27263 {
27264 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27265 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27266 loclabel_num++;
27267 last_label = ggc_strdup (loclabel);
27268 /* See if loclabel might be equal to .Ltext0. If yes,
27269 bump first_loclabel_num_not_at_text_label. */
27270 if (!have_multiple_function_sections
27271 && in_first_function_p
27272 && maybe_at_text_label_p)
27273 {
27274 static rtx_insn *last_start;
27275 rtx_insn *insn;
27276 for (insn = loc_note; insn; insn = previous_insn (insn))
27277 if (insn == last_start)
27278 break;
27279 else if (!NONDEBUG_INSN_P (insn))
27280 continue;
27281 else
27282 {
27283 rtx body = PATTERN (insn);
27284 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27285 continue;
27286 /* Inline asm could occupy zero bytes. */
27287 else if (GET_CODE (body) == ASM_INPUT
27288 || asm_noperands (body) >= 0)
27289 continue;
27290 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27291 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27292 continue;
27293 #endif
27294 else
27295 {
27296 /* Assume insn has non-zero length. */
27297 maybe_at_text_label_p = false;
27298 break;
27299 }
27300 }
27301 if (maybe_at_text_label_p)
27302 {
27303 last_start = loc_note;
27304 first_loclabel_num_not_at_text_label = loclabel_num;
27305 }
27306 }
27307 }
27308
27309 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27310 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27311
27312 if (!var_loc_p)
27313 {
27314 struct call_arg_loc_node *ca_loc
27315 = ggc_cleared_alloc<call_arg_loc_node> ();
27316 rtx_insn *prev = call_insn;
27317
27318 ca_loc->call_arg_loc_note
27319 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27320 ca_loc->next = NULL;
27321 ca_loc->label = last_label;
27322 gcc_assert (prev
27323 && (CALL_P (prev)
27324 || (NONJUMP_INSN_P (prev)
27325 && GET_CODE (PATTERN (prev)) == SEQUENCE
27326 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27327 if (!CALL_P (prev))
27328 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27329 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27330
27331 /* Look for a SYMBOL_REF in the "prev" instruction. */
27332 rtx x = get_call_rtx_from (PATTERN (prev));
27333 if (x)
27334 {
27335 /* Try to get the call symbol, if any. */
27336 if (MEM_P (XEXP (x, 0)))
27337 x = XEXP (x, 0);
27338 /* First, look for a memory access to a symbol_ref. */
27339 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27340 && SYMBOL_REF_DECL (XEXP (x, 0))
27341 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27342 ca_loc->symbol_ref = XEXP (x, 0);
27343 /* Otherwise, look at a compile-time known user-level function
27344 declaration. */
27345 else if (MEM_P (x)
27346 && MEM_EXPR (x)
27347 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27348 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27349 }
27350
27351 ca_loc->block = insn_scope (prev);
27352 if (call_arg_locations)
27353 call_arg_loc_last->next = ca_loc;
27354 else
27355 call_arg_locations = ca_loc;
27356 call_arg_loc_last = ca_loc;
27357 }
27358 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27359 {
27360 newloc->label = last_label;
27361 newloc->view = view;
27362 }
27363 else
27364 {
27365 if (!last_postcall_label)
27366 {
27367 sprintf (loclabel, "%s-1", last_label);
27368 last_postcall_label = ggc_strdup (loclabel);
27369 }
27370 newloc->label = last_postcall_label;
27371 /* ??? This view is at last_label, not last_label-1, but we
27372 could only assume view at last_label-1 is zero if we could
27373 assume calls always have length greater than one. This is
27374 probably true in general, though there might be a rare
27375 exception to this rule, e.g. if a call insn is optimized out
27376 by target magic. Then, even the -1 in the label will be
27377 wrong, which might invalidate the range. Anyway, using view,
27378 though technically possibly incorrect, will work as far as
27379 ranges go: since L-1 is in the middle of the call insn,
27380 (L-1).0 and (L-1).V shouldn't make any difference, and having
27381 the loclist entry refer to the .loc entry might be useful, so
27382 leave it like this. */
27383 newloc->view = view;
27384 }
27385
27386 if (var_loc_p && flag_debug_asm)
27387 {
27388 const char *name, *sep, *patstr;
27389 if (decl && DECL_NAME (decl))
27390 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27391 else
27392 name = "";
27393 if (NOTE_VAR_LOCATION_LOC (loc_note))
27394 {
27395 sep = " => ";
27396 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27397 }
27398 else
27399 {
27400 sep = " ";
27401 patstr = "RESET";
27402 }
27403 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27404 name, sep, patstr);
27405 }
27406
27407 last_var_location_insn = next_real;
27408 last_in_cold_section_p = in_cold_section_p;
27409 }
27410
27411 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27412 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27413 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27414 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27415 BLOCK_FRAGMENT_ORIGIN links. */
27416 static bool
27417 block_within_block_p (tree block, tree outer, bool bothways)
27418 {
27419 if (block == outer)
27420 return true;
27421
27422 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27423 for (tree context = BLOCK_SUPERCONTEXT (block);
27424 context != outer;
27425 context = BLOCK_SUPERCONTEXT (context))
27426 if (!context || TREE_CODE (context) != BLOCK)
27427 return false;
27428
27429 if (!bothways)
27430 return true;
27431
27432 /* Now check that each block is actually referenced by its
27433 parent. */
27434 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27435 context = BLOCK_SUPERCONTEXT (context))
27436 {
27437 if (BLOCK_FRAGMENT_ORIGIN (context))
27438 {
27439 gcc_assert (!BLOCK_SUBBLOCKS (context));
27440 context = BLOCK_FRAGMENT_ORIGIN (context);
27441 }
27442 for (tree sub = BLOCK_SUBBLOCKS (context);
27443 sub != block;
27444 sub = BLOCK_CHAIN (sub))
27445 if (!sub)
27446 return false;
27447 if (context == outer)
27448 return true;
27449 else
27450 block = context;
27451 }
27452 }
27453
27454 /* Called during final while assembling the marker of the entry point
27455 for an inlined function. */
27456
27457 static void
27458 dwarf2out_inline_entry (tree block)
27459 {
27460 gcc_assert (debug_inline_points);
27461
27462 /* If we can't represent it, don't bother. */
27463 if (!(dwarf_version >= 3 || !dwarf_strict))
27464 return;
27465
27466 gcc_assert (DECL_P (block_ultimate_origin (block)));
27467
27468 /* Sanity check the block tree. This would catch a case in which
27469 BLOCK got removed from the tree reachable from the outermost
27470 lexical block, but got retained in markers. It would still link
27471 back to its parents, but some ancestor would be missing a link
27472 down the path to the sub BLOCK. If the block got removed, its
27473 BLOCK_NUMBER will not be a usable value. */
27474 if (flag_checking)
27475 gcc_assert (block_within_block_p (block,
27476 DECL_INITIAL (current_function_decl),
27477 true));
27478
27479 gcc_assert (inlined_function_outer_scope_p (block));
27480 gcc_assert (!lookup_block_die (block));
27481
27482 if (BLOCK_FRAGMENT_ORIGIN (block))
27483 block = BLOCK_FRAGMENT_ORIGIN (block);
27484 /* Can the entry point ever not be at the beginning of an
27485 unfragmented lexical block? */
27486 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27487 || (cur_line_info_table
27488 && !ZERO_VIEW_P (cur_line_info_table->view))))
27489 return;
27490
27491 if (!inline_entry_data_table)
27492 inline_entry_data_table
27493 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27494
27495
27496 inline_entry_data **iedp
27497 = inline_entry_data_table->find_slot_with_hash (block,
27498 htab_hash_pointer (block),
27499 INSERT);
27500 if (*iedp)
27501 /* ??? Ideally, we'd record all entry points for the same inlined
27502 function (some may have been duplicated by e.g. unrolling), but
27503 we have no way to represent that ATM. */
27504 return;
27505
27506 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27507 ied->block = block;
27508 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27509 ied->label_num = BLOCK_NUMBER (block);
27510 if (cur_line_info_table)
27511 ied->view = cur_line_info_table->view;
27512
27513 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27514
27515 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27516 BLOCK_NUMBER (block));
27517 ASM_OUTPUT_LABEL (asm_out_file, label);
27518 }
27519
27520 /* Called from finalize_size_functions for size functions so that their body
27521 can be encoded in the debug info to describe the layout of variable-length
27522 structures. */
27523
27524 static void
27525 dwarf2out_size_function (tree decl)
27526 {
27527 function_to_dwarf_procedure (decl);
27528 }
27529
27530 /* Note in one location list that text section has changed. */
27531
27532 int
27533 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27534 {
27535 var_loc_list *list = *slot;
27536 if (list->first)
27537 list->last_before_switch
27538 = list->last->next ? list->last->next : list->last;
27539 return 1;
27540 }
27541
27542 /* Note in all location lists that text section has changed. */
27543
27544 static void
27545 var_location_switch_text_section (void)
27546 {
27547 if (decl_loc_table == NULL)
27548 return;
27549
27550 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27551 }
27552
27553 /* Create a new line number table. */
27554
27555 static dw_line_info_table *
27556 new_line_info_table (void)
27557 {
27558 dw_line_info_table *table;
27559
27560 table = ggc_cleared_alloc<dw_line_info_table> ();
27561 table->file_num = 1;
27562 table->line_num = 1;
27563 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27564 FORCE_RESET_NEXT_VIEW (table->view);
27565 table->symviews_since_reset = 0;
27566
27567 return table;
27568 }
27569
27570 /* Lookup the "current" table into which we emit line info, so
27571 that we don't have to do it for every source line. */
27572
27573 static void
27574 set_cur_line_info_table (section *sec)
27575 {
27576 dw_line_info_table *table;
27577
27578 if (sec == text_section)
27579 table = text_section_line_info;
27580 else if (sec == cold_text_section)
27581 {
27582 table = cold_text_section_line_info;
27583 if (!table)
27584 {
27585 cold_text_section_line_info = table = new_line_info_table ();
27586 table->end_label = cold_end_label;
27587 }
27588 }
27589 else
27590 {
27591 const char *end_label;
27592
27593 if (crtl->has_bb_partition)
27594 {
27595 if (in_cold_section_p)
27596 end_label = crtl->subsections.cold_section_end_label;
27597 else
27598 end_label = crtl->subsections.hot_section_end_label;
27599 }
27600 else
27601 {
27602 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27603 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27604 current_function_funcdef_no);
27605 end_label = ggc_strdup (label);
27606 }
27607
27608 table = new_line_info_table ();
27609 table->end_label = end_label;
27610
27611 vec_safe_push (separate_line_info, table);
27612 }
27613
27614 if (output_asm_line_debug_info ())
27615 table->is_stmt = (cur_line_info_table
27616 ? cur_line_info_table->is_stmt
27617 : DWARF_LINE_DEFAULT_IS_STMT_START);
27618 cur_line_info_table = table;
27619 }
27620
27621
27622 /* We need to reset the locations at the beginning of each
27623 function. We can't do this in the end_function hook, because the
27624 declarations that use the locations won't have been output when
27625 that hook is called. Also compute have_multiple_function_sections here. */
27626
27627 static void
27628 dwarf2out_begin_function (tree fun)
27629 {
27630 section *sec = function_section (fun);
27631
27632 if (sec != text_section)
27633 have_multiple_function_sections = true;
27634
27635 if (crtl->has_bb_partition && !cold_text_section)
27636 {
27637 gcc_assert (current_function_decl == fun);
27638 cold_text_section = unlikely_text_section ();
27639 switch_to_section (cold_text_section);
27640 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27641 switch_to_section (sec);
27642 }
27643
27644 dwarf2out_note_section_used ();
27645 call_site_count = 0;
27646 tail_call_site_count = 0;
27647
27648 set_cur_line_info_table (sec);
27649 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27650 }
27651
27652 /* Helper function of dwarf2out_end_function, called only after emitting
27653 the very first function into assembly. Check if some .debug_loc range
27654 might end with a .LVL* label that could be equal to .Ltext0.
27655 In that case we must force using absolute addresses in .debug_loc ranges,
27656 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27657 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27658 list terminator.
27659 Set have_multiple_function_sections to true in that case and
27660 terminate htab traversal. */
27661
27662 int
27663 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27664 {
27665 var_loc_list *entry = *slot;
27666 struct var_loc_node *node;
27667
27668 node = entry->first;
27669 if (node && node->next && node->next->label)
27670 {
27671 unsigned int i;
27672 const char *label = node->next->label;
27673 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27674
27675 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27676 {
27677 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27678 if (strcmp (label, loclabel) == 0)
27679 {
27680 have_multiple_function_sections = true;
27681 return 0;
27682 }
27683 }
27684 }
27685 return 1;
27686 }
27687
27688 /* Hook called after emitting a function into assembly.
27689 This does something only for the very first function emitted. */
27690
27691 static void
27692 dwarf2out_end_function (unsigned int)
27693 {
27694 if (in_first_function_p
27695 && !have_multiple_function_sections
27696 && first_loclabel_num_not_at_text_label
27697 && decl_loc_table)
27698 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27699 in_first_function_p = false;
27700 maybe_at_text_label_p = false;
27701 }
27702
27703 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27704 front-ends register a translation unit even before dwarf2out_init is
27705 called. */
27706 static tree main_translation_unit = NULL_TREE;
27707
27708 /* Hook called by front-ends after they built their main translation unit.
27709 Associate comp_unit_die to UNIT. */
27710
27711 static void
27712 dwarf2out_register_main_translation_unit (tree unit)
27713 {
27714 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27715 && main_translation_unit == NULL_TREE);
27716 main_translation_unit = unit;
27717 /* If dwarf2out_init has not been called yet, it will perform the association
27718 itself looking at main_translation_unit. */
27719 if (decl_die_table != NULL)
27720 equate_decl_number_to_die (unit, comp_unit_die ());
27721 }
27722
27723 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27724
27725 static void
27726 push_dw_line_info_entry (dw_line_info_table *table,
27727 enum dw_line_info_opcode opcode, unsigned int val)
27728 {
27729 dw_line_info_entry e;
27730 e.opcode = opcode;
27731 e.val = val;
27732 vec_safe_push (table->entries, e);
27733 }
27734
27735 /* Output a label to mark the beginning of a source code line entry
27736 and record information relating to this source line, in
27737 'line_info_table' for later output of the .debug_line section. */
27738 /* ??? The discriminator parameter ought to be unsigned. */
27739
27740 static void
27741 dwarf2out_source_line (unsigned int line, unsigned int column,
27742 const char *filename,
27743 int discriminator, bool is_stmt)
27744 {
27745 unsigned int file_num;
27746 dw_line_info_table *table;
27747 static var_loc_view lvugid;
27748
27749 if (debug_info_level < DINFO_LEVEL_TERSE)
27750 return;
27751
27752 table = cur_line_info_table;
27753
27754 if (line == 0)
27755 {
27756 if (debug_variable_location_views
27757 && output_asm_line_debug_info ()
27758 && table && !RESETTING_VIEW_P (table->view))
27759 {
27760 /* If we're using the assembler to compute view numbers, we
27761 can't issue a .loc directive for line zero, so we can't
27762 get a view number at this point. We might attempt to
27763 compute it from the previous view, or equate it to a
27764 subsequent view (though it might not be there!), but
27765 since we're omitting the line number entry, we might as
27766 well omit the view number as well. That means pretending
27767 it's a view number zero, which might very well turn out
27768 to be correct. ??? Extend the assembler so that the
27769 compiler could emit e.g. ".locview .LVU#", to output a
27770 view without changing line number information. We'd then
27771 have to count it in symviews_since_reset; when it's omitted,
27772 it doesn't count. */
27773 if (!zero_view_p)
27774 zero_view_p = BITMAP_GGC_ALLOC ();
27775 bitmap_set_bit (zero_view_p, table->view);
27776 if (flag_debug_asm)
27777 {
27778 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27779 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27780 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27781 ASM_COMMENT_START);
27782 assemble_name (asm_out_file, label);
27783 putc ('\n', asm_out_file);
27784 }
27785 table->view = ++lvugid;
27786 }
27787 return;
27788 }
27789
27790 /* The discriminator column was added in dwarf4. Simplify the below
27791 by simply removing it if we're not supposed to output it. */
27792 if (dwarf_version < 4 && dwarf_strict)
27793 discriminator = 0;
27794
27795 if (!debug_column_info)
27796 column = 0;
27797
27798 file_num = maybe_emit_file (lookup_filename (filename));
27799
27800 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27801 the debugger has used the second (possibly duplicate) line number
27802 at the beginning of the function to mark the end of the prologue.
27803 We could eliminate any other duplicates within the function. For
27804 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27805 that second line number entry. */
27806 /* Recall that this end-of-prologue indication is *not* the same thing
27807 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27808 to which the hook corresponds, follows the last insn that was
27809 emitted by gen_prologue. What we need is to precede the first insn
27810 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27811 insn that corresponds to something the user wrote. These may be
27812 very different locations once scheduling is enabled. */
27813
27814 if (0 && file_num == table->file_num
27815 && line == table->line_num
27816 && column == table->column_num
27817 && discriminator == table->discrim_num
27818 && is_stmt == table->is_stmt)
27819 return;
27820
27821 switch_to_section (current_function_section ());
27822
27823 /* If requested, emit something human-readable. */
27824 if (flag_debug_asm)
27825 {
27826 if (debug_column_info)
27827 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27828 filename, line, column);
27829 else
27830 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27831 filename, line);
27832 }
27833
27834 if (output_asm_line_debug_info ())
27835 {
27836 /* Emit the .loc directive understood by GNU as. */
27837 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27838 file_num, line, is_stmt, discriminator */
27839 fputs ("\t.loc ", asm_out_file);
27840 fprint_ul (asm_out_file, file_num);
27841 putc (' ', asm_out_file);
27842 fprint_ul (asm_out_file, line);
27843 putc (' ', asm_out_file);
27844 fprint_ul (asm_out_file, column);
27845
27846 if (is_stmt != table->is_stmt)
27847 {
27848 #if HAVE_GAS_LOC_STMT
27849 fputs (" is_stmt ", asm_out_file);
27850 putc (is_stmt ? '1' : '0', asm_out_file);
27851 #endif
27852 }
27853 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27854 {
27855 gcc_assert (discriminator > 0);
27856 fputs (" discriminator ", asm_out_file);
27857 fprint_ul (asm_out_file, (unsigned long) discriminator);
27858 }
27859 if (debug_variable_location_views)
27860 {
27861 if (!RESETTING_VIEW_P (table->view))
27862 {
27863 table->symviews_since_reset++;
27864 if (table->symviews_since_reset > symview_upper_bound)
27865 symview_upper_bound = table->symviews_since_reset;
27866 /* When we're using the assembler to compute view
27867 numbers, we output symbolic labels after "view" in
27868 .loc directives, and the assembler will set them for
27869 us, so that we can refer to the view numbers in
27870 location lists. The only exceptions are when we know
27871 a view will be zero: "-0" is a forced reset, used
27872 e.g. in the beginning of functions, whereas "0" tells
27873 the assembler to check that there was a PC change
27874 since the previous view, in a way that implicitly
27875 resets the next view. */
27876 fputs (" view ", asm_out_file);
27877 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27878 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27879 assemble_name (asm_out_file, label);
27880 table->view = ++lvugid;
27881 }
27882 else
27883 {
27884 table->symviews_since_reset = 0;
27885 if (FORCE_RESETTING_VIEW_P (table->view))
27886 fputs (" view -0", asm_out_file);
27887 else
27888 fputs (" view 0", asm_out_file);
27889 /* Mark the present view as a zero view. Earlier debug
27890 binds may have already added its id to loclists to be
27891 emitted later, so we can't reuse the id for something
27892 else. However, it's good to know whether a view is
27893 known to be zero, because then we may be able to
27894 optimize out locviews that are all zeros, so take
27895 note of it in zero_view_p. */
27896 if (!zero_view_p)
27897 zero_view_p = BITMAP_GGC_ALLOC ();
27898 bitmap_set_bit (zero_view_p, lvugid);
27899 table->view = ++lvugid;
27900 }
27901 }
27902 putc ('\n', asm_out_file);
27903 }
27904 else
27905 {
27906 unsigned int label_num = ++line_info_label_num;
27907
27908 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27909
27910 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27911 push_dw_line_info_entry (table, LI_adv_address, label_num);
27912 else
27913 push_dw_line_info_entry (table, LI_set_address, label_num);
27914 if (debug_variable_location_views)
27915 {
27916 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27917 if (resetting)
27918 table->view = 0;
27919
27920 if (flag_debug_asm)
27921 fprintf (asm_out_file, "\t%s view %s%d\n",
27922 ASM_COMMENT_START,
27923 resetting ? "-" : "",
27924 table->view);
27925
27926 table->view++;
27927 }
27928 if (file_num != table->file_num)
27929 push_dw_line_info_entry (table, LI_set_file, file_num);
27930 if (discriminator != table->discrim_num)
27931 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27932 if (is_stmt != table->is_stmt)
27933 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27934 push_dw_line_info_entry (table, LI_set_line, line);
27935 if (debug_column_info)
27936 push_dw_line_info_entry (table, LI_set_column, column);
27937 }
27938
27939 table->file_num = file_num;
27940 table->line_num = line;
27941 table->column_num = column;
27942 table->discrim_num = discriminator;
27943 table->is_stmt = is_stmt;
27944 table->in_use = true;
27945 }
27946
27947 /* Record the beginning of a new source file. */
27948
27949 static void
27950 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27951 {
27952 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27953 {
27954 macinfo_entry e;
27955 e.code = DW_MACINFO_start_file;
27956 e.lineno = lineno;
27957 e.info = ggc_strdup (filename);
27958 vec_safe_push (macinfo_table, e);
27959 }
27960 }
27961
27962 /* Record the end of a source file. */
27963
27964 static void
27965 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27966 {
27967 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27968 {
27969 macinfo_entry e;
27970 e.code = DW_MACINFO_end_file;
27971 e.lineno = lineno;
27972 e.info = NULL;
27973 vec_safe_push (macinfo_table, e);
27974 }
27975 }
27976
27977 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27978 the tail part of the directive line, i.e. the part which is past the
27979 initial whitespace, #, whitespace, directive-name, whitespace part. */
27980
27981 static void
27982 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27983 const char *buffer ATTRIBUTE_UNUSED)
27984 {
27985 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27986 {
27987 macinfo_entry e;
27988 /* Insert a dummy first entry to be able to optimize the whole
27989 predefined macro block using DW_MACRO_import. */
27990 if (macinfo_table->is_empty () && lineno <= 1)
27991 {
27992 e.code = 0;
27993 e.lineno = 0;
27994 e.info = NULL;
27995 vec_safe_push (macinfo_table, e);
27996 }
27997 e.code = DW_MACINFO_define;
27998 e.lineno = lineno;
27999 e.info = ggc_strdup (buffer);
28000 vec_safe_push (macinfo_table, e);
28001 }
28002 }
28003
28004 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28005 the tail part of the directive line, i.e. the part which is past the
28006 initial whitespace, #, whitespace, directive-name, whitespace part. */
28007
28008 static void
28009 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28010 const char *buffer ATTRIBUTE_UNUSED)
28011 {
28012 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28013 {
28014 macinfo_entry e;
28015 /* Insert a dummy first entry to be able to optimize the whole
28016 predefined macro block using DW_MACRO_import. */
28017 if (macinfo_table->is_empty () && lineno <= 1)
28018 {
28019 e.code = 0;
28020 e.lineno = 0;
28021 e.info = NULL;
28022 vec_safe_push (macinfo_table, e);
28023 }
28024 e.code = DW_MACINFO_undef;
28025 e.lineno = lineno;
28026 e.info = ggc_strdup (buffer);
28027 vec_safe_push (macinfo_table, e);
28028 }
28029 }
28030
28031 /* Helpers to manipulate hash table of CUs. */
28032
28033 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28034 {
28035 static inline hashval_t hash (const macinfo_entry *);
28036 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28037 };
28038
28039 inline hashval_t
28040 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28041 {
28042 return htab_hash_string (entry->info);
28043 }
28044
28045 inline bool
28046 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28047 const macinfo_entry *entry2)
28048 {
28049 return !strcmp (entry1->info, entry2->info);
28050 }
28051
28052 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28053
28054 /* Output a single .debug_macinfo entry. */
28055
28056 static void
28057 output_macinfo_op (macinfo_entry *ref)
28058 {
28059 int file_num;
28060 size_t len;
28061 struct indirect_string_node *node;
28062 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28063 struct dwarf_file_data *fd;
28064
28065 switch (ref->code)
28066 {
28067 case DW_MACINFO_start_file:
28068 fd = lookup_filename (ref->info);
28069 file_num = maybe_emit_file (fd);
28070 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28071 dw2_asm_output_data_uleb128 (ref->lineno,
28072 "Included from line number %lu",
28073 (unsigned long) ref->lineno);
28074 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28075 break;
28076 case DW_MACINFO_end_file:
28077 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28078 break;
28079 case DW_MACINFO_define:
28080 case DW_MACINFO_undef:
28081 len = strlen (ref->info) + 1;
28082 if (!dwarf_strict
28083 && len > DWARF_OFFSET_SIZE
28084 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28085 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28086 {
28087 ref->code = ref->code == DW_MACINFO_define
28088 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28089 output_macinfo_op (ref);
28090 return;
28091 }
28092 dw2_asm_output_data (1, ref->code,
28093 ref->code == DW_MACINFO_define
28094 ? "Define macro" : "Undefine macro");
28095 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28096 (unsigned long) ref->lineno);
28097 dw2_asm_output_nstring (ref->info, -1, "The macro");
28098 break;
28099 case DW_MACRO_define_strp:
28100 case DW_MACRO_undef_strp:
28101 /* NB: dwarf2out_finish performs:
28102 1. save_macinfo_strings
28103 2. hash table traverse of index_string
28104 3. output_macinfo -> output_macinfo_op
28105 4. output_indirect_strings
28106 -> hash table traverse of output_index_string
28107
28108 When output_macinfo_op is called, all index strings have been
28109 added to hash table by save_macinfo_strings and we can't pass
28110 INSERT to find_slot_with_hash which may expand hash table, even
28111 if no insertion is needed, and change hash table traverse order
28112 between index_string and output_index_string. */
28113 node = find_AT_string (ref->info, NO_INSERT);
28114 gcc_assert (node
28115 && (node->form == DW_FORM_strp
28116 || node->form == dwarf_FORM (DW_FORM_strx)));
28117 dw2_asm_output_data (1, ref->code,
28118 ref->code == DW_MACRO_define_strp
28119 ? "Define macro strp"
28120 : "Undefine macro strp");
28121 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28122 (unsigned long) ref->lineno);
28123 if (node->form == DW_FORM_strp)
28124 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28125 debug_str_section, "The macro: \"%s\"",
28126 ref->info);
28127 else
28128 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28129 ref->info);
28130 break;
28131 case DW_MACRO_import:
28132 dw2_asm_output_data (1, ref->code, "Import");
28133 ASM_GENERATE_INTERNAL_LABEL (label,
28134 DEBUG_MACRO_SECTION_LABEL,
28135 ref->lineno + macinfo_label_base);
28136 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28137 break;
28138 default:
28139 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28140 ASM_COMMENT_START, (unsigned long) ref->code);
28141 break;
28142 }
28143 }
28144
28145 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28146 other compilation unit .debug_macinfo sections. IDX is the first
28147 index of a define/undef, return the number of ops that should be
28148 emitted in a comdat .debug_macinfo section and emit
28149 a DW_MACRO_import entry referencing it.
28150 If the define/undef entry should be emitted normally, return 0. */
28151
28152 static unsigned
28153 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28154 macinfo_hash_type **macinfo_htab)
28155 {
28156 macinfo_entry *first, *second, *cur, *inc;
28157 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28158 unsigned char checksum[16];
28159 struct md5_ctx ctx;
28160 char *grp_name, *tail;
28161 const char *base;
28162 unsigned int i, count, encoded_filename_len, linebuf_len;
28163 macinfo_entry **slot;
28164
28165 first = &(*macinfo_table)[idx];
28166 second = &(*macinfo_table)[idx + 1];
28167
28168 /* Optimize only if there are at least two consecutive define/undef ops,
28169 and either all of them are before first DW_MACINFO_start_file
28170 with lineno {0,1} (i.e. predefined macro block), or all of them are
28171 in some included header file. */
28172 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28173 return 0;
28174 if (vec_safe_is_empty (files))
28175 {
28176 if (first->lineno > 1 || second->lineno > 1)
28177 return 0;
28178 }
28179 else if (first->lineno == 0)
28180 return 0;
28181
28182 /* Find the last define/undef entry that can be grouped together
28183 with first and at the same time compute md5 checksum of their
28184 codes, linenumbers and strings. */
28185 md5_init_ctx (&ctx);
28186 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28187 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28188 break;
28189 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28190 break;
28191 else
28192 {
28193 unsigned char code = cur->code;
28194 md5_process_bytes (&code, 1, &ctx);
28195 checksum_uleb128 (cur->lineno, &ctx);
28196 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28197 }
28198 md5_finish_ctx (&ctx, checksum);
28199 count = i - idx;
28200
28201 /* From the containing include filename (if any) pick up just
28202 usable characters from its basename. */
28203 if (vec_safe_is_empty (files))
28204 base = "";
28205 else
28206 base = lbasename (files->last ().info);
28207 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28208 if (ISIDNUM (base[i]) || base[i] == '.')
28209 encoded_filename_len++;
28210 /* Count . at the end. */
28211 if (encoded_filename_len)
28212 encoded_filename_len++;
28213
28214 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28215 linebuf_len = strlen (linebuf);
28216
28217 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28218 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28219 + 16 * 2 + 1);
28220 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28221 tail = grp_name + 4;
28222 if (encoded_filename_len)
28223 {
28224 for (i = 0; base[i]; i++)
28225 if (ISIDNUM (base[i]) || base[i] == '.')
28226 *tail++ = base[i];
28227 *tail++ = '.';
28228 }
28229 memcpy (tail, linebuf, linebuf_len);
28230 tail += linebuf_len;
28231 *tail++ = '.';
28232 for (i = 0; i < 16; i++)
28233 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28234
28235 /* Construct a macinfo_entry for DW_MACRO_import
28236 in the empty vector entry before the first define/undef. */
28237 inc = &(*macinfo_table)[idx - 1];
28238 inc->code = DW_MACRO_import;
28239 inc->lineno = 0;
28240 inc->info = ggc_strdup (grp_name);
28241 if (!*macinfo_htab)
28242 *macinfo_htab = new macinfo_hash_type (10);
28243 /* Avoid emitting duplicates. */
28244 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28245 if (*slot != NULL)
28246 {
28247 inc->code = 0;
28248 inc->info = NULL;
28249 /* If such an entry has been used before, just emit
28250 a DW_MACRO_import op. */
28251 inc = *slot;
28252 output_macinfo_op (inc);
28253 /* And clear all macinfo_entry in the range to avoid emitting them
28254 in the second pass. */
28255 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28256 {
28257 cur->code = 0;
28258 cur->info = NULL;
28259 }
28260 }
28261 else
28262 {
28263 *slot = inc;
28264 inc->lineno = (*macinfo_htab)->elements ();
28265 output_macinfo_op (inc);
28266 }
28267 return count;
28268 }
28269
28270 /* Save any strings needed by the macinfo table in the debug str
28271 table. All strings must be collected into the table by the time
28272 index_string is called. */
28273
28274 static void
28275 save_macinfo_strings (void)
28276 {
28277 unsigned len;
28278 unsigned i;
28279 macinfo_entry *ref;
28280
28281 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28282 {
28283 switch (ref->code)
28284 {
28285 /* Match the logic in output_macinfo_op to decide on
28286 indirect strings. */
28287 case DW_MACINFO_define:
28288 case DW_MACINFO_undef:
28289 len = strlen (ref->info) + 1;
28290 if (!dwarf_strict
28291 && len > DWARF_OFFSET_SIZE
28292 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28293 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28294 set_indirect_string (find_AT_string (ref->info));
28295 break;
28296 case DW_MACINFO_start_file:
28297 /* -gsplit-dwarf -g3 will also output filename as indirect
28298 string. */
28299 if (!dwarf_split_debug_info)
28300 break;
28301 /* Fall through. */
28302 case DW_MACRO_define_strp:
28303 case DW_MACRO_undef_strp:
28304 set_indirect_string (find_AT_string (ref->info));
28305 break;
28306 default:
28307 break;
28308 }
28309 }
28310 }
28311
28312 /* Output macinfo section(s). */
28313
28314 static void
28315 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28316 {
28317 unsigned i;
28318 unsigned long length = vec_safe_length (macinfo_table);
28319 macinfo_entry *ref;
28320 vec<macinfo_entry, va_gc> *files = NULL;
28321 macinfo_hash_type *macinfo_htab = NULL;
28322 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28323
28324 if (! length)
28325 return;
28326
28327 /* output_macinfo* uses these interchangeably. */
28328 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28329 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28330 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28331 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28332
28333 /* AIX Assembler inserts the length, so adjust the reference to match the
28334 offset expected by debuggers. */
28335 strcpy (dl_section_ref, debug_line_label);
28336 if (XCOFF_DEBUGGING_INFO)
28337 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28338
28339 /* For .debug_macro emit the section header. */
28340 if (!dwarf_strict || dwarf_version >= 5)
28341 {
28342 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28343 "DWARF macro version number");
28344 if (DWARF_OFFSET_SIZE == 8)
28345 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28346 else
28347 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28348 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28349 debug_line_section, NULL);
28350 }
28351
28352 /* In the first loop, it emits the primary .debug_macinfo section
28353 and after each emitted op the macinfo_entry is cleared.
28354 If a longer range of define/undef ops can be optimized using
28355 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28356 the vector before the first define/undef in the range and the
28357 whole range of define/undef ops is not emitted and kept. */
28358 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28359 {
28360 switch (ref->code)
28361 {
28362 case DW_MACINFO_start_file:
28363 vec_safe_push (files, *ref);
28364 break;
28365 case DW_MACINFO_end_file:
28366 if (!vec_safe_is_empty (files))
28367 files->pop ();
28368 break;
28369 case DW_MACINFO_define:
28370 case DW_MACINFO_undef:
28371 if ((!dwarf_strict || dwarf_version >= 5)
28372 && HAVE_COMDAT_GROUP
28373 && vec_safe_length (files) != 1
28374 && i > 0
28375 && i + 1 < length
28376 && (*macinfo_table)[i - 1].code == 0)
28377 {
28378 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28379 if (count)
28380 {
28381 i += count - 1;
28382 continue;
28383 }
28384 }
28385 break;
28386 case 0:
28387 /* A dummy entry may be inserted at the beginning to be able
28388 to optimize the whole block of predefined macros. */
28389 if (i == 0)
28390 continue;
28391 default:
28392 break;
28393 }
28394 output_macinfo_op (ref);
28395 ref->info = NULL;
28396 ref->code = 0;
28397 }
28398
28399 if (!macinfo_htab)
28400 return;
28401
28402 /* Save the number of transparent includes so we can adjust the
28403 label number for the fat LTO object DWARF. */
28404 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28405
28406 delete macinfo_htab;
28407 macinfo_htab = NULL;
28408
28409 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28410 terminate the current chain and switch to a new comdat .debug_macinfo
28411 section and emit the define/undef entries within it. */
28412 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28413 switch (ref->code)
28414 {
28415 case 0:
28416 continue;
28417 case DW_MACRO_import:
28418 {
28419 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28420 tree comdat_key = get_identifier (ref->info);
28421 /* Terminate the previous .debug_macinfo section. */
28422 dw2_asm_output_data (1, 0, "End compilation unit");
28423 targetm.asm_out.named_section (debug_macinfo_section_name,
28424 SECTION_DEBUG
28425 | SECTION_LINKONCE
28426 | (early_lto_debug
28427 ? SECTION_EXCLUDE : 0),
28428 comdat_key);
28429 ASM_GENERATE_INTERNAL_LABEL (label,
28430 DEBUG_MACRO_SECTION_LABEL,
28431 ref->lineno + macinfo_label_base);
28432 ASM_OUTPUT_LABEL (asm_out_file, label);
28433 ref->code = 0;
28434 ref->info = NULL;
28435 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28436 "DWARF macro version number");
28437 if (DWARF_OFFSET_SIZE == 8)
28438 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28439 else
28440 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28441 }
28442 break;
28443 case DW_MACINFO_define:
28444 case DW_MACINFO_undef:
28445 output_macinfo_op (ref);
28446 ref->code = 0;
28447 ref->info = NULL;
28448 break;
28449 default:
28450 gcc_unreachable ();
28451 }
28452
28453 macinfo_label_base += macinfo_label_base_adj;
28454 }
28455
28456 /* Initialize the various sections and labels for dwarf output and prefix
28457 them with PREFIX if non-NULL. Returns the generation (zero based
28458 number of times function was called). */
28459
28460 static unsigned
28461 init_sections_and_labels (bool early_lto_debug)
28462 {
28463 /* As we may get called multiple times have a generation count for
28464 labels. */
28465 static unsigned generation = 0;
28466
28467 if (early_lto_debug)
28468 {
28469 if (!dwarf_split_debug_info)
28470 {
28471 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28472 SECTION_DEBUG | SECTION_EXCLUDE,
28473 NULL);
28474 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28475 SECTION_DEBUG | SECTION_EXCLUDE,
28476 NULL);
28477 debug_macinfo_section_name
28478 = ((dwarf_strict && dwarf_version < 5)
28479 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28480 debug_macinfo_section = get_section (debug_macinfo_section_name,
28481 SECTION_DEBUG
28482 | SECTION_EXCLUDE, NULL);
28483 }
28484 else
28485 {
28486 /* ??? Which of the following do we need early? */
28487 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28488 SECTION_DEBUG | SECTION_EXCLUDE,
28489 NULL);
28490 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28491 SECTION_DEBUG | SECTION_EXCLUDE,
28492 NULL);
28493 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28494 SECTION_DEBUG
28495 | SECTION_EXCLUDE, NULL);
28496 debug_skeleton_abbrev_section
28497 = get_section (DEBUG_LTO_ABBREV_SECTION,
28498 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28499 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28500 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28501 generation);
28502
28503 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28504 stay in the main .o, but the skeleton_line goes into the split
28505 off dwo. */
28506 debug_skeleton_line_section
28507 = get_section (DEBUG_LTO_LINE_SECTION,
28508 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28509 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28510 DEBUG_SKELETON_LINE_SECTION_LABEL,
28511 generation);
28512 debug_str_offsets_section
28513 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28514 SECTION_DEBUG | SECTION_EXCLUDE,
28515 NULL);
28516 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28517 DEBUG_SKELETON_INFO_SECTION_LABEL,
28518 generation);
28519 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28520 DEBUG_STR_DWO_SECTION_FLAGS,
28521 NULL);
28522 debug_macinfo_section_name
28523 = ((dwarf_strict && dwarf_version < 5)
28524 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28525 debug_macinfo_section = get_section (debug_macinfo_section_name,
28526 SECTION_DEBUG | SECTION_EXCLUDE,
28527 NULL);
28528 }
28529 /* For macro info and the file table we have to refer to a
28530 debug_line section. */
28531 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28532 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28533 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28534 DEBUG_LINE_SECTION_LABEL, generation);
28535
28536 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28537 DEBUG_STR_SECTION_FLAGS
28538 | SECTION_EXCLUDE, NULL);
28539 if (!dwarf_split_debug_info)
28540 debug_line_str_section
28541 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28542 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28543 }
28544 else
28545 {
28546 if (!dwarf_split_debug_info)
28547 {
28548 debug_info_section = get_section (DEBUG_INFO_SECTION,
28549 SECTION_DEBUG, NULL);
28550 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28551 SECTION_DEBUG, NULL);
28552 debug_loc_section = get_section (dwarf_version >= 5
28553 ? DEBUG_LOCLISTS_SECTION
28554 : DEBUG_LOC_SECTION,
28555 SECTION_DEBUG, NULL);
28556 debug_macinfo_section_name
28557 = ((dwarf_strict && dwarf_version < 5)
28558 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28559 debug_macinfo_section = get_section (debug_macinfo_section_name,
28560 SECTION_DEBUG, NULL);
28561 }
28562 else
28563 {
28564 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28565 SECTION_DEBUG | SECTION_EXCLUDE,
28566 NULL);
28567 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28568 SECTION_DEBUG | SECTION_EXCLUDE,
28569 NULL);
28570 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28571 SECTION_DEBUG, NULL);
28572 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28573 SECTION_DEBUG, NULL);
28574 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28575 SECTION_DEBUG, NULL);
28576 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28577 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28578 generation);
28579
28580 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28581 stay in the main .o, but the skeleton_line goes into the
28582 split off dwo. */
28583 debug_skeleton_line_section
28584 = get_section (DEBUG_DWO_LINE_SECTION,
28585 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28586 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28587 DEBUG_SKELETON_LINE_SECTION_LABEL,
28588 generation);
28589 debug_str_offsets_section
28590 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28591 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28592 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28593 DEBUG_SKELETON_INFO_SECTION_LABEL,
28594 generation);
28595 debug_loc_section = get_section (dwarf_version >= 5
28596 ? DEBUG_DWO_LOCLISTS_SECTION
28597 : DEBUG_DWO_LOC_SECTION,
28598 SECTION_DEBUG | SECTION_EXCLUDE,
28599 NULL);
28600 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28601 DEBUG_STR_DWO_SECTION_FLAGS,
28602 NULL);
28603 debug_macinfo_section_name
28604 = ((dwarf_strict && dwarf_version < 5)
28605 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28606 debug_macinfo_section = get_section (debug_macinfo_section_name,
28607 SECTION_DEBUG | SECTION_EXCLUDE,
28608 NULL);
28609 }
28610 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28611 SECTION_DEBUG, NULL);
28612 debug_line_section = get_section (DEBUG_LINE_SECTION,
28613 SECTION_DEBUG, NULL);
28614 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28615 SECTION_DEBUG, NULL);
28616 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28617 SECTION_DEBUG, NULL);
28618 debug_str_section = get_section (DEBUG_STR_SECTION,
28619 DEBUG_STR_SECTION_FLAGS, NULL);
28620 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28621 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28622 DEBUG_STR_SECTION_FLAGS, NULL);
28623
28624 debug_ranges_section = get_section (dwarf_version >= 5
28625 ? DEBUG_RNGLISTS_SECTION
28626 : DEBUG_RANGES_SECTION,
28627 SECTION_DEBUG, NULL);
28628 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28629 SECTION_DEBUG, NULL);
28630 }
28631
28632 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28633 DEBUG_ABBREV_SECTION_LABEL, generation);
28634 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28635 DEBUG_INFO_SECTION_LABEL, generation);
28636 info_section_emitted = false;
28637 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28638 DEBUG_LINE_SECTION_LABEL, generation);
28639 /* There are up to 4 unique ranges labels per generation.
28640 See also output_rnglists. */
28641 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28642 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28643 if (dwarf_version >= 5 && dwarf_split_debug_info)
28644 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28645 DEBUG_RANGES_SECTION_LABEL,
28646 1 + generation * 4);
28647 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28648 DEBUG_ADDR_SECTION_LABEL, generation);
28649 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28650 (dwarf_strict && dwarf_version < 5)
28651 ? DEBUG_MACINFO_SECTION_LABEL
28652 : DEBUG_MACRO_SECTION_LABEL, generation);
28653 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28654 generation);
28655
28656 ++generation;
28657 return generation - 1;
28658 }
28659
28660 /* Set up for Dwarf output at the start of compilation. */
28661
28662 static void
28663 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28664 {
28665 /* Allocate the file_table. */
28666 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28667
28668 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28669 /* Allocate the decl_die_table. */
28670 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28671
28672 /* Allocate the decl_loc_table. */
28673 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28674
28675 /* Allocate the cached_dw_loc_list_table. */
28676 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28677
28678 /* Allocate the initial hunk of the abbrev_die_table. */
28679 vec_alloc (abbrev_die_table, 256);
28680 /* Zero-th entry is allocated, but unused. */
28681 abbrev_die_table->quick_push (NULL);
28682
28683 /* Allocate the dwarf_proc_stack_usage_map. */
28684 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28685
28686 /* Allocate the pubtypes and pubnames vectors. */
28687 vec_alloc (pubname_table, 32);
28688 vec_alloc (pubtype_table, 32);
28689
28690 vec_alloc (incomplete_types, 64);
28691
28692 vec_alloc (used_rtx_array, 32);
28693
28694 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28695 vec_alloc (macinfo_table, 64);
28696 #endif
28697
28698 /* If front-ends already registered a main translation unit but we were not
28699 ready to perform the association, do this now. */
28700 if (main_translation_unit != NULL_TREE)
28701 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28702 }
28703
28704 /* Called before compile () starts outputtting functions, variables
28705 and toplevel asms into assembly. */
28706
28707 static void
28708 dwarf2out_assembly_start (void)
28709 {
28710 if (text_section_line_info)
28711 return;
28712
28713 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28714 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28715 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28716 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28717 COLD_TEXT_SECTION_LABEL, 0);
28718 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28719
28720 switch_to_section (text_section);
28721 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28722 #endif
28723
28724 /* Make sure the line number table for .text always exists. */
28725 text_section_line_info = new_line_info_table ();
28726 text_section_line_info->end_label = text_end_label;
28727
28728 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28729 cur_line_info_table = text_section_line_info;
28730 #endif
28731
28732 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28733 && dwarf2out_do_cfi_asm ()
28734 && !dwarf2out_do_eh_frame ())
28735 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28736 }
28737
28738 /* A helper function for dwarf2out_finish called through
28739 htab_traverse. Assign a string its index. All strings must be
28740 collected into the table by the time index_string is called,
28741 because the indexing code relies on htab_traverse to traverse nodes
28742 in the same order for each run. */
28743
28744 int
28745 index_string (indirect_string_node **h, unsigned int *index)
28746 {
28747 indirect_string_node *node = *h;
28748
28749 find_string_form (node);
28750 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28751 {
28752 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28753 node->index = *index;
28754 *index += 1;
28755 }
28756 return 1;
28757 }
28758
28759 /* A helper function for output_indirect_strings called through
28760 htab_traverse. Output the offset to a string and update the
28761 current offset. */
28762
28763 int
28764 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28765 {
28766 indirect_string_node *node = *h;
28767
28768 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28769 {
28770 /* Assert that this node has been assigned an index. */
28771 gcc_assert (node->index != NO_INDEX_ASSIGNED
28772 && node->index != NOT_INDEXED);
28773 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28774 "indexed string 0x%x: %s", node->index, node->str);
28775 *offset += strlen (node->str) + 1;
28776 }
28777 return 1;
28778 }
28779
28780 /* A helper function for dwarf2out_finish called through
28781 htab_traverse. Output the indexed string. */
28782
28783 int
28784 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28785 {
28786 struct indirect_string_node *node = *h;
28787
28788 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28789 {
28790 /* Assert that the strings are output in the same order as their
28791 indexes were assigned. */
28792 gcc_assert (*cur_idx == node->index);
28793 assemble_string (node->str, strlen (node->str) + 1);
28794 *cur_idx += 1;
28795 }
28796 return 1;
28797 }
28798
28799 /* A helper function for output_indirect_strings. Counts the number
28800 of index strings offsets. Must match the logic of the functions
28801 output_index_string[_offsets] above. */
28802 int
28803 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28804 {
28805 struct indirect_string_node *node = *h;
28806
28807 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28808 *last_idx += 1;
28809 return 1;
28810 }
28811
28812 /* A helper function for dwarf2out_finish called through
28813 htab_traverse. Emit one queued .debug_str string. */
28814
28815 int
28816 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28817 {
28818 struct indirect_string_node *node = *h;
28819
28820 node->form = find_string_form (node);
28821 if (node->form == form && node->refcount > 0)
28822 {
28823 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28824 assemble_string (node->str, strlen (node->str) + 1);
28825 }
28826
28827 return 1;
28828 }
28829
28830 /* Output the indexed string table. */
28831
28832 static void
28833 output_indirect_strings (void)
28834 {
28835 switch_to_section (debug_str_section);
28836 if (!dwarf_split_debug_info)
28837 debug_str_hash->traverse<enum dwarf_form,
28838 output_indirect_string> (DW_FORM_strp);
28839 else
28840 {
28841 unsigned int offset = 0;
28842 unsigned int cur_idx = 0;
28843
28844 if (skeleton_debug_str_hash)
28845 skeleton_debug_str_hash->traverse<enum dwarf_form,
28846 output_indirect_string> (DW_FORM_strp);
28847
28848 switch_to_section (debug_str_offsets_section);
28849 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28850 header. Note that we don't need to generate a label to the
28851 actual index table following the header here, because this is
28852 for the split dwarf case only. In an .dwo file there is only
28853 one string offsets table (and one debug info section). But
28854 if we would start using string offset tables for the main (or
28855 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28856 pointing to the actual index after the header. Split dwarf
28857 units will never have a string offsets base attribute. When
28858 a split unit is moved into a .dwp file the string offsets can
28859 be found through the .debug_cu_index section table. */
28860 if (dwarf_version >= 5)
28861 {
28862 unsigned int last_idx = 0;
28863 unsigned long str_offsets_length;
28864
28865 debug_str_hash->traverse_noresize
28866 <unsigned int *, count_index_strings> (&last_idx);
28867 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28868 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28869 dw2_asm_output_data (4, 0xffffffff,
28870 "Escape value for 64-bit DWARF extension");
28871 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28872 "Length of string offsets unit");
28873 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28874 dw2_asm_output_data (2, 0, "Header zero padding");
28875 }
28876 debug_str_hash->traverse_noresize
28877 <unsigned int *, output_index_string_offset> (&offset);
28878 switch_to_section (debug_str_dwo_section);
28879 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28880 (&cur_idx);
28881 }
28882 }
28883
28884 /* Callback for htab_traverse to assign an index to an entry in the
28885 table, and to write that entry to the .debug_addr section. */
28886
28887 int
28888 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28889 {
28890 addr_table_entry *entry = *slot;
28891
28892 if (entry->refcount == 0)
28893 {
28894 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28895 || entry->index == NOT_INDEXED);
28896 return 1;
28897 }
28898
28899 gcc_assert (entry->index == *cur_index);
28900 (*cur_index)++;
28901
28902 switch (entry->kind)
28903 {
28904 case ate_kind_rtx:
28905 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28906 "0x%x", entry->index);
28907 break;
28908 case ate_kind_rtx_dtprel:
28909 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28910 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28911 DWARF2_ADDR_SIZE,
28912 entry->addr.rtl);
28913 fputc ('\n', asm_out_file);
28914 break;
28915 case ate_kind_label:
28916 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28917 "0x%x", entry->index);
28918 break;
28919 default:
28920 gcc_unreachable ();
28921 }
28922 return 1;
28923 }
28924
28925 /* A helper function for dwarf2out_finish. Counts the number
28926 of indexed addresses. Must match the logic of the functions
28927 output_addr_table_entry above. */
28928 int
28929 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28930 {
28931 addr_table_entry *entry = *slot;
28932
28933 if (entry->refcount > 0)
28934 *last_idx += 1;
28935 return 1;
28936 }
28937
28938 /* Produce the .debug_addr section. */
28939
28940 static void
28941 output_addr_table (void)
28942 {
28943 unsigned int index = 0;
28944 if (addr_index_table == NULL || addr_index_table->size () == 0)
28945 return;
28946
28947 switch_to_section (debug_addr_section);
28948 addr_index_table
28949 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28950 }
28951
28952 #if ENABLE_ASSERT_CHECKING
28953 /* Verify that all marks are clear. */
28954
28955 static void
28956 verify_marks_clear (dw_die_ref die)
28957 {
28958 dw_die_ref c;
28959
28960 gcc_assert (! die->die_mark);
28961 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28962 }
28963 #endif /* ENABLE_ASSERT_CHECKING */
28964
28965 /* Clear the marks for a die and its children.
28966 Be cool if the mark isn't set. */
28967
28968 static void
28969 prune_unmark_dies (dw_die_ref die)
28970 {
28971 dw_die_ref c;
28972
28973 if (die->die_mark)
28974 die->die_mark = 0;
28975 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28976 }
28977
28978 /* Given LOC that is referenced by a DIE we're marking as used, find all
28979 referenced DWARF procedures it references and mark them as used. */
28980
28981 static void
28982 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28983 {
28984 for (; loc != NULL; loc = loc->dw_loc_next)
28985 switch (loc->dw_loc_opc)
28986 {
28987 case DW_OP_implicit_pointer:
28988 case DW_OP_convert:
28989 case DW_OP_reinterpret:
28990 case DW_OP_GNU_implicit_pointer:
28991 case DW_OP_GNU_convert:
28992 case DW_OP_GNU_reinterpret:
28993 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28994 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28995 break;
28996 case DW_OP_GNU_variable_value:
28997 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28998 {
28999 dw_die_ref ref
29000 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29001 if (ref == NULL)
29002 break;
29003 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29004 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29005 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29006 }
29007 /* FALLTHRU */
29008 case DW_OP_call2:
29009 case DW_OP_call4:
29010 case DW_OP_call_ref:
29011 case DW_OP_const_type:
29012 case DW_OP_GNU_const_type:
29013 case DW_OP_GNU_parameter_ref:
29014 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29015 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29016 break;
29017 case DW_OP_regval_type:
29018 case DW_OP_deref_type:
29019 case DW_OP_GNU_regval_type:
29020 case DW_OP_GNU_deref_type:
29021 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29022 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29023 break;
29024 case DW_OP_entry_value:
29025 case DW_OP_GNU_entry_value:
29026 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29027 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29028 break;
29029 default:
29030 break;
29031 }
29032 }
29033
29034 /* Given DIE that we're marking as used, find any other dies
29035 it references as attributes and mark them as used. */
29036
29037 static void
29038 prune_unused_types_walk_attribs (dw_die_ref die)
29039 {
29040 dw_attr_node *a;
29041 unsigned ix;
29042
29043 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29044 {
29045 switch (AT_class (a))
29046 {
29047 /* Make sure DWARF procedures referenced by location descriptions will
29048 get emitted. */
29049 case dw_val_class_loc:
29050 prune_unused_types_walk_loc_descr (AT_loc (a));
29051 break;
29052 case dw_val_class_loc_list:
29053 for (dw_loc_list_ref list = AT_loc_list (a);
29054 list != NULL;
29055 list = list->dw_loc_next)
29056 prune_unused_types_walk_loc_descr (list->expr);
29057 break;
29058
29059 case dw_val_class_view_list:
29060 /* This points to a loc_list in another attribute, so it's
29061 already covered. */
29062 break;
29063
29064 case dw_val_class_die_ref:
29065 /* A reference to another DIE.
29066 Make sure that it will get emitted.
29067 If it was broken out into a comdat group, don't follow it. */
29068 if (! AT_ref (a)->comdat_type_p
29069 || a->dw_attr == DW_AT_specification)
29070 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29071 break;
29072
29073 case dw_val_class_str:
29074 /* Set the string's refcount to 0 so that prune_unused_types_mark
29075 accounts properly for it. */
29076 a->dw_attr_val.v.val_str->refcount = 0;
29077 break;
29078
29079 default:
29080 break;
29081 }
29082 }
29083 }
29084
29085 /* Mark the generic parameters and arguments children DIEs of DIE. */
29086
29087 static void
29088 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29089 {
29090 dw_die_ref c;
29091
29092 if (die == NULL || die->die_child == NULL)
29093 return;
29094 c = die->die_child;
29095 do
29096 {
29097 if (is_template_parameter (c))
29098 prune_unused_types_mark (c, 1);
29099 c = c->die_sib;
29100 } while (c && c != die->die_child);
29101 }
29102
29103 /* Mark DIE as being used. If DOKIDS is true, then walk down
29104 to DIE's children. */
29105
29106 static void
29107 prune_unused_types_mark (dw_die_ref die, int dokids)
29108 {
29109 dw_die_ref c;
29110
29111 if (die->die_mark == 0)
29112 {
29113 /* We haven't done this node yet. Mark it as used. */
29114 die->die_mark = 1;
29115 /* If this is the DIE of a generic type instantiation,
29116 mark the children DIEs that describe its generic parms and
29117 args. */
29118 prune_unused_types_mark_generic_parms_dies (die);
29119
29120 /* We also have to mark its parents as used.
29121 (But we don't want to mark our parent's kids due to this,
29122 unless it is a class.) */
29123 if (die->die_parent)
29124 prune_unused_types_mark (die->die_parent,
29125 class_scope_p (die->die_parent));
29126
29127 /* Mark any referenced nodes. */
29128 prune_unused_types_walk_attribs (die);
29129
29130 /* If this node is a specification,
29131 also mark the definition, if it exists. */
29132 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29133 prune_unused_types_mark (die->die_definition, 1);
29134 }
29135
29136 if (dokids && die->die_mark != 2)
29137 {
29138 /* We need to walk the children, but haven't done so yet.
29139 Remember that we've walked the kids. */
29140 die->die_mark = 2;
29141
29142 /* If this is an array type, we need to make sure our
29143 kids get marked, even if they're types. If we're
29144 breaking out types into comdat sections, do this
29145 for all type definitions. */
29146 if (die->die_tag == DW_TAG_array_type
29147 || (use_debug_types
29148 && is_type_die (die) && ! is_declaration_die (die)))
29149 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29150 else
29151 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29152 }
29153 }
29154
29155 /* For local classes, look if any static member functions were emitted
29156 and if so, mark them. */
29157
29158 static void
29159 prune_unused_types_walk_local_classes (dw_die_ref die)
29160 {
29161 dw_die_ref c;
29162
29163 if (die->die_mark == 2)
29164 return;
29165
29166 switch (die->die_tag)
29167 {
29168 case DW_TAG_structure_type:
29169 case DW_TAG_union_type:
29170 case DW_TAG_class_type:
29171 case DW_TAG_interface_type:
29172 break;
29173
29174 case DW_TAG_subprogram:
29175 if (!get_AT_flag (die, DW_AT_declaration)
29176 || die->die_definition != NULL)
29177 prune_unused_types_mark (die, 1);
29178 return;
29179
29180 default:
29181 return;
29182 }
29183
29184 /* Mark children. */
29185 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29186 }
29187
29188 /* Walk the tree DIE and mark types that we actually use. */
29189
29190 static void
29191 prune_unused_types_walk (dw_die_ref die)
29192 {
29193 dw_die_ref c;
29194
29195 /* Don't do anything if this node is already marked and
29196 children have been marked as well. */
29197 if (die->die_mark == 2)
29198 return;
29199
29200 switch (die->die_tag)
29201 {
29202 case DW_TAG_structure_type:
29203 case DW_TAG_union_type:
29204 case DW_TAG_class_type:
29205 case DW_TAG_interface_type:
29206 if (die->die_perennial_p)
29207 break;
29208
29209 for (c = die->die_parent; c; c = c->die_parent)
29210 if (c->die_tag == DW_TAG_subprogram)
29211 break;
29212
29213 /* Finding used static member functions inside of classes
29214 is needed just for local classes, because for other classes
29215 static member function DIEs with DW_AT_specification
29216 are emitted outside of the DW_TAG_*_type. If we ever change
29217 it, we'd need to call this even for non-local classes. */
29218 if (c)
29219 prune_unused_types_walk_local_classes (die);
29220
29221 /* It's a type node --- don't mark it. */
29222 return;
29223
29224 case DW_TAG_const_type:
29225 case DW_TAG_packed_type:
29226 case DW_TAG_pointer_type:
29227 case DW_TAG_reference_type:
29228 case DW_TAG_rvalue_reference_type:
29229 case DW_TAG_volatile_type:
29230 case DW_TAG_typedef:
29231 case DW_TAG_array_type:
29232 case DW_TAG_friend:
29233 case DW_TAG_enumeration_type:
29234 case DW_TAG_subroutine_type:
29235 case DW_TAG_string_type:
29236 case DW_TAG_set_type:
29237 case DW_TAG_subrange_type:
29238 case DW_TAG_ptr_to_member_type:
29239 case DW_TAG_file_type:
29240 /* Type nodes are useful only when other DIEs reference them --- don't
29241 mark them. */
29242 /* FALLTHROUGH */
29243
29244 case DW_TAG_dwarf_procedure:
29245 /* Likewise for DWARF procedures. */
29246
29247 if (die->die_perennial_p)
29248 break;
29249
29250 return;
29251
29252 default:
29253 /* Mark everything else. */
29254 break;
29255 }
29256
29257 if (die->die_mark == 0)
29258 {
29259 die->die_mark = 1;
29260
29261 /* Now, mark any dies referenced from here. */
29262 prune_unused_types_walk_attribs (die);
29263 }
29264
29265 die->die_mark = 2;
29266
29267 /* Mark children. */
29268 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29269 }
29270
29271 /* Increment the string counts on strings referred to from DIE's
29272 attributes. */
29273
29274 static void
29275 prune_unused_types_update_strings (dw_die_ref die)
29276 {
29277 dw_attr_node *a;
29278 unsigned ix;
29279
29280 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29281 if (AT_class (a) == dw_val_class_str)
29282 {
29283 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29284 s->refcount++;
29285 /* Avoid unnecessarily putting strings that are used less than
29286 twice in the hash table. */
29287 if (s->refcount
29288 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29289 {
29290 indirect_string_node **slot
29291 = debug_str_hash->find_slot_with_hash (s->str,
29292 htab_hash_string (s->str),
29293 INSERT);
29294 gcc_assert (*slot == NULL);
29295 *slot = s;
29296 }
29297 }
29298 }
29299
29300 /* Mark DIE and its children as removed. */
29301
29302 static void
29303 mark_removed (dw_die_ref die)
29304 {
29305 dw_die_ref c;
29306 die->removed = true;
29307 FOR_EACH_CHILD (die, c, mark_removed (c));
29308 }
29309
29310 /* Remove from the tree DIE any dies that aren't marked. */
29311
29312 static void
29313 prune_unused_types_prune (dw_die_ref die)
29314 {
29315 dw_die_ref c;
29316
29317 gcc_assert (die->die_mark);
29318 prune_unused_types_update_strings (die);
29319
29320 if (! die->die_child)
29321 return;
29322
29323 c = die->die_child;
29324 do {
29325 dw_die_ref prev = c, next;
29326 for (c = c->die_sib; ! c->die_mark; c = next)
29327 if (c == die->die_child)
29328 {
29329 /* No marked children between 'prev' and the end of the list. */
29330 if (prev == c)
29331 /* No marked children at all. */
29332 die->die_child = NULL;
29333 else
29334 {
29335 prev->die_sib = c->die_sib;
29336 die->die_child = prev;
29337 }
29338 c->die_sib = NULL;
29339 mark_removed (c);
29340 return;
29341 }
29342 else
29343 {
29344 next = c->die_sib;
29345 c->die_sib = NULL;
29346 mark_removed (c);
29347 }
29348
29349 if (c != prev->die_sib)
29350 prev->die_sib = c;
29351 prune_unused_types_prune (c);
29352 } while (c != die->die_child);
29353 }
29354
29355 /* Remove dies representing declarations that we never use. */
29356
29357 static void
29358 prune_unused_types (void)
29359 {
29360 unsigned int i;
29361 limbo_die_node *node;
29362 comdat_type_node *ctnode;
29363 pubname_entry *pub;
29364 dw_die_ref base_type;
29365
29366 #if ENABLE_ASSERT_CHECKING
29367 /* All the marks should already be clear. */
29368 verify_marks_clear (comp_unit_die ());
29369 for (node = limbo_die_list; node; node = node->next)
29370 verify_marks_clear (node->die);
29371 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29372 verify_marks_clear (ctnode->root_die);
29373 #endif /* ENABLE_ASSERT_CHECKING */
29374
29375 /* Mark types that are used in global variables. */
29376 premark_types_used_by_global_vars ();
29377
29378 /* Set the mark on nodes that are actually used. */
29379 prune_unused_types_walk (comp_unit_die ());
29380 for (node = limbo_die_list; node; node = node->next)
29381 prune_unused_types_walk (node->die);
29382 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29383 {
29384 prune_unused_types_walk (ctnode->root_die);
29385 prune_unused_types_mark (ctnode->type_die, 1);
29386 }
29387
29388 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29389 are unusual in that they are pubnames that are the children of pubtypes.
29390 They should only be marked via their parent DW_TAG_enumeration_type die,
29391 not as roots in themselves. */
29392 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29393 if (pub->die->die_tag != DW_TAG_enumerator)
29394 prune_unused_types_mark (pub->die, 1);
29395 for (i = 0; base_types.iterate (i, &base_type); i++)
29396 prune_unused_types_mark (base_type, 1);
29397
29398 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29399 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29400 callees). */
29401 cgraph_node *cnode;
29402 FOR_EACH_FUNCTION (cnode)
29403 if (cnode->referred_to_p (false))
29404 {
29405 dw_die_ref die = lookup_decl_die (cnode->decl);
29406 if (die == NULL || die->die_mark)
29407 continue;
29408 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29409 if (e->caller != cnode
29410 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29411 {
29412 prune_unused_types_mark (die, 1);
29413 break;
29414 }
29415 }
29416
29417 if (debug_str_hash)
29418 debug_str_hash->empty ();
29419 if (skeleton_debug_str_hash)
29420 skeleton_debug_str_hash->empty ();
29421 prune_unused_types_prune (comp_unit_die ());
29422 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29423 {
29424 node = *pnode;
29425 if (!node->die->die_mark)
29426 *pnode = node->next;
29427 else
29428 {
29429 prune_unused_types_prune (node->die);
29430 pnode = &node->next;
29431 }
29432 }
29433 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29434 prune_unused_types_prune (ctnode->root_die);
29435
29436 /* Leave the marks clear. */
29437 prune_unmark_dies (comp_unit_die ());
29438 for (node = limbo_die_list; node; node = node->next)
29439 prune_unmark_dies (node->die);
29440 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29441 prune_unmark_dies (ctnode->root_die);
29442 }
29443
29444 /* Helpers to manipulate hash table of comdat type units. */
29445
29446 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29447 {
29448 static inline hashval_t hash (const comdat_type_node *);
29449 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29450 };
29451
29452 inline hashval_t
29453 comdat_type_hasher::hash (const comdat_type_node *type_node)
29454 {
29455 hashval_t h;
29456 memcpy (&h, type_node->signature, sizeof (h));
29457 return h;
29458 }
29459
29460 inline bool
29461 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29462 const comdat_type_node *type_node_2)
29463 {
29464 return (! memcmp (type_node_1->signature, type_node_2->signature,
29465 DWARF_TYPE_SIGNATURE_SIZE));
29466 }
29467
29468 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29469 to the location it would have been added, should we know its
29470 DECL_ASSEMBLER_NAME when we added other attributes. This will
29471 probably improve compactness of debug info, removing equivalent
29472 abbrevs, and hide any differences caused by deferring the
29473 computation of the assembler name, triggered by e.g. PCH. */
29474
29475 static inline void
29476 move_linkage_attr (dw_die_ref die)
29477 {
29478 unsigned ix = vec_safe_length (die->die_attr);
29479 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29480
29481 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29482 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29483
29484 while (--ix > 0)
29485 {
29486 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29487
29488 if (prev->dw_attr == DW_AT_decl_line
29489 || prev->dw_attr == DW_AT_decl_column
29490 || prev->dw_attr == DW_AT_name)
29491 break;
29492 }
29493
29494 if (ix != vec_safe_length (die->die_attr) - 1)
29495 {
29496 die->die_attr->pop ();
29497 die->die_attr->quick_insert (ix, linkage);
29498 }
29499 }
29500
29501 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29502 referenced from typed stack ops and count how often they are used. */
29503
29504 static void
29505 mark_base_types (dw_loc_descr_ref loc)
29506 {
29507 dw_die_ref base_type = NULL;
29508
29509 for (; loc; loc = loc->dw_loc_next)
29510 {
29511 switch (loc->dw_loc_opc)
29512 {
29513 case DW_OP_regval_type:
29514 case DW_OP_deref_type:
29515 case DW_OP_GNU_regval_type:
29516 case DW_OP_GNU_deref_type:
29517 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29518 break;
29519 case DW_OP_convert:
29520 case DW_OP_reinterpret:
29521 case DW_OP_GNU_convert:
29522 case DW_OP_GNU_reinterpret:
29523 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29524 continue;
29525 /* FALLTHRU */
29526 case DW_OP_const_type:
29527 case DW_OP_GNU_const_type:
29528 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29529 break;
29530 case DW_OP_entry_value:
29531 case DW_OP_GNU_entry_value:
29532 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29533 continue;
29534 default:
29535 continue;
29536 }
29537 gcc_assert (base_type->die_parent == comp_unit_die ());
29538 if (base_type->die_mark)
29539 base_type->die_mark++;
29540 else
29541 {
29542 base_types.safe_push (base_type);
29543 base_type->die_mark = 1;
29544 }
29545 }
29546 }
29547
29548 /* Comparison function for sorting marked base types. */
29549
29550 static int
29551 base_type_cmp (const void *x, const void *y)
29552 {
29553 dw_die_ref dx = *(const dw_die_ref *) x;
29554 dw_die_ref dy = *(const dw_die_ref *) y;
29555 unsigned int byte_size1, byte_size2;
29556 unsigned int encoding1, encoding2;
29557 unsigned int align1, align2;
29558 if (dx->die_mark > dy->die_mark)
29559 return -1;
29560 if (dx->die_mark < dy->die_mark)
29561 return 1;
29562 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29563 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29564 if (byte_size1 < byte_size2)
29565 return 1;
29566 if (byte_size1 > byte_size2)
29567 return -1;
29568 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29569 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29570 if (encoding1 < encoding2)
29571 return 1;
29572 if (encoding1 > encoding2)
29573 return -1;
29574 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29575 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29576 if (align1 < align2)
29577 return 1;
29578 if (align1 > align2)
29579 return -1;
29580 return 0;
29581 }
29582
29583 /* Move base types marked by mark_base_types as early as possible
29584 in the CU, sorted by decreasing usage count both to make the
29585 uleb128 references as small as possible and to make sure they
29586 will have die_offset already computed by calc_die_sizes when
29587 sizes of typed stack loc ops is computed. */
29588
29589 static void
29590 move_marked_base_types (void)
29591 {
29592 unsigned int i;
29593 dw_die_ref base_type, die, c;
29594
29595 if (base_types.is_empty ())
29596 return;
29597
29598 /* Sort by decreasing usage count, they will be added again in that
29599 order later on. */
29600 base_types.qsort (base_type_cmp);
29601 die = comp_unit_die ();
29602 c = die->die_child;
29603 do
29604 {
29605 dw_die_ref prev = c;
29606 c = c->die_sib;
29607 while (c->die_mark)
29608 {
29609 remove_child_with_prev (c, prev);
29610 /* As base types got marked, there must be at least
29611 one node other than DW_TAG_base_type. */
29612 gcc_assert (die->die_child != NULL);
29613 c = prev->die_sib;
29614 }
29615 }
29616 while (c != die->die_child);
29617 gcc_assert (die->die_child);
29618 c = die->die_child;
29619 for (i = 0; base_types.iterate (i, &base_type); i++)
29620 {
29621 base_type->die_mark = 0;
29622 base_type->die_sib = c->die_sib;
29623 c->die_sib = base_type;
29624 c = base_type;
29625 }
29626 }
29627
29628 /* Helper function for resolve_addr, attempt to resolve
29629 one CONST_STRING, return true if successful. Similarly verify that
29630 SYMBOL_REFs refer to variables emitted in the current CU. */
29631
29632 static bool
29633 resolve_one_addr (rtx *addr)
29634 {
29635 rtx rtl = *addr;
29636
29637 if (GET_CODE (rtl) == CONST_STRING)
29638 {
29639 size_t len = strlen (XSTR (rtl, 0)) + 1;
29640 tree t = build_string (len, XSTR (rtl, 0));
29641 tree tlen = size_int (len - 1);
29642 TREE_TYPE (t)
29643 = build_array_type (char_type_node, build_index_type (tlen));
29644 rtl = lookup_constant_def (t);
29645 if (!rtl || !MEM_P (rtl))
29646 return false;
29647 rtl = XEXP (rtl, 0);
29648 if (GET_CODE (rtl) == SYMBOL_REF
29649 && SYMBOL_REF_DECL (rtl)
29650 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29651 return false;
29652 vec_safe_push (used_rtx_array, rtl);
29653 *addr = rtl;
29654 return true;
29655 }
29656
29657 if (GET_CODE (rtl) == SYMBOL_REF
29658 && SYMBOL_REF_DECL (rtl))
29659 {
29660 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29661 {
29662 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29663 return false;
29664 }
29665 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29666 return false;
29667 }
29668
29669 if (GET_CODE (rtl) == CONST)
29670 {
29671 subrtx_ptr_iterator::array_type array;
29672 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29673 if (!resolve_one_addr (*iter))
29674 return false;
29675 }
29676
29677 return true;
29678 }
29679
29680 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29681 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29682 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29683
29684 static rtx
29685 string_cst_pool_decl (tree t)
29686 {
29687 rtx rtl = output_constant_def (t, 1);
29688 unsigned char *array;
29689 dw_loc_descr_ref l;
29690 tree decl;
29691 size_t len;
29692 dw_die_ref ref;
29693
29694 if (!rtl || !MEM_P (rtl))
29695 return NULL_RTX;
29696 rtl = XEXP (rtl, 0);
29697 if (GET_CODE (rtl) != SYMBOL_REF
29698 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29699 return NULL_RTX;
29700
29701 decl = SYMBOL_REF_DECL (rtl);
29702 if (!lookup_decl_die (decl))
29703 {
29704 len = TREE_STRING_LENGTH (t);
29705 vec_safe_push (used_rtx_array, rtl);
29706 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29707 array = ggc_vec_alloc<unsigned char> (len);
29708 memcpy (array, TREE_STRING_POINTER (t), len);
29709 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29710 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29711 l->dw_loc_oprnd2.v.val_vec.length = len;
29712 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29713 l->dw_loc_oprnd2.v.val_vec.array = array;
29714 add_AT_loc (ref, DW_AT_location, l);
29715 equate_decl_number_to_die (decl, ref);
29716 }
29717 return rtl;
29718 }
29719
29720 /* Helper function of resolve_addr_in_expr. LOC is
29721 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29722 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29723 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29724 with DW_OP_implicit_pointer if possible
29725 and return true, if unsuccessful, return false. */
29726
29727 static bool
29728 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29729 {
29730 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29731 HOST_WIDE_INT offset = 0;
29732 dw_die_ref ref = NULL;
29733 tree decl;
29734
29735 if (GET_CODE (rtl) == CONST
29736 && GET_CODE (XEXP (rtl, 0)) == PLUS
29737 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29738 {
29739 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29740 rtl = XEXP (XEXP (rtl, 0), 0);
29741 }
29742 if (GET_CODE (rtl) == CONST_STRING)
29743 {
29744 size_t len = strlen (XSTR (rtl, 0)) + 1;
29745 tree t = build_string (len, XSTR (rtl, 0));
29746 tree tlen = size_int (len - 1);
29747
29748 TREE_TYPE (t)
29749 = build_array_type (char_type_node, build_index_type (tlen));
29750 rtl = string_cst_pool_decl (t);
29751 if (!rtl)
29752 return false;
29753 }
29754 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29755 {
29756 decl = SYMBOL_REF_DECL (rtl);
29757 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29758 {
29759 ref = lookup_decl_die (decl);
29760 if (ref && (get_AT (ref, DW_AT_location)
29761 || get_AT (ref, DW_AT_const_value)))
29762 {
29763 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29764 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29765 loc->dw_loc_oprnd1.val_entry = NULL;
29766 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29767 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29768 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29769 loc->dw_loc_oprnd2.v.val_int = offset;
29770 return true;
29771 }
29772 }
29773 }
29774 return false;
29775 }
29776
29777 /* Helper function for resolve_addr, handle one location
29778 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29779 the location list couldn't be resolved. */
29780
29781 static bool
29782 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29783 {
29784 dw_loc_descr_ref keep = NULL;
29785 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29786 switch (loc->dw_loc_opc)
29787 {
29788 case DW_OP_addr:
29789 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29790 {
29791 if ((prev == NULL
29792 || prev->dw_loc_opc == DW_OP_piece
29793 || prev->dw_loc_opc == DW_OP_bit_piece)
29794 && loc->dw_loc_next
29795 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29796 && (!dwarf_strict || dwarf_version >= 5)
29797 && optimize_one_addr_into_implicit_ptr (loc))
29798 break;
29799 return false;
29800 }
29801 break;
29802 case DW_OP_GNU_addr_index:
29803 case DW_OP_addrx:
29804 case DW_OP_GNU_const_index:
29805 case DW_OP_constx:
29806 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29807 || loc->dw_loc_opc == DW_OP_addrx)
29808 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29809 || loc->dw_loc_opc == DW_OP_constx)
29810 && loc->dtprel))
29811 {
29812 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29813 if (!resolve_one_addr (&rtl))
29814 return false;
29815 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29816 loc->dw_loc_oprnd1.val_entry
29817 = add_addr_table_entry (rtl, ate_kind_rtx);
29818 }
29819 break;
29820 case DW_OP_const4u:
29821 case DW_OP_const8u:
29822 if (loc->dtprel
29823 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29824 return false;
29825 break;
29826 case DW_OP_plus_uconst:
29827 if (size_of_loc_descr (loc)
29828 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29829 + 1
29830 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29831 {
29832 dw_loc_descr_ref repl
29833 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29834 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29835 add_loc_descr (&repl, loc->dw_loc_next);
29836 *loc = *repl;
29837 }
29838 break;
29839 case DW_OP_implicit_value:
29840 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29841 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29842 return false;
29843 break;
29844 case DW_OP_implicit_pointer:
29845 case DW_OP_GNU_implicit_pointer:
29846 case DW_OP_GNU_parameter_ref:
29847 case DW_OP_GNU_variable_value:
29848 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29849 {
29850 dw_die_ref ref
29851 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29852 if (ref == NULL)
29853 return false;
29854 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29855 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29856 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29857 }
29858 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29859 {
29860 if (prev == NULL
29861 && loc->dw_loc_next == NULL
29862 && AT_class (a) == dw_val_class_loc)
29863 switch (a->dw_attr)
29864 {
29865 /* Following attributes allow both exprloc and reference,
29866 so if the whole expression is DW_OP_GNU_variable_value
29867 alone we could transform it into reference. */
29868 case DW_AT_byte_size:
29869 case DW_AT_bit_size:
29870 case DW_AT_lower_bound:
29871 case DW_AT_upper_bound:
29872 case DW_AT_bit_stride:
29873 case DW_AT_count:
29874 case DW_AT_allocated:
29875 case DW_AT_associated:
29876 case DW_AT_byte_stride:
29877 a->dw_attr_val.val_class = dw_val_class_die_ref;
29878 a->dw_attr_val.val_entry = NULL;
29879 a->dw_attr_val.v.val_die_ref.die
29880 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29881 a->dw_attr_val.v.val_die_ref.external = 0;
29882 return true;
29883 default:
29884 break;
29885 }
29886 if (dwarf_strict)
29887 return false;
29888 }
29889 break;
29890 case DW_OP_const_type:
29891 case DW_OP_regval_type:
29892 case DW_OP_deref_type:
29893 case DW_OP_convert:
29894 case DW_OP_reinterpret:
29895 case DW_OP_GNU_const_type:
29896 case DW_OP_GNU_regval_type:
29897 case DW_OP_GNU_deref_type:
29898 case DW_OP_GNU_convert:
29899 case DW_OP_GNU_reinterpret:
29900 while (loc->dw_loc_next
29901 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29902 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29903 {
29904 dw_die_ref base1, base2;
29905 unsigned enc1, enc2, size1, size2;
29906 if (loc->dw_loc_opc == DW_OP_regval_type
29907 || loc->dw_loc_opc == DW_OP_deref_type
29908 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29909 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29910 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29911 else if (loc->dw_loc_oprnd1.val_class
29912 == dw_val_class_unsigned_const)
29913 break;
29914 else
29915 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29916 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29917 == dw_val_class_unsigned_const)
29918 break;
29919 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29920 gcc_assert (base1->die_tag == DW_TAG_base_type
29921 && base2->die_tag == DW_TAG_base_type);
29922 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29923 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29924 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29925 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29926 if (size1 == size2
29927 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29928 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29929 && loc != keep)
29930 || enc1 == enc2))
29931 {
29932 /* Optimize away next DW_OP_convert after
29933 adjusting LOC's base type die reference. */
29934 if (loc->dw_loc_opc == DW_OP_regval_type
29935 || loc->dw_loc_opc == DW_OP_deref_type
29936 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29937 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29938 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29939 else
29940 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29941 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29942 continue;
29943 }
29944 /* Don't change integer DW_OP_convert after e.g. floating
29945 point typed stack entry. */
29946 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29947 keep = loc->dw_loc_next;
29948 break;
29949 }
29950 break;
29951 default:
29952 break;
29953 }
29954 return true;
29955 }
29956
29957 /* Helper function of resolve_addr. DIE had DW_AT_location of
29958 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29959 and DW_OP_addr couldn't be resolved. resolve_addr has already
29960 removed the DW_AT_location attribute. This function attempts to
29961 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29962 to it or DW_AT_const_value attribute, if possible. */
29963
29964 static void
29965 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29966 {
29967 if (!VAR_P (decl)
29968 || lookup_decl_die (decl) != die
29969 || DECL_EXTERNAL (decl)
29970 || !TREE_STATIC (decl)
29971 || DECL_INITIAL (decl) == NULL_TREE
29972 || DECL_P (DECL_INITIAL (decl))
29973 || get_AT (die, DW_AT_const_value))
29974 return;
29975
29976 tree init = DECL_INITIAL (decl);
29977 HOST_WIDE_INT offset = 0;
29978 /* For variables that have been optimized away and thus
29979 don't have a memory location, see if we can emit
29980 DW_AT_const_value instead. */
29981 if (tree_add_const_value_attribute (die, init))
29982 return;
29983 if (dwarf_strict && dwarf_version < 5)
29984 return;
29985 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29986 and ADDR_EXPR refers to a decl that has DW_AT_location or
29987 DW_AT_const_value (but isn't addressable, otherwise
29988 resolving the original DW_OP_addr wouldn't fail), see if
29989 we can add DW_OP_implicit_pointer. */
29990 STRIP_NOPS (init);
29991 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29992 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29993 {
29994 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29995 init = TREE_OPERAND (init, 0);
29996 STRIP_NOPS (init);
29997 }
29998 if (TREE_CODE (init) != ADDR_EXPR)
29999 return;
30000 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30001 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30002 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30003 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30004 && TREE_OPERAND (init, 0) != decl))
30005 {
30006 dw_die_ref ref;
30007 dw_loc_descr_ref l;
30008
30009 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30010 {
30011 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30012 if (!rtl)
30013 return;
30014 decl = SYMBOL_REF_DECL (rtl);
30015 }
30016 else
30017 decl = TREE_OPERAND (init, 0);
30018 ref = lookup_decl_die (decl);
30019 if (ref == NULL
30020 || (!get_AT (ref, DW_AT_location)
30021 && !get_AT (ref, DW_AT_const_value)))
30022 return;
30023 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30024 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30025 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30026 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30027 add_AT_loc (die, DW_AT_location, l);
30028 }
30029 }
30030
30031 /* Return NULL if l is a DWARF expression, or first op that is not
30032 valid DWARF expression. */
30033
30034 static dw_loc_descr_ref
30035 non_dwarf_expression (dw_loc_descr_ref l)
30036 {
30037 while (l)
30038 {
30039 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30040 return l;
30041 switch (l->dw_loc_opc)
30042 {
30043 case DW_OP_regx:
30044 case DW_OP_implicit_value:
30045 case DW_OP_stack_value:
30046 case DW_OP_implicit_pointer:
30047 case DW_OP_GNU_implicit_pointer:
30048 case DW_OP_GNU_parameter_ref:
30049 case DW_OP_piece:
30050 case DW_OP_bit_piece:
30051 return l;
30052 default:
30053 break;
30054 }
30055 l = l->dw_loc_next;
30056 }
30057 return NULL;
30058 }
30059
30060 /* Return adjusted copy of EXPR:
30061 If it is empty DWARF expression, return it.
30062 If it is valid non-empty DWARF expression,
30063 return copy of EXPR with DW_OP_deref appended to it.
30064 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30065 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30066 If it is DWARF expression followed by DW_OP_stack_value, return
30067 copy of the DWARF expression without anything appended.
30068 Otherwise, return NULL. */
30069
30070 static dw_loc_descr_ref
30071 copy_deref_exprloc (dw_loc_descr_ref expr)
30072 {
30073 dw_loc_descr_ref tail = NULL;
30074
30075 if (expr == NULL)
30076 return NULL;
30077
30078 dw_loc_descr_ref l = non_dwarf_expression (expr);
30079 if (l && l->dw_loc_next)
30080 return NULL;
30081
30082 if (l)
30083 {
30084 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30085 tail = new_loc_descr ((enum dwarf_location_atom)
30086 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30087 0, 0);
30088 else
30089 switch (l->dw_loc_opc)
30090 {
30091 case DW_OP_regx:
30092 tail = new_loc_descr (DW_OP_bregx,
30093 l->dw_loc_oprnd1.v.val_unsigned, 0);
30094 break;
30095 case DW_OP_stack_value:
30096 break;
30097 default:
30098 return NULL;
30099 }
30100 }
30101 else
30102 tail = new_loc_descr (DW_OP_deref, 0, 0);
30103
30104 dw_loc_descr_ref ret = NULL, *p = &ret;
30105 while (expr != l)
30106 {
30107 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30108 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30109 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30110 p = &(*p)->dw_loc_next;
30111 expr = expr->dw_loc_next;
30112 }
30113 *p = tail;
30114 return ret;
30115 }
30116
30117 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30118 reference to a variable or argument, adjust it if needed and return:
30119 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30120 attribute if present should be removed
30121 0 keep the attribute perhaps with minor modifications, no need to rescan
30122 1 if the attribute has been successfully adjusted. */
30123
30124 static int
30125 optimize_string_length (dw_attr_node *a)
30126 {
30127 dw_loc_descr_ref l = AT_loc (a), lv;
30128 dw_die_ref die;
30129 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30130 {
30131 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30132 die = lookup_decl_die (decl);
30133 if (die)
30134 {
30135 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30136 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30137 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30138 }
30139 else
30140 return -1;
30141 }
30142 else
30143 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30144
30145 /* DWARF5 allows reference class, so we can then reference the DIE.
30146 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30147 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30148 {
30149 a->dw_attr_val.val_class = dw_val_class_die_ref;
30150 a->dw_attr_val.val_entry = NULL;
30151 a->dw_attr_val.v.val_die_ref.die = die;
30152 a->dw_attr_val.v.val_die_ref.external = 0;
30153 return 0;
30154 }
30155
30156 dw_attr_node *av = get_AT (die, DW_AT_location);
30157 dw_loc_list_ref d;
30158 bool non_dwarf_expr = false;
30159
30160 if (av == NULL)
30161 return dwarf_strict ? -1 : 0;
30162 switch (AT_class (av))
30163 {
30164 case dw_val_class_loc_list:
30165 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30166 if (d->expr && non_dwarf_expression (d->expr))
30167 non_dwarf_expr = true;
30168 break;
30169 case dw_val_class_view_list:
30170 gcc_unreachable ();
30171 case dw_val_class_loc:
30172 lv = AT_loc (av);
30173 if (lv == NULL)
30174 return dwarf_strict ? -1 : 0;
30175 if (non_dwarf_expression (lv))
30176 non_dwarf_expr = true;
30177 break;
30178 default:
30179 return dwarf_strict ? -1 : 0;
30180 }
30181
30182 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30183 into DW_OP_call4 or DW_OP_GNU_variable_value into
30184 DW_OP_call4 DW_OP_deref, do so. */
30185 if (!non_dwarf_expr
30186 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30187 {
30188 l->dw_loc_opc = DW_OP_call4;
30189 if (l->dw_loc_next)
30190 l->dw_loc_next = NULL;
30191 else
30192 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30193 return 0;
30194 }
30195
30196 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30197 copy over the DW_AT_location attribute from die to a. */
30198 if (l->dw_loc_next != NULL)
30199 {
30200 a->dw_attr_val = av->dw_attr_val;
30201 return 1;
30202 }
30203
30204 dw_loc_list_ref list, *p;
30205 switch (AT_class (av))
30206 {
30207 case dw_val_class_loc_list:
30208 p = &list;
30209 list = NULL;
30210 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30211 {
30212 lv = copy_deref_exprloc (d->expr);
30213 if (lv)
30214 {
30215 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30216 p = &(*p)->dw_loc_next;
30217 }
30218 else if (!dwarf_strict && d->expr)
30219 return 0;
30220 }
30221 if (list == NULL)
30222 return dwarf_strict ? -1 : 0;
30223 a->dw_attr_val.val_class = dw_val_class_loc_list;
30224 gen_llsym (list);
30225 *AT_loc_list_ptr (a) = list;
30226 return 1;
30227 case dw_val_class_loc:
30228 lv = copy_deref_exprloc (AT_loc (av));
30229 if (lv == NULL)
30230 return dwarf_strict ? -1 : 0;
30231 a->dw_attr_val.v.val_loc = lv;
30232 return 1;
30233 default:
30234 gcc_unreachable ();
30235 }
30236 }
30237
30238 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30239 an address in .rodata section if the string literal is emitted there,
30240 or remove the containing location list or replace DW_AT_const_value
30241 with DW_AT_location and empty location expression, if it isn't found
30242 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30243 to something that has been emitted in the current CU. */
30244
30245 static void
30246 resolve_addr (dw_die_ref die)
30247 {
30248 dw_die_ref c;
30249 dw_attr_node *a;
30250 dw_loc_list_ref *curr, *start, loc;
30251 unsigned ix;
30252 bool remove_AT_byte_size = false;
30253
30254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30255 switch (AT_class (a))
30256 {
30257 case dw_val_class_loc_list:
30258 start = curr = AT_loc_list_ptr (a);
30259 loc = *curr;
30260 gcc_assert (loc);
30261 /* The same list can be referenced more than once. See if we have
30262 already recorded the result from a previous pass. */
30263 if (loc->replaced)
30264 *curr = loc->dw_loc_next;
30265 else if (!loc->resolved_addr)
30266 {
30267 /* As things stand, we do not expect or allow one die to
30268 reference a suffix of another die's location list chain.
30269 References must be identical or completely separate.
30270 There is therefore no need to cache the result of this
30271 pass on any list other than the first; doing so
30272 would lead to unnecessary writes. */
30273 while (*curr)
30274 {
30275 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30276 if (!resolve_addr_in_expr (a, (*curr)->expr))
30277 {
30278 dw_loc_list_ref next = (*curr)->dw_loc_next;
30279 dw_loc_descr_ref l = (*curr)->expr;
30280
30281 if (next && (*curr)->ll_symbol)
30282 {
30283 gcc_assert (!next->ll_symbol);
30284 next->ll_symbol = (*curr)->ll_symbol;
30285 next->vl_symbol = (*curr)->vl_symbol;
30286 }
30287 if (dwarf_split_debug_info)
30288 remove_loc_list_addr_table_entries (l);
30289 *curr = next;
30290 }
30291 else
30292 {
30293 mark_base_types ((*curr)->expr);
30294 curr = &(*curr)->dw_loc_next;
30295 }
30296 }
30297 if (loc == *start)
30298 loc->resolved_addr = 1;
30299 else
30300 {
30301 loc->replaced = 1;
30302 loc->dw_loc_next = *start;
30303 }
30304 }
30305 if (!*start)
30306 {
30307 remove_AT (die, a->dw_attr);
30308 ix--;
30309 }
30310 break;
30311 case dw_val_class_view_list:
30312 {
30313 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30314 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30315 dw_val_node *llnode
30316 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30317 /* If we no longer have a loclist, or it no longer needs
30318 views, drop this attribute. */
30319 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30320 {
30321 remove_AT (die, a->dw_attr);
30322 ix--;
30323 }
30324 break;
30325 }
30326 case dw_val_class_loc:
30327 {
30328 dw_loc_descr_ref l = AT_loc (a);
30329 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30330 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30331 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30332 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30333 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30334 with DW_FORM_ref referencing the same DIE as
30335 DW_OP_GNU_variable_value used to reference. */
30336 if (a->dw_attr == DW_AT_string_length
30337 && l
30338 && l->dw_loc_opc == DW_OP_GNU_variable_value
30339 && (l->dw_loc_next == NULL
30340 || (l->dw_loc_next->dw_loc_next == NULL
30341 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30342 {
30343 switch (optimize_string_length (a))
30344 {
30345 case -1:
30346 remove_AT (die, a->dw_attr);
30347 ix--;
30348 /* If we drop DW_AT_string_length, we need to drop also
30349 DW_AT_{string_length_,}byte_size. */
30350 remove_AT_byte_size = true;
30351 continue;
30352 default:
30353 break;
30354 case 1:
30355 /* Even if we keep the optimized DW_AT_string_length,
30356 it might have changed AT_class, so process it again. */
30357 ix--;
30358 continue;
30359 }
30360 }
30361 /* For -gdwarf-2 don't attempt to optimize
30362 DW_AT_data_member_location containing
30363 DW_OP_plus_uconst - older consumers might
30364 rely on it being that op instead of a more complex,
30365 but shorter, location description. */
30366 if ((dwarf_version > 2
30367 || a->dw_attr != DW_AT_data_member_location
30368 || l == NULL
30369 || l->dw_loc_opc != DW_OP_plus_uconst
30370 || l->dw_loc_next != NULL)
30371 && !resolve_addr_in_expr (a, l))
30372 {
30373 if (dwarf_split_debug_info)
30374 remove_loc_list_addr_table_entries (l);
30375 if (l != NULL
30376 && l->dw_loc_next == NULL
30377 && l->dw_loc_opc == DW_OP_addr
30378 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30379 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30380 && a->dw_attr == DW_AT_location)
30381 {
30382 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30383 remove_AT (die, a->dw_attr);
30384 ix--;
30385 optimize_location_into_implicit_ptr (die, decl);
30386 break;
30387 }
30388 if (a->dw_attr == DW_AT_string_length)
30389 /* If we drop DW_AT_string_length, we need to drop also
30390 DW_AT_{string_length_,}byte_size. */
30391 remove_AT_byte_size = true;
30392 remove_AT (die, a->dw_attr);
30393 ix--;
30394 }
30395 else
30396 mark_base_types (l);
30397 }
30398 break;
30399 case dw_val_class_addr:
30400 if (a->dw_attr == DW_AT_const_value
30401 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30402 {
30403 if (AT_index (a) != NOT_INDEXED)
30404 remove_addr_table_entry (a->dw_attr_val.val_entry);
30405 remove_AT (die, a->dw_attr);
30406 ix--;
30407 }
30408 if ((die->die_tag == DW_TAG_call_site
30409 && a->dw_attr == DW_AT_call_origin)
30410 || (die->die_tag == DW_TAG_GNU_call_site
30411 && a->dw_attr == DW_AT_abstract_origin))
30412 {
30413 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30414 dw_die_ref tdie = lookup_decl_die (tdecl);
30415 dw_die_ref cdie;
30416 if (tdie == NULL
30417 && DECL_EXTERNAL (tdecl)
30418 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30419 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30420 {
30421 dw_die_ref pdie = cdie;
30422 /* Make sure we don't add these DIEs into type units.
30423 We could emit skeleton DIEs for context (namespaces,
30424 outer structs/classes) and a skeleton DIE for the
30425 innermost context with DW_AT_signature pointing to the
30426 type unit. See PR78835. */
30427 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30428 pdie = pdie->die_parent;
30429 if (pdie == NULL)
30430 {
30431 /* Creating a full DIE for tdecl is overly expensive and
30432 at this point even wrong when in the LTO phase
30433 as it can end up generating new type DIEs we didn't
30434 output and thus optimize_external_refs will crash. */
30435 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30436 add_AT_flag (tdie, DW_AT_external, 1);
30437 add_AT_flag (tdie, DW_AT_declaration, 1);
30438 add_linkage_attr (tdie, tdecl);
30439 add_name_and_src_coords_attributes (tdie, tdecl, true);
30440 equate_decl_number_to_die (tdecl, tdie);
30441 }
30442 }
30443 if (tdie)
30444 {
30445 a->dw_attr_val.val_class = dw_val_class_die_ref;
30446 a->dw_attr_val.v.val_die_ref.die = tdie;
30447 a->dw_attr_val.v.val_die_ref.external = 0;
30448 }
30449 else
30450 {
30451 if (AT_index (a) != NOT_INDEXED)
30452 remove_addr_table_entry (a->dw_attr_val.val_entry);
30453 remove_AT (die, a->dw_attr);
30454 ix--;
30455 }
30456 }
30457 break;
30458 default:
30459 break;
30460 }
30461
30462 if (remove_AT_byte_size)
30463 remove_AT (die, dwarf_version >= 5
30464 ? DW_AT_string_length_byte_size
30465 : DW_AT_byte_size);
30466
30467 FOR_EACH_CHILD (die, c, resolve_addr (c));
30468 }
30469 \f
30470 /* Helper routines for optimize_location_lists.
30471 This pass tries to share identical local lists in .debug_loc
30472 section. */
30473
30474 /* Iteratively hash operands of LOC opcode into HSTATE. */
30475
30476 static void
30477 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30478 {
30479 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30480 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30481
30482 switch (loc->dw_loc_opc)
30483 {
30484 case DW_OP_const4u:
30485 case DW_OP_const8u:
30486 if (loc->dtprel)
30487 goto hash_addr;
30488 /* FALLTHRU */
30489 case DW_OP_const1u:
30490 case DW_OP_const1s:
30491 case DW_OP_const2u:
30492 case DW_OP_const2s:
30493 case DW_OP_const4s:
30494 case DW_OP_const8s:
30495 case DW_OP_constu:
30496 case DW_OP_consts:
30497 case DW_OP_pick:
30498 case DW_OP_plus_uconst:
30499 case DW_OP_breg0:
30500 case DW_OP_breg1:
30501 case DW_OP_breg2:
30502 case DW_OP_breg3:
30503 case DW_OP_breg4:
30504 case DW_OP_breg5:
30505 case DW_OP_breg6:
30506 case DW_OP_breg7:
30507 case DW_OP_breg8:
30508 case DW_OP_breg9:
30509 case DW_OP_breg10:
30510 case DW_OP_breg11:
30511 case DW_OP_breg12:
30512 case DW_OP_breg13:
30513 case DW_OP_breg14:
30514 case DW_OP_breg15:
30515 case DW_OP_breg16:
30516 case DW_OP_breg17:
30517 case DW_OP_breg18:
30518 case DW_OP_breg19:
30519 case DW_OP_breg20:
30520 case DW_OP_breg21:
30521 case DW_OP_breg22:
30522 case DW_OP_breg23:
30523 case DW_OP_breg24:
30524 case DW_OP_breg25:
30525 case DW_OP_breg26:
30526 case DW_OP_breg27:
30527 case DW_OP_breg28:
30528 case DW_OP_breg29:
30529 case DW_OP_breg30:
30530 case DW_OP_breg31:
30531 case DW_OP_regx:
30532 case DW_OP_fbreg:
30533 case DW_OP_piece:
30534 case DW_OP_deref_size:
30535 case DW_OP_xderef_size:
30536 hstate.add_object (val1->v.val_int);
30537 break;
30538 case DW_OP_skip:
30539 case DW_OP_bra:
30540 {
30541 int offset;
30542
30543 gcc_assert (val1->val_class == dw_val_class_loc);
30544 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30545 hstate.add_object (offset);
30546 }
30547 break;
30548 case DW_OP_implicit_value:
30549 hstate.add_object (val1->v.val_unsigned);
30550 switch (val2->val_class)
30551 {
30552 case dw_val_class_const:
30553 hstate.add_object (val2->v.val_int);
30554 break;
30555 case dw_val_class_vec:
30556 {
30557 unsigned int elt_size = val2->v.val_vec.elt_size;
30558 unsigned int len = val2->v.val_vec.length;
30559
30560 hstate.add_int (elt_size);
30561 hstate.add_int (len);
30562 hstate.add (val2->v.val_vec.array, len * elt_size);
30563 }
30564 break;
30565 case dw_val_class_const_double:
30566 hstate.add_object (val2->v.val_double.low);
30567 hstate.add_object (val2->v.val_double.high);
30568 break;
30569 case dw_val_class_wide_int:
30570 hstate.add (val2->v.val_wide->get_val (),
30571 get_full_len (*val2->v.val_wide)
30572 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30573 break;
30574 case dw_val_class_addr:
30575 inchash::add_rtx (val2->v.val_addr, hstate);
30576 break;
30577 default:
30578 gcc_unreachable ();
30579 }
30580 break;
30581 case DW_OP_bregx:
30582 case DW_OP_bit_piece:
30583 hstate.add_object (val1->v.val_int);
30584 hstate.add_object (val2->v.val_int);
30585 break;
30586 case DW_OP_addr:
30587 hash_addr:
30588 if (loc->dtprel)
30589 {
30590 unsigned char dtprel = 0xd1;
30591 hstate.add_object (dtprel);
30592 }
30593 inchash::add_rtx (val1->v.val_addr, hstate);
30594 break;
30595 case DW_OP_GNU_addr_index:
30596 case DW_OP_addrx:
30597 case DW_OP_GNU_const_index:
30598 case DW_OP_constx:
30599 {
30600 if (loc->dtprel)
30601 {
30602 unsigned char dtprel = 0xd1;
30603 hstate.add_object (dtprel);
30604 }
30605 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30606 }
30607 break;
30608 case DW_OP_implicit_pointer:
30609 case DW_OP_GNU_implicit_pointer:
30610 hstate.add_int (val2->v.val_int);
30611 break;
30612 case DW_OP_entry_value:
30613 case DW_OP_GNU_entry_value:
30614 hstate.add_object (val1->v.val_loc);
30615 break;
30616 case DW_OP_regval_type:
30617 case DW_OP_deref_type:
30618 case DW_OP_GNU_regval_type:
30619 case DW_OP_GNU_deref_type:
30620 {
30621 unsigned int byte_size
30622 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30623 unsigned int encoding
30624 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30625 hstate.add_object (val1->v.val_int);
30626 hstate.add_object (byte_size);
30627 hstate.add_object (encoding);
30628 }
30629 break;
30630 case DW_OP_convert:
30631 case DW_OP_reinterpret:
30632 case DW_OP_GNU_convert:
30633 case DW_OP_GNU_reinterpret:
30634 if (val1->val_class == dw_val_class_unsigned_const)
30635 {
30636 hstate.add_object (val1->v.val_unsigned);
30637 break;
30638 }
30639 /* FALLTHRU */
30640 case DW_OP_const_type:
30641 case DW_OP_GNU_const_type:
30642 {
30643 unsigned int byte_size
30644 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30645 unsigned int encoding
30646 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30647 hstate.add_object (byte_size);
30648 hstate.add_object (encoding);
30649 if (loc->dw_loc_opc != DW_OP_const_type
30650 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30651 break;
30652 hstate.add_object (val2->val_class);
30653 switch (val2->val_class)
30654 {
30655 case dw_val_class_const:
30656 hstate.add_object (val2->v.val_int);
30657 break;
30658 case dw_val_class_vec:
30659 {
30660 unsigned int elt_size = val2->v.val_vec.elt_size;
30661 unsigned int len = val2->v.val_vec.length;
30662
30663 hstate.add_object (elt_size);
30664 hstate.add_object (len);
30665 hstate.add (val2->v.val_vec.array, len * elt_size);
30666 }
30667 break;
30668 case dw_val_class_const_double:
30669 hstate.add_object (val2->v.val_double.low);
30670 hstate.add_object (val2->v.val_double.high);
30671 break;
30672 case dw_val_class_wide_int:
30673 hstate.add (val2->v.val_wide->get_val (),
30674 get_full_len (*val2->v.val_wide)
30675 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30676 break;
30677 default:
30678 gcc_unreachable ();
30679 }
30680 }
30681 break;
30682
30683 default:
30684 /* Other codes have no operands. */
30685 break;
30686 }
30687 }
30688
30689 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30690
30691 static inline void
30692 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30693 {
30694 dw_loc_descr_ref l;
30695 bool sizes_computed = false;
30696 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30697 size_of_locs (loc);
30698
30699 for (l = loc; l != NULL; l = l->dw_loc_next)
30700 {
30701 enum dwarf_location_atom opc = l->dw_loc_opc;
30702 hstate.add_object (opc);
30703 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30704 {
30705 size_of_locs (loc);
30706 sizes_computed = true;
30707 }
30708 hash_loc_operands (l, hstate);
30709 }
30710 }
30711
30712 /* Compute hash of the whole location list LIST_HEAD. */
30713
30714 static inline void
30715 hash_loc_list (dw_loc_list_ref list_head)
30716 {
30717 dw_loc_list_ref curr = list_head;
30718 inchash::hash hstate;
30719
30720 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30721 {
30722 hstate.add (curr->begin, strlen (curr->begin) + 1);
30723 hstate.add (curr->end, strlen (curr->end) + 1);
30724 hstate.add_object (curr->vbegin);
30725 hstate.add_object (curr->vend);
30726 if (curr->section)
30727 hstate.add (curr->section, strlen (curr->section) + 1);
30728 hash_locs (curr->expr, hstate);
30729 }
30730 list_head->hash = hstate.end ();
30731 }
30732
30733 /* Return true if X and Y opcodes have the same operands. */
30734
30735 static inline bool
30736 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30737 {
30738 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30739 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30740 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30741 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30742
30743 switch (x->dw_loc_opc)
30744 {
30745 case DW_OP_const4u:
30746 case DW_OP_const8u:
30747 if (x->dtprel)
30748 goto hash_addr;
30749 /* FALLTHRU */
30750 case DW_OP_const1u:
30751 case DW_OP_const1s:
30752 case DW_OP_const2u:
30753 case DW_OP_const2s:
30754 case DW_OP_const4s:
30755 case DW_OP_const8s:
30756 case DW_OP_constu:
30757 case DW_OP_consts:
30758 case DW_OP_pick:
30759 case DW_OP_plus_uconst:
30760 case DW_OP_breg0:
30761 case DW_OP_breg1:
30762 case DW_OP_breg2:
30763 case DW_OP_breg3:
30764 case DW_OP_breg4:
30765 case DW_OP_breg5:
30766 case DW_OP_breg6:
30767 case DW_OP_breg7:
30768 case DW_OP_breg8:
30769 case DW_OP_breg9:
30770 case DW_OP_breg10:
30771 case DW_OP_breg11:
30772 case DW_OP_breg12:
30773 case DW_OP_breg13:
30774 case DW_OP_breg14:
30775 case DW_OP_breg15:
30776 case DW_OP_breg16:
30777 case DW_OP_breg17:
30778 case DW_OP_breg18:
30779 case DW_OP_breg19:
30780 case DW_OP_breg20:
30781 case DW_OP_breg21:
30782 case DW_OP_breg22:
30783 case DW_OP_breg23:
30784 case DW_OP_breg24:
30785 case DW_OP_breg25:
30786 case DW_OP_breg26:
30787 case DW_OP_breg27:
30788 case DW_OP_breg28:
30789 case DW_OP_breg29:
30790 case DW_OP_breg30:
30791 case DW_OP_breg31:
30792 case DW_OP_regx:
30793 case DW_OP_fbreg:
30794 case DW_OP_piece:
30795 case DW_OP_deref_size:
30796 case DW_OP_xderef_size:
30797 return valx1->v.val_int == valy1->v.val_int;
30798 case DW_OP_skip:
30799 case DW_OP_bra:
30800 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30801 can cause irrelevant differences in dw_loc_addr. */
30802 gcc_assert (valx1->val_class == dw_val_class_loc
30803 && valy1->val_class == dw_val_class_loc
30804 && (dwarf_split_debug_info
30805 || x->dw_loc_addr == y->dw_loc_addr));
30806 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30807 case DW_OP_implicit_value:
30808 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30809 || valx2->val_class != valy2->val_class)
30810 return false;
30811 switch (valx2->val_class)
30812 {
30813 case dw_val_class_const:
30814 return valx2->v.val_int == valy2->v.val_int;
30815 case dw_val_class_vec:
30816 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30817 && valx2->v.val_vec.length == valy2->v.val_vec.length
30818 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30819 valx2->v.val_vec.elt_size
30820 * valx2->v.val_vec.length) == 0;
30821 case dw_val_class_const_double:
30822 return valx2->v.val_double.low == valy2->v.val_double.low
30823 && valx2->v.val_double.high == valy2->v.val_double.high;
30824 case dw_val_class_wide_int:
30825 return *valx2->v.val_wide == *valy2->v.val_wide;
30826 case dw_val_class_addr:
30827 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30828 default:
30829 gcc_unreachable ();
30830 }
30831 case DW_OP_bregx:
30832 case DW_OP_bit_piece:
30833 return valx1->v.val_int == valy1->v.val_int
30834 && valx2->v.val_int == valy2->v.val_int;
30835 case DW_OP_addr:
30836 hash_addr:
30837 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30838 case DW_OP_GNU_addr_index:
30839 case DW_OP_addrx:
30840 case DW_OP_GNU_const_index:
30841 case DW_OP_constx:
30842 {
30843 rtx ax1 = valx1->val_entry->addr.rtl;
30844 rtx ay1 = valy1->val_entry->addr.rtl;
30845 return rtx_equal_p (ax1, ay1);
30846 }
30847 case DW_OP_implicit_pointer:
30848 case DW_OP_GNU_implicit_pointer:
30849 return valx1->val_class == dw_val_class_die_ref
30850 && valx1->val_class == valy1->val_class
30851 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30852 && valx2->v.val_int == valy2->v.val_int;
30853 case DW_OP_entry_value:
30854 case DW_OP_GNU_entry_value:
30855 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30856 case DW_OP_const_type:
30857 case DW_OP_GNU_const_type:
30858 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30859 || valx2->val_class != valy2->val_class)
30860 return false;
30861 switch (valx2->val_class)
30862 {
30863 case dw_val_class_const:
30864 return valx2->v.val_int == valy2->v.val_int;
30865 case dw_val_class_vec:
30866 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30867 && valx2->v.val_vec.length == valy2->v.val_vec.length
30868 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30869 valx2->v.val_vec.elt_size
30870 * valx2->v.val_vec.length) == 0;
30871 case dw_val_class_const_double:
30872 return valx2->v.val_double.low == valy2->v.val_double.low
30873 && valx2->v.val_double.high == valy2->v.val_double.high;
30874 case dw_val_class_wide_int:
30875 return *valx2->v.val_wide == *valy2->v.val_wide;
30876 default:
30877 gcc_unreachable ();
30878 }
30879 case DW_OP_regval_type:
30880 case DW_OP_deref_type:
30881 case DW_OP_GNU_regval_type:
30882 case DW_OP_GNU_deref_type:
30883 return valx1->v.val_int == valy1->v.val_int
30884 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30885 case DW_OP_convert:
30886 case DW_OP_reinterpret:
30887 case DW_OP_GNU_convert:
30888 case DW_OP_GNU_reinterpret:
30889 if (valx1->val_class != valy1->val_class)
30890 return false;
30891 if (valx1->val_class == dw_val_class_unsigned_const)
30892 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30893 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30894 case DW_OP_GNU_parameter_ref:
30895 return valx1->val_class == dw_val_class_die_ref
30896 && valx1->val_class == valy1->val_class
30897 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30898 default:
30899 /* Other codes have no operands. */
30900 return true;
30901 }
30902 }
30903
30904 /* Return true if DWARF location expressions X and Y are the same. */
30905
30906 static inline bool
30907 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30908 {
30909 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30910 if (x->dw_loc_opc != y->dw_loc_opc
30911 || x->dtprel != y->dtprel
30912 || !compare_loc_operands (x, y))
30913 break;
30914 return x == NULL && y == NULL;
30915 }
30916
30917 /* Hashtable helpers. */
30918
30919 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30920 {
30921 static inline hashval_t hash (const dw_loc_list_struct *);
30922 static inline bool equal (const dw_loc_list_struct *,
30923 const dw_loc_list_struct *);
30924 };
30925
30926 /* Return precomputed hash of location list X. */
30927
30928 inline hashval_t
30929 loc_list_hasher::hash (const dw_loc_list_struct *x)
30930 {
30931 return x->hash;
30932 }
30933
30934 /* Return true if location lists A and B are the same. */
30935
30936 inline bool
30937 loc_list_hasher::equal (const dw_loc_list_struct *a,
30938 const dw_loc_list_struct *b)
30939 {
30940 if (a == b)
30941 return 1;
30942 if (a->hash != b->hash)
30943 return 0;
30944 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30945 if (strcmp (a->begin, b->begin) != 0
30946 || strcmp (a->end, b->end) != 0
30947 || (a->section == NULL) != (b->section == NULL)
30948 || (a->section && strcmp (a->section, b->section) != 0)
30949 || a->vbegin != b->vbegin || a->vend != b->vend
30950 || !compare_locs (a->expr, b->expr))
30951 break;
30952 return a == NULL && b == NULL;
30953 }
30954
30955 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30956
30957
30958 /* Recursively optimize location lists referenced from DIE
30959 children and share them whenever possible. */
30960
30961 static void
30962 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30963 {
30964 dw_die_ref c;
30965 dw_attr_node *a;
30966 unsigned ix;
30967 dw_loc_list_struct **slot;
30968 bool drop_locviews = false;
30969 bool has_locviews = false;
30970
30971 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30972 if (AT_class (a) == dw_val_class_loc_list)
30973 {
30974 dw_loc_list_ref list = AT_loc_list (a);
30975 /* TODO: perform some optimizations here, before hashing
30976 it and storing into the hash table. */
30977 hash_loc_list (list);
30978 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30979 if (*slot == NULL)
30980 {
30981 *slot = list;
30982 if (loc_list_has_views (list))
30983 gcc_assert (list->vl_symbol);
30984 else if (list->vl_symbol)
30985 {
30986 drop_locviews = true;
30987 list->vl_symbol = NULL;
30988 }
30989 }
30990 else
30991 {
30992 if (list->vl_symbol && !(*slot)->vl_symbol)
30993 drop_locviews = true;
30994 a->dw_attr_val.v.val_loc_list = *slot;
30995 }
30996 }
30997 else if (AT_class (a) == dw_val_class_view_list)
30998 {
30999 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31000 has_locviews = true;
31001 }
31002
31003
31004 if (drop_locviews && has_locviews)
31005 remove_AT (die, DW_AT_GNU_locviews);
31006
31007 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31008 }
31009
31010
31011 /* Recursively assign each location list a unique index into the debug_addr
31012 section. */
31013
31014 static void
31015 index_location_lists (dw_die_ref die)
31016 {
31017 dw_die_ref c;
31018 dw_attr_node *a;
31019 unsigned ix;
31020
31021 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31022 if (AT_class (a) == dw_val_class_loc_list)
31023 {
31024 dw_loc_list_ref list = AT_loc_list (a);
31025 dw_loc_list_ref curr;
31026 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31027 {
31028 /* Don't index an entry that has already been indexed
31029 or won't be output. Make sure skip_loc_list_entry doesn't
31030 call size_of_locs, because that might cause circular dependency,
31031 index_location_lists requiring address table indexes to be
31032 computed, but adding new indexes through add_addr_table_entry
31033 and address table index computation requiring no new additions
31034 to the hash table. In the rare case of DWARF[234] >= 64KB
31035 location expression, we'll just waste unused address table entry
31036 for it. */
31037 if (curr->begin_entry != NULL
31038 || skip_loc_list_entry (curr))
31039 continue;
31040
31041 curr->begin_entry
31042 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31043 }
31044 }
31045
31046 FOR_EACH_CHILD (die, c, index_location_lists (c));
31047 }
31048
31049 /* Optimize location lists referenced from DIE
31050 children and share them whenever possible. */
31051
31052 static void
31053 optimize_location_lists (dw_die_ref die)
31054 {
31055 loc_list_hash_type htab (500);
31056 optimize_location_lists_1 (die, &htab);
31057 }
31058 \f
31059 /* Traverse the limbo die list, and add parent/child links. The only
31060 dies without parents that should be here are concrete instances of
31061 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31062 For concrete instances, we can get the parent die from the abstract
31063 instance. */
31064
31065 static void
31066 flush_limbo_die_list (void)
31067 {
31068 limbo_die_node *node;
31069
31070 /* get_context_die calls force_decl_die, which can put new DIEs on the
31071 limbo list in LTO mode when nested functions are put in a different
31072 partition than that of their parent function. */
31073 while ((node = limbo_die_list))
31074 {
31075 dw_die_ref die = node->die;
31076 limbo_die_list = node->next;
31077
31078 if (die->die_parent == NULL)
31079 {
31080 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31081
31082 if (origin && origin->die_parent)
31083 add_child_die (origin->die_parent, die);
31084 else if (is_cu_die (die))
31085 ;
31086 else if (seen_error ())
31087 /* It's OK to be confused by errors in the input. */
31088 add_child_die (comp_unit_die (), die);
31089 else
31090 {
31091 /* In certain situations, the lexical block containing a
31092 nested function can be optimized away, which results
31093 in the nested function die being orphaned. Likewise
31094 with the return type of that nested function. Force
31095 this to be a child of the containing function.
31096
31097 It may happen that even the containing function got fully
31098 inlined and optimized out. In that case we are lost and
31099 assign the empty child. This should not be big issue as
31100 the function is likely unreachable too. */
31101 gcc_assert (node->created_for);
31102
31103 if (DECL_P (node->created_for))
31104 origin = get_context_die (DECL_CONTEXT (node->created_for));
31105 else if (TYPE_P (node->created_for))
31106 origin = scope_die_for (node->created_for, comp_unit_die ());
31107 else
31108 origin = comp_unit_die ();
31109
31110 add_child_die (origin, die);
31111 }
31112 }
31113 }
31114 }
31115
31116 /* Reset DIEs so we can output them again. */
31117
31118 static void
31119 reset_dies (dw_die_ref die)
31120 {
31121 dw_die_ref c;
31122
31123 /* Remove stuff we re-generate. */
31124 die->die_mark = 0;
31125 die->die_offset = 0;
31126 die->die_abbrev = 0;
31127 remove_AT (die, DW_AT_sibling);
31128
31129 FOR_EACH_CHILD (die, c, reset_dies (c));
31130 }
31131
31132 /* Output stuff that dwarf requires at the end of every file,
31133 and generate the DWARF-2 debugging info. */
31134
31135 static void
31136 dwarf2out_finish (const char *filename)
31137 {
31138 comdat_type_node *ctnode;
31139 dw_die_ref main_comp_unit_die;
31140 unsigned char checksum[16];
31141 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31142
31143 /* Flush out any latecomers to the limbo party. */
31144 flush_limbo_die_list ();
31145
31146 if (inline_entry_data_table)
31147 gcc_assert (inline_entry_data_table->elements () == 0);
31148
31149 if (flag_checking)
31150 {
31151 verify_die (comp_unit_die ());
31152 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31153 verify_die (node->die);
31154 }
31155
31156 /* We shouldn't have any symbols with delayed asm names for
31157 DIEs generated after early finish. */
31158 gcc_assert (deferred_asm_name == NULL);
31159
31160 gen_remaining_tmpl_value_param_die_attribute ();
31161
31162 if (flag_generate_lto || flag_generate_offload)
31163 {
31164 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31165
31166 /* Prune stuff so that dwarf2out_finish runs successfully
31167 for the fat part of the object. */
31168 reset_dies (comp_unit_die ());
31169 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31170 reset_dies (node->die);
31171
31172 hash_table<comdat_type_hasher> comdat_type_table (100);
31173 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31174 {
31175 comdat_type_node **slot
31176 = comdat_type_table.find_slot (ctnode, INSERT);
31177
31178 /* Don't reset types twice. */
31179 if (*slot != HTAB_EMPTY_ENTRY)
31180 continue;
31181
31182 /* Remove the pointer to the line table. */
31183 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31184
31185 if (debug_info_level >= DINFO_LEVEL_TERSE)
31186 reset_dies (ctnode->root_die);
31187
31188 *slot = ctnode;
31189 }
31190
31191 /* Reset die CU symbol so we don't output it twice. */
31192 comp_unit_die ()->die_id.die_symbol = NULL;
31193
31194 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31195 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31196 if (have_macinfo)
31197 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31198
31199 /* Remove indirect string decisions. */
31200 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31201 if (debug_line_str_hash)
31202 {
31203 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31204 debug_line_str_hash = NULL;
31205 }
31206 }
31207
31208 #if ENABLE_ASSERT_CHECKING
31209 {
31210 dw_die_ref die = comp_unit_die (), c;
31211 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31212 }
31213 #endif
31214 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31215 resolve_addr (ctnode->root_die);
31216 resolve_addr (comp_unit_die ());
31217 move_marked_base_types ();
31218
31219 if (dump_file)
31220 {
31221 fprintf (dump_file, "DWARF for %s\n", filename);
31222 print_die (comp_unit_die (), dump_file);
31223 }
31224
31225 /* Initialize sections and labels used for actual assembler output. */
31226 unsigned generation = init_sections_and_labels (false);
31227
31228 /* Traverse the DIE's and add sibling attributes to those DIE's that
31229 have children. */
31230 add_sibling_attributes (comp_unit_die ());
31231 limbo_die_node *node;
31232 for (node = cu_die_list; node; node = node->next)
31233 add_sibling_attributes (node->die);
31234 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31235 add_sibling_attributes (ctnode->root_die);
31236
31237 /* When splitting DWARF info, we put some attributes in the
31238 skeleton compile_unit DIE that remains in the .o, while
31239 most attributes go in the DWO compile_unit_die. */
31240 if (dwarf_split_debug_info)
31241 {
31242 limbo_die_node *cu;
31243 main_comp_unit_die = gen_compile_unit_die (NULL);
31244 if (dwarf_version >= 5)
31245 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31246 cu = limbo_die_list;
31247 gcc_assert (cu->die == main_comp_unit_die);
31248 limbo_die_list = limbo_die_list->next;
31249 cu->next = cu_die_list;
31250 cu_die_list = cu;
31251 }
31252 else
31253 main_comp_unit_die = comp_unit_die ();
31254
31255 /* Output a terminator label for the .text section. */
31256 switch_to_section (text_section);
31257 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31258 if (cold_text_section)
31259 {
31260 switch_to_section (cold_text_section);
31261 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31262 }
31263
31264 /* We can only use the low/high_pc attributes if all of the code was
31265 in .text. */
31266 if (!have_multiple_function_sections
31267 || (dwarf_version < 3 && dwarf_strict))
31268 {
31269 /* Don't add if the CU has no associated code. */
31270 if (text_section_used)
31271 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31272 text_end_label, true);
31273 }
31274 else
31275 {
31276 unsigned fde_idx;
31277 dw_fde_ref fde;
31278 bool range_list_added = false;
31279
31280 if (text_section_used)
31281 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31282 text_end_label, &range_list_added, true);
31283 if (cold_text_section_used)
31284 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31285 cold_end_label, &range_list_added, true);
31286
31287 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31288 {
31289 if (DECL_IGNORED_P (fde->decl))
31290 continue;
31291 if (!fde->in_std_section)
31292 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31293 fde->dw_fde_end, &range_list_added,
31294 true);
31295 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31296 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31297 fde->dw_fde_second_end, &range_list_added,
31298 true);
31299 }
31300
31301 if (range_list_added)
31302 {
31303 /* We need to give .debug_loc and .debug_ranges an appropriate
31304 "base address". Use zero so that these addresses become
31305 absolute. Historically, we've emitted the unexpected
31306 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31307 Emit both to give time for other tools to adapt. */
31308 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31309 if (! dwarf_strict && dwarf_version < 4)
31310 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31311
31312 add_ranges (NULL);
31313 }
31314 }
31315
31316 /* AIX Assembler inserts the length, so adjust the reference to match the
31317 offset expected by debuggers. */
31318 strcpy (dl_section_ref, debug_line_section_label);
31319 if (XCOFF_DEBUGGING_INFO)
31320 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31321
31322 if (debug_info_level >= DINFO_LEVEL_TERSE)
31323 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31324 dl_section_ref);
31325
31326 if (have_macinfo)
31327 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31328 macinfo_section_label);
31329
31330 if (dwarf_split_debug_info)
31331 {
31332 if (have_location_lists)
31333 {
31334 /* Since we generate the loclists in the split DWARF .dwo
31335 file itself, we don't need to generate a loclists_base
31336 attribute for the split compile unit DIE. That attribute
31337 (and using relocatable sec_offset FORMs) isn't allowed
31338 for a split compile unit. Only if the .debug_loclists
31339 section was in the main file, would we need to generate a
31340 loclists_base attribute here (for the full or skeleton
31341 unit DIE). */
31342
31343 /* optimize_location_lists calculates the size of the lists,
31344 so index them first, and assign indices to the entries.
31345 Although optimize_location_lists will remove entries from
31346 the table, it only does so for duplicates, and therefore
31347 only reduces ref_counts to 1. */
31348 index_location_lists (comp_unit_die ());
31349 }
31350
31351 if (addr_index_table != NULL)
31352 {
31353 unsigned int index = 0;
31354 addr_index_table
31355 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31356 (&index);
31357 }
31358 }
31359
31360 loc_list_idx = 0;
31361 if (have_location_lists)
31362 {
31363 optimize_location_lists (comp_unit_die ());
31364 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31365 if (dwarf_version >= 5 && dwarf_split_debug_info)
31366 assign_location_list_indexes (comp_unit_die ());
31367 }
31368
31369 save_macinfo_strings ();
31370
31371 if (dwarf_split_debug_info)
31372 {
31373 unsigned int index = 0;
31374
31375 /* Add attributes common to skeleton compile_units and
31376 type_units. Because these attributes include strings, it
31377 must be done before freezing the string table. Top-level
31378 skeleton die attrs are added when the skeleton type unit is
31379 created, so ensure it is created by this point. */
31380 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31381 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31382 }
31383
31384 /* Output all of the compilation units. We put the main one last so that
31385 the offsets are available to output_pubnames. */
31386 for (node = cu_die_list; node; node = node->next)
31387 output_comp_unit (node->die, 0, NULL);
31388
31389 hash_table<comdat_type_hasher> comdat_type_table (100);
31390 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31391 {
31392 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31393
31394 /* Don't output duplicate types. */
31395 if (*slot != HTAB_EMPTY_ENTRY)
31396 continue;
31397
31398 /* Add a pointer to the line table for the main compilation unit
31399 so that the debugger can make sense of DW_AT_decl_file
31400 attributes. */
31401 if (debug_info_level >= DINFO_LEVEL_TERSE)
31402 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31403 (!dwarf_split_debug_info
31404 ? dl_section_ref
31405 : debug_skeleton_line_section_label));
31406
31407 output_comdat_type_unit (ctnode);
31408 *slot = ctnode;
31409 }
31410
31411 if (dwarf_split_debug_info)
31412 {
31413 int mark;
31414 struct md5_ctx ctx;
31415
31416 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31417 index_rnglists ();
31418
31419 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31420 md5_init_ctx (&ctx);
31421 mark = 0;
31422 die_checksum (comp_unit_die (), &ctx, &mark);
31423 unmark_all_dies (comp_unit_die ());
31424 md5_finish_ctx (&ctx, checksum);
31425
31426 if (dwarf_version < 5)
31427 {
31428 /* Use the first 8 bytes of the checksum as the dwo_id,
31429 and add it to both comp-unit DIEs. */
31430 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31431 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31432 }
31433
31434 /* Add the base offset of the ranges table to the skeleton
31435 comp-unit DIE. */
31436 if (!vec_safe_is_empty (ranges_table))
31437 {
31438 if (dwarf_version >= 5)
31439 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31440 ranges_base_label);
31441 else
31442 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31443 ranges_section_label);
31444 }
31445
31446 switch_to_section (debug_addr_section);
31447 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31448 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31449 before DWARF5, didn't have a header for .debug_addr units.
31450 DWARF5 specifies a small header when address tables are used. */
31451 if (dwarf_version >= 5)
31452 {
31453 unsigned int last_idx = 0;
31454 unsigned long addrs_length;
31455
31456 addr_index_table->traverse_noresize
31457 <unsigned int *, count_index_addrs> (&last_idx);
31458 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31459
31460 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31461 dw2_asm_output_data (4, 0xffffffff,
31462 "Escape value for 64-bit DWARF extension");
31463 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31464 "Length of Address Unit");
31465 dw2_asm_output_data (2, 5, "DWARF addr version");
31466 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31467 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31468 }
31469 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31470 output_addr_table ();
31471 }
31472
31473 /* Output the main compilation unit if non-empty or if .debug_macinfo
31474 or .debug_macro will be emitted. */
31475 output_comp_unit (comp_unit_die (), have_macinfo,
31476 dwarf_split_debug_info ? checksum : NULL);
31477
31478 if (dwarf_split_debug_info && info_section_emitted)
31479 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31480
31481 /* Output the abbreviation table. */
31482 if (vec_safe_length (abbrev_die_table) != 1)
31483 {
31484 switch_to_section (debug_abbrev_section);
31485 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31486 output_abbrev_section ();
31487 }
31488
31489 /* Output location list section if necessary. */
31490 if (have_location_lists)
31491 {
31492 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31493 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31494 /* Output the location lists info. */
31495 switch_to_section (debug_loc_section);
31496 if (dwarf_version >= 5)
31497 {
31498 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31499 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31500 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31501 dw2_asm_output_data (4, 0xffffffff,
31502 "Initial length escape value indicating "
31503 "64-bit DWARF extension");
31504 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31505 "Length of Location Lists");
31506 ASM_OUTPUT_LABEL (asm_out_file, l1);
31507 output_dwarf_version ();
31508 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31509 dw2_asm_output_data (1, 0, "Segment Size");
31510 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31511 "Offset Entry Count");
31512 }
31513 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31514 if (dwarf_version >= 5 && dwarf_split_debug_info)
31515 {
31516 unsigned int save_loc_list_idx = loc_list_idx;
31517 loc_list_idx = 0;
31518 output_loclists_offsets (comp_unit_die ());
31519 gcc_assert (save_loc_list_idx == loc_list_idx);
31520 }
31521 output_location_lists (comp_unit_die ());
31522 if (dwarf_version >= 5)
31523 ASM_OUTPUT_LABEL (asm_out_file, l2);
31524 }
31525
31526 output_pubtables ();
31527
31528 /* Output the address range information if a CU (.debug_info section)
31529 was emitted. We output an empty table even if we had no functions
31530 to put in it. This because the consumer has no way to tell the
31531 difference between an empty table that we omitted and failure to
31532 generate a table that would have contained data. */
31533 if (info_section_emitted)
31534 {
31535 switch_to_section (debug_aranges_section);
31536 output_aranges ();
31537 }
31538
31539 /* Output ranges section if necessary. */
31540 if (!vec_safe_is_empty (ranges_table))
31541 {
31542 if (dwarf_version >= 5)
31543 output_rnglists (generation);
31544 else
31545 output_ranges ();
31546 }
31547
31548 /* Have to end the macro section. */
31549 if (have_macinfo)
31550 {
31551 switch_to_section (debug_macinfo_section);
31552 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31553 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31554 : debug_skeleton_line_section_label, false);
31555 dw2_asm_output_data (1, 0, "End compilation unit");
31556 }
31557
31558 /* Output the source line correspondence table. We must do this
31559 even if there is no line information. Otherwise, on an empty
31560 translation unit, we will generate a present, but empty,
31561 .debug_info section. IRIX 6.5 `nm' will then complain when
31562 examining the file. This is done late so that any filenames
31563 used by the debug_info section are marked as 'used'. */
31564 switch_to_section (debug_line_section);
31565 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31566 if (! output_asm_line_debug_info ())
31567 output_line_info (false);
31568
31569 if (dwarf_split_debug_info && info_section_emitted)
31570 {
31571 switch_to_section (debug_skeleton_line_section);
31572 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31573 output_line_info (true);
31574 }
31575
31576 /* If we emitted any indirect strings, output the string table too. */
31577 if (debug_str_hash || skeleton_debug_str_hash)
31578 output_indirect_strings ();
31579 if (debug_line_str_hash)
31580 {
31581 switch_to_section (debug_line_str_section);
31582 const enum dwarf_form form = DW_FORM_line_strp;
31583 debug_line_str_hash->traverse<enum dwarf_form,
31584 output_indirect_string> (form);
31585 }
31586
31587 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31588 symview_upper_bound = 0;
31589 if (zero_view_p)
31590 bitmap_clear (zero_view_p);
31591 }
31592
31593 /* Returns a hash value for X (which really is a variable_value_struct). */
31594
31595 inline hashval_t
31596 variable_value_hasher::hash (variable_value_struct *x)
31597 {
31598 return (hashval_t) x->decl_id;
31599 }
31600
31601 /* Return nonzero if decl_id of variable_value_struct X is the same as
31602 UID of decl Y. */
31603
31604 inline bool
31605 variable_value_hasher::equal (variable_value_struct *x, tree y)
31606 {
31607 return x->decl_id == DECL_UID (y);
31608 }
31609
31610 /* Helper function for resolve_variable_value, handle
31611 DW_OP_GNU_variable_value in one location expression.
31612 Return true if exprloc has been changed into loclist. */
31613
31614 static bool
31615 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31616 {
31617 dw_loc_descr_ref next;
31618 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31619 {
31620 next = loc->dw_loc_next;
31621 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31622 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31623 continue;
31624
31625 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31626 if (DECL_CONTEXT (decl) != current_function_decl)
31627 continue;
31628
31629 dw_die_ref ref = lookup_decl_die (decl);
31630 if (ref)
31631 {
31632 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31633 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31634 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31635 continue;
31636 }
31637 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31638 if (l == NULL)
31639 continue;
31640 if (l->dw_loc_next)
31641 {
31642 if (AT_class (a) != dw_val_class_loc)
31643 continue;
31644 switch (a->dw_attr)
31645 {
31646 /* Following attributes allow both exprloc and loclist
31647 classes, so we can change them into a loclist. */
31648 case DW_AT_location:
31649 case DW_AT_string_length:
31650 case DW_AT_return_addr:
31651 case DW_AT_data_member_location:
31652 case DW_AT_frame_base:
31653 case DW_AT_segment:
31654 case DW_AT_static_link:
31655 case DW_AT_use_location:
31656 case DW_AT_vtable_elem_location:
31657 if (prev)
31658 {
31659 prev->dw_loc_next = NULL;
31660 prepend_loc_descr_to_each (l, AT_loc (a));
31661 }
31662 if (next)
31663 add_loc_descr_to_each (l, next);
31664 a->dw_attr_val.val_class = dw_val_class_loc_list;
31665 a->dw_attr_val.val_entry = NULL;
31666 a->dw_attr_val.v.val_loc_list = l;
31667 have_location_lists = true;
31668 return true;
31669 /* Following attributes allow both exprloc and reference,
31670 so if the whole expression is DW_OP_GNU_variable_value alone
31671 we could transform it into reference. */
31672 case DW_AT_byte_size:
31673 case DW_AT_bit_size:
31674 case DW_AT_lower_bound:
31675 case DW_AT_upper_bound:
31676 case DW_AT_bit_stride:
31677 case DW_AT_count:
31678 case DW_AT_allocated:
31679 case DW_AT_associated:
31680 case DW_AT_byte_stride:
31681 if (prev == NULL && next == NULL)
31682 break;
31683 /* FALLTHRU */
31684 default:
31685 if (dwarf_strict)
31686 continue;
31687 break;
31688 }
31689 /* Create DW_TAG_variable that we can refer to. */
31690 gen_decl_die (decl, NULL_TREE, NULL,
31691 lookup_decl_die (current_function_decl));
31692 ref = lookup_decl_die (decl);
31693 if (ref)
31694 {
31695 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31696 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31697 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31698 }
31699 continue;
31700 }
31701 if (prev)
31702 {
31703 prev->dw_loc_next = l->expr;
31704 add_loc_descr (&prev->dw_loc_next, next);
31705 free_loc_descr (loc, NULL);
31706 next = prev->dw_loc_next;
31707 }
31708 else
31709 {
31710 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31711 add_loc_descr (&loc, next);
31712 next = loc;
31713 }
31714 loc = prev;
31715 }
31716 return false;
31717 }
31718
31719 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31720
31721 static void
31722 resolve_variable_value (dw_die_ref die)
31723 {
31724 dw_attr_node *a;
31725 dw_loc_list_ref loc;
31726 unsigned ix;
31727
31728 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31729 switch (AT_class (a))
31730 {
31731 case dw_val_class_loc:
31732 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31733 break;
31734 /* FALLTHRU */
31735 case dw_val_class_loc_list:
31736 loc = AT_loc_list (a);
31737 gcc_assert (loc);
31738 for (; loc; loc = loc->dw_loc_next)
31739 resolve_variable_value_in_expr (a, loc->expr);
31740 break;
31741 default:
31742 break;
31743 }
31744 }
31745
31746 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31747 temporaries in the current function. */
31748
31749 static void
31750 resolve_variable_values (void)
31751 {
31752 if (!variable_value_hash || !current_function_decl)
31753 return;
31754
31755 struct variable_value_struct *node
31756 = variable_value_hash->find_with_hash (current_function_decl,
31757 DECL_UID (current_function_decl));
31758
31759 if (node == NULL)
31760 return;
31761
31762 unsigned int i;
31763 dw_die_ref die;
31764 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31765 resolve_variable_value (die);
31766 }
31767
31768 /* Helper function for note_variable_value, handle one location
31769 expression. */
31770
31771 static void
31772 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31773 {
31774 for (; loc; loc = loc->dw_loc_next)
31775 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31776 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31777 {
31778 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31779 dw_die_ref ref = lookup_decl_die (decl);
31780 if (! ref && (flag_generate_lto || flag_generate_offload))
31781 {
31782 /* ??? This is somewhat a hack because we do not create DIEs
31783 for variables not in BLOCK trees early but when generating
31784 early LTO output we need the dw_val_class_decl_ref to be
31785 fully resolved. For fat LTO objects we'd also like to
31786 undo this after LTO dwarf output. */
31787 gcc_assert (DECL_CONTEXT (decl));
31788 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31789 gcc_assert (ctx != NULL);
31790 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31791 ref = lookup_decl_die (decl);
31792 gcc_assert (ref != NULL);
31793 }
31794 if (ref)
31795 {
31796 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31797 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31798 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31799 continue;
31800 }
31801 if (VAR_P (decl)
31802 && DECL_CONTEXT (decl)
31803 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31804 && lookup_decl_die (DECL_CONTEXT (decl)))
31805 {
31806 if (!variable_value_hash)
31807 variable_value_hash
31808 = hash_table<variable_value_hasher>::create_ggc (10);
31809
31810 tree fndecl = DECL_CONTEXT (decl);
31811 struct variable_value_struct *node;
31812 struct variable_value_struct **slot
31813 = variable_value_hash->find_slot_with_hash (fndecl,
31814 DECL_UID (fndecl),
31815 INSERT);
31816 if (*slot == NULL)
31817 {
31818 node = ggc_cleared_alloc<variable_value_struct> ();
31819 node->decl_id = DECL_UID (fndecl);
31820 *slot = node;
31821 }
31822 else
31823 node = *slot;
31824
31825 vec_safe_push (node->dies, die);
31826 }
31827 }
31828 }
31829
31830 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31831 with dw_val_class_decl_ref operand. */
31832
31833 static void
31834 note_variable_value (dw_die_ref die)
31835 {
31836 dw_die_ref c;
31837 dw_attr_node *a;
31838 dw_loc_list_ref loc;
31839 unsigned ix;
31840
31841 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31842 switch (AT_class (a))
31843 {
31844 case dw_val_class_loc_list:
31845 loc = AT_loc_list (a);
31846 gcc_assert (loc);
31847 if (!loc->noted_variable_value)
31848 {
31849 loc->noted_variable_value = 1;
31850 for (; loc; loc = loc->dw_loc_next)
31851 note_variable_value_in_expr (die, loc->expr);
31852 }
31853 break;
31854 case dw_val_class_loc:
31855 note_variable_value_in_expr (die, AT_loc (a));
31856 break;
31857 default:
31858 break;
31859 }
31860
31861 /* Mark children. */
31862 FOR_EACH_CHILD (die, c, note_variable_value (c));
31863 }
31864
31865 /* Perform any cleanups needed after the early debug generation pass
31866 has run. */
31867
31868 static void
31869 dwarf2out_early_finish (const char *filename)
31870 {
31871 set_early_dwarf s;
31872 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31873
31874 /* PCH might result in DW_AT_producer string being restored from the
31875 header compilation, so always fill it with empty string initially
31876 and overwrite only here. */
31877 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31878 producer_string = gen_producer_string ();
31879 producer->dw_attr_val.v.val_str->refcount--;
31880 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31881
31882 /* Add the name for the main input file now. We delayed this from
31883 dwarf2out_init to avoid complications with PCH. */
31884 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31885 add_comp_dir_attribute (comp_unit_die ());
31886
31887 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31888 DW_AT_comp_dir into .debug_line_str section. */
31889 if (!output_asm_line_debug_info ()
31890 && dwarf_version >= 5
31891 && DWARF5_USE_DEBUG_LINE_STR)
31892 {
31893 for (int i = 0; i < 2; i++)
31894 {
31895 dw_attr_node *a = get_AT (comp_unit_die (),
31896 i ? DW_AT_comp_dir : DW_AT_name);
31897 if (a == NULL
31898 || AT_class (a) != dw_val_class_str
31899 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31900 continue;
31901
31902 if (! debug_line_str_hash)
31903 debug_line_str_hash
31904 = hash_table<indirect_string_hasher>::create_ggc (10);
31905
31906 struct indirect_string_node *node
31907 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31908 set_indirect_string (node);
31909 node->form = DW_FORM_line_strp;
31910 a->dw_attr_val.v.val_str->refcount--;
31911 a->dw_attr_val.v.val_str = node;
31912 }
31913 }
31914
31915 /* With LTO early dwarf was really finished at compile-time, so make
31916 sure to adjust the phase after annotating the LTRANS CU DIE. */
31917 if (in_lto_p)
31918 {
31919 /* Force DW_TAG_imported_unit to be created now, otherwise
31920 we might end up without it or ordered after DW_TAG_inlined_subroutine
31921 referencing DIEs from it. */
31922 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
31923 {
31924 unsigned i;
31925 tree tu;
31926 if (external_die_map)
31927 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
31928 if (sym_off_pair *desc = external_die_map->get (tu))
31929 {
31930 dw_die_ref import = new_die (DW_TAG_imported_unit,
31931 comp_unit_die (), NULL_TREE);
31932 add_AT_external_die_ref (import, DW_AT_import,
31933 desc->sym, desc->off);
31934 }
31935 }
31936
31937 early_dwarf_finished = true;
31938 if (dump_file)
31939 {
31940 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31941 print_die (comp_unit_die (), dump_file);
31942 }
31943 return;
31944 }
31945
31946 /* Walk through the list of incomplete types again, trying once more to
31947 emit full debugging info for them. */
31948 retry_incomplete_types ();
31949
31950 /* The point here is to flush out the limbo list so that it is empty
31951 and we don't need to stream it for LTO. */
31952 flush_limbo_die_list ();
31953
31954 gen_scheduled_generic_parms_dies ();
31955 gen_remaining_tmpl_value_param_die_attribute ();
31956
31957 /* Add DW_AT_linkage_name for all deferred DIEs. */
31958 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31959 {
31960 tree decl = node->created_for;
31961 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31962 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31963 ended up in deferred_asm_name before we knew it was
31964 constant and never written to disk. */
31965 && DECL_ASSEMBLER_NAME (decl))
31966 {
31967 add_linkage_attr (node->die, decl);
31968 move_linkage_attr (node->die);
31969 }
31970 }
31971 deferred_asm_name = NULL;
31972
31973 if (flag_eliminate_unused_debug_types)
31974 prune_unused_types ();
31975
31976 /* Generate separate COMDAT sections for type DIEs. */
31977 if (use_debug_types)
31978 {
31979 break_out_comdat_types (comp_unit_die ());
31980
31981 /* Each new type_unit DIE was added to the limbo die list when created.
31982 Since these have all been added to comdat_type_list, clear the
31983 limbo die list. */
31984 limbo_die_list = NULL;
31985
31986 /* For each new comdat type unit, copy declarations for incomplete
31987 types to make the new unit self-contained (i.e., no direct
31988 references to the main compile unit). */
31989 for (comdat_type_node *ctnode = comdat_type_list;
31990 ctnode != NULL; ctnode = ctnode->next)
31991 copy_decls_for_unworthy_types (ctnode->root_die);
31992 copy_decls_for_unworthy_types (comp_unit_die ());
31993
31994 /* In the process of copying declarations from one unit to another,
31995 we may have left some declarations behind that are no longer
31996 referenced. Prune them. */
31997 prune_unused_types ();
31998 }
31999
32000 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32001 with dw_val_class_decl_ref operand. */
32002 note_variable_value (comp_unit_die ());
32003 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32004 note_variable_value (node->die);
32005 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32006 ctnode = ctnode->next)
32007 note_variable_value (ctnode->root_die);
32008 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32009 note_variable_value (node->die);
32010
32011 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32012 both the main_cu and all skeleton TUs. Making this call unconditional
32013 would end up either adding a second copy of the AT_pubnames attribute, or
32014 requiring a special case in add_top_level_skeleton_die_attrs. */
32015 if (!dwarf_split_debug_info)
32016 add_AT_pubnames (comp_unit_die ());
32017
32018 /* The early debug phase is now finished. */
32019 early_dwarf_finished = true;
32020 if (dump_file)
32021 {
32022 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32023 print_die (comp_unit_die (), dump_file);
32024 }
32025
32026 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32027 if ((!flag_generate_lto && !flag_generate_offload)
32028 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32029 copy_lto_debug_sections operation of the simple object support in
32030 libiberty is not implemented for them yet. */
32031 || TARGET_PECOFF || TARGET_COFF)
32032 return;
32033
32034 /* Now as we are going to output for LTO initialize sections and labels
32035 to the LTO variants. We don't need a random-seed postfix as other
32036 LTO sections as linking the LTO debug sections into one in a partial
32037 link is fine. */
32038 init_sections_and_labels (true);
32039
32040 /* The output below is modeled after dwarf2out_finish with all
32041 location related output removed and some LTO specific changes.
32042 Some refactoring might make both smaller and easier to match up. */
32043
32044 /* Traverse the DIE's and add add sibling attributes to those DIE's
32045 that have children. */
32046 add_sibling_attributes (comp_unit_die ());
32047 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32048 add_sibling_attributes (node->die);
32049 for (comdat_type_node *ctnode = comdat_type_list;
32050 ctnode != NULL; ctnode = ctnode->next)
32051 add_sibling_attributes (ctnode->root_die);
32052
32053 /* AIX Assembler inserts the length, so adjust the reference to match the
32054 offset expected by debuggers. */
32055 strcpy (dl_section_ref, debug_line_section_label);
32056 if (XCOFF_DEBUGGING_INFO)
32057 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32058
32059 if (debug_info_level >= DINFO_LEVEL_TERSE)
32060 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32061
32062 if (have_macinfo)
32063 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32064 macinfo_section_label);
32065
32066 save_macinfo_strings ();
32067
32068 if (dwarf_split_debug_info)
32069 {
32070 unsigned int index = 0;
32071 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32072 }
32073
32074 /* Output all of the compilation units. We put the main one last so that
32075 the offsets are available to output_pubnames. */
32076 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32077 output_comp_unit (node->die, 0, NULL);
32078
32079 hash_table<comdat_type_hasher> comdat_type_table (100);
32080 for (comdat_type_node *ctnode = comdat_type_list;
32081 ctnode != NULL; ctnode = ctnode->next)
32082 {
32083 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32084
32085 /* Don't output duplicate types. */
32086 if (*slot != HTAB_EMPTY_ENTRY)
32087 continue;
32088
32089 /* Add a pointer to the line table for the main compilation unit
32090 so that the debugger can make sense of DW_AT_decl_file
32091 attributes. */
32092 if (debug_info_level >= DINFO_LEVEL_TERSE)
32093 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32094 (!dwarf_split_debug_info
32095 ? debug_line_section_label
32096 : debug_skeleton_line_section_label));
32097
32098 output_comdat_type_unit (ctnode);
32099 *slot = ctnode;
32100 }
32101
32102 /* Stick a unique symbol to the main debuginfo section. */
32103 compute_comp_unit_symbol (comp_unit_die ());
32104
32105 /* Output the main compilation unit. We always need it if only for
32106 the CU symbol. */
32107 output_comp_unit (comp_unit_die (), true, NULL);
32108
32109 /* Output the abbreviation table. */
32110 if (vec_safe_length (abbrev_die_table) != 1)
32111 {
32112 switch_to_section (debug_abbrev_section);
32113 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32114 output_abbrev_section ();
32115 }
32116
32117 /* Have to end the macro section. */
32118 if (have_macinfo)
32119 {
32120 /* We have to save macinfo state if we need to output it again
32121 for the FAT part of the object. */
32122 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32123 if (flag_fat_lto_objects)
32124 macinfo_table = macinfo_table->copy ();
32125
32126 switch_to_section (debug_macinfo_section);
32127 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32128 output_macinfo (debug_line_section_label, true);
32129 dw2_asm_output_data (1, 0, "End compilation unit");
32130
32131 if (flag_fat_lto_objects)
32132 {
32133 vec_free (macinfo_table);
32134 macinfo_table = saved_macinfo_table;
32135 }
32136 }
32137
32138 /* Emit a skeleton debug_line section. */
32139 switch_to_section (debug_line_section);
32140 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32141 output_line_info (true);
32142
32143 /* If we emitted any indirect strings, output the string table too. */
32144 if (debug_str_hash || skeleton_debug_str_hash)
32145 output_indirect_strings ();
32146 if (debug_line_str_hash)
32147 {
32148 switch_to_section (debug_line_str_section);
32149 const enum dwarf_form form = DW_FORM_line_strp;
32150 debug_line_str_hash->traverse<enum dwarf_form,
32151 output_indirect_string> (form);
32152 }
32153
32154 /* Switch back to the text section. */
32155 switch_to_section (text_section);
32156 }
32157
32158 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32159 within the same process. For use by toplev::finalize. */
32160
32161 void
32162 dwarf2out_c_finalize (void)
32163 {
32164 last_var_location_insn = NULL;
32165 cached_next_real_insn = NULL;
32166 used_rtx_array = NULL;
32167 incomplete_types = NULL;
32168 debug_info_section = NULL;
32169 debug_skeleton_info_section = NULL;
32170 debug_abbrev_section = NULL;
32171 debug_skeleton_abbrev_section = NULL;
32172 debug_aranges_section = NULL;
32173 debug_addr_section = NULL;
32174 debug_macinfo_section = NULL;
32175 debug_line_section = NULL;
32176 debug_skeleton_line_section = NULL;
32177 debug_loc_section = NULL;
32178 debug_pubnames_section = NULL;
32179 debug_pubtypes_section = NULL;
32180 debug_str_section = NULL;
32181 debug_line_str_section = NULL;
32182 debug_str_dwo_section = NULL;
32183 debug_str_offsets_section = NULL;
32184 debug_ranges_section = NULL;
32185 debug_frame_section = NULL;
32186 fde_vec = NULL;
32187 debug_str_hash = NULL;
32188 debug_line_str_hash = NULL;
32189 skeleton_debug_str_hash = NULL;
32190 dw2_string_counter = 0;
32191 have_multiple_function_sections = false;
32192 text_section_used = false;
32193 cold_text_section_used = false;
32194 cold_text_section = NULL;
32195 current_unit_personality = NULL;
32196
32197 early_dwarf = false;
32198 early_dwarf_finished = false;
32199
32200 next_die_offset = 0;
32201 single_comp_unit_die = NULL;
32202 comdat_type_list = NULL;
32203 limbo_die_list = NULL;
32204 file_table = NULL;
32205 decl_die_table = NULL;
32206 common_block_die_table = NULL;
32207 decl_loc_table = NULL;
32208 call_arg_locations = NULL;
32209 call_arg_loc_last = NULL;
32210 call_site_count = -1;
32211 tail_call_site_count = -1;
32212 cached_dw_loc_list_table = NULL;
32213 abbrev_die_table = NULL;
32214 delete dwarf_proc_stack_usage_map;
32215 dwarf_proc_stack_usage_map = NULL;
32216 line_info_label_num = 0;
32217 cur_line_info_table = NULL;
32218 text_section_line_info = NULL;
32219 cold_text_section_line_info = NULL;
32220 separate_line_info = NULL;
32221 info_section_emitted = false;
32222 pubname_table = NULL;
32223 pubtype_table = NULL;
32224 macinfo_table = NULL;
32225 ranges_table = NULL;
32226 ranges_by_label = NULL;
32227 rnglist_idx = 0;
32228 have_location_lists = false;
32229 loclabel_num = 0;
32230 poc_label_num = 0;
32231 last_emitted_file = NULL;
32232 label_num = 0;
32233 tmpl_value_parm_die_table = NULL;
32234 generic_type_instances = NULL;
32235 frame_pointer_fb_offset = 0;
32236 frame_pointer_fb_offset_valid = false;
32237 base_types.release ();
32238 XDELETEVEC (producer_string);
32239 producer_string = NULL;
32240 }
32241
32242 #include "gt-dwarf2out.h"
This page took 1.450334 seconds and 6 git commands to generate.