]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
Remove unused functions and fields.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
973 eh unwinders. */
974 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
975 return;
976
977 rtx personality = get_personality_function (current_function_decl);
978
979 if (personality)
980 {
981 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
982 ref = personality;
983
984 /* ??? The GAS support isn't entirely consistent. We have to
985 handle indirect support ourselves, but PC-relative is done
986 in the assembler. Further, the assembler can't handle any
987 of the weirder relocation types. */
988 if (enc & DW_EH_PE_indirect)
989 ref = dw2_force_const_mem (ref, true);
990
991 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
992 output_addr_const (asm_out_file, ref);
993 fputc ('\n', asm_out_file);
994 }
995
996 if (crtl->uses_eh_lsda)
997 {
998 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
999
1000 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1001 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1002 current_function_funcdef_no);
1003 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1004 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1005
1006 if (enc & DW_EH_PE_indirect)
1007 ref = dw2_force_const_mem (ref, true);
1008
1009 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1010 output_addr_const (asm_out_file, ref);
1011 fputc ('\n', asm_out_file);
1012 }
1013 }
1014
1015 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1016 this allocation may be done before pass_final. */
1017
1018 dw_fde_ref
1019 dwarf2out_alloc_current_fde (void)
1020 {
1021 dw_fde_ref fde;
1022
1023 fde = ggc_cleared_alloc<dw_fde_node> ();
1024 fde->decl = current_function_decl;
1025 fde->funcdef_number = current_function_funcdef_no;
1026 fde->fde_index = vec_safe_length (fde_vec);
1027 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1028 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1029 fde->nothrow = crtl->nothrow;
1030 fde->drap_reg = INVALID_REGNUM;
1031 fde->vdrap_reg = INVALID_REGNUM;
1032
1033 /* Record the FDE associated with this function. */
1034 cfun->fde = fde;
1035 vec_safe_push (fde_vec, fde);
1036
1037 return fde;
1038 }
1039
1040 /* Output a marker (i.e. a label) for the beginning of a function, before
1041 the prologue. */
1042
1043 void
1044 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1045 unsigned int column ATTRIBUTE_UNUSED,
1046 const char *file ATTRIBUTE_UNUSED)
1047 {
1048 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1049 char * dup_label;
1050 dw_fde_ref fde;
1051 section *fnsec;
1052 bool do_frame;
1053
1054 current_function_func_begin_label = NULL;
1055
1056 do_frame = dwarf2out_do_frame ();
1057
1058 /* ??? current_function_func_begin_label is also used by except.c for
1059 call-site information. We must emit this label if it might be used. */
1060 if (!do_frame
1061 && (!flag_exceptions
1062 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1063 return;
1064
1065 fnsec = function_section (current_function_decl);
1066 switch_to_section (fnsec);
1067 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1068 current_function_funcdef_no);
1069 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 dup_label = xstrdup (label);
1072 current_function_func_begin_label = dup_label;
1073
1074 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1075 if (!do_frame)
1076 return;
1077
1078 /* Unlike the debug version, the EH version of frame unwind info is a per-
1079 function setting so we need to record whether we need it for the unit. */
1080 do_eh_frame |= dwarf2out_do_eh_frame ();
1081
1082 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1083 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1084 would include pass_dwarf2_frame. If we've not created the FDE yet,
1085 do so now. */
1086 fde = cfun->fde;
1087 if (fde == NULL)
1088 fde = dwarf2out_alloc_current_fde ();
1089
1090 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1091 fde->dw_fde_begin = dup_label;
1092 fde->dw_fde_current_label = dup_label;
1093 fde->in_std_section = (fnsec == text_section
1094 || (cold_text_section && fnsec == cold_text_section));
1095
1096 /* We only want to output line number information for the genuine dwarf2
1097 prologue case, not the eh frame case. */
1098 #ifdef DWARF2_DEBUGGING_INFO
1099 if (file)
1100 dwarf2out_source_line (line, column, file, 0, true);
1101 #endif
1102
1103 if (dwarf2out_do_cfi_asm ())
1104 dwarf2out_do_cfi_startproc (false);
1105 else
1106 {
1107 rtx personality = get_personality_function (current_function_decl);
1108 if (!current_unit_personality)
1109 current_unit_personality = personality;
1110
1111 /* We cannot keep a current personality per function as without CFI
1112 asm, at the point where we emit the CFI data, there is no current
1113 function anymore. */
1114 if (personality && current_unit_personality != personality)
1115 sorry ("multiple EH personalities are supported only with assemblers "
1116 "supporting .cfi_personality directive");
1117 }
1118 }
1119
1120 /* Output a marker (i.e. a label) for the end of the generated code
1121 for a function prologue. This gets called *after* the prologue code has
1122 been generated. */
1123
1124 void
1125 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1126 const char *file ATTRIBUTE_UNUSED)
1127 {
1128 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1129
1130 /* Output a label to mark the endpoint of the code generated for this
1131 function. */
1132 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1133 current_function_funcdef_no);
1134 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1137 }
1138
1139 /* Output a marker (i.e. a label) for the beginning of the generated code
1140 for a function epilogue. This gets called *before* the prologue code has
1141 been generated. */
1142
1143 void
1144 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1145 const char *file ATTRIBUTE_UNUSED)
1146 {
1147 dw_fde_ref fde = cfun->fde;
1148 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1149
1150 if (fde->dw_fde_vms_begin_epilogue)
1151 return;
1152
1153 /* Output a label to mark the endpoint of the code generated for this
1154 function. */
1155 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1156 current_function_funcdef_no);
1157 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1160 }
1161
1162 /* Output a marker (i.e. a label) for the absolute end of the generated code
1163 for a function definition. This gets called *after* the epilogue code has
1164 been generated. */
1165
1166 void
1167 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1168 const char *file ATTRIBUTE_UNUSED)
1169 {
1170 dw_fde_ref fde;
1171 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1172
1173 last_var_location_insn = NULL;
1174 cached_next_real_insn = NULL;
1175
1176 if (dwarf2out_do_cfi_asm ())
1177 fprintf (asm_out_file, "\t.cfi_endproc\n");
1178
1179 /* Output a label to mark the endpoint of the code generated for this
1180 function. */
1181 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1182 current_function_funcdef_no);
1183 ASM_OUTPUT_LABEL (asm_out_file, label);
1184 fde = cfun->fde;
1185 gcc_assert (fde != NULL);
1186 if (fde->dw_fde_second_begin == NULL)
1187 fde->dw_fde_end = xstrdup (label);
1188 }
1189
1190 void
1191 dwarf2out_frame_finish (void)
1192 {
1193 /* Output call frame information. */
1194 if (targetm.debug_unwind_info () == UI_DWARF2)
1195 output_call_frame_info (0);
1196
1197 /* Output another copy for the unwinder. */
1198 if (do_eh_frame)
1199 output_call_frame_info (1);
1200 }
1201
1202 /* Note that the current function section is being used for code. */
1203
1204 static void
1205 dwarf2out_note_section_used (void)
1206 {
1207 section *sec = current_function_section ();
1208 if (sec == text_section)
1209 text_section_used = true;
1210 else if (sec == cold_text_section)
1211 cold_text_section_used = true;
1212 }
1213
1214 static void var_location_switch_text_section (void);
1215 static void set_cur_line_info_table (section *);
1216
1217 void
1218 dwarf2out_switch_text_section (void)
1219 {
1220 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1227 current_function_funcdef_no);
1228
1229 fde->dw_fde_second_begin = ggc_strdup (label);
1230 if (!in_cold_section_p)
1231 {
1232 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1233 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1234 }
1235 else
1236 {
1237 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1238 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1239 }
1240 have_multiple_function_sections = true;
1241
1242 /* There is no need to mark used sections when not debugging. */
1243 if (cold_text_section != NULL)
1244 dwarf2out_note_section_used ();
1245
1246 if (dwarf2out_do_cfi_asm ())
1247 fprintf (asm_out_file, "\t.cfi_endproc\n");
1248
1249 /* Now do the real section switch. */
1250 sect = current_function_section ();
1251 switch_to_section (sect);
1252
1253 fde->second_in_std_section
1254 = (sect == text_section
1255 || (cold_text_section && sect == cold_text_section));
1256
1257 if (dwarf2out_do_cfi_asm ())
1258 dwarf2out_do_cfi_startproc (true);
1259
1260 var_location_switch_text_section ();
1261
1262 if (cold_text_section != NULL)
1263 set_cur_line_info_table (sect);
1264 }
1265 \f
1266 /* And now, the subset of the debugging information support code necessary
1267 for emitting location expressions. */
1268
1269 /* Data about a single source file. */
1270 struct GTY((for_user)) dwarf_file_data {
1271 const char * filename;
1272 int emitted_number;
1273 };
1274
1275 /* Describe an entry into the .debug_addr section. */
1276
1277 enum ate_kind {
1278 ate_kind_rtx,
1279 ate_kind_rtx_dtprel,
1280 ate_kind_label
1281 };
1282
1283 struct GTY((for_user)) addr_table_entry {
1284 enum ate_kind kind;
1285 unsigned int refcount;
1286 unsigned int index;
1287 union addr_table_entry_struct_union
1288 {
1289 rtx GTY ((tag ("0"))) rtl;
1290 char * GTY ((tag ("1"))) label;
1291 }
1292 GTY ((desc ("%1.kind"))) addr;
1293 };
1294
1295 typedef unsigned int var_loc_view;
1296
1297 /* Location lists are ranges + location descriptions for that range,
1298 so you can track variables that are in different places over
1299 their entire life. */
1300 typedef struct GTY(()) dw_loc_list_struct {
1301 dw_loc_list_ref dw_loc_next;
1302 const char *begin; /* Label and addr_entry for start of range */
1303 addr_table_entry *begin_entry;
1304 const char *end; /* Label for end of range */
1305 char *ll_symbol; /* Label for beginning of location list.
1306 Only on head of list. */
1307 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1308 const char *section; /* Section this loclist is relative to */
1309 dw_loc_descr_ref expr;
1310 var_loc_view vbegin, vend;
1311 hashval_t hash;
1312 /* True if all addresses in this and subsequent lists are known to be
1313 resolved. */
1314 bool resolved_addr;
1315 /* True if this list has been replaced by dw_loc_next. */
1316 bool replaced;
1317 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1318 section. */
1319 unsigned char emitted : 1;
1320 /* True if hash field is index rather than hash value. */
1321 unsigned char num_assigned : 1;
1322 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1323 unsigned char offset_emitted : 1;
1324 /* True if note_variable_value_in_expr has been called on it. */
1325 unsigned char noted_variable_value : 1;
1326 /* True if the range should be emitted even if begin and end
1327 are the same. */
1328 bool force;
1329 } dw_loc_list_node;
1330
1331 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1332 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1333
1334 /* Convert a DWARF stack opcode into its string name. */
1335
1336 static const char *
1337 dwarf_stack_op_name (unsigned int op)
1338 {
1339 const char *name = get_DW_OP_name (op);
1340
1341 if (name != NULL)
1342 return name;
1343
1344 return "OP_<unknown>";
1345 }
1346
1347 /* Return TRUE iff we're to output location view lists as a separate
1348 attribute next to the location lists, as an extension compatible
1349 with DWARF 2 and above. */
1350
1351 static inline bool
1352 dwarf2out_locviews_in_attribute ()
1353 {
1354 return debug_variable_location_views == 1;
1355 }
1356
1357 /* Return TRUE iff we're to output location view lists as part of the
1358 location lists, as proposed for standardization after DWARF 5. */
1359
1360 static inline bool
1361 dwarf2out_locviews_in_loclist ()
1362 {
1363 #ifndef DW_LLE_view_pair
1364 return false;
1365 #else
1366 return debug_variable_location_views == -1;
1367 #endif
1368 }
1369
1370 /* Return a pointer to a newly allocated location description. Location
1371 descriptions are simple expression terms that can be strung
1372 together to form more complicated location (address) descriptions. */
1373
1374 static inline dw_loc_descr_ref
1375 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1376 unsigned HOST_WIDE_INT oprnd2)
1377 {
1378 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1379
1380 descr->dw_loc_opc = op;
1381 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1382 descr->dw_loc_oprnd1.val_entry = NULL;
1383 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1384 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1385 descr->dw_loc_oprnd2.val_entry = NULL;
1386 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1387
1388 return descr;
1389 }
1390
1391 /* Add a location description term to a location description expression. */
1392
1393 static inline void
1394 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1395 {
1396 dw_loc_descr_ref *d;
1397
1398 /* Find the end of the chain. */
1399 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1400 ;
1401
1402 *d = descr;
1403 }
1404
1405 /* Compare two location operands for exact equality. */
1406
1407 static bool
1408 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1409 {
1410 if (a->val_class != b->val_class)
1411 return false;
1412 switch (a->val_class)
1413 {
1414 case dw_val_class_none:
1415 return true;
1416 case dw_val_class_addr:
1417 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1418
1419 case dw_val_class_offset:
1420 case dw_val_class_unsigned_const:
1421 case dw_val_class_const:
1422 case dw_val_class_unsigned_const_implicit:
1423 case dw_val_class_const_implicit:
1424 case dw_val_class_range_list:
1425 /* These are all HOST_WIDE_INT, signed or unsigned. */
1426 return a->v.val_unsigned == b->v.val_unsigned;
1427
1428 case dw_val_class_loc:
1429 return a->v.val_loc == b->v.val_loc;
1430 case dw_val_class_loc_list:
1431 return a->v.val_loc_list == b->v.val_loc_list;
1432 case dw_val_class_view_list:
1433 return a->v.val_view_list == b->v.val_view_list;
1434 case dw_val_class_die_ref:
1435 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1436 case dw_val_class_fde_ref:
1437 return a->v.val_fde_index == b->v.val_fde_index;
1438 case dw_val_class_symview:
1439 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1440 case dw_val_class_lbl_id:
1441 case dw_val_class_lineptr:
1442 case dw_val_class_macptr:
1443 case dw_val_class_loclistsptr:
1444 case dw_val_class_high_pc:
1445 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1446 case dw_val_class_str:
1447 return a->v.val_str == b->v.val_str;
1448 case dw_val_class_flag:
1449 return a->v.val_flag == b->v.val_flag;
1450 case dw_val_class_file:
1451 case dw_val_class_file_implicit:
1452 return a->v.val_file == b->v.val_file;
1453 case dw_val_class_decl_ref:
1454 return a->v.val_decl_ref == b->v.val_decl_ref;
1455
1456 case dw_val_class_const_double:
1457 return (a->v.val_double.high == b->v.val_double.high
1458 && a->v.val_double.low == b->v.val_double.low);
1459
1460 case dw_val_class_wide_int:
1461 return *a->v.val_wide == *b->v.val_wide;
1462
1463 case dw_val_class_vec:
1464 {
1465 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1466 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1467
1468 return (a_len == b_len
1469 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1470 }
1471
1472 case dw_val_class_data8:
1473 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1474
1475 case dw_val_class_vms_delta:
1476 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1477 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1478
1479 case dw_val_class_discr_value:
1480 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1481 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1482 case dw_val_class_discr_list:
1483 /* It makes no sense comparing two discriminant value lists. */
1484 return false;
1485 }
1486 gcc_unreachable ();
1487 }
1488
1489 /* Compare two location atoms for exact equality. */
1490
1491 static bool
1492 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1493 {
1494 if (a->dw_loc_opc != b->dw_loc_opc)
1495 return false;
1496
1497 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1498 address size, but since we always allocate cleared storage it
1499 should be zero for other types of locations. */
1500 if (a->dtprel != b->dtprel)
1501 return false;
1502
1503 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1504 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1505 }
1506
1507 /* Compare two complete location expressions for exact equality. */
1508
1509 bool
1510 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1511 {
1512 while (1)
1513 {
1514 if (a == b)
1515 return true;
1516 if (a == NULL || b == NULL)
1517 return false;
1518 if (!loc_descr_equal_p_1 (a, b))
1519 return false;
1520
1521 a = a->dw_loc_next;
1522 b = b->dw_loc_next;
1523 }
1524 }
1525
1526
1527 /* Add a constant POLY_OFFSET to a location expression. */
1528
1529 static void
1530 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1531 {
1532 dw_loc_descr_ref loc;
1533 HOST_WIDE_INT *p;
1534
1535 gcc_assert (*list_head != NULL);
1536
1537 if (known_eq (poly_offset, 0))
1538 return;
1539
1540 /* Find the end of the chain. */
1541 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1542 ;
1543
1544 HOST_WIDE_INT offset;
1545 if (!poly_offset.is_constant (&offset))
1546 {
1547 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1548 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1549 return;
1550 }
1551
1552 p = NULL;
1553 if (loc->dw_loc_opc == DW_OP_fbreg
1554 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1555 p = &loc->dw_loc_oprnd1.v.val_int;
1556 else if (loc->dw_loc_opc == DW_OP_bregx)
1557 p = &loc->dw_loc_oprnd2.v.val_int;
1558
1559 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1560 offset. Don't optimize if an signed integer overflow would happen. */
1561 if (p != NULL
1562 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1563 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1564 *p += offset;
1565
1566 else if (offset > 0)
1567 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1568
1569 else
1570 {
1571 loc->dw_loc_next
1572 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1573 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1574 }
1575 }
1576
1577 /* Return a pointer to a newly allocated location description for
1578 REG and OFFSET. */
1579
1580 static inline dw_loc_descr_ref
1581 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1582 {
1583 HOST_WIDE_INT const_offset;
1584 if (offset.is_constant (&const_offset))
1585 {
1586 if (reg <= 31)
1587 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1588 const_offset, 0);
1589 else
1590 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1591 }
1592 else
1593 {
1594 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1595 loc_descr_plus_const (&ret, offset);
1596 return ret;
1597 }
1598 }
1599
1600 /* Add a constant OFFSET to a location list. */
1601
1602 static void
1603 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1604 {
1605 dw_loc_list_ref d;
1606 for (d = list_head; d != NULL; d = d->dw_loc_next)
1607 loc_descr_plus_const (&d->expr, offset);
1608 }
1609
1610 #define DWARF_REF_SIZE \
1611 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1612
1613 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1614 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1615 DW_FORM_data16 with 128 bits. */
1616 #define DWARF_LARGEST_DATA_FORM_BITS \
1617 (dwarf_version >= 5 ? 128 : 64)
1618
1619 /* Utility inline function for construction of ops that were GNU extension
1620 before DWARF 5. */
1621 static inline enum dwarf_location_atom
1622 dwarf_OP (enum dwarf_location_atom op)
1623 {
1624 switch (op)
1625 {
1626 case DW_OP_implicit_pointer:
1627 if (dwarf_version < 5)
1628 return DW_OP_GNU_implicit_pointer;
1629 break;
1630
1631 case DW_OP_entry_value:
1632 if (dwarf_version < 5)
1633 return DW_OP_GNU_entry_value;
1634 break;
1635
1636 case DW_OP_const_type:
1637 if (dwarf_version < 5)
1638 return DW_OP_GNU_const_type;
1639 break;
1640
1641 case DW_OP_regval_type:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_regval_type;
1644 break;
1645
1646 case DW_OP_deref_type:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_deref_type;
1649 break;
1650
1651 case DW_OP_convert:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_convert;
1654 break;
1655
1656 case DW_OP_reinterpret:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_reinterpret;
1659 break;
1660
1661 case DW_OP_addrx:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_addr_index;
1664 break;
1665
1666 case DW_OP_constx:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_const_index;
1669 break;
1670
1671 default:
1672 break;
1673 }
1674 return op;
1675 }
1676
1677 /* Similarly for attributes. */
1678 static inline enum dwarf_attribute
1679 dwarf_AT (enum dwarf_attribute at)
1680 {
1681 switch (at)
1682 {
1683 case DW_AT_call_return_pc:
1684 if (dwarf_version < 5)
1685 return DW_AT_low_pc;
1686 break;
1687
1688 case DW_AT_call_tail_call:
1689 if (dwarf_version < 5)
1690 return DW_AT_GNU_tail_call;
1691 break;
1692
1693 case DW_AT_call_origin:
1694 if (dwarf_version < 5)
1695 return DW_AT_abstract_origin;
1696 break;
1697
1698 case DW_AT_call_target:
1699 if (dwarf_version < 5)
1700 return DW_AT_GNU_call_site_target;
1701 break;
1702
1703 case DW_AT_call_target_clobbered:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_call_site_target_clobbered;
1706 break;
1707
1708 case DW_AT_call_parameter:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_value:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_value;
1716 break;
1717
1718 case DW_AT_call_data_value:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_data_value;
1721 break;
1722
1723 case DW_AT_call_all_calls:
1724 if (dwarf_version < 5)
1725 return DW_AT_GNU_all_call_sites;
1726 break;
1727
1728 case DW_AT_call_all_tail_calls:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_all_tail_call_sites;
1731 break;
1732
1733 case DW_AT_dwo_name:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_dwo_name;
1736 break;
1737
1738 case DW_AT_addr_base:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_addr_base;
1741 break;
1742
1743 default:
1744 break;
1745 }
1746 return at;
1747 }
1748
1749 /* And similarly for tags. */
1750 static inline enum dwarf_tag
1751 dwarf_TAG (enum dwarf_tag tag)
1752 {
1753 switch (tag)
1754 {
1755 case DW_TAG_call_site:
1756 if (dwarf_version < 5)
1757 return DW_TAG_GNU_call_site;
1758 break;
1759
1760 case DW_TAG_call_site_parameter:
1761 if (dwarf_version < 5)
1762 return DW_TAG_GNU_call_site_parameter;
1763 break;
1764
1765 default:
1766 break;
1767 }
1768 return tag;
1769 }
1770
1771 /* And similarly for forms. */
1772 static inline enum dwarf_form
1773 dwarf_FORM (enum dwarf_form form)
1774 {
1775 switch (form)
1776 {
1777 case DW_FORM_addrx:
1778 if (dwarf_version < 5)
1779 return DW_FORM_GNU_addr_index;
1780 break;
1781
1782 case DW_FORM_strx:
1783 if (dwarf_version < 5)
1784 return DW_FORM_GNU_str_index;
1785 break;
1786
1787 default:
1788 break;
1789 }
1790 return form;
1791 }
1792
1793 static unsigned long int get_base_type_offset (dw_die_ref);
1794
1795 /* Return the size of a location descriptor. */
1796
1797 static unsigned long
1798 size_of_loc_descr (dw_loc_descr_ref loc)
1799 {
1800 unsigned long size = 1;
1801
1802 switch (loc->dw_loc_opc)
1803 {
1804 case DW_OP_addr:
1805 size += DWARF2_ADDR_SIZE;
1806 break;
1807 case DW_OP_GNU_addr_index:
1808 case DW_OP_addrx:
1809 case DW_OP_GNU_const_index:
1810 case DW_OP_constx:
1811 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1812 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1813 break;
1814 case DW_OP_const1u:
1815 case DW_OP_const1s:
1816 size += 1;
1817 break;
1818 case DW_OP_const2u:
1819 case DW_OP_const2s:
1820 size += 2;
1821 break;
1822 case DW_OP_const4u:
1823 case DW_OP_const4s:
1824 size += 4;
1825 break;
1826 case DW_OP_const8u:
1827 case DW_OP_const8s:
1828 size += 8;
1829 break;
1830 case DW_OP_constu:
1831 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1832 break;
1833 case DW_OP_consts:
1834 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1835 break;
1836 case DW_OP_pick:
1837 size += 1;
1838 break;
1839 case DW_OP_plus_uconst:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1841 break;
1842 case DW_OP_skip:
1843 case DW_OP_bra:
1844 size += 2;
1845 break;
1846 case DW_OP_breg0:
1847 case DW_OP_breg1:
1848 case DW_OP_breg2:
1849 case DW_OP_breg3:
1850 case DW_OP_breg4:
1851 case DW_OP_breg5:
1852 case DW_OP_breg6:
1853 case DW_OP_breg7:
1854 case DW_OP_breg8:
1855 case DW_OP_breg9:
1856 case DW_OP_breg10:
1857 case DW_OP_breg11:
1858 case DW_OP_breg12:
1859 case DW_OP_breg13:
1860 case DW_OP_breg14:
1861 case DW_OP_breg15:
1862 case DW_OP_breg16:
1863 case DW_OP_breg17:
1864 case DW_OP_breg18:
1865 case DW_OP_breg19:
1866 case DW_OP_breg20:
1867 case DW_OP_breg21:
1868 case DW_OP_breg22:
1869 case DW_OP_breg23:
1870 case DW_OP_breg24:
1871 case DW_OP_breg25:
1872 case DW_OP_breg26:
1873 case DW_OP_breg27:
1874 case DW_OP_breg28:
1875 case DW_OP_breg29:
1876 case DW_OP_breg30:
1877 case DW_OP_breg31:
1878 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1879 break;
1880 case DW_OP_regx:
1881 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1882 break;
1883 case DW_OP_fbreg:
1884 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1885 break;
1886 case DW_OP_bregx:
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1888 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1889 break;
1890 case DW_OP_piece:
1891 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1892 break;
1893 case DW_OP_bit_piece:
1894 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1895 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1896 break;
1897 case DW_OP_deref_size:
1898 case DW_OP_xderef_size:
1899 size += 1;
1900 break;
1901 case DW_OP_call2:
1902 size += 2;
1903 break;
1904 case DW_OP_call4:
1905 size += 4;
1906 break;
1907 case DW_OP_call_ref:
1908 case DW_OP_GNU_variable_value:
1909 size += DWARF_REF_SIZE;
1910 break;
1911 case DW_OP_implicit_value:
1912 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1913 + loc->dw_loc_oprnd1.v.val_unsigned;
1914 break;
1915 case DW_OP_implicit_pointer:
1916 case DW_OP_GNU_implicit_pointer:
1917 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1918 break;
1919 case DW_OP_entry_value:
1920 case DW_OP_GNU_entry_value:
1921 {
1922 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1923 size += size_of_uleb128 (op_size) + op_size;
1924 break;
1925 }
1926 case DW_OP_const_type:
1927 case DW_OP_GNU_const_type:
1928 {
1929 unsigned long o
1930 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1931 size += size_of_uleb128 (o) + 1;
1932 switch (loc->dw_loc_oprnd2.val_class)
1933 {
1934 case dw_val_class_vec:
1935 size += loc->dw_loc_oprnd2.v.val_vec.length
1936 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1937 break;
1938 case dw_val_class_const:
1939 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1940 break;
1941 case dw_val_class_const_double:
1942 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1943 break;
1944 case dw_val_class_wide_int:
1945 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1946 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1947 break;
1948 default:
1949 gcc_unreachable ();
1950 }
1951 break;
1952 }
1953 case DW_OP_regval_type:
1954 case DW_OP_GNU_regval_type:
1955 {
1956 unsigned long o
1957 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1958 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1959 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_deref_type:
1963 case DW_OP_GNU_deref_type:
1964 {
1965 unsigned long o
1966 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1967 size += 1 + size_of_uleb128 (o);
1968 }
1969 break;
1970 case DW_OP_convert:
1971 case DW_OP_reinterpret:
1972 case DW_OP_GNU_convert:
1973 case DW_OP_GNU_reinterpret:
1974 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1975 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1976 else
1977 {
1978 unsigned long o
1979 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1980 size += size_of_uleb128 (o);
1981 }
1982 break;
1983 case DW_OP_GNU_parameter_ref:
1984 size += 4;
1985 break;
1986 default:
1987 break;
1988 }
1989
1990 return size;
1991 }
1992
1993 /* Return the size of a series of location descriptors. */
1994
1995 unsigned long
1996 size_of_locs (dw_loc_descr_ref loc)
1997 {
1998 dw_loc_descr_ref l;
1999 unsigned long size;
2000
2001 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2002 field, to avoid writing to a PCH file. */
2003 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2004 {
2005 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2006 break;
2007 size += size_of_loc_descr (l);
2008 }
2009 if (! l)
2010 return size;
2011
2012 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2013 {
2014 l->dw_loc_addr = size;
2015 size += size_of_loc_descr (l);
2016 }
2017
2018 return size;
2019 }
2020
2021 /* Return the size of the value in a DW_AT_discr_value attribute. */
2022
2023 static int
2024 size_of_discr_value (dw_discr_value *discr_value)
2025 {
2026 if (discr_value->pos)
2027 return size_of_uleb128 (discr_value->v.uval);
2028 else
2029 return size_of_sleb128 (discr_value->v.sval);
2030 }
2031
2032 /* Return the size of the value in a DW_AT_discr_list attribute. */
2033
2034 static int
2035 size_of_discr_list (dw_discr_list_ref discr_list)
2036 {
2037 int size = 0;
2038
2039 for (dw_discr_list_ref list = discr_list;
2040 list != NULL;
2041 list = list->dw_discr_next)
2042 {
2043 /* One byte for the discriminant value descriptor, and then one or two
2044 LEB128 numbers, depending on whether it's a single case label or a
2045 range label. */
2046 size += 1;
2047 size += size_of_discr_value (&list->dw_discr_lower_bound);
2048 if (list->dw_discr_range != 0)
2049 size += size_of_discr_value (&list->dw_discr_upper_bound);
2050 }
2051 return size;
2052 }
2053
2054 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2055 static void get_ref_die_offset_label (char *, dw_die_ref);
2056 static unsigned long int get_ref_die_offset (dw_die_ref);
2057
2058 /* Output location description stack opcode's operands (if any).
2059 The for_eh_or_skip parameter controls whether register numbers are
2060 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2061 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2062 info). This should be suppressed for the cases that have not been converted
2063 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2064
2065 static void
2066 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2067 {
2068 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2069 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2070
2071 switch (loc->dw_loc_opc)
2072 {
2073 #ifdef DWARF2_DEBUGGING_INFO
2074 case DW_OP_const2u:
2075 case DW_OP_const2s:
2076 dw2_asm_output_data (2, val1->v.val_int, NULL);
2077 break;
2078 case DW_OP_const4u:
2079 if (loc->dtprel)
2080 {
2081 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2082 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2083 val1->v.val_addr);
2084 fputc ('\n', asm_out_file);
2085 break;
2086 }
2087 /* FALLTHRU */
2088 case DW_OP_const4s:
2089 dw2_asm_output_data (4, val1->v.val_int, NULL);
2090 break;
2091 case DW_OP_const8u:
2092 if (loc->dtprel)
2093 {
2094 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2095 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2096 val1->v.val_addr);
2097 fputc ('\n', asm_out_file);
2098 break;
2099 }
2100 /* FALLTHRU */
2101 case DW_OP_const8s:
2102 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2103 dw2_asm_output_data (8, val1->v.val_int, NULL);
2104 break;
2105 case DW_OP_skip:
2106 case DW_OP_bra:
2107 {
2108 int offset;
2109
2110 gcc_assert (val1->val_class == dw_val_class_loc);
2111 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2112
2113 dw2_asm_output_data (2, offset, NULL);
2114 }
2115 break;
2116 case DW_OP_implicit_value:
2117 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2118 switch (val2->val_class)
2119 {
2120 case dw_val_class_const:
2121 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2122 break;
2123 case dw_val_class_vec:
2124 {
2125 unsigned int elt_size = val2->v.val_vec.elt_size;
2126 unsigned int len = val2->v.val_vec.length;
2127 unsigned int i;
2128 unsigned char *p;
2129
2130 if (elt_size > sizeof (HOST_WIDE_INT))
2131 {
2132 elt_size /= 2;
2133 len *= 2;
2134 }
2135 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2136 i < len;
2137 i++, p += elt_size)
2138 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2139 "fp or vector constant word %u", i);
2140 }
2141 break;
2142 case dw_val_class_const_double:
2143 {
2144 unsigned HOST_WIDE_INT first, second;
2145
2146 if (WORDS_BIG_ENDIAN)
2147 {
2148 first = val2->v.val_double.high;
2149 second = val2->v.val_double.low;
2150 }
2151 else
2152 {
2153 first = val2->v.val_double.low;
2154 second = val2->v.val_double.high;
2155 }
2156 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2157 first, NULL);
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 second, NULL);
2160 }
2161 break;
2162 case dw_val_class_wide_int:
2163 {
2164 int i;
2165 int len = get_full_len (*val2->v.val_wide);
2166 if (WORDS_BIG_ENDIAN)
2167 for (i = len - 1; i >= 0; --i)
2168 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2169 val2->v.val_wide->elt (i), NULL);
2170 else
2171 for (i = 0; i < len; ++i)
2172 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2173 val2->v.val_wide->elt (i), NULL);
2174 }
2175 break;
2176 case dw_val_class_addr:
2177 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2178 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2179 break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 break;
2184 #else
2185 case DW_OP_const2u:
2186 case DW_OP_const2s:
2187 case DW_OP_const4u:
2188 case DW_OP_const4s:
2189 case DW_OP_const8u:
2190 case DW_OP_const8s:
2191 case DW_OP_skip:
2192 case DW_OP_bra:
2193 case DW_OP_implicit_value:
2194 /* We currently don't make any attempt to make sure these are
2195 aligned properly like we do for the main unwind info, so
2196 don't support emitting things larger than a byte if we're
2197 only doing unwinding. */
2198 gcc_unreachable ();
2199 #endif
2200 case DW_OP_const1u:
2201 case DW_OP_const1s:
2202 dw2_asm_output_data (1, val1->v.val_int, NULL);
2203 break;
2204 case DW_OP_constu:
2205 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2206 break;
2207 case DW_OP_consts:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_pick:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213 case DW_OP_plus_uconst:
2214 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2215 break;
2216 case DW_OP_breg0:
2217 case DW_OP_breg1:
2218 case DW_OP_breg2:
2219 case DW_OP_breg3:
2220 case DW_OP_breg4:
2221 case DW_OP_breg5:
2222 case DW_OP_breg6:
2223 case DW_OP_breg7:
2224 case DW_OP_breg8:
2225 case DW_OP_breg9:
2226 case DW_OP_breg10:
2227 case DW_OP_breg11:
2228 case DW_OP_breg12:
2229 case DW_OP_breg13:
2230 case DW_OP_breg14:
2231 case DW_OP_breg15:
2232 case DW_OP_breg16:
2233 case DW_OP_breg17:
2234 case DW_OP_breg18:
2235 case DW_OP_breg19:
2236 case DW_OP_breg20:
2237 case DW_OP_breg21:
2238 case DW_OP_breg22:
2239 case DW_OP_breg23:
2240 case DW_OP_breg24:
2241 case DW_OP_breg25:
2242 case DW_OP_breg26:
2243 case DW_OP_breg27:
2244 case DW_OP_breg28:
2245 case DW_OP_breg29:
2246 case DW_OP_breg30:
2247 case DW_OP_breg31:
2248 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2249 break;
2250 case DW_OP_regx:
2251 {
2252 unsigned r = val1->v.val_unsigned;
2253 if (for_eh_or_skip >= 0)
2254 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2255 gcc_assert (size_of_uleb128 (r)
2256 == size_of_uleb128 (val1->v.val_unsigned));
2257 dw2_asm_output_data_uleb128 (r, NULL);
2258 }
2259 break;
2260 case DW_OP_fbreg:
2261 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2262 break;
2263 case DW_OP_bregx:
2264 {
2265 unsigned r = val1->v.val_unsigned;
2266 if (for_eh_or_skip >= 0)
2267 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2268 gcc_assert (size_of_uleb128 (r)
2269 == size_of_uleb128 (val1->v.val_unsigned));
2270 dw2_asm_output_data_uleb128 (r, NULL);
2271 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2272 }
2273 break;
2274 case DW_OP_piece:
2275 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2276 break;
2277 case DW_OP_bit_piece:
2278 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2279 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2280 break;
2281 case DW_OP_deref_size:
2282 case DW_OP_xderef_size:
2283 dw2_asm_output_data (1, val1->v.val_int, NULL);
2284 break;
2285
2286 case DW_OP_addr:
2287 if (loc->dtprel)
2288 {
2289 if (targetm.asm_out.output_dwarf_dtprel)
2290 {
2291 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2292 DWARF2_ADDR_SIZE,
2293 val1->v.val_addr);
2294 fputc ('\n', asm_out_file);
2295 }
2296 else
2297 gcc_unreachable ();
2298 }
2299 else
2300 {
2301 #ifdef DWARF2_DEBUGGING_INFO
2302 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2303 #else
2304 gcc_unreachable ();
2305 #endif
2306 }
2307 break;
2308
2309 case DW_OP_GNU_addr_index:
2310 case DW_OP_addrx:
2311 case DW_OP_GNU_const_index:
2312 case DW_OP_constx:
2313 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2314 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2315 "(index into .debug_addr)");
2316 break;
2317
2318 case DW_OP_call2:
2319 case DW_OP_call4:
2320 {
2321 unsigned long die_offset
2322 = get_ref_die_offset (val1->v.val_die_ref.die);
2323 /* Make sure the offset has been computed and that we can encode it as
2324 an operand. */
2325 gcc_assert (die_offset > 0
2326 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2327 ? 0xffff
2328 : 0xffffffff));
2329 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2330 die_offset, NULL);
2331 }
2332 break;
2333
2334 case DW_OP_call_ref:
2335 case DW_OP_GNU_variable_value:
2336 {
2337 char label[MAX_ARTIFICIAL_LABEL_BYTES
2338 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2339 gcc_assert (val1->val_class == dw_val_class_die_ref);
2340 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2341 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2342 }
2343 break;
2344
2345 case DW_OP_implicit_pointer:
2346 case DW_OP_GNU_implicit_pointer:
2347 {
2348 char label[MAX_ARTIFICIAL_LABEL_BYTES
2349 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2350 gcc_assert (val1->val_class == dw_val_class_die_ref);
2351 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2352 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2353 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2354 }
2355 break;
2356
2357 case DW_OP_entry_value:
2358 case DW_OP_GNU_entry_value:
2359 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2360 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2361 break;
2362
2363 case DW_OP_const_type:
2364 case DW_OP_GNU_const_type:
2365 {
2366 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2367 gcc_assert (o);
2368 dw2_asm_output_data_uleb128 (o, NULL);
2369 switch (val2->val_class)
2370 {
2371 case dw_val_class_const:
2372 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2373 dw2_asm_output_data (1, l, NULL);
2374 dw2_asm_output_data (l, val2->v.val_int, NULL);
2375 break;
2376 case dw_val_class_vec:
2377 {
2378 unsigned int elt_size = val2->v.val_vec.elt_size;
2379 unsigned int len = val2->v.val_vec.length;
2380 unsigned int i;
2381 unsigned char *p;
2382
2383 l = len * elt_size;
2384 dw2_asm_output_data (1, l, NULL);
2385 if (elt_size > sizeof (HOST_WIDE_INT))
2386 {
2387 elt_size /= 2;
2388 len *= 2;
2389 }
2390 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2391 i < len;
2392 i++, p += elt_size)
2393 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2394 "fp or vector constant word %u", i);
2395 }
2396 break;
2397 case dw_val_class_const_double:
2398 {
2399 unsigned HOST_WIDE_INT first, second;
2400 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2401
2402 dw2_asm_output_data (1, 2 * l, NULL);
2403 if (WORDS_BIG_ENDIAN)
2404 {
2405 first = val2->v.val_double.high;
2406 second = val2->v.val_double.low;
2407 }
2408 else
2409 {
2410 first = val2->v.val_double.low;
2411 second = val2->v.val_double.high;
2412 }
2413 dw2_asm_output_data (l, first, NULL);
2414 dw2_asm_output_data (l, second, NULL);
2415 }
2416 break;
2417 case dw_val_class_wide_int:
2418 {
2419 int i;
2420 int len = get_full_len (*val2->v.val_wide);
2421 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2422
2423 dw2_asm_output_data (1, len * l, NULL);
2424 if (WORDS_BIG_ENDIAN)
2425 for (i = len - 1; i >= 0; --i)
2426 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2427 else
2428 for (i = 0; i < len; ++i)
2429 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2430 }
2431 break;
2432 default:
2433 gcc_unreachable ();
2434 }
2435 }
2436 break;
2437 case DW_OP_regval_type:
2438 case DW_OP_GNU_regval_type:
2439 {
2440 unsigned r = val1->v.val_unsigned;
2441 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2442 gcc_assert (o);
2443 if (for_eh_or_skip >= 0)
2444 {
2445 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2446 gcc_assert (size_of_uleb128 (r)
2447 == size_of_uleb128 (val1->v.val_unsigned));
2448 }
2449 dw2_asm_output_data_uleb128 (r, NULL);
2450 dw2_asm_output_data_uleb128 (o, NULL);
2451 }
2452 break;
2453 case DW_OP_deref_type:
2454 case DW_OP_GNU_deref_type:
2455 {
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 dw2_asm_output_data (1, val1->v.val_int, NULL);
2459 dw2_asm_output_data_uleb128 (o, NULL);
2460 }
2461 break;
2462 case DW_OP_convert:
2463 case DW_OP_reinterpret:
2464 case DW_OP_GNU_convert:
2465 case DW_OP_GNU_reinterpret:
2466 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2467 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2468 else
2469 {
2470 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2471 gcc_assert (o);
2472 dw2_asm_output_data_uleb128 (o, NULL);
2473 }
2474 break;
2475
2476 case DW_OP_GNU_parameter_ref:
2477 {
2478 unsigned long o;
2479 gcc_assert (val1->val_class == dw_val_class_die_ref);
2480 o = get_ref_die_offset (val1->v.val_die_ref.die);
2481 dw2_asm_output_data (4, o, NULL);
2482 }
2483 break;
2484
2485 default:
2486 /* Other codes have no operands. */
2487 break;
2488 }
2489 }
2490
2491 /* Output a sequence of location operations.
2492 The for_eh_or_skip parameter controls whether register numbers are
2493 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2494 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2495 info). This should be suppressed for the cases that have not been converted
2496 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2497
2498 void
2499 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2500 {
2501 for (; loc != NULL; loc = loc->dw_loc_next)
2502 {
2503 enum dwarf_location_atom opc = loc->dw_loc_opc;
2504 /* Output the opcode. */
2505 if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2507 {
2508 unsigned r = (opc - DW_OP_breg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2512 }
2513 else if (for_eh_or_skip >= 0
2514 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2515 {
2516 unsigned r = (opc - DW_OP_reg0);
2517 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2518 gcc_assert (r <= 31);
2519 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2520 }
2521
2522 dw2_asm_output_data (1, opc,
2523 "%s", dwarf_stack_op_name (opc));
2524
2525 /* Output the operand(s) (if any). */
2526 output_loc_operands (loc, for_eh_or_skip);
2527 }
2528 }
2529
2530 /* Output location description stack opcode's operands (if any).
2531 The output is single bytes on a line, suitable for .cfi_escape. */
2532
2533 static void
2534 output_loc_operands_raw (dw_loc_descr_ref loc)
2535 {
2536 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2537 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2538
2539 switch (loc->dw_loc_opc)
2540 {
2541 case DW_OP_addr:
2542 case DW_OP_GNU_addr_index:
2543 case DW_OP_addrx:
2544 case DW_OP_GNU_const_index:
2545 case DW_OP_constx:
2546 case DW_OP_implicit_value:
2547 /* We cannot output addresses in .cfi_escape, only bytes. */
2548 gcc_unreachable ();
2549
2550 case DW_OP_const1u:
2551 case DW_OP_const1s:
2552 case DW_OP_pick:
2553 case DW_OP_deref_size:
2554 case DW_OP_xderef_size:
2555 fputc (',', asm_out_file);
2556 dw2_asm_output_data_raw (1, val1->v.val_int);
2557 break;
2558
2559 case DW_OP_const2u:
2560 case DW_OP_const2s:
2561 fputc (',', asm_out_file);
2562 dw2_asm_output_data_raw (2, val1->v.val_int);
2563 break;
2564
2565 case DW_OP_const4u:
2566 case DW_OP_const4s:
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_raw (4, val1->v.val_int);
2569 break;
2570
2571 case DW_OP_const8u:
2572 case DW_OP_const8s:
2573 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2574 fputc (',', asm_out_file);
2575 dw2_asm_output_data_raw (8, val1->v.val_int);
2576 break;
2577
2578 case DW_OP_skip:
2579 case DW_OP_bra:
2580 {
2581 int offset;
2582
2583 gcc_assert (val1->val_class == dw_val_class_loc);
2584 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2585
2586 fputc (',', asm_out_file);
2587 dw2_asm_output_data_raw (2, offset);
2588 }
2589 break;
2590
2591 case DW_OP_regx:
2592 {
2593 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2594 gcc_assert (size_of_uleb128 (r)
2595 == size_of_uleb128 (val1->v.val_unsigned));
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (r);
2598 }
2599 break;
2600
2601 case DW_OP_constu:
2602 case DW_OP_plus_uconst:
2603 case DW_OP_piece:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2606 break;
2607
2608 case DW_OP_bit_piece:
2609 fputc (',', asm_out_file);
2610 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2611 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2612 break;
2613
2614 case DW_OP_consts:
2615 case DW_OP_breg0:
2616 case DW_OP_breg1:
2617 case DW_OP_breg2:
2618 case DW_OP_breg3:
2619 case DW_OP_breg4:
2620 case DW_OP_breg5:
2621 case DW_OP_breg6:
2622 case DW_OP_breg7:
2623 case DW_OP_breg8:
2624 case DW_OP_breg9:
2625 case DW_OP_breg10:
2626 case DW_OP_breg11:
2627 case DW_OP_breg12:
2628 case DW_OP_breg13:
2629 case DW_OP_breg14:
2630 case DW_OP_breg15:
2631 case DW_OP_breg16:
2632 case DW_OP_breg17:
2633 case DW_OP_breg18:
2634 case DW_OP_breg19:
2635 case DW_OP_breg20:
2636 case DW_OP_breg21:
2637 case DW_OP_breg22:
2638 case DW_OP_breg23:
2639 case DW_OP_breg24:
2640 case DW_OP_breg25:
2641 case DW_OP_breg26:
2642 case DW_OP_breg27:
2643 case DW_OP_breg28:
2644 case DW_OP_breg29:
2645 case DW_OP_breg30:
2646 case DW_OP_breg31:
2647 case DW_OP_fbreg:
2648 fputc (',', asm_out_file);
2649 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2650 break;
2651
2652 case DW_OP_bregx:
2653 {
2654 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2655 gcc_assert (size_of_uleb128 (r)
2656 == size_of_uleb128 (val1->v.val_unsigned));
2657 fputc (',', asm_out_file);
2658 dw2_asm_output_data_uleb128_raw (r);
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2661 }
2662 break;
2663
2664 case DW_OP_implicit_pointer:
2665 case DW_OP_entry_value:
2666 case DW_OP_const_type:
2667 case DW_OP_regval_type:
2668 case DW_OP_deref_type:
2669 case DW_OP_convert:
2670 case DW_OP_reinterpret:
2671 case DW_OP_GNU_implicit_pointer:
2672 case DW_OP_GNU_entry_value:
2673 case DW_OP_GNU_const_type:
2674 case DW_OP_GNU_regval_type:
2675 case DW_OP_GNU_deref_type:
2676 case DW_OP_GNU_convert:
2677 case DW_OP_GNU_reinterpret:
2678 case DW_OP_GNU_parameter_ref:
2679 gcc_unreachable ();
2680 break;
2681
2682 default:
2683 /* Other codes have no operands. */
2684 break;
2685 }
2686 }
2687
2688 void
2689 output_loc_sequence_raw (dw_loc_descr_ref loc)
2690 {
2691 while (1)
2692 {
2693 enum dwarf_location_atom opc = loc->dw_loc_opc;
2694 /* Output the opcode. */
2695 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2696 {
2697 unsigned r = (opc - DW_OP_breg0);
2698 r = DWARF2_FRAME_REG_OUT (r, 1);
2699 gcc_assert (r <= 31);
2700 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2701 }
2702 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2703 {
2704 unsigned r = (opc - DW_OP_reg0);
2705 r = DWARF2_FRAME_REG_OUT (r, 1);
2706 gcc_assert (r <= 31);
2707 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2708 }
2709 /* Output the opcode. */
2710 fprintf (asm_out_file, "%#x", opc);
2711 output_loc_operands_raw (loc);
2712
2713 if (!loc->dw_loc_next)
2714 break;
2715 loc = loc->dw_loc_next;
2716
2717 fputc (',', asm_out_file);
2718 }
2719 }
2720
2721 /* This function builds a dwarf location descriptor sequence from a
2722 dw_cfa_location, adding the given OFFSET to the result of the
2723 expression. */
2724
2725 struct dw_loc_descr_node *
2726 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2727 {
2728 struct dw_loc_descr_node *head, *tmp;
2729
2730 offset += cfa->offset;
2731
2732 if (cfa->indirect)
2733 {
2734 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2735 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2736 head->dw_loc_oprnd1.val_entry = NULL;
2737 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2738 add_loc_descr (&head, tmp);
2739 loc_descr_plus_const (&head, offset);
2740 }
2741 else
2742 head = new_reg_loc_descr (cfa->reg, offset);
2743
2744 return head;
2745 }
2746
2747 /* This function builds a dwarf location descriptor sequence for
2748 the address at OFFSET from the CFA when stack is aligned to
2749 ALIGNMENT byte. */
2750
2751 struct dw_loc_descr_node *
2752 build_cfa_aligned_loc (dw_cfa_location *cfa,
2753 poly_int64 offset, HOST_WIDE_INT alignment)
2754 {
2755 struct dw_loc_descr_node *head;
2756 unsigned int dwarf_fp
2757 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2758
2759 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2760 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2761 {
2762 head = new_reg_loc_descr (dwarf_fp, 0);
2763 add_loc_descr (&head, int_loc_descriptor (alignment));
2764 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2765 loc_descr_plus_const (&head, offset);
2766 }
2767 else
2768 head = new_reg_loc_descr (dwarf_fp, offset);
2769 return head;
2770 }
2771 \f
2772 /* And now, the support for symbolic debugging information. */
2773
2774 /* .debug_str support. */
2775
2776 static void dwarf2out_init (const char *);
2777 static void dwarf2out_finish (const char *);
2778 static void dwarf2out_early_finish (const char *);
2779 static void dwarf2out_assembly_start (void);
2780 static void dwarf2out_define (unsigned int, const char *);
2781 static void dwarf2out_undef (unsigned int, const char *);
2782 static void dwarf2out_start_source_file (unsigned, const char *);
2783 static void dwarf2out_end_source_file (unsigned);
2784 static void dwarf2out_function_decl (tree);
2785 static void dwarf2out_begin_block (unsigned, unsigned);
2786 static void dwarf2out_end_block (unsigned, unsigned);
2787 static bool dwarf2out_ignore_block (const_tree);
2788 static void dwarf2out_early_global_decl (tree);
2789 static void dwarf2out_late_global_decl (tree);
2790 static void dwarf2out_type_decl (tree, int);
2791 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2792 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2793 dw_die_ref);
2794 static void dwarf2out_abstract_function (tree);
2795 static void dwarf2out_var_location (rtx_insn *);
2796 static void dwarf2out_inline_entry (tree);
2797 static void dwarf2out_size_function (tree);
2798 static void dwarf2out_begin_function (tree);
2799 static void dwarf2out_end_function (unsigned int);
2800 static void dwarf2out_register_main_translation_unit (tree unit);
2801 static void dwarf2out_set_name (tree, tree);
2802 static void dwarf2out_register_external_die (tree decl, const char *sym,
2803 unsigned HOST_WIDE_INT off);
2804 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2805 unsigned HOST_WIDE_INT *off);
2806
2807 /* The debug hooks structure. */
2808
2809 const struct gcc_debug_hooks dwarf2_debug_hooks =
2810 {
2811 dwarf2out_init,
2812 dwarf2out_finish,
2813 dwarf2out_early_finish,
2814 dwarf2out_assembly_start,
2815 dwarf2out_define,
2816 dwarf2out_undef,
2817 dwarf2out_start_source_file,
2818 dwarf2out_end_source_file,
2819 dwarf2out_begin_block,
2820 dwarf2out_end_block,
2821 dwarf2out_ignore_block,
2822 dwarf2out_source_line,
2823 dwarf2out_begin_prologue,
2824 #if VMS_DEBUGGING_INFO
2825 dwarf2out_vms_end_prologue,
2826 dwarf2out_vms_begin_epilogue,
2827 #else
2828 debug_nothing_int_charstar,
2829 debug_nothing_int_charstar,
2830 #endif
2831 dwarf2out_end_epilogue,
2832 dwarf2out_begin_function,
2833 dwarf2out_end_function, /* end_function */
2834 dwarf2out_register_main_translation_unit,
2835 dwarf2out_function_decl, /* function_decl */
2836 dwarf2out_early_global_decl,
2837 dwarf2out_late_global_decl,
2838 dwarf2out_type_decl, /* type_decl */
2839 dwarf2out_imported_module_or_decl,
2840 dwarf2out_die_ref_for_decl,
2841 dwarf2out_register_external_die,
2842 debug_nothing_tree, /* deferred_inline_function */
2843 /* The DWARF 2 backend tries to reduce debugging bloat by not
2844 emitting the abstract description of inline functions until
2845 something tries to reference them. */
2846 dwarf2out_abstract_function, /* outlining_inline_function */
2847 debug_nothing_rtx_code_label, /* label */
2848 debug_nothing_int, /* handle_pch */
2849 dwarf2out_var_location,
2850 dwarf2out_inline_entry, /* inline_entry */
2851 dwarf2out_size_function, /* size_function */
2852 dwarf2out_switch_text_section,
2853 dwarf2out_set_name,
2854 1, /* start_end_main_source_file */
2855 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2856 };
2857
2858 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2859 {
2860 dwarf2out_init,
2861 debug_nothing_charstar,
2862 debug_nothing_charstar,
2863 dwarf2out_assembly_start,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int,
2868 debug_nothing_int_int, /* begin_block */
2869 debug_nothing_int_int, /* end_block */
2870 debug_true_const_tree, /* ignore_block */
2871 dwarf2out_source_line, /* source_line */
2872 debug_nothing_int_int_charstar, /* begin_prologue */
2873 debug_nothing_int_charstar, /* end_prologue */
2874 debug_nothing_int_charstar, /* begin_epilogue */
2875 debug_nothing_int_charstar, /* end_epilogue */
2876 debug_nothing_tree, /* begin_function */
2877 debug_nothing_int, /* end_function */
2878 debug_nothing_tree, /* register_main_translation_unit */
2879 debug_nothing_tree, /* function_decl */
2880 debug_nothing_tree, /* early_global_decl */
2881 debug_nothing_tree, /* late_global_decl */
2882 debug_nothing_tree_int, /* type_decl */
2883 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2884 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2885 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2886 debug_nothing_tree, /* deferred_inline_function */
2887 debug_nothing_tree, /* outlining_inline_function */
2888 debug_nothing_rtx_code_label, /* label */
2889 debug_nothing_int, /* handle_pch */
2890 debug_nothing_rtx_insn, /* var_location */
2891 debug_nothing_tree, /* inline_entry */
2892 debug_nothing_tree, /* size_function */
2893 debug_nothing_void, /* switch_text_section */
2894 debug_nothing_tree_tree, /* set_name */
2895 0, /* start_end_main_source_file */
2896 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2897 };
2898 \f
2899 /* NOTE: In the comments in this file, many references are made to
2900 "Debugging Information Entries". This term is abbreviated as `DIE'
2901 throughout the remainder of this file. */
2902
2903 /* An internal representation of the DWARF output is built, and then
2904 walked to generate the DWARF debugging info. The walk of the internal
2905 representation is done after the entire program has been compiled.
2906 The types below are used to describe the internal representation. */
2907
2908 /* Whether to put type DIEs into their own section .debug_types instead
2909 of making them part of the .debug_info section. Only supported for
2910 Dwarf V4 or higher and the user didn't disable them through
2911 -fno-debug-types-section. It is more efficient to put them in a
2912 separate comdat sections since the linker will then be able to
2913 remove duplicates. But not all tools support .debug_types sections
2914 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2915 it is DW_UT_type unit type in .debug_info section. */
2916
2917 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2918
2919 /* Various DIE's use offsets relative to the beginning of the
2920 .debug_info section to refer to each other. */
2921
2922 typedef long int dw_offset;
2923
2924 struct comdat_type_node;
2925
2926 /* The entries in the line_info table more-or-less mirror the opcodes
2927 that are used in the real dwarf line table. Arrays of these entries
2928 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2929 supported. */
2930
2931 enum dw_line_info_opcode {
2932 /* Emit DW_LNE_set_address; the operand is the label index. */
2933 LI_set_address,
2934
2935 /* Emit a row to the matrix with the given line. This may be done
2936 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2937 special opcodes. */
2938 LI_set_line,
2939
2940 /* Emit a DW_LNS_set_file. */
2941 LI_set_file,
2942
2943 /* Emit a DW_LNS_set_column. */
2944 LI_set_column,
2945
2946 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2947 LI_negate_stmt,
2948
2949 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2950 LI_set_prologue_end,
2951 LI_set_epilogue_begin,
2952
2953 /* Emit a DW_LNE_set_discriminator. */
2954 LI_set_discriminator,
2955
2956 /* Output a Fixed Advance PC; the target PC is the label index; the
2957 base PC is the previous LI_adv_address or LI_set_address entry.
2958 We only use this when emitting debug views without assembler
2959 support, at explicit user request. Ideally, we should only use
2960 it when the offset might be zero but we can't tell: it's the only
2961 way to maybe change the PC without resetting the view number. */
2962 LI_adv_address
2963 };
2964
2965 typedef struct GTY(()) dw_line_info_struct {
2966 enum dw_line_info_opcode opcode;
2967 unsigned int val;
2968 } dw_line_info_entry;
2969
2970
2971 struct GTY(()) dw_line_info_table {
2972 /* The label that marks the end of this section. */
2973 const char *end_label;
2974
2975 /* The values for the last row of the matrix, as collected in the table.
2976 These are used to minimize the changes to the next row. */
2977 unsigned int file_num;
2978 unsigned int line_num;
2979 unsigned int column_num;
2980 int discrim_num;
2981 bool is_stmt;
2982 bool in_use;
2983
2984 /* This denotes the NEXT view number.
2985
2986 If it is 0, it is known that the NEXT view will be the first view
2987 at the given PC.
2988
2989 If it is -1, we're forcing the view number to be reset, e.g. at a
2990 function entry.
2991
2992 The meaning of other nonzero values depends on whether we're
2993 computing views internally or leaving it for the assembler to do
2994 so. If we're emitting them internally, view denotes the view
2995 number since the last known advance of PC. If we're leaving it
2996 for the assembler, it denotes the LVU label number that we're
2997 going to ask the assembler to assign. */
2998 var_loc_view view;
2999
3000 /* This counts the number of symbolic views emitted in this table
3001 since the latest view reset. Its max value, over all tables,
3002 sets symview_upper_bound. */
3003 var_loc_view symviews_since_reset;
3004
3005 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3006 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3007 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3008 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3009
3010 vec<dw_line_info_entry, va_gc> *entries;
3011 };
3012
3013 /* This is an upper bound for view numbers that the assembler may
3014 assign to symbolic views output in this translation. It is used to
3015 decide how big a field to use to represent view numbers in
3016 symview-classed attributes. */
3017
3018 static var_loc_view symview_upper_bound;
3019
3020 /* If we're keep track of location views and their reset points, and
3021 INSN is a reset point (i.e., it necessarily advances the PC), mark
3022 the next view in TABLE as reset. */
3023
3024 static void
3025 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3026 {
3027 if (!debug_internal_reset_location_views)
3028 return;
3029
3030 /* Maybe turn (part of?) this test into a default target hook. */
3031 int reset = 0;
3032
3033 if (targetm.reset_location_view)
3034 reset = targetm.reset_location_view (insn);
3035
3036 if (reset)
3037 ;
3038 else if (JUMP_TABLE_DATA_P (insn))
3039 reset = 1;
3040 else if (GET_CODE (insn) == USE
3041 || GET_CODE (insn) == CLOBBER
3042 || GET_CODE (insn) == ASM_INPUT
3043 || asm_noperands (insn) >= 0)
3044 ;
3045 else if (get_attr_min_length (insn) > 0)
3046 reset = 1;
3047
3048 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3049 RESET_NEXT_VIEW (table->view);
3050 }
3051
3052 /* Each DIE attribute has a field specifying the attribute kind,
3053 a link to the next attribute in the chain, and an attribute value.
3054 Attributes are typically linked below the DIE they modify. */
3055
3056 typedef struct GTY(()) dw_attr_struct {
3057 enum dwarf_attribute dw_attr;
3058 dw_val_node dw_attr_val;
3059 }
3060 dw_attr_node;
3061
3062
3063 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3064 The children of each node form a circular list linked by
3065 die_sib. die_child points to the node *before* the "first" child node. */
3066
3067 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3068 union die_symbol_or_type_node
3069 {
3070 const char * GTY ((tag ("0"))) die_symbol;
3071 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3072 }
3073 GTY ((desc ("%0.comdat_type_p"))) die_id;
3074 vec<dw_attr_node, va_gc> *die_attr;
3075 dw_die_ref die_parent;
3076 dw_die_ref die_child;
3077 dw_die_ref die_sib;
3078 dw_die_ref die_definition; /* ref from a specification to its definition */
3079 dw_offset die_offset;
3080 unsigned long die_abbrev;
3081 int die_mark;
3082 unsigned int decl_id;
3083 enum dwarf_tag die_tag;
3084 /* Die is used and must not be pruned as unused. */
3085 BOOL_BITFIELD die_perennial_p : 1;
3086 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3087 /* For an external ref to die_symbol if die_offset contains an extra
3088 offset to that symbol. */
3089 BOOL_BITFIELD with_offset : 1;
3090 /* Whether this DIE was removed from the DIE tree, for example via
3091 prune_unused_types. We don't consider those present from the
3092 DIE lookup routines. */
3093 BOOL_BITFIELD removed : 1;
3094 /* Lots of spare bits. */
3095 }
3096 die_node;
3097
3098 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3099 static bool early_dwarf;
3100 static bool early_dwarf_finished;
3101 struct set_early_dwarf {
3102 bool saved;
3103 set_early_dwarf () : saved(early_dwarf)
3104 {
3105 gcc_assert (! early_dwarf_finished);
3106 early_dwarf = true;
3107 }
3108 ~set_early_dwarf () { early_dwarf = saved; }
3109 };
3110
3111 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3112 #define FOR_EACH_CHILD(die, c, expr) do { \
3113 c = die->die_child; \
3114 if (c) do { \
3115 c = c->die_sib; \
3116 expr; \
3117 } while (c != die->die_child); \
3118 } while (0)
3119
3120 /* The pubname structure */
3121
3122 typedef struct GTY(()) pubname_struct {
3123 dw_die_ref die;
3124 const char *name;
3125 }
3126 pubname_entry;
3127
3128
3129 struct GTY(()) dw_ranges {
3130 const char *label;
3131 /* If this is positive, it's a block number, otherwise it's a
3132 bitwise-negated index into dw_ranges_by_label. */
3133 int num;
3134 /* Index for the range list for DW_FORM_rnglistx. */
3135 unsigned int idx : 31;
3136 /* True if this range might be possibly in a different section
3137 from previous entry. */
3138 unsigned int maybe_new_sec : 1;
3139 };
3140
3141 /* A structure to hold a macinfo entry. */
3142
3143 typedef struct GTY(()) macinfo_struct {
3144 unsigned char code;
3145 unsigned HOST_WIDE_INT lineno;
3146 const char *info;
3147 }
3148 macinfo_entry;
3149
3150
3151 struct GTY(()) dw_ranges_by_label {
3152 const char *begin;
3153 const char *end;
3154 };
3155
3156 /* The comdat type node structure. */
3157 struct GTY(()) comdat_type_node
3158 {
3159 dw_die_ref root_die;
3160 dw_die_ref type_die;
3161 dw_die_ref skeleton_die;
3162 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3163 comdat_type_node *next;
3164 };
3165
3166 /* A list of DIEs for which we can't determine ancestry (parent_die
3167 field) just yet. Later in dwarf2out_finish we will fill in the
3168 missing bits. */
3169 typedef struct GTY(()) limbo_die_struct {
3170 dw_die_ref die;
3171 /* The tree for which this DIE was created. We use this to
3172 determine ancestry later. */
3173 tree created_for;
3174 struct limbo_die_struct *next;
3175 }
3176 limbo_die_node;
3177
3178 typedef struct skeleton_chain_struct
3179 {
3180 dw_die_ref old_die;
3181 dw_die_ref new_die;
3182 struct skeleton_chain_struct *parent;
3183 }
3184 skeleton_chain_node;
3185
3186 /* Define a macro which returns nonzero for a TYPE_DECL which was
3187 implicitly generated for a type.
3188
3189 Note that, unlike the C front-end (which generates a NULL named
3190 TYPE_DECL node for each complete tagged type, each array type,
3191 and each function type node created) the C++ front-end generates
3192 a _named_ TYPE_DECL node for each tagged type node created.
3193 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3194 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3195 front-end, but for each type, tagged or not. */
3196
3197 #define TYPE_DECL_IS_STUB(decl) \
3198 (DECL_NAME (decl) == NULL_TREE \
3199 || (DECL_ARTIFICIAL (decl) \
3200 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3201 /* This is necessary for stub decls that \
3202 appear in nested inline functions. */ \
3203 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3204 && (decl_ultimate_origin (decl) \
3205 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3206
3207 /* Information concerning the compilation unit's programming
3208 language, and compiler version. */
3209
3210 /* Fixed size portion of the DWARF compilation unit header. */
3211 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3212 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3213 + (dwarf_version >= 5 ? 4 : 3))
3214
3215 /* Fixed size portion of the DWARF comdat type unit header. */
3216 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3217 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3219
3220 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3221 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3222 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3223
3224 /* Fixed size portion of public names info. */
3225 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3226
3227 /* Fixed size portion of the address range info. */
3228 #define DWARF_ARANGES_HEADER_SIZE \
3229 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3230 DWARF2_ADDR_SIZE * 2) \
3231 - DWARF_INITIAL_LENGTH_SIZE)
3232
3233 /* Size of padding portion in the address range info. It must be
3234 aligned to twice the pointer size. */
3235 #define DWARF_ARANGES_PAD_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3239
3240 /* Use assembler line directives if available. */
3241 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3242 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3243 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3244 #else
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3246 #endif
3247 #endif
3248
3249 /* Use assembler views in line directives if available. */
3250 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3251 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3252 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3253 #else
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3255 #endif
3256 #endif
3257
3258 /* Return true if GCC configure detected assembler support for .loc. */
3259
3260 bool
3261 dwarf2out_default_as_loc_support (void)
3262 {
3263 return DWARF2_ASM_LINE_DEBUG_INFO;
3264 #if (GCC_VERSION >= 3000)
3265 # undef DWARF2_ASM_LINE_DEBUG_INFO
3266 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3267 #endif
3268 }
3269
3270 /* Return true if GCC configure detected assembler support for views
3271 in .loc directives. */
3272
3273 bool
3274 dwarf2out_default_as_locview_support (void)
3275 {
3276 return DWARF2_ASM_VIEW_DEBUG_INFO;
3277 #if (GCC_VERSION >= 3000)
3278 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3279 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3280 #endif
3281 }
3282
3283 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3284 view computation, and it refers to a view identifier for which we
3285 will not emit a label because it is known to map to a view number
3286 zero. We won't allocate the bitmap if we're not using assembler
3287 support for location views, but we have to make the variable
3288 visible for GGC and for code that will be optimized out for lack of
3289 support but that's still parsed and compiled. We could abstract it
3290 out with macros, but it's not worth it. */
3291 static GTY(()) bitmap zero_view_p;
3292
3293 /* Evaluate to TRUE iff N is known to identify the first location view
3294 at its PC. When not using assembler location view computation,
3295 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3296 and views label numbers recorded in it are the ones known to be
3297 zero. */
3298 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3299 || (N) == (var_loc_view)-1 \
3300 || (zero_view_p \
3301 && bitmap_bit_p (zero_view_p, (N))))
3302
3303 /* Return true iff we're to emit .loc directives for the assembler to
3304 generate line number sections.
3305
3306 When we're not emitting views, all we need from the assembler is
3307 support for .loc directives.
3308
3309 If we are emitting views, we can only use the assembler's .loc
3310 support if it also supports views.
3311
3312 When the compiler is emitting the line number programs and
3313 computing view numbers itself, it resets view numbers at known PC
3314 changes and counts from that, and then it emits view numbers as
3315 literal constants in locviewlists. There are cases in which the
3316 compiler is not sure about PC changes, e.g. when extra alignment is
3317 requested for a label. In these cases, the compiler may not reset
3318 the view counter, and the potential PC advance in the line number
3319 program will use an opcode that does not reset the view counter
3320 even if the PC actually changes, so that compiler and debug info
3321 consumer can keep view numbers in sync.
3322
3323 When the compiler defers view computation to the assembler, it
3324 emits symbolic view numbers in locviewlists, with the exception of
3325 views known to be zero (forced resets, or reset after
3326 compiler-visible PC changes): instead of emitting symbols for
3327 these, we emit literal zero and assert the assembler agrees with
3328 the compiler's assessment. We could use symbolic views everywhere,
3329 instead of special-casing zero views, but then we'd be unable to
3330 optimize out locviewlists that contain only zeros. */
3331
3332 static bool
3333 output_asm_line_debug_info (void)
3334 {
3335 return (dwarf2out_as_loc_support
3336 && (dwarf2out_as_locview_support
3337 || !debug_variable_location_views));
3338 }
3339
3340 /* Minimum line offset in a special line info. opcode.
3341 This value was chosen to give a reasonable range of values. */
3342 #define DWARF_LINE_BASE -10
3343
3344 /* First special line opcode - leave room for the standard opcodes. */
3345 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3346
3347 /* Range of line offsets in a special line info. opcode. */
3348 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3349
3350 /* Flag that indicates the initial value of the is_stmt_start flag.
3351 In the present implementation, we do not mark any lines as
3352 the beginning of a source statement, because that information
3353 is not made available by the GCC front-end. */
3354 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3355
3356 /* Maximum number of operations per instruction bundle. */
3357 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3358 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3359 #endif
3360
3361 /* This location is used by calc_die_sizes() to keep track
3362 the offset of each DIE within the .debug_info section. */
3363 static unsigned long next_die_offset;
3364
3365 /* Record the root of the DIE's built for the current compilation unit. */
3366 static GTY(()) dw_die_ref single_comp_unit_die;
3367
3368 /* A list of type DIEs that have been separated into comdat sections. */
3369 static GTY(()) comdat_type_node *comdat_type_list;
3370
3371 /* A list of CU DIEs that have been separated. */
3372 static GTY(()) limbo_die_node *cu_die_list;
3373
3374 /* A list of DIEs with a NULL parent waiting to be relocated. */
3375 static GTY(()) limbo_die_node *limbo_die_list;
3376
3377 /* A list of DIEs for which we may have to generate
3378 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3379 static GTY(()) limbo_die_node *deferred_asm_name;
3380
3381 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3382 {
3383 typedef const char *compare_type;
3384
3385 static hashval_t hash (dwarf_file_data *);
3386 static bool equal (dwarf_file_data *, const char *);
3387 };
3388
3389 /* Filenames referenced by this compilation unit. */
3390 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3391
3392 struct decl_die_hasher : ggc_ptr_hash<die_node>
3393 {
3394 typedef tree compare_type;
3395
3396 static hashval_t hash (die_node *);
3397 static bool equal (die_node *, tree);
3398 };
3399 /* A hash table of references to DIE's that describe declarations.
3400 The key is a DECL_UID() which is a unique number identifying each decl. */
3401 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3402
3403 struct GTY ((for_user)) variable_value_struct {
3404 unsigned int decl_id;
3405 vec<dw_die_ref, va_gc> *dies;
3406 };
3407
3408 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3409 {
3410 typedef tree compare_type;
3411
3412 static hashval_t hash (variable_value_struct *);
3413 static bool equal (variable_value_struct *, tree);
3414 };
3415 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3416 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3417 DECL_CONTEXT of the referenced VAR_DECLs. */
3418 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3419
3420 struct block_die_hasher : ggc_ptr_hash<die_struct>
3421 {
3422 static hashval_t hash (die_struct *);
3423 static bool equal (die_struct *, die_struct *);
3424 };
3425
3426 /* A hash table of references to DIE's that describe COMMON blocks.
3427 The key is DECL_UID() ^ die_parent. */
3428 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3429
3430 typedef struct GTY(()) die_arg_entry_struct {
3431 dw_die_ref die;
3432 tree arg;
3433 } die_arg_entry;
3434
3435
3436 /* Node of the variable location list. */
3437 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3438 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3439 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3440 in mode of the EXPR_LIST node and first EXPR_LIST operand
3441 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3442 location or NULL for padding. For larger bitsizes,
3443 mode is 0 and first operand is a CONCAT with bitsize
3444 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3445 NULL as second operand. */
3446 rtx GTY (()) loc;
3447 const char * GTY (()) label;
3448 struct var_loc_node * GTY (()) next;
3449 var_loc_view view;
3450 };
3451
3452 /* Variable location list. */
3453 struct GTY ((for_user)) var_loc_list_def {
3454 struct var_loc_node * GTY (()) first;
3455
3456 /* Pointer to the last but one or last element of the
3457 chained list. If the list is empty, both first and
3458 last are NULL, if the list contains just one node
3459 or the last node certainly is not redundant, it points
3460 to the last node, otherwise points to the last but one.
3461 Do not mark it for GC because it is marked through the chain. */
3462 struct var_loc_node * GTY ((skip ("%h"))) last;
3463
3464 /* Pointer to the last element before section switch,
3465 if NULL, either sections weren't switched or first
3466 is after section switch. */
3467 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3468
3469 /* DECL_UID of the variable decl. */
3470 unsigned int decl_id;
3471 };
3472 typedef struct var_loc_list_def var_loc_list;
3473
3474 /* Call argument location list. */
3475 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3476 rtx GTY (()) call_arg_loc_note;
3477 const char * GTY (()) label;
3478 tree GTY (()) block;
3479 bool tail_call_p;
3480 rtx GTY (()) symbol_ref;
3481 struct call_arg_loc_node * GTY (()) next;
3482 };
3483
3484
3485 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3486 {
3487 typedef const_tree compare_type;
3488
3489 static hashval_t hash (var_loc_list *);
3490 static bool equal (var_loc_list *, const_tree);
3491 };
3492
3493 /* Table of decl location linked lists. */
3494 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3495
3496 /* Head and tail of call_arg_loc chain. */
3497 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3498 static struct call_arg_loc_node *call_arg_loc_last;
3499
3500 /* Number of call sites in the current function. */
3501 static int call_site_count = -1;
3502 /* Number of tail call sites in the current function. */
3503 static int tail_call_site_count = -1;
3504
3505 /* A cached location list. */
3506 struct GTY ((for_user)) cached_dw_loc_list_def {
3507 /* The DECL_UID of the decl that this entry describes. */
3508 unsigned int decl_id;
3509
3510 /* The cached location list. */
3511 dw_loc_list_ref loc_list;
3512 };
3513 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3514
3515 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3516 {
3517
3518 typedef const_tree compare_type;
3519
3520 static hashval_t hash (cached_dw_loc_list *);
3521 static bool equal (cached_dw_loc_list *, const_tree);
3522 };
3523
3524 /* Table of cached location lists. */
3525 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3526
3527 /* A vector of references to DIE's that are uniquely identified by their tag,
3528 presence/absence of children DIE's, and list of attribute/value pairs. */
3529 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3530
3531 /* A hash map to remember the stack usage for DWARF procedures. The value
3532 stored is the stack size difference between before the DWARF procedure
3533 invokation and after it returned. In other words, for a DWARF procedure
3534 that consumes N stack slots and that pushes M ones, this stores M - N. */
3535 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3536
3537 /* A global counter for generating labels for line number data. */
3538 static unsigned int line_info_label_num;
3539
3540 /* The current table to which we should emit line number information
3541 for the current function. This will be set up at the beginning of
3542 assembly for the function. */
3543 static GTY(()) dw_line_info_table *cur_line_info_table;
3544
3545 /* The two default tables of line number info. */
3546 static GTY(()) dw_line_info_table *text_section_line_info;
3547 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3548
3549 /* The set of all non-default tables of line number info. */
3550 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3551
3552 /* A flag to tell pubnames/types export if there is an info section to
3553 refer to. */
3554 static bool info_section_emitted;
3555
3556 /* A pointer to the base of a table that contains a list of publicly
3557 accessible names. */
3558 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3559
3560 /* A pointer to the base of a table that contains a list of publicly
3561 accessible types. */
3562 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3563
3564 /* A pointer to the base of a table that contains a list of macro
3565 defines/undefines (and file start/end markers). */
3566 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3567
3568 /* True if .debug_macinfo or .debug_macros section is going to be
3569 emitted. */
3570 #define have_macinfo \
3571 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3572 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3573 && !macinfo_table->is_empty ())
3574
3575 /* Vector of dies for which we should generate .debug_ranges info. */
3576 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3577
3578 /* Vector of pairs of labels referenced in ranges_table. */
3579 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3580
3581 /* Whether we have location lists that need outputting */
3582 static GTY(()) bool have_location_lists;
3583
3584 /* Unique label counter. */
3585 static GTY(()) unsigned int loclabel_num;
3586
3587 /* Unique label counter for point-of-call tables. */
3588 static GTY(()) unsigned int poc_label_num;
3589
3590 /* The last file entry emitted by maybe_emit_file(). */
3591 static GTY(()) struct dwarf_file_data * last_emitted_file;
3592
3593 /* Number of internal labels generated by gen_internal_sym(). */
3594 static GTY(()) int label_num;
3595
3596 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3597
3598 /* Instances of generic types for which we need to generate debug
3599 info that describe their generic parameters and arguments. That
3600 generation needs to happen once all types are properly laid out so
3601 we do it at the end of compilation. */
3602 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3603
3604 /* Offset from the "steady-state frame pointer" to the frame base,
3605 within the current function. */
3606 static poly_int64 frame_pointer_fb_offset;
3607 static bool frame_pointer_fb_offset_valid;
3608
3609 static vec<dw_die_ref> base_types;
3610
3611 /* Flags to represent a set of attribute classes for attributes that represent
3612 a scalar value (bounds, pointers, ...). */
3613 enum dw_scalar_form
3614 {
3615 dw_scalar_form_constant = 0x01,
3616 dw_scalar_form_exprloc = 0x02,
3617 dw_scalar_form_reference = 0x04
3618 };
3619
3620 /* Forward declarations for functions defined in this file. */
3621
3622 static int is_pseudo_reg (const_rtx);
3623 static tree type_main_variant (tree);
3624 static int is_tagged_type (const_tree);
3625 static const char *dwarf_tag_name (unsigned);
3626 static const char *dwarf_attr_name (unsigned);
3627 static const char *dwarf_form_name (unsigned);
3628 static tree decl_ultimate_origin (const_tree);
3629 static tree decl_class_context (tree);
3630 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3631 static inline enum dw_val_class AT_class (dw_attr_node *);
3632 static inline unsigned int AT_index (dw_attr_node *);
3633 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3634 static inline unsigned AT_flag (dw_attr_node *);
3635 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3636 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3637 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3638 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3639 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3640 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3641 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3642 unsigned int, unsigned char *);
3643 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3644 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3645 static inline const char *AT_string (dw_attr_node *);
3646 static enum dwarf_form AT_string_form (dw_attr_node *);
3647 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3648 static void add_AT_specification (dw_die_ref, dw_die_ref);
3649 static inline dw_die_ref AT_ref (dw_attr_node *);
3650 static inline int AT_ref_external (dw_attr_node *);
3651 static inline void set_AT_ref_external (dw_attr_node *, int);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3668 unsigned long, bool);
3669 static inline const char *AT_lbl (dw_attr_node *);
3670 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3671 static const char *get_AT_low_pc (dw_die_ref);
3672 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3673 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3674 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3675 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3676 static bool is_c (void);
3677 static bool is_cxx (void);
3678 static bool is_cxx (const_tree);
3679 static bool is_fortran (void);
3680 static bool is_ada (void);
3681 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3682 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3683 static void add_child_die (dw_die_ref, dw_die_ref);
3684 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3685 static dw_die_ref lookup_type_die (tree);
3686 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3687 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3688 static void equate_type_number_to_die (tree, dw_die_ref);
3689 static dw_die_ref lookup_decl_die (tree);
3690 static var_loc_list *lookup_decl_loc (const_tree);
3691 static void equate_decl_number_to_die (tree, dw_die_ref);
3692 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3693 static void print_spaces (FILE *);
3694 static void print_die (dw_die_ref, FILE *);
3695 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3696 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3697 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3698 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3699 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3700 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3701 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3702 struct md5_ctx *, int *);
3703 struct checksum_attributes;
3704 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3705 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3706 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3707 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3708 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3709 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3710 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3711 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3712 static int is_type_die (dw_die_ref);
3713 static inline bool is_template_instantiation (dw_die_ref);
3714 static int is_declaration_die (dw_die_ref);
3715 static int should_move_die_to_comdat (dw_die_ref);
3716 static dw_die_ref clone_as_declaration (dw_die_ref);
3717 static dw_die_ref clone_die (dw_die_ref);
3718 static dw_die_ref clone_tree (dw_die_ref);
3719 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3720 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3721 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3722 static dw_die_ref generate_skeleton (dw_die_ref);
3723 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3724 dw_die_ref,
3725 dw_die_ref);
3726 static void break_out_comdat_types (dw_die_ref);
3727 static void copy_decls_for_unworthy_types (dw_die_ref);
3728
3729 static void add_sibling_attributes (dw_die_ref);
3730 static void output_location_lists (dw_die_ref);
3731 static int constant_size (unsigned HOST_WIDE_INT);
3732 static unsigned long size_of_die (dw_die_ref);
3733 static void calc_die_sizes (dw_die_ref);
3734 static void calc_base_type_die_sizes (void);
3735 static void mark_dies (dw_die_ref);
3736 static void unmark_dies (dw_die_ref);
3737 static void unmark_all_dies (dw_die_ref);
3738 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3739 static unsigned long size_of_aranges (void);
3740 static enum dwarf_form value_format (dw_attr_node *);
3741 static void output_value_format (dw_attr_node *);
3742 static void output_abbrev_section (void);
3743 static void output_die_abbrevs (unsigned long, dw_die_ref);
3744 static void output_die (dw_die_ref);
3745 static void output_compilation_unit_header (enum dwarf_unit_type);
3746 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3747 static void output_comdat_type_unit (comdat_type_node *);
3748 static const char *dwarf2_name (tree, int);
3749 static void add_pubname (tree, dw_die_ref);
3750 static void add_enumerator_pubname (const char *, dw_die_ref);
3751 static void add_pubname_string (const char *, dw_die_ref);
3752 static void add_pubtype (tree, dw_die_ref);
3753 static void output_pubnames (vec<pubname_entry, va_gc> *);
3754 static void output_aranges (void);
3755 static unsigned int add_ranges (const_tree, bool = false);
3756 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3757 bool *, bool);
3758 static void output_ranges (void);
3759 static dw_line_info_table *new_line_info_table (void);
3760 static void output_line_info (bool);
3761 static void output_file_names (void);
3762 static dw_die_ref base_type_die (tree, bool);
3763 static int is_base_type (tree);
3764 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3765 static int decl_quals (const_tree);
3766 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3767 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3768 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3769 static unsigned int dbx_reg_number (const_rtx);
3770 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3771 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3772 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3773 enum var_init_status);
3774 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3775 enum var_init_status);
3776 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3777 enum var_init_status);
3778 static int is_based_loc (const_rtx);
3779 static bool resolve_one_addr (rtx *);
3780 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3783 enum var_init_status);
3784 struct loc_descr_context;
3785 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3786 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3787 static dw_loc_list_ref loc_list_from_tree (tree, int,
3788 struct loc_descr_context *);
3789 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_desc_attribute (dw_die_ref, tree);
3812 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3813 static void add_comp_dir_attribute (dw_die_ref);
3814 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3815 struct loc_descr_context *);
3816 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3817 struct loc_descr_context *);
3818 static void add_subscript_info (dw_die_ref, tree, bool);
3819 static void add_byte_size_attribute (dw_die_ref, tree);
3820 static void add_alignment_attribute (dw_die_ref, tree);
3821 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3822 struct vlr_context *);
3823 static void add_bit_size_attribute (dw_die_ref, tree);
3824 static void add_prototyped_attribute (dw_die_ref, tree);
3825 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3826 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3827 static void add_src_coords_attributes (dw_die_ref, tree);
3828 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3829 static void add_discr_value (dw_die_ref, dw_discr_value *);
3830 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3831 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3832 static dw_die_ref scope_die_for (tree, dw_die_ref);
3833 static inline int local_scope_p (dw_die_ref);
3834 static inline int class_scope_p (dw_die_ref);
3835 static inline int class_or_namespace_scope_p (dw_die_ref);
3836 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3837 static void add_calling_convention_attribute (dw_die_ref, tree);
3838 static const char *type_tag (const_tree);
3839 static tree member_declared_type (const_tree);
3840 #if 0
3841 static const char *decl_start_label (tree);
3842 #endif
3843 static void gen_array_type_die (tree, dw_die_ref);
3844 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3845 #if 0
3846 static void gen_entry_point_die (tree, dw_die_ref);
3847 #endif
3848 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3850 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3851 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3852 static void gen_formal_types_die (tree, dw_die_ref);
3853 static void gen_subprogram_die (tree, dw_die_ref);
3854 static void gen_variable_die (tree, tree, dw_die_ref);
3855 static void gen_const_die (tree, dw_die_ref);
3856 static void gen_label_die (tree, dw_die_ref);
3857 static void gen_lexical_block_die (tree, dw_die_ref);
3858 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3859 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3860 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3861 static dw_die_ref gen_compile_unit_die (const char *);
3862 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3863 static void gen_member_die (tree, dw_die_ref);
3864 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3865 enum debug_info_usage);
3866 static void gen_subroutine_type_die (tree, dw_die_ref);
3867 static void gen_typedef_die (tree, dw_die_ref);
3868 static void gen_type_die (tree, dw_die_ref);
3869 static void gen_block_die (tree, dw_die_ref);
3870 static void decls_for_scope (tree, dw_die_ref);
3871 static bool is_naming_typedef_decl (const_tree);
3872 static inline dw_die_ref get_context_die (tree);
3873 static void gen_namespace_die (tree, dw_die_ref);
3874 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3875 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3876 static dw_die_ref force_decl_die (tree);
3877 static dw_die_ref force_type_die (tree);
3878 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3879 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3880 static struct dwarf_file_data * lookup_filename (const char *);
3881 static void retry_incomplete_types (void);
3882 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3883 static void gen_generic_params_dies (tree);
3884 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3885 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3886 static void splice_child_die (dw_die_ref, dw_die_ref);
3887 static int file_info_cmp (const void *, const void *);
3888 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3889 const char *, var_loc_view, const char *);
3890 static void output_loc_list (dw_loc_list_ref);
3891 static char *gen_internal_sym (const char *);
3892 static bool want_pubnames (void);
3893
3894 static void prune_unmark_dies (dw_die_ref);
3895 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3896 static void prune_unused_types_mark (dw_die_ref, int);
3897 static void prune_unused_types_walk (dw_die_ref);
3898 static void prune_unused_types_walk_attribs (dw_die_ref);
3899 static void prune_unused_types_prune (dw_die_ref);
3900 static void prune_unused_types (void);
3901 static int maybe_emit_file (struct dwarf_file_data *fd);
3902 static inline const char *AT_vms_delta1 (dw_attr_node *);
3903 static inline const char *AT_vms_delta2 (dw_attr_node *);
3904 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3905 static void gen_remaining_tmpl_value_param_die_attribute (void);
3906 static bool generic_type_p (tree);
3907 static void schedule_generic_params_dies_gen (tree t);
3908 static void gen_scheduled_generic_parms_dies (void);
3909 static void resolve_variable_values (void);
3910
3911 static const char *comp_dir_string (void);
3912
3913 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3914
3915 /* enum for tracking thread-local variables whose address is really an offset
3916 relative to the TLS pointer, which will need link-time relocation, but will
3917 not need relocation by the DWARF consumer. */
3918
3919 enum dtprel_bool
3920 {
3921 dtprel_false = 0,
3922 dtprel_true = 1
3923 };
3924
3925 /* Return the operator to use for an address of a variable. For dtprel_true, we
3926 use DW_OP_const*. For regular variables, which need both link-time
3927 relocation and consumer-level relocation (e.g., to account for shared objects
3928 loaded at a random address), we use DW_OP_addr*. */
3929
3930 static inline enum dwarf_location_atom
3931 dw_addr_op (enum dtprel_bool dtprel)
3932 {
3933 if (dtprel == dtprel_true)
3934 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3935 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3936 else
3937 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3938 }
3939
3940 /* Return a pointer to a newly allocated address location description. If
3941 dwarf_split_debug_info is true, then record the address with the appropriate
3942 relocation. */
3943 static inline dw_loc_descr_ref
3944 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3945 {
3946 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3947
3948 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3949 ref->dw_loc_oprnd1.v.val_addr = addr;
3950 ref->dtprel = dtprel;
3951 if (dwarf_split_debug_info)
3952 ref->dw_loc_oprnd1.val_entry
3953 = add_addr_table_entry (addr,
3954 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3955 else
3956 ref->dw_loc_oprnd1.val_entry = NULL;
3957
3958 return ref;
3959 }
3960
3961 /* Section names used to hold DWARF debugging information. */
3962
3963 #ifndef DEBUG_INFO_SECTION
3964 #define DEBUG_INFO_SECTION ".debug_info"
3965 #endif
3966 #ifndef DEBUG_DWO_INFO_SECTION
3967 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_INFO_SECTION
3970 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3971 #endif
3972 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3973 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3974 #endif
3975 #ifndef DEBUG_ABBREV_SECTION
3976 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3977 #endif
3978 #ifndef DEBUG_LTO_ABBREV_SECTION
3979 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3980 #endif
3981 #ifndef DEBUG_DWO_ABBREV_SECTION
3982 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3983 #endif
3984 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3985 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3986 #endif
3987 #ifndef DEBUG_ARANGES_SECTION
3988 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3989 #endif
3990 #ifndef DEBUG_ADDR_SECTION
3991 #define DEBUG_ADDR_SECTION ".debug_addr"
3992 #endif
3993 #ifndef DEBUG_MACINFO_SECTION
3994 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3995 #endif
3996 #ifndef DEBUG_LTO_MACINFO_SECTION
3997 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3998 #endif
3999 #ifndef DEBUG_DWO_MACINFO_SECTION
4000 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4001 #endif
4002 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4003 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4004 #endif
4005 #ifndef DEBUG_MACRO_SECTION
4006 #define DEBUG_MACRO_SECTION ".debug_macro"
4007 #endif
4008 #ifndef DEBUG_LTO_MACRO_SECTION
4009 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4010 #endif
4011 #ifndef DEBUG_DWO_MACRO_SECTION
4012 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4013 #endif
4014 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4015 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4016 #endif
4017 #ifndef DEBUG_LINE_SECTION
4018 #define DEBUG_LINE_SECTION ".debug_line"
4019 #endif
4020 #ifndef DEBUG_LTO_LINE_SECTION
4021 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4022 #endif
4023 #ifndef DEBUG_DWO_LINE_SECTION
4024 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4025 #endif
4026 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4027 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4028 #endif
4029 #ifndef DEBUG_LOC_SECTION
4030 #define DEBUG_LOC_SECTION ".debug_loc"
4031 #endif
4032 #ifndef DEBUG_DWO_LOC_SECTION
4033 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4034 #endif
4035 #ifndef DEBUG_LOCLISTS_SECTION
4036 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4037 #endif
4038 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4039 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4040 #endif
4041 #ifndef DEBUG_PUBNAMES_SECTION
4042 #define DEBUG_PUBNAMES_SECTION \
4043 ((debug_generate_pub_sections == 2) \
4044 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4045 #endif
4046 #ifndef DEBUG_PUBTYPES_SECTION
4047 #define DEBUG_PUBTYPES_SECTION \
4048 ((debug_generate_pub_sections == 2) \
4049 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4050 #endif
4051 #ifndef DEBUG_STR_OFFSETS_SECTION
4052 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4053 #endif
4054 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4055 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4056 #endif
4057 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4058 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4059 #endif
4060 #ifndef DEBUG_STR_SECTION
4061 #define DEBUG_STR_SECTION ".debug_str"
4062 #endif
4063 #ifndef DEBUG_LTO_STR_SECTION
4064 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4065 #endif
4066 #ifndef DEBUG_STR_DWO_SECTION
4067 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4068 #endif
4069 #ifndef DEBUG_LTO_STR_DWO_SECTION
4070 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4071 #endif
4072 #ifndef DEBUG_RANGES_SECTION
4073 #define DEBUG_RANGES_SECTION ".debug_ranges"
4074 #endif
4075 #ifndef DEBUG_RNGLISTS_SECTION
4076 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4077 #endif
4078 #ifndef DEBUG_LINE_STR_SECTION
4079 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4080 #endif
4081 #ifndef DEBUG_LTO_LINE_STR_SECTION
4082 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4083 #endif
4084
4085 /* Standard ELF section names for compiled code and data. */
4086 #ifndef TEXT_SECTION_NAME
4087 #define TEXT_SECTION_NAME ".text"
4088 #endif
4089
4090 /* Section flags for .debug_str section. */
4091 #define DEBUG_STR_SECTION_FLAGS \
4092 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4093 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4094 : SECTION_DEBUG)
4095
4096 /* Section flags for .debug_str.dwo section. */
4097 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4098
4099 /* Attribute used to refer to the macro section. */
4100 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4101 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4102
4103 /* Labels we insert at beginning sections we can reference instead of
4104 the section names themselves. */
4105
4106 #ifndef TEXT_SECTION_LABEL
4107 #define TEXT_SECTION_LABEL "Ltext"
4108 #endif
4109 #ifndef COLD_TEXT_SECTION_LABEL
4110 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4111 #endif
4112 #ifndef DEBUG_LINE_SECTION_LABEL
4113 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4114 #endif
4115 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4116 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4117 #endif
4118 #ifndef DEBUG_INFO_SECTION_LABEL
4119 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4120 #endif
4121 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4122 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4123 #endif
4124 #ifndef DEBUG_ABBREV_SECTION_LABEL
4125 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4126 #endif
4127 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4128 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4129 #endif
4130 #ifndef DEBUG_ADDR_SECTION_LABEL
4131 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4132 #endif
4133 #ifndef DEBUG_LOC_SECTION_LABEL
4134 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4135 #endif
4136 #ifndef DEBUG_RANGES_SECTION_LABEL
4137 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4138 #endif
4139 #ifndef DEBUG_MACINFO_SECTION_LABEL
4140 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4141 #endif
4142 #ifndef DEBUG_MACRO_SECTION_LABEL
4143 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4144 #endif
4145 #define SKELETON_COMP_DIE_ABBREV 1
4146 #define SKELETON_TYPE_DIE_ABBREV 2
4147
4148 /* Definitions of defaults for formats and names of various special
4149 (artificial) labels which may be generated within this file (when the -g
4150 options is used and DWARF2_DEBUGGING_INFO is in effect.
4151 If necessary, these may be overridden from within the tm.h file, but
4152 typically, overriding these defaults is unnecessary. */
4153
4154 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4155 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169
4170 #ifndef TEXT_END_LABEL
4171 #define TEXT_END_LABEL "Letext"
4172 #endif
4173 #ifndef COLD_END_LABEL
4174 #define COLD_END_LABEL "Letext_cold"
4175 #endif
4176 #ifndef BLOCK_BEGIN_LABEL
4177 #define BLOCK_BEGIN_LABEL "LBB"
4178 #endif
4179 #ifndef BLOCK_INLINE_ENTRY_LABEL
4180 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4181 #endif
4182 #ifndef BLOCK_END_LABEL
4183 #define BLOCK_END_LABEL "LBE"
4184 #endif
4185 #ifndef LINE_CODE_LABEL
4186 #define LINE_CODE_LABEL "LM"
4187 #endif
4188
4189 \f
4190 /* Return the root of the DIE's built for the current compilation unit. */
4191 static dw_die_ref
4192 comp_unit_die (void)
4193 {
4194 if (!single_comp_unit_die)
4195 single_comp_unit_die = gen_compile_unit_die (NULL);
4196 return single_comp_unit_die;
4197 }
4198
4199 /* We allow a language front-end to designate a function that is to be
4200 called to "demangle" any name before it is put into a DIE. */
4201
4202 static const char *(*demangle_name_func) (const char *);
4203
4204 void
4205 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4206 {
4207 demangle_name_func = func;
4208 }
4209
4210 /* Test if rtl node points to a pseudo register. */
4211
4212 static inline int
4213 is_pseudo_reg (const_rtx rtl)
4214 {
4215 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4216 || (GET_CODE (rtl) == SUBREG
4217 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4218 }
4219
4220 /* Return a reference to a type, with its const and volatile qualifiers
4221 removed. */
4222
4223 static inline tree
4224 type_main_variant (tree type)
4225 {
4226 type = TYPE_MAIN_VARIANT (type);
4227
4228 /* ??? There really should be only one main variant among any group of
4229 variants of a given type (and all of the MAIN_VARIANT values for all
4230 members of the group should point to that one type) but sometimes the C
4231 front-end messes this up for array types, so we work around that bug
4232 here. */
4233 if (TREE_CODE (type) == ARRAY_TYPE)
4234 while (type != TYPE_MAIN_VARIANT (type))
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 return type;
4238 }
4239
4240 /* Return nonzero if the given type node represents a tagged type. */
4241
4242 static inline int
4243 is_tagged_type (const_tree type)
4244 {
4245 enum tree_code code = TREE_CODE (type);
4246
4247 return (code == RECORD_TYPE || code == UNION_TYPE
4248 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4249 }
4250
4251 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4252
4253 static void
4254 get_ref_die_offset_label (char *label, dw_die_ref ref)
4255 {
4256 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4257 }
4258
4259 /* Return die_offset of a DIE reference to a base type. */
4260
4261 static unsigned long int
4262 get_base_type_offset (dw_die_ref ref)
4263 {
4264 if (ref->die_offset)
4265 return ref->die_offset;
4266 if (comp_unit_die ()->die_abbrev)
4267 {
4268 calc_base_type_die_sizes ();
4269 gcc_assert (ref->die_offset);
4270 }
4271 return ref->die_offset;
4272 }
4273
4274 /* Return die_offset of a DIE reference other than base type. */
4275
4276 static unsigned long int
4277 get_ref_die_offset (dw_die_ref ref)
4278 {
4279 gcc_assert (ref->die_offset);
4280 return ref->die_offset;
4281 }
4282
4283 /* Convert a DIE tag into its string name. */
4284
4285 static const char *
4286 dwarf_tag_name (unsigned int tag)
4287 {
4288 const char *name = get_DW_TAG_name (tag);
4289
4290 if (name != NULL)
4291 return name;
4292
4293 return "DW_TAG_<unknown>";
4294 }
4295
4296 /* Convert a DWARF attribute code into its string name. */
4297
4298 static const char *
4299 dwarf_attr_name (unsigned int attr)
4300 {
4301 const char *name;
4302
4303 switch (attr)
4304 {
4305 #if VMS_DEBUGGING_INFO
4306 case DW_AT_HP_prologue:
4307 return "DW_AT_HP_prologue";
4308 #else
4309 case DW_AT_MIPS_loop_unroll_factor:
4310 return "DW_AT_MIPS_loop_unroll_factor";
4311 #endif
4312
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_epilogue:
4315 return "DW_AT_HP_epilogue";
4316 #else
4317 case DW_AT_MIPS_stride:
4318 return "DW_AT_MIPS_stride";
4319 #endif
4320 }
4321
4322 name = get_DW_AT_name (attr);
4323
4324 if (name != NULL)
4325 return name;
4326
4327 return "DW_AT_<unknown>";
4328 }
4329
4330 /* Convert a DWARF value form code into its string name. */
4331
4332 static const char *
4333 dwarf_form_name (unsigned int form)
4334 {
4335 const char *name = get_DW_FORM_name (form);
4336
4337 if (name != NULL)
4338 return name;
4339
4340 return "DW_FORM_<unknown>";
4341 }
4342 \f
4343 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4344 instance of an inlined instance of a decl which is local to an inline
4345 function, so we have to trace all of the way back through the origin chain
4346 to find out what sort of node actually served as the original seed for the
4347 given block. */
4348
4349 static tree
4350 decl_ultimate_origin (const_tree decl)
4351 {
4352 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4353 return NULL_TREE;
4354
4355 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4356 we're trying to output the abstract instance of this function. */
4357 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4358 return NULL_TREE;
4359
4360 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4361 most distant ancestor, this should never happen. */
4362 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4363
4364 return DECL_ABSTRACT_ORIGIN (decl);
4365 }
4366
4367 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4368 of a virtual function may refer to a base class, so we check the 'this'
4369 parameter. */
4370
4371 static tree
4372 decl_class_context (tree decl)
4373 {
4374 tree context = NULL_TREE;
4375
4376 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4377 context = DECL_CONTEXT (decl);
4378 else
4379 context = TYPE_MAIN_VARIANT
4380 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4381
4382 if (context && !TYPE_P (context))
4383 context = NULL_TREE;
4384
4385 return context;
4386 }
4387 \f
4388 /* Add an attribute/value pair to a DIE. */
4389
4390 static inline void
4391 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4392 {
4393 /* Maybe this should be an assert? */
4394 if (die == NULL)
4395 return;
4396
4397 if (flag_checking)
4398 {
4399 /* Check we do not add duplicate attrs. Can't use get_AT here
4400 because that recurses to the specification/abstract origin DIE. */
4401 dw_attr_node *a;
4402 unsigned ix;
4403 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4404 gcc_assert (a->dw_attr != attr->dw_attr);
4405 }
4406
4407 vec_safe_reserve (die->die_attr, 1);
4408 vec_safe_push (die->die_attr, *attr);
4409 }
4410
4411 static inline enum dw_val_class
4412 AT_class (dw_attr_node *a)
4413 {
4414 return a->dw_attr_val.val_class;
4415 }
4416
4417 /* Return the index for any attribute that will be referenced with a
4418 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4419 indices are stored in dw_attr_val.v.val_str for reference counting
4420 pruning. */
4421
4422 static inline unsigned int
4423 AT_index (dw_attr_node *a)
4424 {
4425 if (AT_class (a) == dw_val_class_str)
4426 return a->dw_attr_val.v.val_str->index;
4427 else if (a->dw_attr_val.val_entry != NULL)
4428 return a->dw_attr_val.val_entry->index;
4429 return NOT_INDEXED;
4430 }
4431
4432 /* Add a flag value attribute to a DIE. */
4433
4434 static inline void
4435 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4436 {
4437 dw_attr_node attr;
4438
4439 attr.dw_attr = attr_kind;
4440 attr.dw_attr_val.val_class = dw_val_class_flag;
4441 attr.dw_attr_val.val_entry = NULL;
4442 attr.dw_attr_val.v.val_flag = flag;
4443 add_dwarf_attr (die, &attr);
4444 }
4445
4446 static inline unsigned
4447 AT_flag (dw_attr_node *a)
4448 {
4449 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4450 return a->dw_attr_val.v.val_flag;
4451 }
4452
4453 /* Add a signed integer attribute value to a DIE. */
4454
4455 static inline void
4456 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4457 {
4458 dw_attr_node attr;
4459
4460 attr.dw_attr = attr_kind;
4461 attr.dw_attr_val.val_class = dw_val_class_const;
4462 attr.dw_attr_val.val_entry = NULL;
4463 attr.dw_attr_val.v.val_int = int_val;
4464 add_dwarf_attr (die, &attr);
4465 }
4466
4467 static inline HOST_WIDE_INT
4468 AT_int (dw_attr_node *a)
4469 {
4470 gcc_assert (a && (AT_class (a) == dw_val_class_const
4471 || AT_class (a) == dw_val_class_const_implicit));
4472 return a->dw_attr_val.v.val_int;
4473 }
4474
4475 /* Add an unsigned integer attribute value to a DIE. */
4476
4477 static inline void
4478 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4479 unsigned HOST_WIDE_INT unsigned_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline unsigned HOST_WIDE_INT
4491 AT_unsigned (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4494 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4495 return a->dw_attr_val.v.val_unsigned;
4496 }
4497
4498 /* Add an unsigned wide integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 const wide_int& w)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4510 *attr.dw_attr_val.v.val_wide = w;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an unsigned double integer attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_const_double;
4524 attr.dw_attr_val.val_entry = NULL;
4525 attr.dw_attr_val.v.val_double.high = high;
4526 attr.dw_attr_val.v.val_double.low = low;
4527 add_dwarf_attr (die, &attr);
4528 }
4529
4530 /* Add a floating point attribute value to a DIE and return it. */
4531
4532 static inline void
4533 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4534 unsigned int length, unsigned int elt_size, unsigned char *array)
4535 {
4536 dw_attr_node attr;
4537
4538 attr.dw_attr = attr_kind;
4539 attr.dw_attr_val.val_class = dw_val_class_vec;
4540 attr.dw_attr_val.val_entry = NULL;
4541 attr.dw_attr_val.v.val_vec.length = length;
4542 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4543 attr.dw_attr_val.v.val_vec.array = array;
4544 add_dwarf_attr (die, &attr);
4545 }
4546
4547 /* Add an 8-byte data attribute value to a DIE. */
4548
4549 static inline void
4550 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4551 unsigned char data8[8])
4552 {
4553 dw_attr_node attr;
4554
4555 attr.dw_attr = attr_kind;
4556 attr.dw_attr_val.val_class = dw_val_class_data8;
4557 attr.dw_attr_val.val_entry = NULL;
4558 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4559 add_dwarf_attr (die, &attr);
4560 }
4561
4562 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4563 dwarf_split_debug_info, address attributes in dies destined for the
4564 final executable have force_direct set to avoid using indexed
4565 references. */
4566
4567 static inline void
4568 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4569 bool force_direct)
4570 {
4571 dw_attr_node attr;
4572 char * lbl_id;
4573
4574 lbl_id = xstrdup (lbl_low);
4575 attr.dw_attr = DW_AT_low_pc;
4576 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4577 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4578 if (dwarf_split_debug_info && !force_direct)
4579 attr.dw_attr_val.val_entry
4580 = add_addr_table_entry (lbl_id, ate_kind_label);
4581 else
4582 attr.dw_attr_val.val_entry = NULL;
4583 add_dwarf_attr (die, &attr);
4584
4585 attr.dw_attr = DW_AT_high_pc;
4586 if (dwarf_version < 4)
4587 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4588 else
4589 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4590 lbl_id = xstrdup (lbl_high);
4591 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4592 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4593 && dwarf_split_debug_info && !force_direct)
4594 attr.dw_attr_val.val_entry
4595 = add_addr_table_entry (lbl_id, ate_kind_label);
4596 else
4597 attr.dw_attr_val.val_entry = NULL;
4598 add_dwarf_attr (die, &attr);
4599 }
4600
4601 /* Hash and equality functions for debug_str_hash. */
4602
4603 hashval_t
4604 indirect_string_hasher::hash (indirect_string_node *x)
4605 {
4606 return htab_hash_string (x->str);
4607 }
4608
4609 bool
4610 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4611 {
4612 return strcmp (x1->str, x2) == 0;
4613 }
4614
4615 /* Add STR to the given string hash table. */
4616
4617 static struct indirect_string_node *
4618 find_AT_string_in_table (const char *str,
4619 hash_table<indirect_string_hasher> *table)
4620 {
4621 struct indirect_string_node *node;
4622
4623 indirect_string_node **slot
4624 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4625 if (*slot == NULL)
4626 {
4627 node = ggc_cleared_alloc<indirect_string_node> ();
4628 node->str = ggc_strdup (str);
4629 *slot = node;
4630 }
4631 else
4632 node = *slot;
4633
4634 node->refcount++;
4635 return node;
4636 }
4637
4638 /* Add STR to the indirect string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string (const char *str)
4642 {
4643 if (! debug_str_hash)
4644 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4645
4646 return find_AT_string_in_table (str, debug_str_hash);
4647 }
4648
4649 /* Add a string attribute value to a DIE. */
4650
4651 static inline void
4652 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4653 {
4654 dw_attr_node attr;
4655 struct indirect_string_node *node;
4656
4657 node = find_AT_string (str);
4658
4659 attr.dw_attr = attr_kind;
4660 attr.dw_attr_val.val_class = dw_val_class_str;
4661 attr.dw_attr_val.val_entry = NULL;
4662 attr.dw_attr_val.v.val_str = node;
4663 add_dwarf_attr (die, &attr);
4664 }
4665
4666 static inline const char *
4667 AT_string (dw_attr_node *a)
4668 {
4669 gcc_assert (a && AT_class (a) == dw_val_class_str);
4670 return a->dw_attr_val.v.val_str->str;
4671 }
4672
4673 /* Call this function directly to bypass AT_string_form's logic to put
4674 the string inline in the die. */
4675
4676 static void
4677 set_indirect_string (struct indirect_string_node *node)
4678 {
4679 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4680 /* Already indirect is a no op. */
4681 if (node->form == DW_FORM_strp
4682 || node->form == DW_FORM_line_strp
4683 || node->form == dwarf_FORM (DW_FORM_strx))
4684 {
4685 gcc_assert (node->label);
4686 return;
4687 }
4688 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4689 ++dw2_string_counter;
4690 node->label = xstrdup (label);
4691
4692 if (!dwarf_split_debug_info)
4693 {
4694 node->form = DW_FORM_strp;
4695 node->index = NOT_INDEXED;
4696 }
4697 else
4698 {
4699 node->form = dwarf_FORM (DW_FORM_strx);
4700 node->index = NO_INDEX_ASSIGNED;
4701 }
4702 }
4703
4704 /* A helper function for dwarf2out_finish, called to reset indirect
4705 string decisions done for early LTO dwarf output before fat object
4706 dwarf output. */
4707
4708 int
4709 reset_indirect_string (indirect_string_node **h, void *)
4710 {
4711 struct indirect_string_node *node = *h;
4712 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4713 {
4714 free (node->label);
4715 node->label = NULL;
4716 node->form = (dwarf_form) 0;
4717 node->index = 0;
4718 }
4719 return 1;
4720 }
4721
4722 /* Find out whether a string should be output inline in DIE
4723 or out-of-line in .debug_str section. */
4724
4725 static enum dwarf_form
4726 find_string_form (struct indirect_string_node *node)
4727 {
4728 unsigned int len;
4729
4730 if (node->form)
4731 return node->form;
4732
4733 len = strlen (node->str) + 1;
4734
4735 /* If the string is shorter or equal to the size of the reference, it is
4736 always better to put it inline. */
4737 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4738 return node->form = DW_FORM_string;
4739
4740 /* If we cannot expect the linker to merge strings in .debug_str
4741 section, only put it into .debug_str if it is worth even in this
4742 single module. */
4743 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4744 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4745 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4746 return node->form = DW_FORM_string;
4747
4748 set_indirect_string (node);
4749
4750 return node->form;
4751 }
4752
4753 /* Find out whether the string referenced from the attribute should be
4754 output inline in DIE or out-of-line in .debug_str section. */
4755
4756 static enum dwarf_form
4757 AT_string_form (dw_attr_node *a)
4758 {
4759 gcc_assert (a && AT_class (a) == dw_val_class_str);
4760 return find_string_form (a->dw_attr_val.v.val_str);
4761 }
4762
4763 /* Add a DIE reference attribute value to a DIE. */
4764
4765 static inline void
4766 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4767 {
4768 dw_attr_node attr;
4769 gcc_checking_assert (targ_die != NULL);
4770
4771 /* With LTO we can end up trying to reference something we didn't create
4772 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4773 if (targ_die == NULL)
4774 return;
4775
4776 attr.dw_attr = attr_kind;
4777 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4778 attr.dw_attr_val.val_entry = NULL;
4779 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4780 attr.dw_attr_val.v.val_die_ref.external = 0;
4781 add_dwarf_attr (die, &attr);
4782 }
4783
4784 /* Change DIE reference REF to point to NEW_DIE instead. */
4785
4786 static inline void
4787 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4788 {
4789 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4790 ref->dw_attr_val.v.val_die_ref.die = new_die;
4791 ref->dw_attr_val.v.val_die_ref.external = 0;
4792 }
4793
4794 /* Add an AT_specification attribute to a DIE, and also make the back
4795 pointer from the specification to the definition. */
4796
4797 static inline void
4798 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4799 {
4800 add_AT_die_ref (die, DW_AT_specification, targ_die);
4801 gcc_assert (!targ_die->die_definition);
4802 targ_die->die_definition = die;
4803 }
4804
4805 static inline dw_die_ref
4806 AT_ref (dw_attr_node *a)
4807 {
4808 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4809 return a->dw_attr_val.v.val_die_ref.die;
4810 }
4811
4812 static inline int
4813 AT_ref_external (dw_attr_node *a)
4814 {
4815 if (a && AT_class (a) == dw_val_class_die_ref)
4816 return a->dw_attr_val.v.val_die_ref.external;
4817
4818 return 0;
4819 }
4820
4821 static inline void
4822 set_AT_ref_external (dw_attr_node *a, int i)
4823 {
4824 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4825 a->dw_attr_val.v.val_die_ref.external = i;
4826 }
4827
4828 /* Add a location description attribute value to a DIE. */
4829
4830 static inline void
4831 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4832 {
4833 dw_attr_node attr;
4834
4835 attr.dw_attr = attr_kind;
4836 attr.dw_attr_val.val_class = dw_val_class_loc;
4837 attr.dw_attr_val.val_entry = NULL;
4838 attr.dw_attr_val.v.val_loc = loc;
4839 add_dwarf_attr (die, &attr);
4840 }
4841
4842 static inline dw_loc_descr_ref
4843 AT_loc (dw_attr_node *a)
4844 {
4845 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4846 return a->dw_attr_val.v.val_loc;
4847 }
4848
4849 static inline void
4850 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4851 {
4852 dw_attr_node attr;
4853
4854 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4855 return;
4856
4857 attr.dw_attr = attr_kind;
4858 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4859 attr.dw_attr_val.val_entry = NULL;
4860 attr.dw_attr_val.v.val_loc_list = loc_list;
4861 add_dwarf_attr (die, &attr);
4862 have_location_lists = true;
4863 }
4864
4865 static inline dw_loc_list_ref
4866 AT_loc_list (dw_attr_node *a)
4867 {
4868 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4869 return a->dw_attr_val.v.val_loc_list;
4870 }
4871
4872 /* Add a view list attribute to DIE. It must have a DW_AT_location
4873 attribute, because the view list complements the location list. */
4874
4875 static inline void
4876 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4877 {
4878 dw_attr_node attr;
4879
4880 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4881 return;
4882
4883 attr.dw_attr = attr_kind;
4884 attr.dw_attr_val.val_class = dw_val_class_view_list;
4885 attr.dw_attr_val.val_entry = NULL;
4886 attr.dw_attr_val.v.val_view_list = die;
4887 add_dwarf_attr (die, &attr);
4888 gcc_checking_assert (get_AT (die, DW_AT_location));
4889 gcc_assert (have_location_lists);
4890 }
4891
4892 /* Return a pointer to the location list referenced by the attribute.
4893 If the named attribute is a view list, look up the corresponding
4894 DW_AT_location attribute and return its location list. */
4895
4896 static inline dw_loc_list_ref *
4897 AT_loc_list_ptr (dw_attr_node *a)
4898 {
4899 gcc_assert (a);
4900 switch (AT_class (a))
4901 {
4902 case dw_val_class_loc_list:
4903 return &a->dw_attr_val.v.val_loc_list;
4904 case dw_val_class_view_list:
4905 {
4906 dw_attr_node *l;
4907 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4908 if (!l)
4909 return NULL;
4910 gcc_checking_assert (l + 1 == a);
4911 return AT_loc_list_ptr (l);
4912 }
4913 default:
4914 gcc_unreachable ();
4915 }
4916 }
4917
4918 /* Return the location attribute value associated with a view list
4919 attribute value. */
4920
4921 static inline dw_val_node *
4922 view_list_to_loc_list_val_node (dw_val_node *val)
4923 {
4924 gcc_assert (val->val_class == dw_val_class_view_list);
4925 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4926 if (!loc)
4927 return NULL;
4928 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4929 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4930 return &loc->dw_attr_val;
4931 }
4932
4933 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4934 {
4935 static hashval_t hash (addr_table_entry *);
4936 static bool equal (addr_table_entry *, addr_table_entry *);
4937 };
4938
4939 /* Table of entries into the .debug_addr section. */
4940
4941 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4942
4943 /* Hash an address_table_entry. */
4944
4945 hashval_t
4946 addr_hasher::hash (addr_table_entry *a)
4947 {
4948 inchash::hash hstate;
4949 switch (a->kind)
4950 {
4951 case ate_kind_rtx:
4952 hstate.add_int (0);
4953 break;
4954 case ate_kind_rtx_dtprel:
4955 hstate.add_int (1);
4956 break;
4957 case ate_kind_label:
4958 return htab_hash_string (a->addr.label);
4959 default:
4960 gcc_unreachable ();
4961 }
4962 inchash::add_rtx (a->addr.rtl, hstate);
4963 return hstate.end ();
4964 }
4965
4966 /* Determine equality for two address_table_entries. */
4967
4968 bool
4969 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4970 {
4971 if (a1->kind != a2->kind)
4972 return 0;
4973 switch (a1->kind)
4974 {
4975 case ate_kind_rtx:
4976 case ate_kind_rtx_dtprel:
4977 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4978 case ate_kind_label:
4979 return strcmp (a1->addr.label, a2->addr.label) == 0;
4980 default:
4981 gcc_unreachable ();
4982 }
4983 }
4984
4985 /* Initialize an addr_table_entry. */
4986
4987 void
4988 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4989 {
4990 e->kind = kind;
4991 switch (kind)
4992 {
4993 case ate_kind_rtx:
4994 case ate_kind_rtx_dtprel:
4995 e->addr.rtl = (rtx) addr;
4996 break;
4997 case ate_kind_label:
4998 e->addr.label = (char *) addr;
4999 break;
5000 }
5001 e->refcount = 0;
5002 e->index = NO_INDEX_ASSIGNED;
5003 }
5004
5005 /* Add attr to the address table entry to the table. Defer setting an
5006 index until output time. */
5007
5008 static addr_table_entry *
5009 add_addr_table_entry (void *addr, enum ate_kind kind)
5010 {
5011 addr_table_entry *node;
5012 addr_table_entry finder;
5013
5014 gcc_assert (dwarf_split_debug_info);
5015 if (! addr_index_table)
5016 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5017 init_addr_table_entry (&finder, kind, addr);
5018 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5019
5020 if (*slot == HTAB_EMPTY_ENTRY)
5021 {
5022 node = ggc_cleared_alloc<addr_table_entry> ();
5023 init_addr_table_entry (node, kind, addr);
5024 *slot = node;
5025 }
5026 else
5027 node = *slot;
5028
5029 node->refcount++;
5030 return node;
5031 }
5032
5033 /* Remove an entry from the addr table by decrementing its refcount.
5034 Strictly, decrementing the refcount would be enough, but the
5035 assertion that the entry is actually in the table has found
5036 bugs. */
5037
5038 static void
5039 remove_addr_table_entry (addr_table_entry *entry)
5040 {
5041 gcc_assert (dwarf_split_debug_info && addr_index_table);
5042 /* After an index is assigned, the table is frozen. */
5043 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5044 entry->refcount--;
5045 }
5046
5047 /* Given a location list, remove all addresses it refers to from the
5048 address_table. */
5049
5050 static void
5051 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5052 {
5053 for (; descr; descr = descr->dw_loc_next)
5054 if (descr->dw_loc_oprnd1.val_entry != NULL)
5055 {
5056 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5057 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5058 }
5059 }
5060
5061 /* A helper function for dwarf2out_finish called through
5062 htab_traverse. Assign an addr_table_entry its index. All entries
5063 must be collected into the table when this function is called,
5064 because the indexing code relies on htab_traverse to traverse nodes
5065 in the same order for each run. */
5066
5067 int
5068 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5069 {
5070 addr_table_entry *node = *h;
5071
5072 /* Don't index unreferenced nodes. */
5073 if (node->refcount == 0)
5074 return 1;
5075
5076 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5077 node->index = *index;
5078 *index += 1;
5079
5080 return 1;
5081 }
5082
5083 /* Add an address constant attribute value to a DIE. When using
5084 dwarf_split_debug_info, address attributes in dies destined for the
5085 final executable should be direct references--setting the parameter
5086 force_direct ensures this behavior. */
5087
5088 static inline void
5089 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5090 bool force_direct)
5091 {
5092 dw_attr_node attr;
5093
5094 attr.dw_attr = attr_kind;
5095 attr.dw_attr_val.val_class = dw_val_class_addr;
5096 attr.dw_attr_val.v.val_addr = addr;
5097 if (dwarf_split_debug_info && !force_direct)
5098 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5099 else
5100 attr.dw_attr_val.val_entry = NULL;
5101 add_dwarf_attr (die, &attr);
5102 }
5103
5104 /* Get the RTX from to an address DIE attribute. */
5105
5106 static inline rtx
5107 AT_addr (dw_attr_node *a)
5108 {
5109 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5110 return a->dw_attr_val.v.val_addr;
5111 }
5112
5113 /* Add a file attribute value to a DIE. */
5114
5115 static inline void
5116 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5117 struct dwarf_file_data *fd)
5118 {
5119 dw_attr_node attr;
5120
5121 attr.dw_attr = attr_kind;
5122 attr.dw_attr_val.val_class = dw_val_class_file;
5123 attr.dw_attr_val.val_entry = NULL;
5124 attr.dw_attr_val.v.val_file = fd;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the dwarf_file_data from a file DIE attribute. */
5129
5130 static inline struct dwarf_file_data *
5131 AT_file (dw_attr_node *a)
5132 {
5133 gcc_assert (a && (AT_class (a) == dw_val_class_file
5134 || AT_class (a) == dw_val_class_file_implicit));
5135 return a->dw_attr_val.v.val_file;
5136 }
5137
5138 /* Add a symbolic view identifier attribute value to a DIE. */
5139
5140 static inline void
5141 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5142 const char *view_label)
5143 {
5144 dw_attr_node attr;
5145
5146 attr.dw_attr = attr_kind;
5147 attr.dw_attr_val.val_class = dw_val_class_symview;
5148 attr.dw_attr_val.val_entry = NULL;
5149 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5150 add_dwarf_attr (die, &attr);
5151 }
5152
5153 /* Add a label identifier attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl_id)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5165 if (dwarf_split_debug_info)
5166 attr.dw_attr_val.val_entry
5167 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5168 ate_kind_label);
5169 add_dwarf_attr (die, &attr);
5170 }
5171
5172 /* Add a section offset attribute value to a DIE, an offset into the
5173 debug_line section. */
5174
5175 static inline void
5176 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5177 const char *label)
5178 {
5179 dw_attr_node attr;
5180
5181 attr.dw_attr = attr_kind;
5182 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5183 attr.dw_attr_val.val_entry = NULL;
5184 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5185 add_dwarf_attr (die, &attr);
5186 }
5187
5188 /* Add a section offset attribute value to a DIE, an offset into the
5189 debug_macinfo section. */
5190
5191 static inline void
5192 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5193 const char *label)
5194 {
5195 dw_attr_node attr;
5196
5197 attr.dw_attr = attr_kind;
5198 attr.dw_attr_val.val_class = dw_val_class_macptr;
5199 attr.dw_attr_val.val_entry = NULL;
5200 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5201 add_dwarf_attr (die, &attr);
5202 }
5203
5204 /* Add a range_list attribute value to a DIE. When using
5205 dwarf_split_debug_info, address attributes in dies destined for the
5206 final executable should be direct references--setting the parameter
5207 force_direct ensures this behavior. */
5208
5209 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5210 #define RELOCATED_OFFSET (NULL)
5211
5212 static void
5213 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5214 long unsigned int offset, bool force_direct)
5215 {
5216 dw_attr_node attr;
5217
5218 attr.dw_attr = attr_kind;
5219 attr.dw_attr_val.val_class = dw_val_class_range_list;
5220 /* For the range_list attribute, use val_entry to store whether the
5221 offset should follow split-debug-info or normal semantics. This
5222 value is read in output_range_list_offset. */
5223 if (dwarf_split_debug_info && !force_direct)
5224 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5225 else
5226 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5227 attr.dw_attr_val.v.val_offset = offset;
5228 add_dwarf_attr (die, &attr);
5229 }
5230
5231 /* Return the start label of a delta attribute. */
5232
5233 static inline const char *
5234 AT_vms_delta1 (dw_attr_node *a)
5235 {
5236 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5237 return a->dw_attr_val.v.val_vms_delta.lbl1;
5238 }
5239
5240 /* Return the end label of a delta attribute. */
5241
5242 static inline const char *
5243 AT_vms_delta2 (dw_attr_node *a)
5244 {
5245 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5246 return a->dw_attr_val.v.val_vms_delta.lbl2;
5247 }
5248
5249 static inline const char *
5250 AT_lbl (dw_attr_node *a)
5251 {
5252 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5253 || AT_class (a) == dw_val_class_lineptr
5254 || AT_class (a) == dw_val_class_macptr
5255 || AT_class (a) == dw_val_class_loclistsptr
5256 || AT_class (a) == dw_val_class_high_pc));
5257 return a->dw_attr_val.v.val_lbl_id;
5258 }
5259
5260 /* Get the attribute of type attr_kind. */
5261
5262 static dw_attr_node *
5263 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5264 {
5265 dw_attr_node *a;
5266 unsigned ix;
5267 dw_die_ref spec = NULL;
5268
5269 if (! die)
5270 return NULL;
5271
5272 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5273 if (a->dw_attr == attr_kind)
5274 return a;
5275 else if (a->dw_attr == DW_AT_specification
5276 || a->dw_attr == DW_AT_abstract_origin)
5277 spec = AT_ref (a);
5278
5279 if (spec)
5280 return get_AT (spec, attr_kind);
5281
5282 return NULL;
5283 }
5284
5285 /* Returns the parent of the declaration of DIE. */
5286
5287 static dw_die_ref
5288 get_die_parent (dw_die_ref die)
5289 {
5290 dw_die_ref t;
5291
5292 if (!die)
5293 return NULL;
5294
5295 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5296 || (t = get_AT_ref (die, DW_AT_specification)))
5297 die = t;
5298
5299 return die->die_parent;
5300 }
5301
5302 /* Return the "low pc" attribute value, typically associated with a subprogram
5303 DIE. Return null if the "low pc" attribute is either not present, or if it
5304 cannot be represented as an assembler label identifier. */
5305
5306 static inline const char *
5307 get_AT_low_pc (dw_die_ref die)
5308 {
5309 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5310
5311 return a ? AT_lbl (a) : NULL;
5312 }
5313
5314 /* Return the value of the string attribute designated by ATTR_KIND, or
5315 NULL if it is not present. */
5316
5317 static inline const char *
5318 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5319 {
5320 dw_attr_node *a = get_AT (die, attr_kind);
5321
5322 return a ? AT_string (a) : NULL;
5323 }
5324
5325 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5326 if it is not present. */
5327
5328 static inline int
5329 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5330 {
5331 dw_attr_node *a = get_AT (die, attr_kind);
5332
5333 return a ? AT_flag (a) : 0;
5334 }
5335
5336 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5337 if it is not present. */
5338
5339 static inline unsigned
5340 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5341 {
5342 dw_attr_node *a = get_AT (die, attr_kind);
5343
5344 return a ? AT_unsigned (a) : 0;
5345 }
5346
5347 static inline dw_die_ref
5348 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5349 {
5350 dw_attr_node *a = get_AT (die, attr_kind);
5351
5352 return a ? AT_ref (a) : NULL;
5353 }
5354
5355 static inline struct dwarf_file_data *
5356 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5357 {
5358 dw_attr_node *a = get_AT (die, attr_kind);
5359
5360 return a ? AT_file (a) : NULL;
5361 }
5362
5363 /* Return TRUE if the language is C. */
5364
5365 static inline bool
5366 is_c (void)
5367 {
5368 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5369
5370 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5371 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5372
5373
5374 }
5375
5376 /* Return TRUE if the language is C++. */
5377
5378 static inline bool
5379 is_cxx (void)
5380 {
5381 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5382
5383 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5384 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5385 }
5386
5387 /* Return TRUE if DECL was created by the C++ frontend. */
5388
5389 static bool
5390 is_cxx (const_tree decl)
5391 {
5392 if (in_lto_p)
5393 {
5394 const_tree context = get_ultimate_context (decl);
5395 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5396 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5397 }
5398 return is_cxx ();
5399 }
5400
5401 /* Return TRUE if the language is Fortran. */
5402
5403 static inline bool
5404 is_fortran (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_Fortran77
5409 || lang == DW_LANG_Fortran90
5410 || lang == DW_LANG_Fortran95
5411 || lang == DW_LANG_Fortran03
5412 || lang == DW_LANG_Fortran08);
5413 }
5414
5415 static inline bool
5416 is_fortran (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5423 "GNU Fortran", 11) == 0
5424 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5425 "GNU F77") == 0);
5426 }
5427 return is_fortran ();
5428 }
5429
5430 /* Return TRUE if the language is Ada. */
5431
5432 static inline bool
5433 is_ada (void)
5434 {
5435 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5436
5437 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5438 }
5439
5440 /* Remove the specified attribute if present. Return TRUE if removal
5441 was successful. */
5442
5443 static bool
5444 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5445 {
5446 dw_attr_node *a;
5447 unsigned ix;
5448
5449 if (! die)
5450 return false;
5451
5452 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5453 if (a->dw_attr == attr_kind)
5454 {
5455 if (AT_class (a) == dw_val_class_str)
5456 if (a->dw_attr_val.v.val_str->refcount)
5457 a->dw_attr_val.v.val_str->refcount--;
5458
5459 /* vec::ordered_remove should help reduce the number of abbrevs
5460 that are needed. */
5461 die->die_attr->ordered_remove (ix);
5462 return true;
5463 }
5464 return false;
5465 }
5466
5467 /* Remove CHILD from its parent. PREV must have the property that
5468 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5469
5470 static void
5471 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5472 {
5473 gcc_assert (child->die_parent == prev->die_parent);
5474 gcc_assert (prev->die_sib == child);
5475 if (prev == child)
5476 {
5477 gcc_assert (child->die_parent->die_child == child);
5478 prev = NULL;
5479 }
5480 else
5481 prev->die_sib = child->die_sib;
5482 if (child->die_parent->die_child == child)
5483 child->die_parent->die_child = prev;
5484 child->die_sib = NULL;
5485 }
5486
5487 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5488 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5489
5490 static void
5491 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5492 {
5493 dw_die_ref parent = old_child->die_parent;
5494
5495 gcc_assert (parent == prev->die_parent);
5496 gcc_assert (prev->die_sib == old_child);
5497
5498 new_child->die_parent = parent;
5499 if (prev == old_child)
5500 {
5501 gcc_assert (parent->die_child == old_child);
5502 new_child->die_sib = new_child;
5503 }
5504 else
5505 {
5506 prev->die_sib = new_child;
5507 new_child->die_sib = old_child->die_sib;
5508 }
5509 if (old_child->die_parent->die_child == old_child)
5510 old_child->die_parent->die_child = new_child;
5511 old_child->die_sib = NULL;
5512 }
5513
5514 /* Move all children from OLD_PARENT to NEW_PARENT. */
5515
5516 static void
5517 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5518 {
5519 dw_die_ref c;
5520 new_parent->die_child = old_parent->die_child;
5521 old_parent->die_child = NULL;
5522 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5523 }
5524
5525 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5526 matches TAG. */
5527
5528 static void
5529 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5530 {
5531 dw_die_ref c;
5532
5533 c = die->die_child;
5534 if (c) do {
5535 dw_die_ref prev = c;
5536 c = c->die_sib;
5537 while (c->die_tag == tag)
5538 {
5539 remove_child_with_prev (c, prev);
5540 c->die_parent = NULL;
5541 /* Might have removed every child. */
5542 if (die->die_child == NULL)
5543 return;
5544 c = prev->die_sib;
5545 }
5546 } while (c != die->die_child);
5547 }
5548
5549 /* Add a CHILD_DIE as the last child of DIE. */
5550
5551 static void
5552 add_child_die (dw_die_ref die, dw_die_ref child_die)
5553 {
5554 /* FIXME this should probably be an assert. */
5555 if (! die || ! child_die)
5556 return;
5557 gcc_assert (die != child_die);
5558
5559 child_die->die_parent = die;
5560 if (die->die_child)
5561 {
5562 child_die->die_sib = die->die_child->die_sib;
5563 die->die_child->die_sib = child_die;
5564 }
5565 else
5566 child_die->die_sib = child_die;
5567 die->die_child = child_die;
5568 }
5569
5570 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5571
5572 static void
5573 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5574 dw_die_ref after_die)
5575 {
5576 gcc_assert (die
5577 && child_die
5578 && after_die
5579 && die->die_child
5580 && die != child_die);
5581
5582 child_die->die_parent = die;
5583 child_die->die_sib = after_die->die_sib;
5584 after_die->die_sib = child_die;
5585 if (die->die_child == after_die)
5586 die->die_child = child_die;
5587 }
5588
5589 /* Unassociate CHILD from its parent, and make its parent be
5590 NEW_PARENT. */
5591
5592 static void
5593 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5594 {
5595 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5596 if (p->die_sib == child)
5597 {
5598 remove_child_with_prev (child, p);
5599 break;
5600 }
5601 add_child_die (new_parent, child);
5602 }
5603
5604 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5605 is the specification, to the end of PARENT's list of children.
5606 This is done by removing and re-adding it. */
5607
5608 static void
5609 splice_child_die (dw_die_ref parent, dw_die_ref child)
5610 {
5611 /* We want the declaration DIE from inside the class, not the
5612 specification DIE at toplevel. */
5613 if (child->die_parent != parent)
5614 {
5615 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5616
5617 if (tmp)
5618 child = tmp;
5619 }
5620
5621 gcc_assert (child->die_parent == parent
5622 || (child->die_parent
5623 == get_AT_ref (parent, DW_AT_specification)));
5624
5625 reparent_child (child, parent);
5626 }
5627
5628 /* Create and return a new die with TAG_VALUE as tag. */
5629
5630 static inline dw_die_ref
5631 new_die_raw (enum dwarf_tag tag_value)
5632 {
5633 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5634 die->die_tag = tag_value;
5635 return die;
5636 }
5637
5638 /* Create and return a new die with a parent of PARENT_DIE. If
5639 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5640 associated tree T must be supplied to determine parenthood
5641 later. */
5642
5643 static inline dw_die_ref
5644 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5645 {
5646 dw_die_ref die = new_die_raw (tag_value);
5647
5648 if (parent_die != NULL)
5649 add_child_die (parent_die, die);
5650 else
5651 {
5652 limbo_die_node *limbo_node;
5653
5654 /* No DIEs created after early dwarf should end up in limbo,
5655 because the limbo list should not persist past LTO
5656 streaming. */
5657 if (tag_value != DW_TAG_compile_unit
5658 /* These are allowed because they're generated while
5659 breaking out COMDAT units late. */
5660 && tag_value != DW_TAG_type_unit
5661 && tag_value != DW_TAG_skeleton_unit
5662 && !early_dwarf
5663 /* Allow nested functions to live in limbo because they will
5664 only temporarily live there, as decls_for_scope will fix
5665 them up. */
5666 && (TREE_CODE (t) != FUNCTION_DECL
5667 || !decl_function_context (t))
5668 /* Same as nested functions above but for types. Types that
5669 are local to a function will be fixed in
5670 decls_for_scope. */
5671 && (!RECORD_OR_UNION_TYPE_P (t)
5672 || !TYPE_CONTEXT (t)
5673 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5674 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5675 especially in the ltrans stage, but once we implement LTO
5676 dwarf streaming, we should remove this exception. */
5677 && !in_lto_p)
5678 {
5679 fprintf (stderr, "symbol ended up in limbo too late:");
5680 debug_generic_stmt (t);
5681 gcc_unreachable ();
5682 }
5683
5684 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5685 limbo_node->die = die;
5686 limbo_node->created_for = t;
5687 limbo_node->next = limbo_die_list;
5688 limbo_die_list = limbo_node;
5689 }
5690
5691 return die;
5692 }
5693
5694 /* Return the DIE associated with the given type specifier. */
5695
5696 static inline dw_die_ref
5697 lookup_type_die (tree type)
5698 {
5699 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5700 if (die && die->removed)
5701 {
5702 TYPE_SYMTAB_DIE (type) = NULL;
5703 return NULL;
5704 }
5705 return die;
5706 }
5707
5708 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5709 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5710 anonymous type instead the one of the naming typedef. */
5711
5712 static inline dw_die_ref
5713 strip_naming_typedef (tree type, dw_die_ref type_die)
5714 {
5715 if (type
5716 && TREE_CODE (type) == RECORD_TYPE
5717 && type_die
5718 && type_die->die_tag == DW_TAG_typedef
5719 && is_naming_typedef_decl (TYPE_NAME (type)))
5720 type_die = get_AT_ref (type_die, DW_AT_type);
5721 return type_die;
5722 }
5723
5724 /* Like lookup_type_die, but if type is an anonymous type named by a
5725 typedef[1], return the DIE of the anonymous type instead the one of
5726 the naming typedef. This is because in gen_typedef_die, we did
5727 equate the anonymous struct named by the typedef with the DIE of
5728 the naming typedef. So by default, lookup_type_die on an anonymous
5729 struct yields the DIE of the naming typedef.
5730
5731 [1]: Read the comment of is_naming_typedef_decl to learn about what
5732 a naming typedef is. */
5733
5734 static inline dw_die_ref
5735 lookup_type_die_strip_naming_typedef (tree type)
5736 {
5737 dw_die_ref die = lookup_type_die (type);
5738 return strip_naming_typedef (type, die);
5739 }
5740
5741 /* Equate a DIE to a given type specifier. */
5742
5743 static inline void
5744 equate_type_number_to_die (tree type, dw_die_ref type_die)
5745 {
5746 TYPE_SYMTAB_DIE (type) = type_die;
5747 }
5748
5749 /* Returns a hash value for X (which really is a die_struct). */
5750
5751 inline hashval_t
5752 decl_die_hasher::hash (die_node *x)
5753 {
5754 return (hashval_t) x->decl_id;
5755 }
5756
5757 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5758
5759 inline bool
5760 decl_die_hasher::equal (die_node *x, tree y)
5761 {
5762 return (x->decl_id == DECL_UID (y));
5763 }
5764
5765 /* Return the DIE associated with a given declaration. */
5766
5767 static inline dw_die_ref
5768 lookup_decl_die (tree decl)
5769 {
5770 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5771 NO_INSERT);
5772 if (!die)
5773 return NULL;
5774 if ((*die)->removed)
5775 {
5776 decl_die_table->clear_slot (die);
5777 return NULL;
5778 }
5779 return *die;
5780 }
5781
5782
5783 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5784 style reference. Return true if we found one refering to a DIE for
5785 DECL, otherwise return false. */
5786
5787 static bool
5788 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5789 unsigned HOST_WIDE_INT *off)
5790 {
5791 dw_die_ref die;
5792
5793 if (in_lto_p && !decl_die_table)
5794 return false;
5795
5796 if (TREE_CODE (decl) == BLOCK)
5797 die = BLOCK_DIE (decl);
5798 else
5799 die = lookup_decl_die (decl);
5800 if (!die)
5801 return false;
5802
5803 /* During WPA stage and incremental linking we currently use DIEs
5804 to store the decl <-> label + offset map. That's quite inefficient
5805 but it works for now. */
5806 if (in_lto_p)
5807 {
5808 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5809 if (!ref)
5810 {
5811 gcc_assert (die == comp_unit_die ());
5812 return false;
5813 }
5814 *off = ref->die_offset;
5815 *sym = ref->die_id.die_symbol;
5816 return true;
5817 }
5818
5819 /* Similar to get_ref_die_offset_label, but using the "correct"
5820 label. */
5821 *off = die->die_offset;
5822 while (die->die_parent)
5823 die = die->die_parent;
5824 /* For the containing CU DIE we compute a die_symbol in
5825 compute_comp_unit_symbol. */
5826 gcc_assert (die->die_tag == DW_TAG_compile_unit
5827 && die->die_id.die_symbol != NULL);
5828 *sym = die->die_id.die_symbol;
5829 return true;
5830 }
5831
5832 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5833
5834 static void
5835 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5836 const char *symbol, HOST_WIDE_INT offset)
5837 {
5838 /* Create a fake DIE that contains the reference. Don't use
5839 new_die because we don't want to end up in the limbo list. */
5840 dw_die_ref ref = new_die_raw (die->die_tag);
5841 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5842 ref->die_offset = offset;
5843 ref->with_offset = 1;
5844 add_AT_die_ref (die, attr_kind, ref);
5845 }
5846
5847 /* Create a DIE for DECL if required and add a reference to a DIE
5848 at SYMBOL + OFFSET which contains attributes dumped early. */
5849
5850 static void
5851 dwarf2out_register_external_die (tree decl, const char *sym,
5852 unsigned HOST_WIDE_INT off)
5853 {
5854 if (debug_info_level == DINFO_LEVEL_NONE)
5855 return;
5856
5857 if ((flag_wpa
5858 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5859 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5860
5861 dw_die_ref die
5862 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5863 gcc_assert (!die);
5864
5865 tree ctx;
5866 dw_die_ref parent = NULL;
5867 /* Need to lookup a DIE for the decls context - the containing
5868 function or translation unit. */
5869 if (TREE_CODE (decl) == BLOCK)
5870 {
5871 ctx = BLOCK_SUPERCONTEXT (decl);
5872 /* ??? We do not output DIEs for all scopes thus skip as
5873 many DIEs as needed. */
5874 while (TREE_CODE (ctx) == BLOCK
5875 && !BLOCK_DIE (ctx))
5876 ctx = BLOCK_SUPERCONTEXT (ctx);
5877 }
5878 else
5879 ctx = DECL_CONTEXT (decl);
5880 /* Peel types in the context stack. */
5881 while (ctx && TYPE_P (ctx))
5882 ctx = TYPE_CONTEXT (ctx);
5883 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5884 if (debug_info_level <= DINFO_LEVEL_TERSE)
5885 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5886 ctx = DECL_CONTEXT (ctx);
5887 if (ctx)
5888 {
5889 if (TREE_CODE (ctx) == BLOCK)
5890 parent = BLOCK_DIE (ctx);
5891 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5892 /* Keep the 1:1 association during WPA. */
5893 && !flag_wpa
5894 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5895 /* Otherwise all late annotations go to the main CU which
5896 imports the original CUs. */
5897 parent = comp_unit_die ();
5898 else if (TREE_CODE (ctx) == FUNCTION_DECL
5899 && TREE_CODE (decl) != FUNCTION_DECL
5900 && TREE_CODE (decl) != PARM_DECL
5901 && TREE_CODE (decl) != RESULT_DECL
5902 && TREE_CODE (decl) != BLOCK)
5903 /* Leave function local entities parent determination to when
5904 we process scope vars. */
5905 ;
5906 else
5907 parent = lookup_decl_die (ctx);
5908 }
5909 else
5910 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5911 Handle this case gracefully by globalizing stuff. */
5912 parent = comp_unit_die ();
5913 /* Create a DIE "stub". */
5914 switch (TREE_CODE (decl))
5915 {
5916 case TRANSLATION_UNIT_DECL:
5917 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5918 {
5919 die = comp_unit_die ();
5920 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5921 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5922 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5923 to create a DIE for the original CUs. */
5924 return;
5925 }
5926 /* Keep the 1:1 association during WPA. */
5927 die = new_die (DW_TAG_compile_unit, NULL, decl);
5928 break;
5929 case NAMESPACE_DECL:
5930 if (is_fortran (decl))
5931 die = new_die (DW_TAG_module, parent, decl);
5932 else
5933 die = new_die (DW_TAG_namespace, parent, decl);
5934 break;
5935 case FUNCTION_DECL:
5936 die = new_die (DW_TAG_subprogram, parent, decl);
5937 break;
5938 case VAR_DECL:
5939 die = new_die (DW_TAG_variable, parent, decl);
5940 break;
5941 case RESULT_DECL:
5942 die = new_die (DW_TAG_variable, parent, decl);
5943 break;
5944 case PARM_DECL:
5945 die = new_die (DW_TAG_formal_parameter, parent, decl);
5946 break;
5947 case CONST_DECL:
5948 die = new_die (DW_TAG_constant, parent, decl);
5949 break;
5950 case LABEL_DECL:
5951 die = new_die (DW_TAG_label, parent, decl);
5952 break;
5953 case BLOCK:
5954 die = new_die (DW_TAG_lexical_block, parent, decl);
5955 break;
5956 default:
5957 gcc_unreachable ();
5958 }
5959 if (TREE_CODE (decl) == BLOCK)
5960 BLOCK_DIE (decl) = die;
5961 else
5962 equate_decl_number_to_die (decl, die);
5963
5964 add_desc_attribute (die, decl);
5965
5966 /* Add a reference to the DIE providing early debug at $sym + off. */
5967 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5968 }
5969
5970 /* Returns a hash value for X (which really is a var_loc_list). */
5971
5972 inline hashval_t
5973 decl_loc_hasher::hash (var_loc_list *x)
5974 {
5975 return (hashval_t) x->decl_id;
5976 }
5977
5978 /* Return nonzero if decl_id of var_loc_list X is the same as
5979 UID of decl *Y. */
5980
5981 inline bool
5982 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5983 {
5984 return (x->decl_id == DECL_UID (y));
5985 }
5986
5987 /* Return the var_loc list associated with a given declaration. */
5988
5989 static inline var_loc_list *
5990 lookup_decl_loc (const_tree decl)
5991 {
5992 if (!decl_loc_table)
5993 return NULL;
5994 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5995 }
5996
5997 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5998
5999 inline hashval_t
6000 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6001 {
6002 return (hashval_t) x->decl_id;
6003 }
6004
6005 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6006 UID of decl *Y. */
6007
6008 inline bool
6009 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6010 {
6011 return (x->decl_id == DECL_UID (y));
6012 }
6013
6014 /* Equate a DIE to a particular declaration. */
6015
6016 static void
6017 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6018 {
6019 unsigned int decl_id = DECL_UID (decl);
6020
6021 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6022 decl_die->decl_id = decl_id;
6023 }
6024
6025 /* Return how many bits covers PIECE EXPR_LIST. */
6026
6027 static HOST_WIDE_INT
6028 decl_piece_bitsize (rtx piece)
6029 {
6030 int ret = (int) GET_MODE (piece);
6031 if (ret)
6032 return ret;
6033 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6034 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6035 return INTVAL (XEXP (XEXP (piece, 0), 0));
6036 }
6037
6038 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6039
6040 static rtx *
6041 decl_piece_varloc_ptr (rtx piece)
6042 {
6043 if ((int) GET_MODE (piece))
6044 return &XEXP (piece, 0);
6045 else
6046 return &XEXP (XEXP (piece, 0), 1);
6047 }
6048
6049 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6050 Next is the chain of following piece nodes. */
6051
6052 static rtx_expr_list *
6053 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6054 {
6055 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6056 return alloc_EXPR_LIST (bitsize, loc_note, next);
6057 else
6058 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6059 GEN_INT (bitsize),
6060 loc_note), next);
6061 }
6062
6063 /* Return rtx that should be stored into loc field for
6064 LOC_NOTE and BITPOS/BITSIZE. */
6065
6066 static rtx
6067 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6068 HOST_WIDE_INT bitsize)
6069 {
6070 if (bitsize != -1)
6071 {
6072 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6073 if (bitpos != 0)
6074 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6075 }
6076 return loc_note;
6077 }
6078
6079 /* This function either modifies location piece list *DEST in
6080 place (if SRC and INNER is NULL), or copies location piece list
6081 *SRC to *DEST while modifying it. Location BITPOS is modified
6082 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6083 not copied and if needed some padding around it is added.
6084 When modifying in place, DEST should point to EXPR_LIST where
6085 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6086 to the start of the whole list and INNER points to the EXPR_LIST
6087 where earlier pieces cover PIECE_BITPOS bits. */
6088
6089 static void
6090 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6091 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6092 HOST_WIDE_INT bitsize, rtx loc_note)
6093 {
6094 HOST_WIDE_INT diff;
6095 bool copy = inner != NULL;
6096
6097 if (copy)
6098 {
6099 /* First copy all nodes preceding the current bitpos. */
6100 while (src != inner)
6101 {
6102 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6103 decl_piece_bitsize (*src), NULL_RTX);
6104 dest = &XEXP (*dest, 1);
6105 src = &XEXP (*src, 1);
6106 }
6107 }
6108 /* Add padding if needed. */
6109 if (bitpos != piece_bitpos)
6110 {
6111 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6112 copy ? NULL_RTX : *dest);
6113 dest = &XEXP (*dest, 1);
6114 }
6115 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6116 {
6117 gcc_assert (!copy);
6118 /* A piece with correct bitpos and bitsize already exist,
6119 just update the location for it and return. */
6120 *decl_piece_varloc_ptr (*dest) = loc_note;
6121 return;
6122 }
6123 /* Add the piece that changed. */
6124 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6125 dest = &XEXP (*dest, 1);
6126 /* Skip over pieces that overlap it. */
6127 diff = bitpos - piece_bitpos + bitsize;
6128 if (!copy)
6129 src = dest;
6130 while (diff > 0 && *src)
6131 {
6132 rtx piece = *src;
6133 diff -= decl_piece_bitsize (piece);
6134 if (copy)
6135 src = &XEXP (piece, 1);
6136 else
6137 {
6138 *src = XEXP (piece, 1);
6139 free_EXPR_LIST_node (piece);
6140 }
6141 }
6142 /* Add padding if needed. */
6143 if (diff < 0 && *src)
6144 {
6145 if (!copy)
6146 dest = src;
6147 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6148 dest = &XEXP (*dest, 1);
6149 }
6150 if (!copy)
6151 return;
6152 /* Finally copy all nodes following it. */
6153 while (*src)
6154 {
6155 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6156 decl_piece_bitsize (*src), NULL_RTX);
6157 dest = &XEXP (*dest, 1);
6158 src = &XEXP (*src, 1);
6159 }
6160 }
6161
6162 /* Add a variable location node to the linked list for DECL. */
6163
6164 static struct var_loc_node *
6165 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6166 {
6167 unsigned int decl_id;
6168 var_loc_list *temp;
6169 struct var_loc_node *loc = NULL;
6170 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6171
6172 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6173 {
6174 tree realdecl = DECL_DEBUG_EXPR (decl);
6175 if (handled_component_p (realdecl)
6176 || (TREE_CODE (realdecl) == MEM_REF
6177 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6178 {
6179 bool reverse;
6180 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6181 &bitsize, &reverse);
6182 if (!innerdecl
6183 || !DECL_P (innerdecl)
6184 || DECL_IGNORED_P (innerdecl)
6185 || TREE_STATIC (innerdecl)
6186 || bitsize == 0
6187 || bitpos + bitsize > 256)
6188 return NULL;
6189 decl = innerdecl;
6190 }
6191 }
6192
6193 decl_id = DECL_UID (decl);
6194 var_loc_list **slot
6195 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6196 if (*slot == NULL)
6197 {
6198 temp = ggc_cleared_alloc<var_loc_list> ();
6199 temp->decl_id = decl_id;
6200 *slot = temp;
6201 }
6202 else
6203 temp = *slot;
6204
6205 /* For PARM_DECLs try to keep around the original incoming value,
6206 even if that means we'll emit a zero-range .debug_loc entry. */
6207 if (temp->last
6208 && temp->first == temp->last
6209 && TREE_CODE (decl) == PARM_DECL
6210 && NOTE_P (temp->first->loc)
6211 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6212 && DECL_INCOMING_RTL (decl)
6213 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6214 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6215 == GET_CODE (DECL_INCOMING_RTL (decl))
6216 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6217 && (bitsize != -1
6218 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6219 NOTE_VAR_LOCATION_LOC (loc_note))
6220 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6221 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6222 {
6223 loc = ggc_cleared_alloc<var_loc_node> ();
6224 temp->first->next = loc;
6225 temp->last = loc;
6226 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6227 }
6228 else if (temp->last)
6229 {
6230 struct var_loc_node *last = temp->last, *unused = NULL;
6231 rtx *piece_loc = NULL, last_loc_note;
6232 HOST_WIDE_INT piece_bitpos = 0;
6233 if (last->next)
6234 {
6235 last = last->next;
6236 gcc_assert (last->next == NULL);
6237 }
6238 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6239 {
6240 piece_loc = &last->loc;
6241 do
6242 {
6243 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6244 if (piece_bitpos + cur_bitsize > bitpos)
6245 break;
6246 piece_bitpos += cur_bitsize;
6247 piece_loc = &XEXP (*piece_loc, 1);
6248 }
6249 while (*piece_loc);
6250 }
6251 /* TEMP->LAST here is either pointer to the last but one or
6252 last element in the chained list, LAST is pointer to the
6253 last element. */
6254 if (label && strcmp (last->label, label) == 0 && last->view == view)
6255 {
6256 /* For SRA optimized variables if there weren't any real
6257 insns since last note, just modify the last node. */
6258 if (piece_loc != NULL)
6259 {
6260 adjust_piece_list (piece_loc, NULL, NULL,
6261 bitpos, piece_bitpos, bitsize, loc_note);
6262 return NULL;
6263 }
6264 /* If the last note doesn't cover any instructions, remove it. */
6265 if (temp->last != last)
6266 {
6267 temp->last->next = NULL;
6268 unused = last;
6269 last = temp->last;
6270 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6271 }
6272 else
6273 {
6274 gcc_assert (temp->first == temp->last
6275 || (temp->first->next == temp->last
6276 && TREE_CODE (decl) == PARM_DECL));
6277 memset (temp->last, '\0', sizeof (*temp->last));
6278 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6279 return temp->last;
6280 }
6281 }
6282 if (bitsize == -1 && NOTE_P (last->loc))
6283 last_loc_note = last->loc;
6284 else if (piece_loc != NULL
6285 && *piece_loc != NULL_RTX
6286 && piece_bitpos == bitpos
6287 && decl_piece_bitsize (*piece_loc) == bitsize)
6288 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6289 else
6290 last_loc_note = NULL_RTX;
6291 /* If the current location is the same as the end of the list,
6292 and either both or neither of the locations is uninitialized,
6293 we have nothing to do. */
6294 if (last_loc_note == NULL_RTX
6295 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6296 NOTE_VAR_LOCATION_LOC (loc_note)))
6297 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6298 != NOTE_VAR_LOCATION_STATUS (loc_note))
6299 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6300 == VAR_INIT_STATUS_UNINITIALIZED)
6301 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6302 == VAR_INIT_STATUS_UNINITIALIZED))))
6303 {
6304 /* Add LOC to the end of list and update LAST. If the last
6305 element of the list has been removed above, reuse its
6306 memory for the new node, otherwise allocate a new one. */
6307 if (unused)
6308 {
6309 loc = unused;
6310 memset (loc, '\0', sizeof (*loc));
6311 }
6312 else
6313 loc = ggc_cleared_alloc<var_loc_node> ();
6314 if (bitsize == -1 || piece_loc == NULL)
6315 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6316 else
6317 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6318 bitpos, piece_bitpos, bitsize, loc_note);
6319 last->next = loc;
6320 /* Ensure TEMP->LAST will point either to the new last but one
6321 element of the chain, or to the last element in it. */
6322 if (last != temp->last)
6323 temp->last = last;
6324 }
6325 else if (unused)
6326 ggc_free (unused);
6327 }
6328 else
6329 {
6330 loc = ggc_cleared_alloc<var_loc_node> ();
6331 temp->first = loc;
6332 temp->last = loc;
6333 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6334 }
6335 return loc;
6336 }
6337 \f
6338 /* Keep track of the number of spaces used to indent the
6339 output of the debugging routines that print the structure of
6340 the DIE internal representation. */
6341 static int print_indent;
6342
6343 /* Indent the line the number of spaces given by print_indent. */
6344
6345 static inline void
6346 print_spaces (FILE *outfile)
6347 {
6348 fprintf (outfile, "%*s", print_indent, "");
6349 }
6350
6351 /* Print a type signature in hex. */
6352
6353 static inline void
6354 print_signature (FILE *outfile, char *sig)
6355 {
6356 int i;
6357
6358 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6359 fprintf (outfile, "%02x", sig[i] & 0xff);
6360 }
6361
6362 static inline void
6363 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6364 {
6365 if (discr_value->pos)
6366 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6367 else
6368 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6369 }
6370
6371 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6372
6373 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6374 RECURSE, output location descriptor operations. */
6375
6376 static void
6377 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6378 {
6379 switch (val->val_class)
6380 {
6381 case dw_val_class_addr:
6382 fprintf (outfile, "address");
6383 break;
6384 case dw_val_class_offset:
6385 fprintf (outfile, "offset");
6386 break;
6387 case dw_val_class_loc:
6388 fprintf (outfile, "location descriptor");
6389 if (val->v.val_loc == NULL)
6390 fprintf (outfile, " -> <null>\n");
6391 else if (recurse)
6392 {
6393 fprintf (outfile, ":\n");
6394 print_indent += 4;
6395 print_loc_descr (val->v.val_loc, outfile);
6396 print_indent -= 4;
6397 }
6398 else
6399 {
6400 if (flag_dump_noaddr || flag_dump_unnumbered)
6401 fprintf (outfile, " #\n");
6402 else
6403 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6404 }
6405 break;
6406 case dw_val_class_loc_list:
6407 fprintf (outfile, "location list -> label:%s",
6408 val->v.val_loc_list->ll_symbol);
6409 break;
6410 case dw_val_class_view_list:
6411 val = view_list_to_loc_list_val_node (val);
6412 fprintf (outfile, "location list with views -> labels:%s and %s",
6413 val->v.val_loc_list->ll_symbol,
6414 val->v.val_loc_list->vl_symbol);
6415 break;
6416 case dw_val_class_range_list:
6417 fprintf (outfile, "range list");
6418 break;
6419 case dw_val_class_const:
6420 case dw_val_class_const_implicit:
6421 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6422 break;
6423 case dw_val_class_unsigned_const:
6424 case dw_val_class_unsigned_const_implicit:
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6426 break;
6427 case dw_val_class_const_double:
6428 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6429 HOST_WIDE_INT_PRINT_UNSIGNED")",
6430 val->v.val_double.high,
6431 val->v.val_double.low);
6432 break;
6433 case dw_val_class_wide_int:
6434 {
6435 int i = val->v.val_wide->get_len ();
6436 fprintf (outfile, "constant (");
6437 gcc_assert (i > 0);
6438 if (val->v.val_wide->elt (i - 1) == 0)
6439 fprintf (outfile, "0x");
6440 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6441 val->v.val_wide->elt (--i));
6442 while (--i >= 0)
6443 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6444 val->v.val_wide->elt (i));
6445 fprintf (outfile, ")");
6446 break;
6447 }
6448 case dw_val_class_vec:
6449 fprintf (outfile, "floating-point or vector constant");
6450 break;
6451 case dw_val_class_flag:
6452 fprintf (outfile, "%u", val->v.val_flag);
6453 break;
6454 case dw_val_class_die_ref:
6455 if (val->v.val_die_ref.die != NULL)
6456 {
6457 dw_die_ref die = val->v.val_die_ref.die;
6458
6459 if (die->comdat_type_p)
6460 {
6461 fprintf (outfile, "die -> signature: ");
6462 print_signature (outfile,
6463 die->die_id.die_type_node->signature);
6464 }
6465 else if (die->die_id.die_symbol)
6466 {
6467 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6468 if (die->with_offset)
6469 fprintf (outfile, " + %ld", die->die_offset);
6470 }
6471 else
6472 fprintf (outfile, "die -> %ld", die->die_offset);
6473 if (flag_dump_noaddr || flag_dump_unnumbered)
6474 fprintf (outfile, " #");
6475 else
6476 fprintf (outfile, " (%p)", (void *) die);
6477 }
6478 else
6479 fprintf (outfile, "die -> <null>");
6480 break;
6481 case dw_val_class_vms_delta:
6482 fprintf (outfile, "delta: @slotcount(%s-%s)",
6483 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6484 break;
6485 case dw_val_class_symview:
6486 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6487 break;
6488 case dw_val_class_lbl_id:
6489 case dw_val_class_lineptr:
6490 case dw_val_class_macptr:
6491 case dw_val_class_loclistsptr:
6492 case dw_val_class_high_pc:
6493 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6494 break;
6495 case dw_val_class_str:
6496 if (val->v.val_str->str != NULL)
6497 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6498 else
6499 fprintf (outfile, "<null>");
6500 break;
6501 case dw_val_class_file:
6502 case dw_val_class_file_implicit:
6503 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6504 val->v.val_file->emitted_number);
6505 break;
6506 case dw_val_class_data8:
6507 {
6508 int i;
6509
6510 for (i = 0; i < 8; i++)
6511 fprintf (outfile, "%02x", val->v.val_data8[i]);
6512 break;
6513 }
6514 case dw_val_class_discr_value:
6515 print_discr_value (outfile, &val->v.val_discr_value);
6516 break;
6517 case dw_val_class_discr_list:
6518 for (dw_discr_list_ref node = val->v.val_discr_list;
6519 node != NULL;
6520 node = node->dw_discr_next)
6521 {
6522 if (node->dw_discr_range)
6523 {
6524 fprintf (outfile, " .. ");
6525 print_discr_value (outfile, &node->dw_discr_lower_bound);
6526 print_discr_value (outfile, &node->dw_discr_upper_bound);
6527 }
6528 else
6529 print_discr_value (outfile, &node->dw_discr_lower_bound);
6530
6531 if (node->dw_discr_next != NULL)
6532 fprintf (outfile, " | ");
6533 }
6534 default:
6535 break;
6536 }
6537 }
6538
6539 /* Likewise, for a DIE attribute. */
6540
6541 static void
6542 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6543 {
6544 print_dw_val (&a->dw_attr_val, recurse, outfile);
6545 }
6546
6547
6548 /* Print the list of operands in the LOC location description to OUTFILE. This
6549 routine is a debugging aid only. */
6550
6551 static void
6552 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6553 {
6554 dw_loc_descr_ref l = loc;
6555
6556 if (loc == NULL)
6557 {
6558 print_spaces (outfile);
6559 fprintf (outfile, "<null>\n");
6560 return;
6561 }
6562
6563 for (l = loc; l != NULL; l = l->dw_loc_next)
6564 {
6565 print_spaces (outfile);
6566 if (flag_dump_noaddr || flag_dump_unnumbered)
6567 fprintf (outfile, "#");
6568 else
6569 fprintf (outfile, "(%p)", (void *) l);
6570 fprintf (outfile, " %s",
6571 dwarf_stack_op_name (l->dw_loc_opc));
6572 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6573 {
6574 fprintf (outfile, " ");
6575 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6576 }
6577 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6578 {
6579 fprintf (outfile, ", ");
6580 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6581 }
6582 fprintf (outfile, "\n");
6583 }
6584 }
6585
6586 /* Print the information associated with a given DIE, and its children.
6587 This routine is a debugging aid only. */
6588
6589 static void
6590 print_die (dw_die_ref die, FILE *outfile)
6591 {
6592 dw_attr_node *a;
6593 dw_die_ref c;
6594 unsigned ix;
6595
6596 print_spaces (outfile);
6597 fprintf (outfile, "DIE %4ld: %s ",
6598 die->die_offset, dwarf_tag_name (die->die_tag));
6599 if (flag_dump_noaddr || flag_dump_unnumbered)
6600 fprintf (outfile, "#\n");
6601 else
6602 fprintf (outfile, "(%p)\n", (void*) die);
6603 print_spaces (outfile);
6604 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6605 fprintf (outfile, " offset: %ld", die->die_offset);
6606 fprintf (outfile, " mark: %d\n", die->die_mark);
6607
6608 if (die->comdat_type_p)
6609 {
6610 print_spaces (outfile);
6611 fprintf (outfile, " signature: ");
6612 print_signature (outfile, die->die_id.die_type_node->signature);
6613 fprintf (outfile, "\n");
6614 }
6615
6616 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6617 {
6618 print_spaces (outfile);
6619 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6620
6621 print_attribute (a, true, outfile);
6622 fprintf (outfile, "\n");
6623 }
6624
6625 if (die->die_child != NULL)
6626 {
6627 print_indent += 4;
6628 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6629 print_indent -= 4;
6630 }
6631 if (print_indent == 0)
6632 fprintf (outfile, "\n");
6633 }
6634
6635 /* Print the list of operations in the LOC location description. */
6636
6637 DEBUG_FUNCTION void
6638 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6639 {
6640 print_loc_descr (loc, stderr);
6641 }
6642
6643 /* Print the information collected for a given DIE. */
6644
6645 DEBUG_FUNCTION void
6646 debug_dwarf_die (dw_die_ref die)
6647 {
6648 print_die (die, stderr);
6649 }
6650
6651 DEBUG_FUNCTION void
6652 debug (die_struct &ref)
6653 {
6654 print_die (&ref, stderr);
6655 }
6656
6657 DEBUG_FUNCTION void
6658 debug (die_struct *ptr)
6659 {
6660 if (ptr)
6661 debug (*ptr);
6662 else
6663 fprintf (stderr, "<nil>\n");
6664 }
6665
6666
6667 /* Print all DWARF information collected for the compilation unit.
6668 This routine is a debugging aid only. */
6669
6670 DEBUG_FUNCTION void
6671 debug_dwarf (void)
6672 {
6673 print_indent = 0;
6674 print_die (comp_unit_die (), stderr);
6675 }
6676
6677 /* Verify the DIE tree structure. */
6678
6679 DEBUG_FUNCTION void
6680 verify_die (dw_die_ref die)
6681 {
6682 gcc_assert (!die->die_mark);
6683 if (die->die_parent == NULL
6684 && die->die_sib == NULL)
6685 return;
6686 /* Verify the die_sib list is cyclic. */
6687 dw_die_ref x = die;
6688 do
6689 {
6690 x->die_mark = 1;
6691 x = x->die_sib;
6692 }
6693 while (x && !x->die_mark);
6694 gcc_assert (x == die);
6695 x = die;
6696 do
6697 {
6698 /* Verify all dies have the same parent. */
6699 gcc_assert (x->die_parent == die->die_parent);
6700 if (x->die_child)
6701 {
6702 /* Verify the child has the proper parent and recurse. */
6703 gcc_assert (x->die_child->die_parent == x);
6704 verify_die (x->die_child);
6705 }
6706 x->die_mark = 0;
6707 x = x->die_sib;
6708 }
6709 while (x && x->die_mark);
6710 }
6711
6712 /* Sanity checks on DIEs. */
6713
6714 static void
6715 check_die (dw_die_ref die)
6716 {
6717 unsigned ix;
6718 dw_attr_node *a;
6719 bool inline_found = false;
6720 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6721 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6722 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6723 {
6724 switch (a->dw_attr)
6725 {
6726 case DW_AT_inline:
6727 if (a->dw_attr_val.v.val_unsigned)
6728 inline_found = true;
6729 break;
6730 case DW_AT_location:
6731 ++n_location;
6732 break;
6733 case DW_AT_low_pc:
6734 ++n_low_pc;
6735 break;
6736 case DW_AT_high_pc:
6737 ++n_high_pc;
6738 break;
6739 case DW_AT_artificial:
6740 ++n_artificial;
6741 break;
6742 case DW_AT_decl_column:
6743 ++n_decl_column;
6744 break;
6745 case DW_AT_decl_line:
6746 ++n_decl_line;
6747 break;
6748 case DW_AT_decl_file:
6749 ++n_decl_file;
6750 break;
6751 default:
6752 break;
6753 }
6754 }
6755 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6756 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6757 {
6758 fprintf (stderr, "Duplicate attributes in DIE:\n");
6759 debug_dwarf_die (die);
6760 gcc_unreachable ();
6761 }
6762 if (inline_found)
6763 {
6764 /* A debugging information entry that is a member of an abstract
6765 instance tree [that has DW_AT_inline] should not contain any
6766 attributes which describe aspects of the subroutine which vary
6767 between distinct inlined expansions or distinct out-of-line
6768 expansions. */
6769 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6770 gcc_assert (a->dw_attr != DW_AT_low_pc
6771 && a->dw_attr != DW_AT_high_pc
6772 && a->dw_attr != DW_AT_location
6773 && a->dw_attr != DW_AT_frame_base
6774 && a->dw_attr != DW_AT_call_all_calls
6775 && a->dw_attr != DW_AT_GNU_all_call_sites);
6776 }
6777 }
6778 \f
6779 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6780 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6781 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6782
6783 /* Calculate the checksum of a location expression. */
6784
6785 static inline void
6786 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6787 {
6788 int tem;
6789 inchash::hash hstate;
6790 hashval_t hash;
6791
6792 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6793 CHECKSUM (tem);
6794 hash_loc_operands (loc, hstate);
6795 hash = hstate.end();
6796 CHECKSUM (hash);
6797 }
6798
6799 /* Calculate the checksum of an attribute. */
6800
6801 static void
6802 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6803 {
6804 dw_loc_descr_ref loc;
6805 rtx r;
6806
6807 CHECKSUM (at->dw_attr);
6808
6809 /* We don't care that this was compiled with a different compiler
6810 snapshot; if the output is the same, that's what matters. */
6811 if (at->dw_attr == DW_AT_producer)
6812 return;
6813
6814 switch (AT_class (at))
6815 {
6816 case dw_val_class_const:
6817 case dw_val_class_const_implicit:
6818 CHECKSUM (at->dw_attr_val.v.val_int);
6819 break;
6820 case dw_val_class_unsigned_const:
6821 case dw_val_class_unsigned_const_implicit:
6822 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6823 break;
6824 case dw_val_class_const_double:
6825 CHECKSUM (at->dw_attr_val.v.val_double);
6826 break;
6827 case dw_val_class_wide_int:
6828 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6829 get_full_len (*at->dw_attr_val.v.val_wide)
6830 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6831 break;
6832 case dw_val_class_vec:
6833 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6834 (at->dw_attr_val.v.val_vec.length
6835 * at->dw_attr_val.v.val_vec.elt_size));
6836 break;
6837 case dw_val_class_flag:
6838 CHECKSUM (at->dw_attr_val.v.val_flag);
6839 break;
6840 case dw_val_class_str:
6841 CHECKSUM_STRING (AT_string (at));
6842 break;
6843
6844 case dw_val_class_addr:
6845 r = AT_addr (at);
6846 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6847 CHECKSUM_STRING (XSTR (r, 0));
6848 break;
6849
6850 case dw_val_class_offset:
6851 CHECKSUM (at->dw_attr_val.v.val_offset);
6852 break;
6853
6854 case dw_val_class_loc:
6855 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6856 loc_checksum (loc, ctx);
6857 break;
6858
6859 case dw_val_class_die_ref:
6860 die_checksum (AT_ref (at), ctx, mark);
6861 break;
6862
6863 case dw_val_class_fde_ref:
6864 case dw_val_class_vms_delta:
6865 case dw_val_class_symview:
6866 case dw_val_class_lbl_id:
6867 case dw_val_class_lineptr:
6868 case dw_val_class_macptr:
6869 case dw_val_class_loclistsptr:
6870 case dw_val_class_high_pc:
6871 break;
6872
6873 case dw_val_class_file:
6874 case dw_val_class_file_implicit:
6875 CHECKSUM_STRING (AT_file (at)->filename);
6876 break;
6877
6878 case dw_val_class_data8:
6879 CHECKSUM (at->dw_attr_val.v.val_data8);
6880 break;
6881
6882 default:
6883 break;
6884 }
6885 }
6886
6887 /* Calculate the checksum of a DIE. */
6888
6889 static void
6890 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6891 {
6892 dw_die_ref c;
6893 dw_attr_node *a;
6894 unsigned ix;
6895
6896 /* To avoid infinite recursion. */
6897 if (die->die_mark)
6898 {
6899 CHECKSUM (die->die_mark);
6900 return;
6901 }
6902 die->die_mark = ++(*mark);
6903
6904 CHECKSUM (die->die_tag);
6905
6906 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6907 attr_checksum (a, ctx, mark);
6908
6909 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6910 }
6911
6912 #undef CHECKSUM
6913 #undef CHECKSUM_BLOCK
6914 #undef CHECKSUM_STRING
6915
6916 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6917 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6918 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6919 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6920 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6921 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6922 #define CHECKSUM_ATTR(FOO) \
6923 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6924
6925 /* Calculate the checksum of a number in signed LEB128 format. */
6926
6927 static void
6928 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6929 {
6930 unsigned char byte;
6931 bool more;
6932
6933 while (1)
6934 {
6935 byte = (value & 0x7f);
6936 value >>= 7;
6937 more = !((value == 0 && (byte & 0x40) == 0)
6938 || (value == -1 && (byte & 0x40) != 0));
6939 if (more)
6940 byte |= 0x80;
6941 CHECKSUM (byte);
6942 if (!more)
6943 break;
6944 }
6945 }
6946
6947 /* Calculate the checksum of a number in unsigned LEB128 format. */
6948
6949 static void
6950 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6951 {
6952 while (1)
6953 {
6954 unsigned char byte = (value & 0x7f);
6955 value >>= 7;
6956 if (value != 0)
6957 /* More bytes to follow. */
6958 byte |= 0x80;
6959 CHECKSUM (byte);
6960 if (value == 0)
6961 break;
6962 }
6963 }
6964
6965 /* Checksum the context of the DIE. This adds the names of any
6966 surrounding namespaces or structures to the checksum. */
6967
6968 static void
6969 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6970 {
6971 const char *name;
6972 dw_die_ref spec;
6973 int tag = die->die_tag;
6974
6975 if (tag != DW_TAG_namespace
6976 && tag != DW_TAG_structure_type
6977 && tag != DW_TAG_class_type)
6978 return;
6979
6980 name = get_AT_string (die, DW_AT_name);
6981
6982 spec = get_AT_ref (die, DW_AT_specification);
6983 if (spec != NULL)
6984 die = spec;
6985
6986 if (die->die_parent != NULL)
6987 checksum_die_context (die->die_parent, ctx);
6988
6989 CHECKSUM_ULEB128 ('C');
6990 CHECKSUM_ULEB128 (tag);
6991 if (name != NULL)
6992 CHECKSUM_STRING (name);
6993 }
6994
6995 /* Calculate the checksum of a location expression. */
6996
6997 static inline void
6998 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6999 {
7000 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7001 were emitted as a DW_FORM_sdata instead of a location expression. */
7002 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7003 {
7004 CHECKSUM_ULEB128 (DW_FORM_sdata);
7005 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7006 return;
7007 }
7008
7009 /* Otherwise, just checksum the raw location expression. */
7010 while (loc != NULL)
7011 {
7012 inchash::hash hstate;
7013 hashval_t hash;
7014
7015 CHECKSUM_ULEB128 (loc->dtprel);
7016 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7017 hash_loc_operands (loc, hstate);
7018 hash = hstate.end ();
7019 CHECKSUM (hash);
7020 loc = loc->dw_loc_next;
7021 }
7022 }
7023
7024 /* Calculate the checksum of an attribute. */
7025
7026 static void
7027 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7028 struct md5_ctx *ctx, int *mark)
7029 {
7030 dw_loc_descr_ref loc;
7031 rtx r;
7032
7033 if (AT_class (at) == dw_val_class_die_ref)
7034 {
7035 dw_die_ref target_die = AT_ref (at);
7036
7037 /* For pointer and reference types, we checksum only the (qualified)
7038 name of the target type (if there is a name). For friend entries,
7039 we checksum only the (qualified) name of the target type or function.
7040 This allows the checksum to remain the same whether the target type
7041 is complete or not. */
7042 if ((at->dw_attr == DW_AT_type
7043 && (tag == DW_TAG_pointer_type
7044 || tag == DW_TAG_reference_type
7045 || tag == DW_TAG_rvalue_reference_type
7046 || tag == DW_TAG_ptr_to_member_type))
7047 || (at->dw_attr == DW_AT_friend
7048 && tag == DW_TAG_friend))
7049 {
7050 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7051
7052 if (name_attr != NULL)
7053 {
7054 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7055
7056 if (decl == NULL)
7057 decl = target_die;
7058 CHECKSUM_ULEB128 ('N');
7059 CHECKSUM_ULEB128 (at->dw_attr);
7060 if (decl->die_parent != NULL)
7061 checksum_die_context (decl->die_parent, ctx);
7062 CHECKSUM_ULEB128 ('E');
7063 CHECKSUM_STRING (AT_string (name_attr));
7064 return;
7065 }
7066 }
7067
7068 /* For all other references to another DIE, we check to see if the
7069 target DIE has already been visited. If it has, we emit a
7070 backward reference; if not, we descend recursively. */
7071 if (target_die->die_mark > 0)
7072 {
7073 CHECKSUM_ULEB128 ('R');
7074 CHECKSUM_ULEB128 (at->dw_attr);
7075 CHECKSUM_ULEB128 (target_die->die_mark);
7076 }
7077 else
7078 {
7079 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7080
7081 if (decl == NULL)
7082 decl = target_die;
7083 target_die->die_mark = ++(*mark);
7084 CHECKSUM_ULEB128 ('T');
7085 CHECKSUM_ULEB128 (at->dw_attr);
7086 if (decl->die_parent != NULL)
7087 checksum_die_context (decl->die_parent, ctx);
7088 die_checksum_ordered (target_die, ctx, mark);
7089 }
7090 return;
7091 }
7092
7093 CHECKSUM_ULEB128 ('A');
7094 CHECKSUM_ULEB128 (at->dw_attr);
7095
7096 switch (AT_class (at))
7097 {
7098 case dw_val_class_const:
7099 case dw_val_class_const_implicit:
7100 CHECKSUM_ULEB128 (DW_FORM_sdata);
7101 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7102 break;
7103
7104 case dw_val_class_unsigned_const:
7105 case dw_val_class_unsigned_const_implicit:
7106 CHECKSUM_ULEB128 (DW_FORM_sdata);
7107 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7108 break;
7109
7110 case dw_val_class_const_double:
7111 CHECKSUM_ULEB128 (DW_FORM_block);
7112 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7113 CHECKSUM (at->dw_attr_val.v.val_double);
7114 break;
7115
7116 case dw_val_class_wide_int:
7117 CHECKSUM_ULEB128 (DW_FORM_block);
7118 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7119 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7120 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7121 get_full_len (*at->dw_attr_val.v.val_wide)
7122 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7123 break;
7124
7125 case dw_val_class_vec:
7126 CHECKSUM_ULEB128 (DW_FORM_block);
7127 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7128 * at->dw_attr_val.v.val_vec.elt_size);
7129 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7130 (at->dw_attr_val.v.val_vec.length
7131 * at->dw_attr_val.v.val_vec.elt_size));
7132 break;
7133
7134 case dw_val_class_flag:
7135 CHECKSUM_ULEB128 (DW_FORM_flag);
7136 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7137 break;
7138
7139 case dw_val_class_str:
7140 CHECKSUM_ULEB128 (DW_FORM_string);
7141 CHECKSUM_STRING (AT_string (at));
7142 break;
7143
7144 case dw_val_class_addr:
7145 r = AT_addr (at);
7146 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7147 CHECKSUM_ULEB128 (DW_FORM_string);
7148 CHECKSUM_STRING (XSTR (r, 0));
7149 break;
7150
7151 case dw_val_class_offset:
7152 CHECKSUM_ULEB128 (DW_FORM_sdata);
7153 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7154 break;
7155
7156 case dw_val_class_loc:
7157 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7158 loc_checksum_ordered (loc, ctx);
7159 break;
7160
7161 case dw_val_class_fde_ref:
7162 case dw_val_class_symview:
7163 case dw_val_class_lbl_id:
7164 case dw_val_class_lineptr:
7165 case dw_val_class_macptr:
7166 case dw_val_class_loclistsptr:
7167 case dw_val_class_high_pc:
7168 break;
7169
7170 case dw_val_class_file:
7171 case dw_val_class_file_implicit:
7172 CHECKSUM_ULEB128 (DW_FORM_string);
7173 CHECKSUM_STRING (AT_file (at)->filename);
7174 break;
7175
7176 case dw_val_class_data8:
7177 CHECKSUM (at->dw_attr_val.v.val_data8);
7178 break;
7179
7180 default:
7181 break;
7182 }
7183 }
7184
7185 struct checksum_attributes
7186 {
7187 dw_attr_node *at_name;
7188 dw_attr_node *at_type;
7189 dw_attr_node *at_friend;
7190 dw_attr_node *at_accessibility;
7191 dw_attr_node *at_address_class;
7192 dw_attr_node *at_alignment;
7193 dw_attr_node *at_allocated;
7194 dw_attr_node *at_artificial;
7195 dw_attr_node *at_associated;
7196 dw_attr_node *at_binary_scale;
7197 dw_attr_node *at_bit_offset;
7198 dw_attr_node *at_bit_size;
7199 dw_attr_node *at_bit_stride;
7200 dw_attr_node *at_byte_size;
7201 dw_attr_node *at_byte_stride;
7202 dw_attr_node *at_const_value;
7203 dw_attr_node *at_containing_type;
7204 dw_attr_node *at_count;
7205 dw_attr_node *at_data_location;
7206 dw_attr_node *at_data_member_location;
7207 dw_attr_node *at_decimal_scale;
7208 dw_attr_node *at_decimal_sign;
7209 dw_attr_node *at_default_value;
7210 dw_attr_node *at_digit_count;
7211 dw_attr_node *at_discr;
7212 dw_attr_node *at_discr_list;
7213 dw_attr_node *at_discr_value;
7214 dw_attr_node *at_encoding;
7215 dw_attr_node *at_endianity;
7216 dw_attr_node *at_explicit;
7217 dw_attr_node *at_is_optional;
7218 dw_attr_node *at_location;
7219 dw_attr_node *at_lower_bound;
7220 dw_attr_node *at_mutable;
7221 dw_attr_node *at_ordering;
7222 dw_attr_node *at_picture_string;
7223 dw_attr_node *at_prototyped;
7224 dw_attr_node *at_small;
7225 dw_attr_node *at_segment;
7226 dw_attr_node *at_string_length;
7227 dw_attr_node *at_string_length_bit_size;
7228 dw_attr_node *at_string_length_byte_size;
7229 dw_attr_node *at_threads_scaled;
7230 dw_attr_node *at_upper_bound;
7231 dw_attr_node *at_use_location;
7232 dw_attr_node *at_use_UTF8;
7233 dw_attr_node *at_variable_parameter;
7234 dw_attr_node *at_virtuality;
7235 dw_attr_node *at_visibility;
7236 dw_attr_node *at_vtable_elem_location;
7237 };
7238
7239 /* Collect the attributes that we will want to use for the checksum. */
7240
7241 static void
7242 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7243 {
7244 dw_attr_node *a;
7245 unsigned ix;
7246
7247 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7248 {
7249 switch (a->dw_attr)
7250 {
7251 case DW_AT_name:
7252 attrs->at_name = a;
7253 break;
7254 case DW_AT_type:
7255 attrs->at_type = a;
7256 break;
7257 case DW_AT_friend:
7258 attrs->at_friend = a;
7259 break;
7260 case DW_AT_accessibility:
7261 attrs->at_accessibility = a;
7262 break;
7263 case DW_AT_address_class:
7264 attrs->at_address_class = a;
7265 break;
7266 case DW_AT_alignment:
7267 attrs->at_alignment = a;
7268 break;
7269 case DW_AT_allocated:
7270 attrs->at_allocated = a;
7271 break;
7272 case DW_AT_artificial:
7273 attrs->at_artificial = a;
7274 break;
7275 case DW_AT_associated:
7276 attrs->at_associated = a;
7277 break;
7278 case DW_AT_binary_scale:
7279 attrs->at_binary_scale = a;
7280 break;
7281 case DW_AT_bit_offset:
7282 attrs->at_bit_offset = a;
7283 break;
7284 case DW_AT_bit_size:
7285 attrs->at_bit_size = a;
7286 break;
7287 case DW_AT_bit_stride:
7288 attrs->at_bit_stride = a;
7289 break;
7290 case DW_AT_byte_size:
7291 attrs->at_byte_size = a;
7292 break;
7293 case DW_AT_byte_stride:
7294 attrs->at_byte_stride = a;
7295 break;
7296 case DW_AT_const_value:
7297 attrs->at_const_value = a;
7298 break;
7299 case DW_AT_containing_type:
7300 attrs->at_containing_type = a;
7301 break;
7302 case DW_AT_count:
7303 attrs->at_count = a;
7304 break;
7305 case DW_AT_data_location:
7306 attrs->at_data_location = a;
7307 break;
7308 case DW_AT_data_member_location:
7309 attrs->at_data_member_location = a;
7310 break;
7311 case DW_AT_decimal_scale:
7312 attrs->at_decimal_scale = a;
7313 break;
7314 case DW_AT_decimal_sign:
7315 attrs->at_decimal_sign = a;
7316 break;
7317 case DW_AT_default_value:
7318 attrs->at_default_value = a;
7319 break;
7320 case DW_AT_digit_count:
7321 attrs->at_digit_count = a;
7322 break;
7323 case DW_AT_discr:
7324 attrs->at_discr = a;
7325 break;
7326 case DW_AT_discr_list:
7327 attrs->at_discr_list = a;
7328 break;
7329 case DW_AT_discr_value:
7330 attrs->at_discr_value = a;
7331 break;
7332 case DW_AT_encoding:
7333 attrs->at_encoding = a;
7334 break;
7335 case DW_AT_endianity:
7336 attrs->at_endianity = a;
7337 break;
7338 case DW_AT_explicit:
7339 attrs->at_explicit = a;
7340 break;
7341 case DW_AT_is_optional:
7342 attrs->at_is_optional = a;
7343 break;
7344 case DW_AT_location:
7345 attrs->at_location = a;
7346 break;
7347 case DW_AT_lower_bound:
7348 attrs->at_lower_bound = a;
7349 break;
7350 case DW_AT_mutable:
7351 attrs->at_mutable = a;
7352 break;
7353 case DW_AT_ordering:
7354 attrs->at_ordering = a;
7355 break;
7356 case DW_AT_picture_string:
7357 attrs->at_picture_string = a;
7358 break;
7359 case DW_AT_prototyped:
7360 attrs->at_prototyped = a;
7361 break;
7362 case DW_AT_small:
7363 attrs->at_small = a;
7364 break;
7365 case DW_AT_segment:
7366 attrs->at_segment = a;
7367 break;
7368 case DW_AT_string_length:
7369 attrs->at_string_length = a;
7370 break;
7371 case DW_AT_string_length_bit_size:
7372 attrs->at_string_length_bit_size = a;
7373 break;
7374 case DW_AT_string_length_byte_size:
7375 attrs->at_string_length_byte_size = a;
7376 break;
7377 case DW_AT_threads_scaled:
7378 attrs->at_threads_scaled = a;
7379 break;
7380 case DW_AT_upper_bound:
7381 attrs->at_upper_bound = a;
7382 break;
7383 case DW_AT_use_location:
7384 attrs->at_use_location = a;
7385 break;
7386 case DW_AT_use_UTF8:
7387 attrs->at_use_UTF8 = a;
7388 break;
7389 case DW_AT_variable_parameter:
7390 attrs->at_variable_parameter = a;
7391 break;
7392 case DW_AT_virtuality:
7393 attrs->at_virtuality = a;
7394 break;
7395 case DW_AT_visibility:
7396 attrs->at_visibility = a;
7397 break;
7398 case DW_AT_vtable_elem_location:
7399 attrs->at_vtable_elem_location = a;
7400 break;
7401 default:
7402 break;
7403 }
7404 }
7405 }
7406
7407 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7408
7409 static void
7410 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7411 {
7412 dw_die_ref c;
7413 dw_die_ref decl;
7414 struct checksum_attributes attrs;
7415
7416 CHECKSUM_ULEB128 ('D');
7417 CHECKSUM_ULEB128 (die->die_tag);
7418
7419 memset (&attrs, 0, sizeof (attrs));
7420
7421 decl = get_AT_ref (die, DW_AT_specification);
7422 if (decl != NULL)
7423 collect_checksum_attributes (&attrs, decl);
7424 collect_checksum_attributes (&attrs, die);
7425
7426 CHECKSUM_ATTR (attrs.at_name);
7427 CHECKSUM_ATTR (attrs.at_accessibility);
7428 CHECKSUM_ATTR (attrs.at_address_class);
7429 CHECKSUM_ATTR (attrs.at_allocated);
7430 CHECKSUM_ATTR (attrs.at_artificial);
7431 CHECKSUM_ATTR (attrs.at_associated);
7432 CHECKSUM_ATTR (attrs.at_binary_scale);
7433 CHECKSUM_ATTR (attrs.at_bit_offset);
7434 CHECKSUM_ATTR (attrs.at_bit_size);
7435 CHECKSUM_ATTR (attrs.at_bit_stride);
7436 CHECKSUM_ATTR (attrs.at_byte_size);
7437 CHECKSUM_ATTR (attrs.at_byte_stride);
7438 CHECKSUM_ATTR (attrs.at_const_value);
7439 CHECKSUM_ATTR (attrs.at_containing_type);
7440 CHECKSUM_ATTR (attrs.at_count);
7441 CHECKSUM_ATTR (attrs.at_data_location);
7442 CHECKSUM_ATTR (attrs.at_data_member_location);
7443 CHECKSUM_ATTR (attrs.at_decimal_scale);
7444 CHECKSUM_ATTR (attrs.at_decimal_sign);
7445 CHECKSUM_ATTR (attrs.at_default_value);
7446 CHECKSUM_ATTR (attrs.at_digit_count);
7447 CHECKSUM_ATTR (attrs.at_discr);
7448 CHECKSUM_ATTR (attrs.at_discr_list);
7449 CHECKSUM_ATTR (attrs.at_discr_value);
7450 CHECKSUM_ATTR (attrs.at_encoding);
7451 CHECKSUM_ATTR (attrs.at_endianity);
7452 CHECKSUM_ATTR (attrs.at_explicit);
7453 CHECKSUM_ATTR (attrs.at_is_optional);
7454 CHECKSUM_ATTR (attrs.at_location);
7455 CHECKSUM_ATTR (attrs.at_lower_bound);
7456 CHECKSUM_ATTR (attrs.at_mutable);
7457 CHECKSUM_ATTR (attrs.at_ordering);
7458 CHECKSUM_ATTR (attrs.at_picture_string);
7459 CHECKSUM_ATTR (attrs.at_prototyped);
7460 CHECKSUM_ATTR (attrs.at_small);
7461 CHECKSUM_ATTR (attrs.at_segment);
7462 CHECKSUM_ATTR (attrs.at_string_length);
7463 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7464 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7465 CHECKSUM_ATTR (attrs.at_threads_scaled);
7466 CHECKSUM_ATTR (attrs.at_upper_bound);
7467 CHECKSUM_ATTR (attrs.at_use_location);
7468 CHECKSUM_ATTR (attrs.at_use_UTF8);
7469 CHECKSUM_ATTR (attrs.at_variable_parameter);
7470 CHECKSUM_ATTR (attrs.at_virtuality);
7471 CHECKSUM_ATTR (attrs.at_visibility);
7472 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7473 CHECKSUM_ATTR (attrs.at_type);
7474 CHECKSUM_ATTR (attrs.at_friend);
7475 CHECKSUM_ATTR (attrs.at_alignment);
7476
7477 /* Checksum the child DIEs. */
7478 c = die->die_child;
7479 if (c) do {
7480 dw_attr_node *name_attr;
7481
7482 c = c->die_sib;
7483 name_attr = get_AT (c, DW_AT_name);
7484 if (is_template_instantiation (c))
7485 {
7486 /* Ignore instantiations of member type and function templates. */
7487 }
7488 else if (name_attr != NULL
7489 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7490 {
7491 /* Use a shallow checksum for named nested types and member
7492 functions. */
7493 CHECKSUM_ULEB128 ('S');
7494 CHECKSUM_ULEB128 (c->die_tag);
7495 CHECKSUM_STRING (AT_string (name_attr));
7496 }
7497 else
7498 {
7499 /* Use a deep checksum for other children. */
7500 /* Mark this DIE so it gets processed when unmarking. */
7501 if (c->die_mark == 0)
7502 c->die_mark = -1;
7503 die_checksum_ordered (c, ctx, mark);
7504 }
7505 } while (c != die->die_child);
7506
7507 CHECKSUM_ULEB128 (0);
7508 }
7509
7510 /* Add a type name and tag to a hash. */
7511 static void
7512 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7513 {
7514 CHECKSUM_ULEB128 (tag);
7515 CHECKSUM_STRING (name);
7516 }
7517
7518 #undef CHECKSUM
7519 #undef CHECKSUM_STRING
7520 #undef CHECKSUM_ATTR
7521 #undef CHECKSUM_LEB128
7522 #undef CHECKSUM_ULEB128
7523
7524 /* Generate the type signature for DIE. This is computed by generating an
7525 MD5 checksum over the DIE's tag, its relevant attributes, and its
7526 children. Attributes that are references to other DIEs are processed
7527 by recursion, using the MARK field to prevent infinite recursion.
7528 If the DIE is nested inside a namespace or another type, we also
7529 need to include that context in the signature. The lower 64 bits
7530 of the resulting MD5 checksum comprise the signature. */
7531
7532 static void
7533 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7534 {
7535 int mark;
7536 const char *name;
7537 unsigned char checksum[16];
7538 struct md5_ctx ctx;
7539 dw_die_ref decl;
7540 dw_die_ref parent;
7541
7542 name = get_AT_string (die, DW_AT_name);
7543 decl = get_AT_ref (die, DW_AT_specification);
7544 parent = get_die_parent (die);
7545
7546 /* First, compute a signature for just the type name (and its surrounding
7547 context, if any. This is stored in the type unit DIE for link-time
7548 ODR (one-definition rule) checking. */
7549
7550 if (is_cxx () && name != NULL)
7551 {
7552 md5_init_ctx (&ctx);
7553
7554 /* Checksum the names of surrounding namespaces and structures. */
7555 if (parent != NULL)
7556 checksum_die_context (parent, &ctx);
7557
7558 /* Checksum the current DIE. */
7559 die_odr_checksum (die->die_tag, name, &ctx);
7560 md5_finish_ctx (&ctx, checksum);
7561
7562 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7563 }
7564
7565 /* Next, compute the complete type signature. */
7566
7567 md5_init_ctx (&ctx);
7568 mark = 1;
7569 die->die_mark = mark;
7570
7571 /* Checksum the names of surrounding namespaces and structures. */
7572 if (parent != NULL)
7573 checksum_die_context (parent, &ctx);
7574
7575 /* Checksum the DIE and its children. */
7576 die_checksum_ordered (die, &ctx, &mark);
7577 unmark_all_dies (die);
7578 md5_finish_ctx (&ctx, checksum);
7579
7580 /* Store the signature in the type node and link the type DIE and the
7581 type node together. */
7582 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7583 DWARF_TYPE_SIGNATURE_SIZE);
7584 die->comdat_type_p = true;
7585 die->die_id.die_type_node = type_node;
7586 type_node->type_die = die;
7587
7588 /* If the DIE is a specification, link its declaration to the type node
7589 as well. */
7590 if (decl != NULL)
7591 {
7592 decl->comdat_type_p = true;
7593 decl->die_id.die_type_node = type_node;
7594 }
7595 }
7596
7597 /* Do the location expressions look same? */
7598 static inline int
7599 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7600 {
7601 return loc1->dw_loc_opc == loc2->dw_loc_opc
7602 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7603 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7604 }
7605
7606 /* Do the values look the same? */
7607 static int
7608 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7609 {
7610 dw_loc_descr_ref loc1, loc2;
7611 rtx r1, r2;
7612
7613 if (v1->val_class != v2->val_class)
7614 return 0;
7615
7616 switch (v1->val_class)
7617 {
7618 case dw_val_class_const:
7619 case dw_val_class_const_implicit:
7620 return v1->v.val_int == v2->v.val_int;
7621 case dw_val_class_unsigned_const:
7622 case dw_val_class_unsigned_const_implicit:
7623 return v1->v.val_unsigned == v2->v.val_unsigned;
7624 case dw_val_class_const_double:
7625 return v1->v.val_double.high == v2->v.val_double.high
7626 && v1->v.val_double.low == v2->v.val_double.low;
7627 case dw_val_class_wide_int:
7628 return *v1->v.val_wide == *v2->v.val_wide;
7629 case dw_val_class_vec:
7630 if (v1->v.val_vec.length != v2->v.val_vec.length
7631 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7632 return 0;
7633 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7634 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7635 return 0;
7636 return 1;
7637 case dw_val_class_flag:
7638 return v1->v.val_flag == v2->v.val_flag;
7639 case dw_val_class_str:
7640 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7641
7642 case dw_val_class_addr:
7643 r1 = v1->v.val_addr;
7644 r2 = v2->v.val_addr;
7645 if (GET_CODE (r1) != GET_CODE (r2))
7646 return 0;
7647 return !rtx_equal_p (r1, r2);
7648
7649 case dw_val_class_offset:
7650 return v1->v.val_offset == v2->v.val_offset;
7651
7652 case dw_val_class_loc:
7653 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7654 loc1 && loc2;
7655 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7656 if (!same_loc_p (loc1, loc2, mark))
7657 return 0;
7658 return !loc1 && !loc2;
7659
7660 case dw_val_class_die_ref:
7661 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7662
7663 case dw_val_class_symview:
7664 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7665
7666 case dw_val_class_fde_ref:
7667 case dw_val_class_vms_delta:
7668 case dw_val_class_lbl_id:
7669 case dw_val_class_lineptr:
7670 case dw_val_class_macptr:
7671 case dw_val_class_loclistsptr:
7672 case dw_val_class_high_pc:
7673 return 1;
7674
7675 case dw_val_class_file:
7676 case dw_val_class_file_implicit:
7677 return v1->v.val_file == v2->v.val_file;
7678
7679 case dw_val_class_data8:
7680 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7681
7682 default:
7683 return 1;
7684 }
7685 }
7686
7687 /* Do the attributes look the same? */
7688
7689 static int
7690 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7691 {
7692 if (at1->dw_attr != at2->dw_attr)
7693 return 0;
7694
7695 /* We don't care that this was compiled with a different compiler
7696 snapshot; if the output is the same, that's what matters. */
7697 if (at1->dw_attr == DW_AT_producer)
7698 return 1;
7699
7700 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7701 }
7702
7703 /* Do the dies look the same? */
7704
7705 static int
7706 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7707 {
7708 dw_die_ref c1, c2;
7709 dw_attr_node *a1;
7710 unsigned ix;
7711
7712 /* To avoid infinite recursion. */
7713 if (die1->die_mark)
7714 return die1->die_mark == die2->die_mark;
7715 die1->die_mark = die2->die_mark = ++(*mark);
7716
7717 if (die1->die_tag != die2->die_tag)
7718 return 0;
7719
7720 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7721 return 0;
7722
7723 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7724 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7725 return 0;
7726
7727 c1 = die1->die_child;
7728 c2 = die2->die_child;
7729 if (! c1)
7730 {
7731 if (c2)
7732 return 0;
7733 }
7734 else
7735 for (;;)
7736 {
7737 if (!same_die_p (c1, c2, mark))
7738 return 0;
7739 c1 = c1->die_sib;
7740 c2 = c2->die_sib;
7741 if (c1 == die1->die_child)
7742 {
7743 if (c2 == die2->die_child)
7744 break;
7745 else
7746 return 0;
7747 }
7748 }
7749
7750 return 1;
7751 }
7752
7753 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7754 children, and set die_symbol. */
7755
7756 static void
7757 compute_comp_unit_symbol (dw_die_ref unit_die)
7758 {
7759 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7760 const char *base = die_name ? lbasename (die_name) : "anonymous";
7761 char *name = XALLOCAVEC (char, strlen (base) + 64);
7762 char *p;
7763 int i, mark;
7764 unsigned char checksum[16];
7765 struct md5_ctx ctx;
7766
7767 /* Compute the checksum of the DIE, then append part of it as hex digits to
7768 the name filename of the unit. */
7769
7770 md5_init_ctx (&ctx);
7771 mark = 0;
7772 die_checksum (unit_die, &ctx, &mark);
7773 unmark_all_dies (unit_die);
7774 md5_finish_ctx (&ctx, checksum);
7775
7776 /* When we this for comp_unit_die () we have a DW_AT_name that might
7777 not start with a letter but with anything valid for filenames and
7778 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7779 character is not a letter. */
7780 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7781 clean_symbol_name (name);
7782
7783 p = name + strlen (name);
7784 for (i = 0; i < 4; i++)
7785 {
7786 sprintf (p, "%.2x", checksum[i]);
7787 p += 2;
7788 }
7789
7790 unit_die->die_id.die_symbol = xstrdup (name);
7791 }
7792
7793 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7794
7795 static int
7796 is_type_die (dw_die_ref die)
7797 {
7798 switch (die->die_tag)
7799 {
7800 case DW_TAG_array_type:
7801 case DW_TAG_class_type:
7802 case DW_TAG_interface_type:
7803 case DW_TAG_enumeration_type:
7804 case DW_TAG_pointer_type:
7805 case DW_TAG_reference_type:
7806 case DW_TAG_rvalue_reference_type:
7807 case DW_TAG_string_type:
7808 case DW_TAG_structure_type:
7809 case DW_TAG_subroutine_type:
7810 case DW_TAG_union_type:
7811 case DW_TAG_ptr_to_member_type:
7812 case DW_TAG_set_type:
7813 case DW_TAG_subrange_type:
7814 case DW_TAG_base_type:
7815 case DW_TAG_const_type:
7816 case DW_TAG_file_type:
7817 case DW_TAG_packed_type:
7818 case DW_TAG_volatile_type:
7819 case DW_TAG_typedef:
7820 return 1;
7821 default:
7822 return 0;
7823 }
7824 }
7825
7826 /* Returns true iff C is a compile-unit DIE. */
7827
7828 static inline bool
7829 is_cu_die (dw_die_ref c)
7830 {
7831 return c && (c->die_tag == DW_TAG_compile_unit
7832 || c->die_tag == DW_TAG_skeleton_unit);
7833 }
7834
7835 /* Returns true iff C is a unit DIE of some sort. */
7836
7837 static inline bool
7838 is_unit_die (dw_die_ref c)
7839 {
7840 return c && (c->die_tag == DW_TAG_compile_unit
7841 || c->die_tag == DW_TAG_partial_unit
7842 || c->die_tag == DW_TAG_type_unit
7843 || c->die_tag == DW_TAG_skeleton_unit);
7844 }
7845
7846 /* Returns true iff C is a namespace DIE. */
7847
7848 static inline bool
7849 is_namespace_die (dw_die_ref c)
7850 {
7851 return c && c->die_tag == DW_TAG_namespace;
7852 }
7853
7854 /* Return non-zero if this DIE is a template parameter. */
7855
7856 static inline bool
7857 is_template_parameter (dw_die_ref die)
7858 {
7859 switch (die->die_tag)
7860 {
7861 case DW_TAG_template_type_param:
7862 case DW_TAG_template_value_param:
7863 case DW_TAG_GNU_template_template_param:
7864 case DW_TAG_GNU_template_parameter_pack:
7865 return true;
7866 default:
7867 return false;
7868 }
7869 }
7870
7871 /* Return non-zero if this DIE represents a template instantiation. */
7872
7873 static inline bool
7874 is_template_instantiation (dw_die_ref die)
7875 {
7876 dw_die_ref c;
7877
7878 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7879 return false;
7880 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7881 return false;
7882 }
7883
7884 static char *
7885 gen_internal_sym (const char *prefix)
7886 {
7887 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7888
7889 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7890 return xstrdup (buf);
7891 }
7892
7893 /* Return non-zero if this DIE is a declaration. */
7894
7895 static int
7896 is_declaration_die (dw_die_ref die)
7897 {
7898 dw_attr_node *a;
7899 unsigned ix;
7900
7901 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7902 if (a->dw_attr == DW_AT_declaration)
7903 return 1;
7904
7905 return 0;
7906 }
7907
7908 /* Return non-zero if this DIE is nested inside a subprogram. */
7909
7910 static int
7911 is_nested_in_subprogram (dw_die_ref die)
7912 {
7913 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7914
7915 if (decl == NULL)
7916 decl = die;
7917 return local_scope_p (decl);
7918 }
7919
7920 /* Return non-zero if this DIE contains a defining declaration of a
7921 subprogram. */
7922
7923 static int
7924 contains_subprogram_definition (dw_die_ref die)
7925 {
7926 dw_die_ref c;
7927
7928 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7929 return 1;
7930 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7931 return 0;
7932 }
7933
7934 /* Return non-zero if this is a type DIE that should be moved to a
7935 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7936 unit type. */
7937
7938 static int
7939 should_move_die_to_comdat (dw_die_ref die)
7940 {
7941 switch (die->die_tag)
7942 {
7943 case DW_TAG_class_type:
7944 case DW_TAG_structure_type:
7945 case DW_TAG_enumeration_type:
7946 case DW_TAG_union_type:
7947 /* Don't move declarations, inlined instances, types nested in a
7948 subprogram, or types that contain subprogram definitions. */
7949 if (is_declaration_die (die)
7950 || get_AT (die, DW_AT_abstract_origin)
7951 || is_nested_in_subprogram (die)
7952 || contains_subprogram_definition (die))
7953 return 0;
7954 return 1;
7955 case DW_TAG_array_type:
7956 case DW_TAG_interface_type:
7957 case DW_TAG_pointer_type:
7958 case DW_TAG_reference_type:
7959 case DW_TAG_rvalue_reference_type:
7960 case DW_TAG_string_type:
7961 case DW_TAG_subroutine_type:
7962 case DW_TAG_ptr_to_member_type:
7963 case DW_TAG_set_type:
7964 case DW_TAG_subrange_type:
7965 case DW_TAG_base_type:
7966 case DW_TAG_const_type:
7967 case DW_TAG_file_type:
7968 case DW_TAG_packed_type:
7969 case DW_TAG_volatile_type:
7970 case DW_TAG_typedef:
7971 default:
7972 return 0;
7973 }
7974 }
7975
7976 /* Make a clone of DIE. */
7977
7978 static dw_die_ref
7979 clone_die (dw_die_ref die)
7980 {
7981 dw_die_ref clone = new_die_raw (die->die_tag);
7982 dw_attr_node *a;
7983 unsigned ix;
7984
7985 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7986 add_dwarf_attr (clone, a);
7987
7988 return clone;
7989 }
7990
7991 /* Make a clone of the tree rooted at DIE. */
7992
7993 static dw_die_ref
7994 clone_tree (dw_die_ref die)
7995 {
7996 dw_die_ref c;
7997 dw_die_ref clone = clone_die (die);
7998
7999 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8000
8001 return clone;
8002 }
8003
8004 /* Make a clone of DIE as a declaration. */
8005
8006 static dw_die_ref
8007 clone_as_declaration (dw_die_ref die)
8008 {
8009 dw_die_ref clone;
8010 dw_die_ref decl;
8011 dw_attr_node *a;
8012 unsigned ix;
8013
8014 /* If the DIE is already a declaration, just clone it. */
8015 if (is_declaration_die (die))
8016 return clone_die (die);
8017
8018 /* If the DIE is a specification, just clone its declaration DIE. */
8019 decl = get_AT_ref (die, DW_AT_specification);
8020 if (decl != NULL)
8021 {
8022 clone = clone_die (decl);
8023 if (die->comdat_type_p)
8024 add_AT_die_ref (clone, DW_AT_signature, die);
8025 return clone;
8026 }
8027
8028 clone = new_die_raw (die->die_tag);
8029
8030 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8031 {
8032 /* We don't want to copy over all attributes.
8033 For example we don't want DW_AT_byte_size because otherwise we will no
8034 longer have a declaration and GDB will treat it as a definition. */
8035
8036 switch (a->dw_attr)
8037 {
8038 case DW_AT_abstract_origin:
8039 case DW_AT_artificial:
8040 case DW_AT_containing_type:
8041 case DW_AT_external:
8042 case DW_AT_name:
8043 case DW_AT_type:
8044 case DW_AT_virtuality:
8045 case DW_AT_linkage_name:
8046 case DW_AT_MIPS_linkage_name:
8047 add_dwarf_attr (clone, a);
8048 break;
8049 case DW_AT_byte_size:
8050 case DW_AT_alignment:
8051 default:
8052 break;
8053 }
8054 }
8055
8056 if (die->comdat_type_p)
8057 add_AT_die_ref (clone, DW_AT_signature, die);
8058
8059 add_AT_flag (clone, DW_AT_declaration, 1);
8060 return clone;
8061 }
8062
8063
8064 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8065
8066 struct decl_table_entry
8067 {
8068 dw_die_ref orig;
8069 dw_die_ref copy;
8070 };
8071
8072 /* Helpers to manipulate hash table of copied declarations. */
8073
8074 /* Hashtable helpers. */
8075
8076 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8077 {
8078 typedef die_struct *compare_type;
8079 static inline hashval_t hash (const decl_table_entry *);
8080 static inline bool equal (const decl_table_entry *, const die_struct *);
8081 };
8082
8083 inline hashval_t
8084 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8085 {
8086 return htab_hash_pointer (entry->orig);
8087 }
8088
8089 inline bool
8090 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8091 const die_struct *entry2)
8092 {
8093 return entry1->orig == entry2;
8094 }
8095
8096 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8097
8098 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8099 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8100 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8101 to check if the ancestor has already been copied into UNIT. */
8102
8103 static dw_die_ref
8104 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8105 decl_hash_type *decl_table)
8106 {
8107 dw_die_ref parent = die->die_parent;
8108 dw_die_ref new_parent = unit;
8109 dw_die_ref copy;
8110 decl_table_entry **slot = NULL;
8111 struct decl_table_entry *entry = NULL;
8112
8113 if (decl_table)
8114 {
8115 /* Check if the entry has already been copied to UNIT. */
8116 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8117 INSERT);
8118 if (*slot != HTAB_EMPTY_ENTRY)
8119 {
8120 entry = *slot;
8121 return entry->copy;
8122 }
8123
8124 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8125 entry = XCNEW (struct decl_table_entry);
8126 entry->orig = die;
8127 entry->copy = NULL;
8128 *slot = entry;
8129 }
8130
8131 if (parent != NULL)
8132 {
8133 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8134 if (spec != NULL)
8135 parent = spec;
8136 if (!is_unit_die (parent))
8137 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8138 }
8139
8140 copy = clone_as_declaration (die);
8141 add_child_die (new_parent, copy);
8142
8143 if (decl_table)
8144 {
8145 /* Record the pointer to the copy. */
8146 entry->copy = copy;
8147 }
8148
8149 return copy;
8150 }
8151 /* Copy the declaration context to the new type unit DIE. This includes
8152 any surrounding namespace or type declarations. If the DIE has an
8153 AT_specification attribute, it also includes attributes and children
8154 attached to the specification, and returns a pointer to the original
8155 parent of the declaration DIE. Returns NULL otherwise. */
8156
8157 static dw_die_ref
8158 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8159 {
8160 dw_die_ref decl;
8161 dw_die_ref new_decl;
8162 dw_die_ref orig_parent = NULL;
8163
8164 decl = get_AT_ref (die, DW_AT_specification);
8165 if (decl == NULL)
8166 decl = die;
8167 else
8168 {
8169 unsigned ix;
8170 dw_die_ref c;
8171 dw_attr_node *a;
8172
8173 /* The original DIE will be changed to a declaration, and must
8174 be moved to be a child of the original declaration DIE. */
8175 orig_parent = decl->die_parent;
8176
8177 /* Copy the type node pointer from the new DIE to the original
8178 declaration DIE so we can forward references later. */
8179 decl->comdat_type_p = true;
8180 decl->die_id.die_type_node = die->die_id.die_type_node;
8181
8182 remove_AT (die, DW_AT_specification);
8183
8184 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8185 {
8186 if (a->dw_attr != DW_AT_name
8187 && a->dw_attr != DW_AT_declaration
8188 && a->dw_attr != DW_AT_external)
8189 add_dwarf_attr (die, a);
8190 }
8191
8192 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8193 }
8194
8195 if (decl->die_parent != NULL
8196 && !is_unit_die (decl->die_parent))
8197 {
8198 new_decl = copy_ancestor_tree (unit, decl, NULL);
8199 if (new_decl != NULL)
8200 {
8201 remove_AT (new_decl, DW_AT_signature);
8202 add_AT_specification (die, new_decl);
8203 }
8204 }
8205
8206 return orig_parent;
8207 }
8208
8209 /* Generate the skeleton ancestor tree for the given NODE, then clone
8210 the DIE and add the clone into the tree. */
8211
8212 static void
8213 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8214 {
8215 if (node->new_die != NULL)
8216 return;
8217
8218 node->new_die = clone_as_declaration (node->old_die);
8219
8220 if (node->parent != NULL)
8221 {
8222 generate_skeleton_ancestor_tree (node->parent);
8223 add_child_die (node->parent->new_die, node->new_die);
8224 }
8225 }
8226
8227 /* Generate a skeleton tree of DIEs containing any declarations that are
8228 found in the original tree. We traverse the tree looking for declaration
8229 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8230
8231 static void
8232 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8233 {
8234 skeleton_chain_node node;
8235 dw_die_ref c;
8236 dw_die_ref first;
8237 dw_die_ref prev = NULL;
8238 dw_die_ref next = NULL;
8239
8240 node.parent = parent;
8241
8242 first = c = parent->old_die->die_child;
8243 if (c)
8244 next = c->die_sib;
8245 if (c) do {
8246 if (prev == NULL || prev->die_sib == c)
8247 prev = c;
8248 c = next;
8249 next = (c == first ? NULL : c->die_sib);
8250 node.old_die = c;
8251 node.new_die = NULL;
8252 if (is_declaration_die (c))
8253 {
8254 if (is_template_instantiation (c))
8255 {
8256 /* Instantiated templates do not need to be cloned into the
8257 type unit. Just move the DIE and its children back to
8258 the skeleton tree (in the main CU). */
8259 remove_child_with_prev (c, prev);
8260 add_child_die (parent->new_die, c);
8261 c = prev;
8262 }
8263 else if (c->comdat_type_p)
8264 {
8265 /* This is the skeleton of earlier break_out_comdat_types
8266 type. Clone the existing DIE, but keep the children
8267 under the original (which is in the main CU). */
8268 dw_die_ref clone = clone_die (c);
8269
8270 replace_child (c, clone, prev);
8271 generate_skeleton_ancestor_tree (parent);
8272 add_child_die (parent->new_die, c);
8273 c = clone;
8274 continue;
8275 }
8276 else
8277 {
8278 /* Clone the existing DIE, move the original to the skeleton
8279 tree (which is in the main CU), and put the clone, with
8280 all the original's children, where the original came from
8281 (which is about to be moved to the type unit). */
8282 dw_die_ref clone = clone_die (c);
8283 move_all_children (c, clone);
8284
8285 /* If the original has a DW_AT_object_pointer attribute,
8286 it would now point to a child DIE just moved to the
8287 cloned tree, so we need to remove that attribute from
8288 the original. */
8289 remove_AT (c, DW_AT_object_pointer);
8290
8291 replace_child (c, clone, prev);
8292 generate_skeleton_ancestor_tree (parent);
8293 add_child_die (parent->new_die, c);
8294 node.old_die = clone;
8295 node.new_die = c;
8296 c = clone;
8297 }
8298 }
8299 generate_skeleton_bottom_up (&node);
8300 } while (next != NULL);
8301 }
8302
8303 /* Wrapper function for generate_skeleton_bottom_up. */
8304
8305 static dw_die_ref
8306 generate_skeleton (dw_die_ref die)
8307 {
8308 skeleton_chain_node node;
8309
8310 node.old_die = die;
8311 node.new_die = NULL;
8312 node.parent = NULL;
8313
8314 /* If this type definition is nested inside another type,
8315 and is not an instantiation of a template, always leave
8316 at least a declaration in its place. */
8317 if (die->die_parent != NULL
8318 && is_type_die (die->die_parent)
8319 && !is_template_instantiation (die))
8320 node.new_die = clone_as_declaration (die);
8321
8322 generate_skeleton_bottom_up (&node);
8323 return node.new_die;
8324 }
8325
8326 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8327 declaration. The original DIE is moved to a new compile unit so that
8328 existing references to it follow it to the new location. If any of the
8329 original DIE's descendants is a declaration, we need to replace the
8330 original DIE with a skeleton tree and move the declarations back into the
8331 skeleton tree. */
8332
8333 static dw_die_ref
8334 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8335 dw_die_ref prev)
8336 {
8337 dw_die_ref skeleton, orig_parent;
8338
8339 /* Copy the declaration context to the type unit DIE. If the returned
8340 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8341 that DIE. */
8342 orig_parent = copy_declaration_context (unit, child);
8343
8344 skeleton = generate_skeleton (child);
8345 if (skeleton == NULL)
8346 remove_child_with_prev (child, prev);
8347 else
8348 {
8349 skeleton->comdat_type_p = true;
8350 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8351
8352 /* If the original DIE was a specification, we need to put
8353 the skeleton under the parent DIE of the declaration.
8354 This leaves the original declaration in the tree, but
8355 it will be pruned later since there are no longer any
8356 references to it. */
8357 if (orig_parent != NULL)
8358 {
8359 remove_child_with_prev (child, prev);
8360 add_child_die (orig_parent, skeleton);
8361 }
8362 else
8363 replace_child (child, skeleton, prev);
8364 }
8365
8366 return skeleton;
8367 }
8368
8369 static void
8370 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8371 comdat_type_node *type_node,
8372 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8373
8374 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8375 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8376 DWARF procedure references in the DW_AT_location attribute. */
8377
8378 static dw_die_ref
8379 copy_dwarf_procedure (dw_die_ref die,
8380 comdat_type_node *type_node,
8381 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8382 {
8383 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8384
8385 /* DWARF procedures are not supposed to have children... */
8386 gcc_assert (die->die_child == NULL);
8387
8388 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8389 gcc_assert (vec_safe_length (die->die_attr) == 1
8390 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8391
8392 /* Do not copy more than once DWARF procedures. */
8393 bool existed;
8394 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8395 if (existed)
8396 return die_copy;
8397
8398 die_copy = clone_die (die);
8399 add_child_die (type_node->root_die, die_copy);
8400 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8401 return die_copy;
8402 }
8403
8404 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8405 procedures in DIE's attributes. */
8406
8407 static void
8408 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8409 comdat_type_node *type_node,
8410 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8411 {
8412 dw_attr_node *a;
8413 unsigned i;
8414
8415 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8416 {
8417 dw_loc_descr_ref loc;
8418
8419 if (a->dw_attr_val.val_class != dw_val_class_loc)
8420 continue;
8421
8422 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8423 {
8424 switch (loc->dw_loc_opc)
8425 {
8426 case DW_OP_call2:
8427 case DW_OP_call4:
8428 case DW_OP_call_ref:
8429 gcc_assert (loc->dw_loc_oprnd1.val_class
8430 == dw_val_class_die_ref);
8431 loc->dw_loc_oprnd1.v.val_die_ref.die
8432 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8433 type_node,
8434 copied_dwarf_procs);
8435
8436 default:
8437 break;
8438 }
8439 }
8440 }
8441 }
8442
8443 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8444 rewrite references to point to the copies.
8445
8446 References are looked for in DIE's attributes and recursively in all its
8447 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8448 mapping from old DWARF procedures to their copy. It is used not to copy
8449 twice the same DWARF procedure under TYPE_NODE. */
8450
8451 static void
8452 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8453 comdat_type_node *type_node,
8454 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8455 {
8456 dw_die_ref c;
8457
8458 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8459 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8460 type_node,
8461 copied_dwarf_procs));
8462 }
8463
8464 /* Traverse the DIE and set up additional .debug_types or .debug_info
8465 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8466 section. */
8467
8468 static void
8469 break_out_comdat_types (dw_die_ref die)
8470 {
8471 dw_die_ref c;
8472 dw_die_ref first;
8473 dw_die_ref prev = NULL;
8474 dw_die_ref next = NULL;
8475 dw_die_ref unit = NULL;
8476
8477 first = c = die->die_child;
8478 if (c)
8479 next = c->die_sib;
8480 if (c) do {
8481 if (prev == NULL || prev->die_sib == c)
8482 prev = c;
8483 c = next;
8484 next = (c == first ? NULL : c->die_sib);
8485 if (should_move_die_to_comdat (c))
8486 {
8487 dw_die_ref replacement;
8488 comdat_type_node *type_node;
8489
8490 /* Break out nested types into their own type units. */
8491 break_out_comdat_types (c);
8492
8493 /* Create a new type unit DIE as the root for the new tree, and
8494 add it to the list of comdat types. */
8495 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8496 add_AT_unsigned (unit, DW_AT_language,
8497 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8498 type_node = ggc_cleared_alloc<comdat_type_node> ();
8499 type_node->root_die = unit;
8500 type_node->next = comdat_type_list;
8501 comdat_type_list = type_node;
8502
8503 /* Generate the type signature. */
8504 generate_type_signature (c, type_node);
8505
8506 /* Copy the declaration context, attributes, and children of the
8507 declaration into the new type unit DIE, then remove this DIE
8508 from the main CU (or replace it with a skeleton if necessary). */
8509 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8510 type_node->skeleton_die = replacement;
8511
8512 /* Add the DIE to the new compunit. */
8513 add_child_die (unit, c);
8514
8515 /* Types can reference DWARF procedures for type size or data location
8516 expressions. Calls in DWARF expressions cannot target procedures
8517 that are not in the same section. So we must copy DWARF procedures
8518 along with this type and then rewrite references to them. */
8519 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8520 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8521
8522 if (replacement != NULL)
8523 c = replacement;
8524 }
8525 else if (c->die_tag == DW_TAG_namespace
8526 || c->die_tag == DW_TAG_class_type
8527 || c->die_tag == DW_TAG_structure_type
8528 || c->die_tag == DW_TAG_union_type)
8529 {
8530 /* Look for nested types that can be broken out. */
8531 break_out_comdat_types (c);
8532 }
8533 } while (next != NULL);
8534 }
8535
8536 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8537 Enter all the cloned children into the hash table decl_table. */
8538
8539 static dw_die_ref
8540 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8541 {
8542 dw_die_ref c;
8543 dw_die_ref clone;
8544 struct decl_table_entry *entry;
8545 decl_table_entry **slot;
8546
8547 if (die->die_tag == DW_TAG_subprogram)
8548 clone = clone_as_declaration (die);
8549 else
8550 clone = clone_die (die);
8551
8552 slot = decl_table->find_slot_with_hash (die,
8553 htab_hash_pointer (die), INSERT);
8554
8555 /* Assert that DIE isn't in the hash table yet. If it would be there
8556 before, the ancestors would be necessarily there as well, therefore
8557 clone_tree_partial wouldn't be called. */
8558 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8559
8560 entry = XCNEW (struct decl_table_entry);
8561 entry->orig = die;
8562 entry->copy = clone;
8563 *slot = entry;
8564
8565 if (die->die_tag != DW_TAG_subprogram)
8566 FOR_EACH_CHILD (die, c,
8567 add_child_die (clone, clone_tree_partial (c, decl_table)));
8568
8569 return clone;
8570 }
8571
8572 /* Walk the DIE and its children, looking for references to incomplete
8573 or trivial types that are unmarked (i.e., that are not in the current
8574 type_unit). */
8575
8576 static void
8577 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8578 {
8579 dw_die_ref c;
8580 dw_attr_node *a;
8581 unsigned ix;
8582
8583 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8584 {
8585 if (AT_class (a) == dw_val_class_die_ref)
8586 {
8587 dw_die_ref targ = AT_ref (a);
8588 decl_table_entry **slot;
8589 struct decl_table_entry *entry;
8590
8591 if (targ->die_mark != 0 || targ->comdat_type_p)
8592 continue;
8593
8594 slot = decl_table->find_slot_with_hash (targ,
8595 htab_hash_pointer (targ),
8596 INSERT);
8597
8598 if (*slot != HTAB_EMPTY_ENTRY)
8599 {
8600 /* TARG has already been copied, so we just need to
8601 modify the reference to point to the copy. */
8602 entry = *slot;
8603 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8604 }
8605 else
8606 {
8607 dw_die_ref parent = unit;
8608 dw_die_ref copy = clone_die (targ);
8609
8610 /* Record in DECL_TABLE that TARG has been copied.
8611 Need to do this now, before the recursive call,
8612 because DECL_TABLE may be expanded and SLOT
8613 would no longer be a valid pointer. */
8614 entry = XCNEW (struct decl_table_entry);
8615 entry->orig = targ;
8616 entry->copy = copy;
8617 *slot = entry;
8618
8619 /* If TARG is not a declaration DIE, we need to copy its
8620 children. */
8621 if (!is_declaration_die (targ))
8622 {
8623 FOR_EACH_CHILD (
8624 targ, c,
8625 add_child_die (copy,
8626 clone_tree_partial (c, decl_table)));
8627 }
8628
8629 /* Make sure the cloned tree is marked as part of the
8630 type unit. */
8631 mark_dies (copy);
8632
8633 /* If TARG has surrounding context, copy its ancestor tree
8634 into the new type unit. */
8635 if (targ->die_parent != NULL
8636 && !is_unit_die (targ->die_parent))
8637 parent = copy_ancestor_tree (unit, targ->die_parent,
8638 decl_table);
8639
8640 add_child_die (parent, copy);
8641 a->dw_attr_val.v.val_die_ref.die = copy;
8642
8643 /* Make sure the newly-copied DIE is walked. If it was
8644 installed in a previously-added context, it won't
8645 get visited otherwise. */
8646 if (parent != unit)
8647 {
8648 /* Find the highest point of the newly-added tree,
8649 mark each node along the way, and walk from there. */
8650 parent->die_mark = 1;
8651 while (parent->die_parent
8652 && parent->die_parent->die_mark == 0)
8653 {
8654 parent = parent->die_parent;
8655 parent->die_mark = 1;
8656 }
8657 copy_decls_walk (unit, parent, decl_table);
8658 }
8659 }
8660 }
8661 }
8662
8663 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8664 }
8665
8666 /* Copy declarations for "unworthy" types into the new comdat section.
8667 Incomplete types, modified types, and certain other types aren't broken
8668 out into comdat sections of their own, so they don't have a signature,
8669 and we need to copy the declaration into the same section so that we
8670 don't have an external reference. */
8671
8672 static void
8673 copy_decls_for_unworthy_types (dw_die_ref unit)
8674 {
8675 mark_dies (unit);
8676 decl_hash_type decl_table (10);
8677 copy_decls_walk (unit, unit, &decl_table);
8678 unmark_dies (unit);
8679 }
8680
8681 /* Traverse the DIE and add a sibling attribute if it may have the
8682 effect of speeding up access to siblings. To save some space,
8683 avoid generating sibling attributes for DIE's without children. */
8684
8685 static void
8686 add_sibling_attributes (dw_die_ref die)
8687 {
8688 dw_die_ref c;
8689
8690 if (! die->die_child)
8691 return;
8692
8693 if (die->die_parent && die != die->die_parent->die_child)
8694 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8695
8696 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8697 }
8698
8699 /* Output all location lists for the DIE and its children. */
8700
8701 static void
8702 output_location_lists (dw_die_ref die)
8703 {
8704 dw_die_ref c;
8705 dw_attr_node *a;
8706 unsigned ix;
8707
8708 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8709 if (AT_class (a) == dw_val_class_loc_list)
8710 output_loc_list (AT_loc_list (a));
8711
8712 FOR_EACH_CHILD (die, c, output_location_lists (c));
8713 }
8714
8715 /* During assign_location_list_indexes and output_loclists_offset the
8716 current index, after it the number of assigned indexes (i.e. how
8717 large the .debug_loclists* offset table should be). */
8718 static unsigned int loc_list_idx;
8719
8720 /* Output all location list offsets for the DIE and its children. */
8721
8722 static void
8723 output_loclists_offsets (dw_die_ref die)
8724 {
8725 dw_die_ref c;
8726 dw_attr_node *a;
8727 unsigned ix;
8728
8729 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8730 if (AT_class (a) == dw_val_class_loc_list)
8731 {
8732 dw_loc_list_ref l = AT_loc_list (a);
8733 if (l->offset_emitted)
8734 continue;
8735 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8736 loc_section_label, NULL);
8737 gcc_assert (l->hash == loc_list_idx);
8738 loc_list_idx++;
8739 l->offset_emitted = true;
8740 }
8741
8742 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8743 }
8744
8745 /* Recursively set indexes of location lists. */
8746
8747 static void
8748 assign_location_list_indexes (dw_die_ref die)
8749 {
8750 dw_die_ref c;
8751 dw_attr_node *a;
8752 unsigned ix;
8753
8754 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8755 if (AT_class (a) == dw_val_class_loc_list)
8756 {
8757 dw_loc_list_ref list = AT_loc_list (a);
8758 if (!list->num_assigned)
8759 {
8760 list->num_assigned = true;
8761 list->hash = loc_list_idx++;
8762 }
8763 }
8764
8765 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8766 }
8767
8768 /* We want to limit the number of external references, because they are
8769 larger than local references: a relocation takes multiple words, and
8770 even a sig8 reference is always eight bytes, whereas a local reference
8771 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8772 So if we encounter multiple external references to the same type DIE, we
8773 make a local typedef stub for it and redirect all references there.
8774
8775 This is the element of the hash table for keeping track of these
8776 references. */
8777
8778 struct external_ref
8779 {
8780 dw_die_ref type;
8781 dw_die_ref stub;
8782 unsigned n_refs;
8783 };
8784
8785 /* Hashtable helpers. */
8786
8787 struct external_ref_hasher : free_ptr_hash <external_ref>
8788 {
8789 static inline hashval_t hash (const external_ref *);
8790 static inline bool equal (const external_ref *, const external_ref *);
8791 };
8792
8793 inline hashval_t
8794 external_ref_hasher::hash (const external_ref *r)
8795 {
8796 dw_die_ref die = r->type;
8797 hashval_t h = 0;
8798
8799 /* We can't use the address of the DIE for hashing, because
8800 that will make the order of the stub DIEs non-deterministic. */
8801 if (! die->comdat_type_p)
8802 /* We have a symbol; use it to compute a hash. */
8803 h = htab_hash_string (die->die_id.die_symbol);
8804 else
8805 {
8806 /* We have a type signature; use a subset of the bits as the hash.
8807 The 8-byte signature is at least as large as hashval_t. */
8808 comdat_type_node *type_node = die->die_id.die_type_node;
8809 memcpy (&h, type_node->signature, sizeof (h));
8810 }
8811 return h;
8812 }
8813
8814 inline bool
8815 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8816 {
8817 return r1->type == r2->type;
8818 }
8819
8820 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8821
8822 /* Return a pointer to the external_ref for references to DIE. */
8823
8824 static struct external_ref *
8825 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8826 {
8827 struct external_ref ref, *ref_p;
8828 external_ref **slot;
8829
8830 ref.type = die;
8831 slot = map->find_slot (&ref, INSERT);
8832 if (*slot != HTAB_EMPTY_ENTRY)
8833 return *slot;
8834
8835 ref_p = XCNEW (struct external_ref);
8836 ref_p->type = die;
8837 *slot = ref_p;
8838 return ref_p;
8839 }
8840
8841 /* Subroutine of optimize_external_refs, below.
8842
8843 If we see a type skeleton, record it as our stub. If we see external
8844 references, remember how many we've seen. */
8845
8846 static void
8847 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8848 {
8849 dw_die_ref c;
8850 dw_attr_node *a;
8851 unsigned ix;
8852 struct external_ref *ref_p;
8853
8854 if (is_type_die (die)
8855 && (c = get_AT_ref (die, DW_AT_signature)))
8856 {
8857 /* This is a local skeleton; use it for local references. */
8858 ref_p = lookup_external_ref (map, c);
8859 ref_p->stub = die;
8860 }
8861
8862 /* Scan the DIE references, and remember any that refer to DIEs from
8863 other CUs (i.e. those which are not marked). */
8864 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8865 if (AT_class (a) == dw_val_class_die_ref
8866 && (c = AT_ref (a))->die_mark == 0
8867 && is_type_die (c))
8868 {
8869 ref_p = lookup_external_ref (map, c);
8870 ref_p->n_refs++;
8871 }
8872
8873 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8874 }
8875
8876 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8877 points to an external_ref, DATA is the CU we're processing. If we don't
8878 already have a local stub, and we have multiple refs, build a stub. */
8879
8880 int
8881 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8882 {
8883 struct external_ref *ref_p = *slot;
8884
8885 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8886 {
8887 /* We have multiple references to this type, so build a small stub.
8888 Both of these forms are a bit dodgy from the perspective of the
8889 DWARF standard, since technically they should have names. */
8890 dw_die_ref cu = data;
8891 dw_die_ref type = ref_p->type;
8892 dw_die_ref stub = NULL;
8893
8894 if (type->comdat_type_p)
8895 {
8896 /* If we refer to this type via sig8, use AT_signature. */
8897 stub = new_die (type->die_tag, cu, NULL_TREE);
8898 add_AT_die_ref (stub, DW_AT_signature, type);
8899 }
8900 else
8901 {
8902 /* Otherwise, use a typedef with no name. */
8903 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8904 add_AT_die_ref (stub, DW_AT_type, type);
8905 }
8906
8907 stub->die_mark++;
8908 ref_p->stub = stub;
8909 }
8910 return 1;
8911 }
8912
8913 /* DIE is a unit; look through all the DIE references to see if there are
8914 any external references to types, and if so, create local stubs for
8915 them which will be applied in build_abbrev_table. This is useful because
8916 references to local DIEs are smaller. */
8917
8918 static external_ref_hash_type *
8919 optimize_external_refs (dw_die_ref die)
8920 {
8921 external_ref_hash_type *map = new external_ref_hash_type (10);
8922 optimize_external_refs_1 (die, map);
8923 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8924 return map;
8925 }
8926
8927 /* The following 3 variables are temporaries that are computed only during the
8928 build_abbrev_table call and used and released during the following
8929 optimize_abbrev_table call. */
8930
8931 /* First abbrev_id that can be optimized based on usage. */
8932 static unsigned int abbrev_opt_start;
8933
8934 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8935 abbrev_id smaller than this, because they must be already sized
8936 during build_abbrev_table). */
8937 static unsigned int abbrev_opt_base_type_end;
8938
8939 /* Vector of usage counts during build_abbrev_table. Indexed by
8940 abbrev_id - abbrev_opt_start. */
8941 static vec<unsigned int> abbrev_usage_count;
8942
8943 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8944 static vec<dw_die_ref> sorted_abbrev_dies;
8945
8946 /* The format of each DIE (and its attribute value pairs) is encoded in an
8947 abbreviation table. This routine builds the abbreviation table and assigns
8948 a unique abbreviation id for each abbreviation entry. The children of each
8949 die are visited recursively. */
8950
8951 static void
8952 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8953 {
8954 unsigned int abbrev_id = 0;
8955 dw_die_ref c;
8956 dw_attr_node *a;
8957 unsigned ix;
8958 dw_die_ref abbrev;
8959
8960 /* Scan the DIE references, and replace any that refer to
8961 DIEs from other CUs (i.e. those which are not marked) with
8962 the local stubs we built in optimize_external_refs. */
8963 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8964 if (AT_class (a) == dw_val_class_die_ref
8965 && (c = AT_ref (a))->die_mark == 0)
8966 {
8967 struct external_ref *ref_p;
8968 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8969
8970 ref_p = lookup_external_ref (extern_map, c);
8971 if (ref_p->stub && ref_p->stub != die)
8972 change_AT_die_ref (a, ref_p->stub);
8973 else
8974 /* We aren't changing this reference, so mark it external. */
8975 set_AT_ref_external (a, 1);
8976 }
8977
8978 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8979 {
8980 dw_attr_node *die_a, *abbrev_a;
8981 unsigned ix;
8982 bool ok = true;
8983
8984 if (abbrev_id == 0)
8985 continue;
8986 if (abbrev->die_tag != die->die_tag)
8987 continue;
8988 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8989 continue;
8990
8991 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8992 continue;
8993
8994 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8995 {
8996 abbrev_a = &(*abbrev->die_attr)[ix];
8997 if ((abbrev_a->dw_attr != die_a->dw_attr)
8998 || (value_format (abbrev_a) != value_format (die_a)))
8999 {
9000 ok = false;
9001 break;
9002 }
9003 }
9004 if (ok)
9005 break;
9006 }
9007
9008 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9009 {
9010 vec_safe_push (abbrev_die_table, die);
9011 if (abbrev_opt_start)
9012 abbrev_usage_count.safe_push (0);
9013 }
9014 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9015 {
9016 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9017 sorted_abbrev_dies.safe_push (die);
9018 }
9019
9020 die->die_abbrev = abbrev_id;
9021 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9022 }
9023
9024 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9025 by die_abbrev's usage count, from the most commonly used
9026 abbreviation to the least. */
9027
9028 static int
9029 die_abbrev_cmp (const void *p1, const void *p2)
9030 {
9031 dw_die_ref die1 = *(const dw_die_ref *) p1;
9032 dw_die_ref die2 = *(const dw_die_ref *) p2;
9033
9034 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9035 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9036
9037 if (die1->die_abbrev >= abbrev_opt_base_type_end
9038 && die2->die_abbrev >= abbrev_opt_base_type_end)
9039 {
9040 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9041 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9042 return -1;
9043 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9044 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9045 return 1;
9046 }
9047
9048 /* Stabilize the sort. */
9049 if (die1->die_abbrev < die2->die_abbrev)
9050 return -1;
9051 if (die1->die_abbrev > die2->die_abbrev)
9052 return 1;
9053
9054 return 0;
9055 }
9056
9057 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9058 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9059 into dw_val_class_const_implicit or
9060 dw_val_class_unsigned_const_implicit. */
9061
9062 static void
9063 optimize_implicit_const (unsigned int first_id, unsigned int end,
9064 vec<bool> &implicit_consts)
9065 {
9066 /* It never makes sense if there is just one DIE using the abbreviation. */
9067 if (end < first_id + 2)
9068 return;
9069
9070 dw_attr_node *a;
9071 unsigned ix, i;
9072 dw_die_ref die = sorted_abbrev_dies[first_id];
9073 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9074 if (implicit_consts[ix])
9075 {
9076 enum dw_val_class new_class = dw_val_class_none;
9077 switch (AT_class (a))
9078 {
9079 case dw_val_class_unsigned_const:
9080 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9081 continue;
9082
9083 /* The .debug_abbrev section will grow by
9084 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9085 in all the DIEs using that abbreviation. */
9086 if (constant_size (AT_unsigned (a)) * (end - first_id)
9087 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9088 continue;
9089
9090 new_class = dw_val_class_unsigned_const_implicit;
9091 break;
9092
9093 case dw_val_class_const:
9094 new_class = dw_val_class_const_implicit;
9095 break;
9096
9097 case dw_val_class_file:
9098 new_class = dw_val_class_file_implicit;
9099 break;
9100
9101 default:
9102 continue;
9103 }
9104 for (i = first_id; i < end; i++)
9105 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9106 = new_class;
9107 }
9108 }
9109
9110 /* Attempt to optimize abbreviation table from abbrev_opt_start
9111 abbreviation above. */
9112
9113 static void
9114 optimize_abbrev_table (void)
9115 {
9116 if (abbrev_opt_start
9117 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9118 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9119 {
9120 auto_vec<bool, 32> implicit_consts;
9121 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9122
9123 unsigned int abbrev_id = abbrev_opt_start - 1;
9124 unsigned int first_id = ~0U;
9125 unsigned int last_abbrev_id = 0;
9126 unsigned int i;
9127 dw_die_ref die;
9128 if (abbrev_opt_base_type_end > abbrev_opt_start)
9129 abbrev_id = abbrev_opt_base_type_end - 1;
9130 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9131 most commonly used abbreviations come first. */
9132 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9133 {
9134 dw_attr_node *a;
9135 unsigned ix;
9136
9137 /* If calc_base_type_die_sizes has been called, the CU and
9138 base types after it can't be optimized, because we've already
9139 calculated their DIE offsets. We've sorted them first. */
9140 if (die->die_abbrev < abbrev_opt_base_type_end)
9141 continue;
9142 if (die->die_abbrev != last_abbrev_id)
9143 {
9144 last_abbrev_id = die->die_abbrev;
9145 if (dwarf_version >= 5 && first_id != ~0U)
9146 optimize_implicit_const (first_id, i, implicit_consts);
9147 abbrev_id++;
9148 (*abbrev_die_table)[abbrev_id] = die;
9149 if (dwarf_version >= 5)
9150 {
9151 first_id = i;
9152 implicit_consts.truncate (0);
9153
9154 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9155 switch (AT_class (a))
9156 {
9157 case dw_val_class_const:
9158 case dw_val_class_unsigned_const:
9159 case dw_val_class_file:
9160 implicit_consts.safe_push (true);
9161 break;
9162 default:
9163 implicit_consts.safe_push (false);
9164 break;
9165 }
9166 }
9167 }
9168 else if (dwarf_version >= 5)
9169 {
9170 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9171 if (!implicit_consts[ix])
9172 continue;
9173 else
9174 {
9175 dw_attr_node *other_a
9176 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9177 if (!dw_val_equal_p (&a->dw_attr_val,
9178 &other_a->dw_attr_val))
9179 implicit_consts[ix] = false;
9180 }
9181 }
9182 die->die_abbrev = abbrev_id;
9183 }
9184 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9185 if (dwarf_version >= 5 && first_id != ~0U)
9186 optimize_implicit_const (first_id, i, implicit_consts);
9187 }
9188
9189 abbrev_opt_start = 0;
9190 abbrev_opt_base_type_end = 0;
9191 abbrev_usage_count.release ();
9192 sorted_abbrev_dies.release ();
9193 }
9194 \f
9195 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9196
9197 static int
9198 constant_size (unsigned HOST_WIDE_INT value)
9199 {
9200 int log;
9201
9202 if (value == 0)
9203 log = 0;
9204 else
9205 log = floor_log2 (value);
9206
9207 log = log / 8;
9208 log = 1 << (floor_log2 (log) + 1);
9209
9210 return log;
9211 }
9212
9213 /* Return the size of a DIE as it is represented in the
9214 .debug_info section. */
9215
9216 static unsigned long
9217 size_of_die (dw_die_ref die)
9218 {
9219 unsigned long size = 0;
9220 dw_attr_node *a;
9221 unsigned ix;
9222 enum dwarf_form form;
9223
9224 size += size_of_uleb128 (die->die_abbrev);
9225 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9226 {
9227 switch (AT_class (a))
9228 {
9229 case dw_val_class_addr:
9230 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9231 {
9232 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9233 size += size_of_uleb128 (AT_index (a));
9234 }
9235 else
9236 size += DWARF2_ADDR_SIZE;
9237 break;
9238 case dw_val_class_offset:
9239 size += DWARF_OFFSET_SIZE;
9240 break;
9241 case dw_val_class_loc:
9242 {
9243 unsigned long lsize = size_of_locs (AT_loc (a));
9244
9245 /* Block length. */
9246 if (dwarf_version >= 4)
9247 size += size_of_uleb128 (lsize);
9248 else
9249 size += constant_size (lsize);
9250 size += lsize;
9251 }
9252 break;
9253 case dw_val_class_loc_list:
9254 case dw_val_class_view_list:
9255 if (dwarf_split_debug_info && dwarf_version >= 5)
9256 {
9257 gcc_assert (AT_loc_list (a)->num_assigned);
9258 size += size_of_uleb128 (AT_loc_list (a)->hash);
9259 }
9260 else
9261 size += DWARF_OFFSET_SIZE;
9262 break;
9263 case dw_val_class_range_list:
9264 if (value_format (a) == DW_FORM_rnglistx)
9265 {
9266 gcc_assert (rnglist_idx);
9267 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9268 size += size_of_uleb128 (r->idx);
9269 }
9270 else
9271 size += DWARF_OFFSET_SIZE;
9272 break;
9273 case dw_val_class_const:
9274 size += size_of_sleb128 (AT_int (a));
9275 break;
9276 case dw_val_class_unsigned_const:
9277 {
9278 int csize = constant_size (AT_unsigned (a));
9279 if (dwarf_version == 3
9280 && a->dw_attr == DW_AT_data_member_location
9281 && csize >= 4)
9282 size += size_of_uleb128 (AT_unsigned (a));
9283 else
9284 size += csize;
9285 }
9286 break;
9287 case dw_val_class_symview:
9288 if (symview_upper_bound <= 0xff)
9289 size += 1;
9290 else if (symview_upper_bound <= 0xffff)
9291 size += 2;
9292 else if (symview_upper_bound <= 0xffffffff)
9293 size += 4;
9294 else
9295 size += 8;
9296 break;
9297 case dw_val_class_const_implicit:
9298 case dw_val_class_unsigned_const_implicit:
9299 case dw_val_class_file_implicit:
9300 /* These occupy no size in the DIE, just an extra sleb128 in
9301 .debug_abbrev. */
9302 break;
9303 case dw_val_class_const_double:
9304 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9305 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9306 size++; /* block */
9307 break;
9308 case dw_val_class_wide_int:
9309 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9310 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9311 if (get_full_len (*a->dw_attr_val.v.val_wide)
9312 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9313 size++; /* block */
9314 break;
9315 case dw_val_class_vec:
9316 size += constant_size (a->dw_attr_val.v.val_vec.length
9317 * a->dw_attr_val.v.val_vec.elt_size)
9318 + a->dw_attr_val.v.val_vec.length
9319 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9320 break;
9321 case dw_val_class_flag:
9322 if (dwarf_version >= 4)
9323 /* Currently all add_AT_flag calls pass in 1 as last argument,
9324 so DW_FORM_flag_present can be used. If that ever changes,
9325 we'll need to use DW_FORM_flag and have some optimization
9326 in build_abbrev_table that will change those to
9327 DW_FORM_flag_present if it is set to 1 in all DIEs using
9328 the same abbrev entry. */
9329 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9330 else
9331 size += 1;
9332 break;
9333 case dw_val_class_die_ref:
9334 if (AT_ref_external (a))
9335 {
9336 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9337 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9338 is sized by target address length, whereas in DWARF3
9339 it's always sized as an offset. */
9340 if (use_debug_types)
9341 size += DWARF_TYPE_SIGNATURE_SIZE;
9342 else if (dwarf_version == 2)
9343 size += DWARF2_ADDR_SIZE;
9344 else
9345 size += DWARF_OFFSET_SIZE;
9346 }
9347 else
9348 size += DWARF_OFFSET_SIZE;
9349 break;
9350 case dw_val_class_fde_ref:
9351 size += DWARF_OFFSET_SIZE;
9352 break;
9353 case dw_val_class_lbl_id:
9354 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9355 {
9356 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9357 size += size_of_uleb128 (AT_index (a));
9358 }
9359 else
9360 size += DWARF2_ADDR_SIZE;
9361 break;
9362 case dw_val_class_lineptr:
9363 case dw_val_class_macptr:
9364 case dw_val_class_loclistsptr:
9365 size += DWARF_OFFSET_SIZE;
9366 break;
9367 case dw_val_class_str:
9368 form = AT_string_form (a);
9369 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9370 size += DWARF_OFFSET_SIZE;
9371 else if (form == dwarf_FORM (DW_FORM_strx))
9372 size += size_of_uleb128 (AT_index (a));
9373 else
9374 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9375 break;
9376 case dw_val_class_file:
9377 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9378 break;
9379 case dw_val_class_data8:
9380 size += 8;
9381 break;
9382 case dw_val_class_vms_delta:
9383 size += DWARF_OFFSET_SIZE;
9384 break;
9385 case dw_val_class_high_pc:
9386 size += DWARF2_ADDR_SIZE;
9387 break;
9388 case dw_val_class_discr_value:
9389 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9390 break;
9391 case dw_val_class_discr_list:
9392 {
9393 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9394
9395 /* This is a block, so we have the block length and then its
9396 data. */
9397 size += constant_size (block_size) + block_size;
9398 }
9399 break;
9400 default:
9401 gcc_unreachable ();
9402 }
9403 }
9404
9405 return size;
9406 }
9407
9408 /* Size the debugging information associated with a given DIE. Visits the
9409 DIE's children recursively. Updates the global variable next_die_offset, on
9410 each time through. Uses the current value of next_die_offset to update the
9411 die_offset field in each DIE. */
9412
9413 static void
9414 calc_die_sizes (dw_die_ref die)
9415 {
9416 dw_die_ref c;
9417
9418 gcc_assert (die->die_offset == 0
9419 || (unsigned long int) die->die_offset == next_die_offset);
9420 die->die_offset = next_die_offset;
9421 next_die_offset += size_of_die (die);
9422
9423 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9424
9425 if (die->die_child != NULL)
9426 /* Count the null byte used to terminate sibling lists. */
9427 next_die_offset += 1;
9428 }
9429
9430 /* Size just the base type children at the start of the CU.
9431 This is needed because build_abbrev needs to size locs
9432 and sizing of type based stack ops needs to know die_offset
9433 values for the base types. */
9434
9435 static void
9436 calc_base_type_die_sizes (void)
9437 {
9438 unsigned long die_offset = (dwarf_split_debug_info
9439 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9440 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9441 unsigned int i;
9442 dw_die_ref base_type;
9443 #if ENABLE_ASSERT_CHECKING
9444 dw_die_ref prev = comp_unit_die ()->die_child;
9445 #endif
9446
9447 die_offset += size_of_die (comp_unit_die ());
9448 for (i = 0; base_types.iterate (i, &base_type); i++)
9449 {
9450 #if ENABLE_ASSERT_CHECKING
9451 gcc_assert (base_type->die_offset == 0
9452 && prev->die_sib == base_type
9453 && base_type->die_child == NULL
9454 && base_type->die_abbrev);
9455 prev = base_type;
9456 #endif
9457 if (abbrev_opt_start
9458 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9459 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9460 base_type->die_offset = die_offset;
9461 die_offset += size_of_die (base_type);
9462 }
9463 }
9464
9465 /* Set the marks for a die and its children. We do this so
9466 that we know whether or not a reference needs to use FORM_ref_addr; only
9467 DIEs in the same CU will be marked. We used to clear out the offset
9468 and use that as the flag, but ran into ordering problems. */
9469
9470 static void
9471 mark_dies (dw_die_ref die)
9472 {
9473 dw_die_ref c;
9474
9475 gcc_assert (!die->die_mark);
9476
9477 die->die_mark = 1;
9478 FOR_EACH_CHILD (die, c, mark_dies (c));
9479 }
9480
9481 /* Clear the marks for a die and its children. */
9482
9483 static void
9484 unmark_dies (dw_die_ref die)
9485 {
9486 dw_die_ref c;
9487
9488 if (! use_debug_types)
9489 gcc_assert (die->die_mark);
9490
9491 die->die_mark = 0;
9492 FOR_EACH_CHILD (die, c, unmark_dies (c));
9493 }
9494
9495 /* Clear the marks for a die, its children and referred dies. */
9496
9497 static void
9498 unmark_all_dies (dw_die_ref die)
9499 {
9500 dw_die_ref c;
9501 dw_attr_node *a;
9502 unsigned ix;
9503
9504 if (!die->die_mark)
9505 return;
9506 die->die_mark = 0;
9507
9508 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9509
9510 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9511 if (AT_class (a) == dw_val_class_die_ref)
9512 unmark_all_dies (AT_ref (a));
9513 }
9514
9515 /* Calculate if the entry should appear in the final output file. It may be
9516 from a pruned a type. */
9517
9518 static bool
9519 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9520 {
9521 /* By limiting gnu pubnames to definitions only, gold can generate a
9522 gdb index without entries for declarations, which don't include
9523 enough information to be useful. */
9524 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9525 return false;
9526
9527 if (table == pubname_table)
9528 {
9529 /* Enumerator names are part of the pubname table, but the
9530 parent DW_TAG_enumeration_type die may have been pruned.
9531 Don't output them if that is the case. */
9532 if (p->die->die_tag == DW_TAG_enumerator &&
9533 (p->die->die_parent == NULL
9534 || !p->die->die_parent->die_perennial_p))
9535 return false;
9536
9537 /* Everything else in the pubname table is included. */
9538 return true;
9539 }
9540
9541 /* The pubtypes table shouldn't include types that have been
9542 pruned. */
9543 return (p->die->die_offset != 0
9544 || !flag_eliminate_unused_debug_types);
9545 }
9546
9547 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9548 generated for the compilation unit. */
9549
9550 static unsigned long
9551 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9552 {
9553 unsigned long size;
9554 unsigned i;
9555 pubname_entry *p;
9556 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9557
9558 size = DWARF_PUBNAMES_HEADER_SIZE;
9559 FOR_EACH_VEC_ELT (*names, i, p)
9560 if (include_pubname_in_output (names, p))
9561 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9562
9563 size += DWARF_OFFSET_SIZE;
9564 return size;
9565 }
9566
9567 /* Return the size of the information in the .debug_aranges section. */
9568
9569 static unsigned long
9570 size_of_aranges (void)
9571 {
9572 unsigned long size;
9573
9574 size = DWARF_ARANGES_HEADER_SIZE;
9575
9576 /* Count the address/length pair for this compilation unit. */
9577 if (text_section_used)
9578 size += 2 * DWARF2_ADDR_SIZE;
9579 if (cold_text_section_used)
9580 size += 2 * DWARF2_ADDR_SIZE;
9581 if (have_multiple_function_sections)
9582 {
9583 unsigned fde_idx;
9584 dw_fde_ref fde;
9585
9586 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9587 {
9588 if (DECL_IGNORED_P (fde->decl))
9589 continue;
9590 if (!fde->in_std_section)
9591 size += 2 * DWARF2_ADDR_SIZE;
9592 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9593 size += 2 * DWARF2_ADDR_SIZE;
9594 }
9595 }
9596
9597 /* Count the two zero words used to terminated the address range table. */
9598 size += 2 * DWARF2_ADDR_SIZE;
9599 return size;
9600 }
9601 \f
9602 /* Select the encoding of an attribute value. */
9603
9604 static enum dwarf_form
9605 value_format (dw_attr_node *a)
9606 {
9607 switch (AT_class (a))
9608 {
9609 case dw_val_class_addr:
9610 /* Only very few attributes allow DW_FORM_addr. */
9611 switch (a->dw_attr)
9612 {
9613 case DW_AT_low_pc:
9614 case DW_AT_high_pc:
9615 case DW_AT_entry_pc:
9616 case DW_AT_trampoline:
9617 return (AT_index (a) == NOT_INDEXED
9618 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9619 default:
9620 break;
9621 }
9622 switch (DWARF2_ADDR_SIZE)
9623 {
9624 case 1:
9625 return DW_FORM_data1;
9626 case 2:
9627 return DW_FORM_data2;
9628 case 4:
9629 return DW_FORM_data4;
9630 case 8:
9631 return DW_FORM_data8;
9632 default:
9633 gcc_unreachable ();
9634 }
9635 case dw_val_class_loc_list:
9636 case dw_val_class_view_list:
9637 if (dwarf_split_debug_info
9638 && dwarf_version >= 5
9639 && AT_loc_list (a)->num_assigned)
9640 return DW_FORM_loclistx;
9641 /* FALLTHRU */
9642 case dw_val_class_range_list:
9643 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9644 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9645 care about sizes of .debug* sections in shared libraries and
9646 executables and don't take into account relocations that affect just
9647 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9648 table in the .debug_rnglists section. */
9649 if (dwarf_split_debug_info
9650 && dwarf_version >= 5
9651 && AT_class (a) == dw_val_class_range_list
9652 && rnglist_idx
9653 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9654 return DW_FORM_rnglistx;
9655 if (dwarf_version >= 4)
9656 return DW_FORM_sec_offset;
9657 /* FALLTHRU */
9658 case dw_val_class_vms_delta:
9659 case dw_val_class_offset:
9660 switch (DWARF_OFFSET_SIZE)
9661 {
9662 case 4:
9663 return DW_FORM_data4;
9664 case 8:
9665 return DW_FORM_data8;
9666 default:
9667 gcc_unreachable ();
9668 }
9669 case dw_val_class_loc:
9670 if (dwarf_version >= 4)
9671 return DW_FORM_exprloc;
9672 switch (constant_size (size_of_locs (AT_loc (a))))
9673 {
9674 case 1:
9675 return DW_FORM_block1;
9676 case 2:
9677 return DW_FORM_block2;
9678 case 4:
9679 return DW_FORM_block4;
9680 default:
9681 gcc_unreachable ();
9682 }
9683 case dw_val_class_const:
9684 return DW_FORM_sdata;
9685 case dw_val_class_unsigned_const:
9686 switch (constant_size (AT_unsigned (a)))
9687 {
9688 case 1:
9689 return DW_FORM_data1;
9690 case 2:
9691 return DW_FORM_data2;
9692 case 4:
9693 /* In DWARF3 DW_AT_data_member_location with
9694 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9695 constant, so we need to use DW_FORM_udata if we need
9696 a large constant. */
9697 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9698 return DW_FORM_udata;
9699 return DW_FORM_data4;
9700 case 8:
9701 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9702 return DW_FORM_udata;
9703 return DW_FORM_data8;
9704 default:
9705 gcc_unreachable ();
9706 }
9707 case dw_val_class_const_implicit:
9708 case dw_val_class_unsigned_const_implicit:
9709 case dw_val_class_file_implicit:
9710 return DW_FORM_implicit_const;
9711 case dw_val_class_const_double:
9712 switch (HOST_BITS_PER_WIDE_INT)
9713 {
9714 case 8:
9715 return DW_FORM_data2;
9716 case 16:
9717 return DW_FORM_data4;
9718 case 32:
9719 return DW_FORM_data8;
9720 case 64:
9721 if (dwarf_version >= 5)
9722 return DW_FORM_data16;
9723 /* FALLTHRU */
9724 default:
9725 return DW_FORM_block1;
9726 }
9727 case dw_val_class_wide_int:
9728 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9729 {
9730 case 8:
9731 return DW_FORM_data1;
9732 case 16:
9733 return DW_FORM_data2;
9734 case 32:
9735 return DW_FORM_data4;
9736 case 64:
9737 return DW_FORM_data8;
9738 case 128:
9739 if (dwarf_version >= 5)
9740 return DW_FORM_data16;
9741 /* FALLTHRU */
9742 default:
9743 return DW_FORM_block1;
9744 }
9745 case dw_val_class_symview:
9746 /* ??? We might use uleb128, but then we'd have to compute
9747 .debug_info offsets in the assembler. */
9748 if (symview_upper_bound <= 0xff)
9749 return DW_FORM_data1;
9750 else if (symview_upper_bound <= 0xffff)
9751 return DW_FORM_data2;
9752 else if (symview_upper_bound <= 0xffffffff)
9753 return DW_FORM_data4;
9754 else
9755 return DW_FORM_data8;
9756 case dw_val_class_vec:
9757 switch (constant_size (a->dw_attr_val.v.val_vec.length
9758 * a->dw_attr_val.v.val_vec.elt_size))
9759 {
9760 case 1:
9761 return DW_FORM_block1;
9762 case 2:
9763 return DW_FORM_block2;
9764 case 4:
9765 return DW_FORM_block4;
9766 default:
9767 gcc_unreachable ();
9768 }
9769 case dw_val_class_flag:
9770 if (dwarf_version >= 4)
9771 {
9772 /* Currently all add_AT_flag calls pass in 1 as last argument,
9773 so DW_FORM_flag_present can be used. If that ever changes,
9774 we'll need to use DW_FORM_flag and have some optimization
9775 in build_abbrev_table that will change those to
9776 DW_FORM_flag_present if it is set to 1 in all DIEs using
9777 the same abbrev entry. */
9778 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9779 return DW_FORM_flag_present;
9780 }
9781 return DW_FORM_flag;
9782 case dw_val_class_die_ref:
9783 if (AT_ref_external (a))
9784 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9785 else
9786 return DW_FORM_ref;
9787 case dw_val_class_fde_ref:
9788 return DW_FORM_data;
9789 case dw_val_class_lbl_id:
9790 return (AT_index (a) == NOT_INDEXED
9791 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9792 case dw_val_class_lineptr:
9793 case dw_val_class_macptr:
9794 case dw_val_class_loclistsptr:
9795 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9796 case dw_val_class_str:
9797 return AT_string_form (a);
9798 case dw_val_class_file:
9799 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9800 {
9801 case 1:
9802 return DW_FORM_data1;
9803 case 2:
9804 return DW_FORM_data2;
9805 case 4:
9806 return DW_FORM_data4;
9807 default:
9808 gcc_unreachable ();
9809 }
9810
9811 case dw_val_class_data8:
9812 return DW_FORM_data8;
9813
9814 case dw_val_class_high_pc:
9815 switch (DWARF2_ADDR_SIZE)
9816 {
9817 case 1:
9818 return DW_FORM_data1;
9819 case 2:
9820 return DW_FORM_data2;
9821 case 4:
9822 return DW_FORM_data4;
9823 case 8:
9824 return DW_FORM_data8;
9825 default:
9826 gcc_unreachable ();
9827 }
9828
9829 case dw_val_class_discr_value:
9830 return (a->dw_attr_val.v.val_discr_value.pos
9831 ? DW_FORM_udata
9832 : DW_FORM_sdata);
9833 case dw_val_class_discr_list:
9834 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9835 {
9836 case 1:
9837 return DW_FORM_block1;
9838 case 2:
9839 return DW_FORM_block2;
9840 case 4:
9841 return DW_FORM_block4;
9842 default:
9843 gcc_unreachable ();
9844 }
9845
9846 default:
9847 gcc_unreachable ();
9848 }
9849 }
9850
9851 /* Output the encoding of an attribute value. */
9852
9853 static void
9854 output_value_format (dw_attr_node *a)
9855 {
9856 enum dwarf_form form = value_format (a);
9857
9858 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9859 }
9860
9861 /* Given a die and id, produce the appropriate abbreviations. */
9862
9863 static void
9864 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9865 {
9866 unsigned ix;
9867 dw_attr_node *a_attr;
9868
9869 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9870 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9871 dwarf_tag_name (abbrev->die_tag));
9872
9873 if (abbrev->die_child != NULL)
9874 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9875 else
9876 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9877
9878 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9879 {
9880 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9881 dwarf_attr_name (a_attr->dw_attr));
9882 output_value_format (a_attr);
9883 if (value_format (a_attr) == DW_FORM_implicit_const)
9884 {
9885 if (AT_class (a_attr) == dw_val_class_file_implicit)
9886 {
9887 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9888 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9889 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9890 }
9891 else
9892 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9893 }
9894 }
9895
9896 dw2_asm_output_data (1, 0, NULL);
9897 dw2_asm_output_data (1, 0, NULL);
9898 }
9899
9900
9901 /* Output the .debug_abbrev section which defines the DIE abbreviation
9902 table. */
9903
9904 static void
9905 output_abbrev_section (void)
9906 {
9907 unsigned int abbrev_id;
9908 dw_die_ref abbrev;
9909
9910 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9911 if (abbrev_id != 0)
9912 output_die_abbrevs (abbrev_id, abbrev);
9913
9914 /* Terminate the table. */
9915 dw2_asm_output_data (1, 0, NULL);
9916 }
9917
9918 /* Return a new location list, given the begin and end range, and the
9919 expression. */
9920
9921 static inline dw_loc_list_ref
9922 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9923 const char *end, var_loc_view vend,
9924 const char *section)
9925 {
9926 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9927
9928 retlist->begin = begin;
9929 retlist->begin_entry = NULL;
9930 retlist->end = end;
9931 retlist->expr = expr;
9932 retlist->section = section;
9933 retlist->vbegin = vbegin;
9934 retlist->vend = vend;
9935
9936 return retlist;
9937 }
9938
9939 /* Return true iff there's any nonzero view number in the loc list.
9940
9941 ??? When views are not enabled, we'll often extend a single range
9942 to the entire function, so that we emit a single location
9943 expression rather than a location list. With views, even with a
9944 single range, we'll output a list if start or end have a nonzero
9945 view. If we change this, we may want to stop splitting a single
9946 range in dw_loc_list just because of a nonzero view, even if it
9947 straddles across hot/cold partitions. */
9948
9949 static bool
9950 loc_list_has_views (dw_loc_list_ref list)
9951 {
9952 if (!debug_variable_location_views)
9953 return false;
9954
9955 for (dw_loc_list_ref loc = list;
9956 loc != NULL; loc = loc->dw_loc_next)
9957 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9958 return true;
9959
9960 return false;
9961 }
9962
9963 /* Generate a new internal symbol for this location list node, if it
9964 hasn't got one yet. */
9965
9966 static inline void
9967 gen_llsym (dw_loc_list_ref list)
9968 {
9969 gcc_assert (!list->ll_symbol);
9970 list->ll_symbol = gen_internal_sym ("LLST");
9971
9972 if (!loc_list_has_views (list))
9973 return;
9974
9975 if (dwarf2out_locviews_in_attribute ())
9976 {
9977 /* Use the same label_num for the view list. */
9978 label_num--;
9979 list->vl_symbol = gen_internal_sym ("LVUS");
9980 }
9981 else
9982 list->vl_symbol = list->ll_symbol;
9983 }
9984
9985 /* Generate a symbol for the list, but only if we really want to emit
9986 it as a list. */
9987
9988 static inline void
9989 maybe_gen_llsym (dw_loc_list_ref list)
9990 {
9991 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
9992 return;
9993
9994 gen_llsym (list);
9995 }
9996
9997 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
9998 NULL, don't consider size of the location expression. If we're not
9999 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10000 representation in *SIZEP. */
10001
10002 static bool
10003 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10004 {
10005 /* Don't output an entry that starts and ends at the same address. */
10006 if (strcmp (curr->begin, curr->end) == 0
10007 && curr->vbegin == curr->vend && !curr->force)
10008 return true;
10009
10010 if (!sizep)
10011 return false;
10012
10013 unsigned long size = size_of_locs (curr->expr);
10014
10015 /* If the expression is too large, drop it on the floor. We could
10016 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10017 in the expression, but >= 64KB expressions for a single value
10018 in a single range are unlikely very useful. */
10019 if (dwarf_version < 5 && size > 0xffff)
10020 return true;
10021
10022 *sizep = size;
10023
10024 return false;
10025 }
10026
10027 /* Output a view pair loclist entry for CURR, if it requires one. */
10028
10029 static void
10030 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10031 {
10032 if (!dwarf2out_locviews_in_loclist ())
10033 return;
10034
10035 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10036 return;
10037
10038 #ifdef DW_LLE_view_pair
10039 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10040
10041 if (dwarf2out_as_locview_support)
10042 {
10043 if (ZERO_VIEW_P (curr->vbegin))
10044 dw2_asm_output_data_uleb128 (0, "Location view begin");
10045 else
10046 {
10047 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10048 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10049 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10050 }
10051
10052 if (ZERO_VIEW_P (curr->vend))
10053 dw2_asm_output_data_uleb128 (0, "Location view end");
10054 else
10055 {
10056 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10057 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10058 dw2_asm_output_symname_uleb128 (label, "Location view end");
10059 }
10060 }
10061 else
10062 {
10063 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10064 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10065 }
10066 #endif /* DW_LLE_view_pair */
10067
10068 return;
10069 }
10070
10071 /* Output the location list given to us. */
10072
10073 static void
10074 output_loc_list (dw_loc_list_ref list_head)
10075 {
10076 int vcount = 0, lcount = 0;
10077
10078 if (list_head->emitted)
10079 return;
10080 list_head->emitted = true;
10081
10082 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10083 {
10084 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10085
10086 for (dw_loc_list_ref curr = list_head; curr != NULL;
10087 curr = curr->dw_loc_next)
10088 {
10089 unsigned long size;
10090
10091 if (skip_loc_list_entry (curr, &size))
10092 continue;
10093
10094 vcount++;
10095
10096 /* ?? dwarf_split_debug_info? */
10097 if (dwarf2out_as_locview_support)
10098 {
10099 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10100
10101 if (!ZERO_VIEW_P (curr->vbegin))
10102 {
10103 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10104 dw2_asm_output_symname_uleb128 (label,
10105 "View list begin (%s)",
10106 list_head->vl_symbol);
10107 }
10108 else
10109 dw2_asm_output_data_uleb128 (0,
10110 "View list begin (%s)",
10111 list_head->vl_symbol);
10112
10113 if (!ZERO_VIEW_P (curr->vend))
10114 {
10115 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10116 dw2_asm_output_symname_uleb128 (label,
10117 "View list end (%s)",
10118 list_head->vl_symbol);
10119 }
10120 else
10121 dw2_asm_output_data_uleb128 (0,
10122 "View list end (%s)",
10123 list_head->vl_symbol);
10124 }
10125 else
10126 {
10127 dw2_asm_output_data_uleb128 (curr->vbegin,
10128 "View list begin (%s)",
10129 list_head->vl_symbol);
10130 dw2_asm_output_data_uleb128 (curr->vend,
10131 "View list end (%s)",
10132 list_head->vl_symbol);
10133 }
10134 }
10135 }
10136
10137 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10138
10139 const char *last_section = NULL;
10140 const char *base_label = NULL;
10141
10142 /* Walk the location list, and output each range + expression. */
10143 for (dw_loc_list_ref curr = list_head; curr != NULL;
10144 curr = curr->dw_loc_next)
10145 {
10146 unsigned long size;
10147
10148 /* Skip this entry? If we skip it here, we must skip it in the
10149 view list above as well. */
10150 if (skip_loc_list_entry (curr, &size))
10151 continue;
10152
10153 lcount++;
10154
10155 if (dwarf_version >= 5)
10156 {
10157 if (dwarf_split_debug_info)
10158 {
10159 dwarf2out_maybe_output_loclist_view_pair (curr);
10160 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10161 uleb128 index into .debug_addr and uleb128 length. */
10162 dw2_asm_output_data (1, DW_LLE_startx_length,
10163 "DW_LLE_startx_length (%s)",
10164 list_head->ll_symbol);
10165 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10166 "Location list range start index "
10167 "(%s)", curr->begin);
10168 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10169 For that case we probably need to emit DW_LLE_startx_endx,
10170 but we'd need 2 .debug_addr entries rather than just one. */
10171 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10172 "Location list length (%s)",
10173 list_head->ll_symbol);
10174 }
10175 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10176 {
10177 dwarf2out_maybe_output_loclist_view_pair (curr);
10178 /* If all code is in .text section, the base address is
10179 already provided by the CU attributes. Use
10180 DW_LLE_offset_pair where both addresses are uleb128 encoded
10181 offsets against that base. */
10182 dw2_asm_output_data (1, DW_LLE_offset_pair,
10183 "DW_LLE_offset_pair (%s)",
10184 list_head->ll_symbol);
10185 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10186 "Location list begin address (%s)",
10187 list_head->ll_symbol);
10188 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10189 "Location list end address (%s)",
10190 list_head->ll_symbol);
10191 }
10192 else if (HAVE_AS_LEB128)
10193 {
10194 /* Otherwise, find out how many consecutive entries could share
10195 the same base entry. If just one, emit DW_LLE_start_length,
10196 otherwise emit DW_LLE_base_address for the base address
10197 followed by a series of DW_LLE_offset_pair. */
10198 if (last_section == NULL || curr->section != last_section)
10199 {
10200 dw_loc_list_ref curr2;
10201 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10202 curr2 = curr2->dw_loc_next)
10203 {
10204 if (strcmp (curr2->begin, curr2->end) == 0
10205 && !curr2->force)
10206 continue;
10207 break;
10208 }
10209 if (curr2 == NULL || curr->section != curr2->section)
10210 last_section = NULL;
10211 else
10212 {
10213 last_section = curr->section;
10214 base_label = curr->begin;
10215 dw2_asm_output_data (1, DW_LLE_base_address,
10216 "DW_LLE_base_address (%s)",
10217 list_head->ll_symbol);
10218 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10219 "Base address (%s)",
10220 list_head->ll_symbol);
10221 }
10222 }
10223 /* Only one entry with the same base address. Use
10224 DW_LLE_start_length with absolute address and uleb128
10225 length. */
10226 if (last_section == NULL)
10227 {
10228 dwarf2out_maybe_output_loclist_view_pair (curr);
10229 dw2_asm_output_data (1, DW_LLE_start_length,
10230 "DW_LLE_start_length (%s)",
10231 list_head->ll_symbol);
10232 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10233 "Location list begin address (%s)",
10234 list_head->ll_symbol);
10235 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10236 "Location list length "
10237 "(%s)", list_head->ll_symbol);
10238 }
10239 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10240 DW_LLE_base_address. */
10241 else
10242 {
10243 dwarf2out_maybe_output_loclist_view_pair (curr);
10244 dw2_asm_output_data (1, DW_LLE_offset_pair,
10245 "DW_LLE_offset_pair (%s)",
10246 list_head->ll_symbol);
10247 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10248 "Location list begin address "
10249 "(%s)", list_head->ll_symbol);
10250 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10251 "Location list end address "
10252 "(%s)", list_head->ll_symbol);
10253 }
10254 }
10255 /* The assembler does not support .uleb128 directive. Emit
10256 DW_LLE_start_end with a pair of absolute addresses. */
10257 else
10258 {
10259 dwarf2out_maybe_output_loclist_view_pair (curr);
10260 dw2_asm_output_data (1, DW_LLE_start_end,
10261 "DW_LLE_start_end (%s)",
10262 list_head->ll_symbol);
10263 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10264 "Location list begin address (%s)",
10265 list_head->ll_symbol);
10266 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10267 "Location list end address (%s)",
10268 list_head->ll_symbol);
10269 }
10270 }
10271 else if (dwarf_split_debug_info)
10272 {
10273 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10274 and 4 byte length. */
10275 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10276 "Location list start/length entry (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10279 "Location list range start index (%s)",
10280 curr->begin);
10281 /* The length field is 4 bytes. If we ever need to support
10282 an 8-byte length, we can add a new DW_LLE code or fall back
10283 to DW_LLE_GNU_start_end_entry. */
10284 dw2_asm_output_delta (4, curr->end, curr->begin,
10285 "Location list range length (%s)",
10286 list_head->ll_symbol);
10287 }
10288 else if (!have_multiple_function_sections)
10289 {
10290 /* Pair of relative addresses against start of text section. */
10291 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10292 "Location list begin address (%s)",
10293 list_head->ll_symbol);
10294 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10295 "Location list end address (%s)",
10296 list_head->ll_symbol);
10297 }
10298 else
10299 {
10300 /* Pair of absolute addresses. */
10301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10302 "Location list begin address (%s)",
10303 list_head->ll_symbol);
10304 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10305 "Location list end address (%s)",
10306 list_head->ll_symbol);
10307 }
10308
10309 /* Output the block length for this list of location operations. */
10310 if (dwarf_version >= 5)
10311 dw2_asm_output_data_uleb128 (size, "Location expression size");
10312 else
10313 {
10314 gcc_assert (size <= 0xffff);
10315 dw2_asm_output_data (2, size, "Location expression size");
10316 }
10317
10318 output_loc_sequence (curr->expr, -1);
10319 }
10320
10321 /* And finally list termination. */
10322 if (dwarf_version >= 5)
10323 dw2_asm_output_data (1, DW_LLE_end_of_list,
10324 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10325 else if (dwarf_split_debug_info)
10326 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10327 "Location list terminator (%s)",
10328 list_head->ll_symbol);
10329 else
10330 {
10331 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10332 "Location list terminator begin (%s)",
10333 list_head->ll_symbol);
10334 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10335 "Location list terminator end (%s)",
10336 list_head->ll_symbol);
10337 }
10338
10339 gcc_assert (!list_head->vl_symbol
10340 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10341 }
10342
10343 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10344 section. Emit a relocated reference if val_entry is NULL, otherwise,
10345 emit an indirect reference. */
10346
10347 static void
10348 output_range_list_offset (dw_attr_node *a)
10349 {
10350 const char *name = dwarf_attr_name (a->dw_attr);
10351
10352 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10353 {
10354 if (dwarf_version >= 5)
10355 {
10356 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10357 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10358 debug_ranges_section, "%s", name);
10359 }
10360 else
10361 {
10362 char *p = strchr (ranges_section_label, '\0');
10363 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10364 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10365 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10366 debug_ranges_section, "%s", name);
10367 *p = '\0';
10368 }
10369 }
10370 else if (dwarf_version >= 5)
10371 {
10372 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10373 gcc_assert (rnglist_idx);
10374 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10375 }
10376 else
10377 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10378 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10379 "%s (offset from %s)", name, ranges_section_label);
10380 }
10381
10382 /* Output the offset into the debug_loc section. */
10383
10384 static void
10385 output_loc_list_offset (dw_attr_node *a)
10386 {
10387 char *sym = AT_loc_list (a)->ll_symbol;
10388
10389 gcc_assert (sym);
10390 if (!dwarf_split_debug_info)
10391 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10392 "%s", dwarf_attr_name (a->dw_attr));
10393 else if (dwarf_version >= 5)
10394 {
10395 gcc_assert (AT_loc_list (a)->num_assigned);
10396 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10397 dwarf_attr_name (a->dw_attr),
10398 sym);
10399 }
10400 else
10401 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10402 "%s", dwarf_attr_name (a->dw_attr));
10403 }
10404
10405 /* Output the offset into the debug_loc section. */
10406
10407 static void
10408 output_view_list_offset (dw_attr_node *a)
10409 {
10410 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10411
10412 gcc_assert (sym);
10413 if (dwarf_split_debug_info)
10414 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10415 "%s", dwarf_attr_name (a->dw_attr));
10416 else
10417 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10418 "%s", dwarf_attr_name (a->dw_attr));
10419 }
10420
10421 /* Output an attribute's index or value appropriately. */
10422
10423 static void
10424 output_attr_index_or_value (dw_attr_node *a)
10425 {
10426 const char *name = dwarf_attr_name (a->dw_attr);
10427
10428 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10429 {
10430 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10431 return;
10432 }
10433 switch (AT_class (a))
10434 {
10435 case dw_val_class_addr:
10436 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10437 break;
10438 case dw_val_class_high_pc:
10439 case dw_val_class_lbl_id:
10440 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10441 break;
10442 default:
10443 gcc_unreachable ();
10444 }
10445 }
10446
10447 /* Output a type signature. */
10448
10449 static inline void
10450 output_signature (const char *sig, const char *name)
10451 {
10452 int i;
10453
10454 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10455 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10456 }
10457
10458 /* Output a discriminant value. */
10459
10460 static inline void
10461 output_discr_value (dw_discr_value *discr_value, const char *name)
10462 {
10463 if (discr_value->pos)
10464 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10465 else
10466 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10467 }
10468
10469 /* Output the DIE and its attributes. Called recursively to generate
10470 the definitions of each child DIE. */
10471
10472 static void
10473 output_die (dw_die_ref die)
10474 {
10475 dw_attr_node *a;
10476 dw_die_ref c;
10477 unsigned long size;
10478 unsigned ix;
10479
10480 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10481 (unsigned long)die->die_offset,
10482 dwarf_tag_name (die->die_tag));
10483
10484 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10485 {
10486 const char *name = dwarf_attr_name (a->dw_attr);
10487
10488 switch (AT_class (a))
10489 {
10490 case dw_val_class_addr:
10491 output_attr_index_or_value (a);
10492 break;
10493
10494 case dw_val_class_offset:
10495 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10496 "%s", name);
10497 break;
10498
10499 case dw_val_class_range_list:
10500 output_range_list_offset (a);
10501 break;
10502
10503 case dw_val_class_loc:
10504 size = size_of_locs (AT_loc (a));
10505
10506 /* Output the block length for this list of location operations. */
10507 if (dwarf_version >= 4)
10508 dw2_asm_output_data_uleb128 (size, "%s", name);
10509 else
10510 dw2_asm_output_data (constant_size (size), size, "%s", name);
10511
10512 output_loc_sequence (AT_loc (a), -1);
10513 break;
10514
10515 case dw_val_class_const:
10516 /* ??? It would be slightly more efficient to use a scheme like is
10517 used for unsigned constants below, but gdb 4.x does not sign
10518 extend. Gdb 5.x does sign extend. */
10519 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10520 break;
10521
10522 case dw_val_class_unsigned_const:
10523 {
10524 int csize = constant_size (AT_unsigned (a));
10525 if (dwarf_version == 3
10526 && a->dw_attr == DW_AT_data_member_location
10527 && csize >= 4)
10528 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10529 else
10530 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10531 }
10532 break;
10533
10534 case dw_val_class_symview:
10535 {
10536 int vsize;
10537 if (symview_upper_bound <= 0xff)
10538 vsize = 1;
10539 else if (symview_upper_bound <= 0xffff)
10540 vsize = 2;
10541 else if (symview_upper_bound <= 0xffffffff)
10542 vsize = 4;
10543 else
10544 vsize = 8;
10545 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10546 "%s", name);
10547 }
10548 break;
10549
10550 case dw_val_class_const_implicit:
10551 if (flag_debug_asm)
10552 fprintf (asm_out_file, "\t\t\t%s %s ("
10553 HOST_WIDE_INT_PRINT_DEC ")\n",
10554 ASM_COMMENT_START, name, AT_int (a));
10555 break;
10556
10557 case dw_val_class_unsigned_const_implicit:
10558 if (flag_debug_asm)
10559 fprintf (asm_out_file, "\t\t\t%s %s ("
10560 HOST_WIDE_INT_PRINT_HEX ")\n",
10561 ASM_COMMENT_START, name, AT_unsigned (a));
10562 break;
10563
10564 case dw_val_class_const_double:
10565 {
10566 unsigned HOST_WIDE_INT first, second;
10567
10568 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10569 dw2_asm_output_data (1,
10570 HOST_BITS_PER_DOUBLE_INT
10571 / HOST_BITS_PER_CHAR,
10572 NULL);
10573
10574 if (WORDS_BIG_ENDIAN)
10575 {
10576 first = a->dw_attr_val.v.val_double.high;
10577 second = a->dw_attr_val.v.val_double.low;
10578 }
10579 else
10580 {
10581 first = a->dw_attr_val.v.val_double.low;
10582 second = a->dw_attr_val.v.val_double.high;
10583 }
10584
10585 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10586 first, "%s", name);
10587 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10588 second, NULL);
10589 }
10590 break;
10591
10592 case dw_val_class_wide_int:
10593 {
10594 int i;
10595 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10596 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10597 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10598 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10599 * l, NULL);
10600
10601 if (WORDS_BIG_ENDIAN)
10602 for (i = len - 1; i >= 0; --i)
10603 {
10604 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10605 "%s", name);
10606 name = "";
10607 }
10608 else
10609 for (i = 0; i < len; ++i)
10610 {
10611 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10612 "%s", name);
10613 name = "";
10614 }
10615 }
10616 break;
10617
10618 case dw_val_class_vec:
10619 {
10620 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10621 unsigned int len = a->dw_attr_val.v.val_vec.length;
10622 unsigned int i;
10623 unsigned char *p;
10624
10625 dw2_asm_output_data (constant_size (len * elt_size),
10626 len * elt_size, "%s", name);
10627 if (elt_size > sizeof (HOST_WIDE_INT))
10628 {
10629 elt_size /= 2;
10630 len *= 2;
10631 }
10632 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10633 i < len;
10634 i++, p += elt_size)
10635 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10636 "fp or vector constant word %u", i);
10637 break;
10638 }
10639
10640 case dw_val_class_flag:
10641 if (dwarf_version >= 4)
10642 {
10643 /* Currently all add_AT_flag calls pass in 1 as last argument,
10644 so DW_FORM_flag_present can be used. If that ever changes,
10645 we'll need to use DW_FORM_flag and have some optimization
10646 in build_abbrev_table that will change those to
10647 DW_FORM_flag_present if it is set to 1 in all DIEs using
10648 the same abbrev entry. */
10649 gcc_assert (AT_flag (a) == 1);
10650 if (flag_debug_asm)
10651 fprintf (asm_out_file, "\t\t\t%s %s\n",
10652 ASM_COMMENT_START, name);
10653 break;
10654 }
10655 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10656 break;
10657
10658 case dw_val_class_loc_list:
10659 output_loc_list_offset (a);
10660 break;
10661
10662 case dw_val_class_view_list:
10663 output_view_list_offset (a);
10664 break;
10665
10666 case dw_val_class_die_ref:
10667 if (AT_ref_external (a))
10668 {
10669 if (AT_ref (a)->comdat_type_p)
10670 {
10671 comdat_type_node *type_node
10672 = AT_ref (a)->die_id.die_type_node;
10673
10674 gcc_assert (type_node);
10675 output_signature (type_node->signature, name);
10676 }
10677 else
10678 {
10679 const char *sym = AT_ref (a)->die_id.die_symbol;
10680 int size;
10681
10682 gcc_assert (sym);
10683 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10684 length, whereas in DWARF3 it's always sized as an
10685 offset. */
10686 if (dwarf_version == 2)
10687 size = DWARF2_ADDR_SIZE;
10688 else
10689 size = DWARF_OFFSET_SIZE;
10690 /* ??? We cannot unconditionally output die_offset if
10691 non-zero - others might create references to those
10692 DIEs via symbols.
10693 And we do not clear its DIE offset after outputting it
10694 (and the label refers to the actual DIEs, not the
10695 DWARF CU unit header which is when using label + offset
10696 would be the correct thing to do).
10697 ??? This is the reason for the with_offset flag. */
10698 if (AT_ref (a)->with_offset)
10699 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10700 debug_info_section, "%s", name);
10701 else
10702 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10703 name);
10704 }
10705 }
10706 else
10707 {
10708 gcc_assert (AT_ref (a)->die_offset);
10709 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10710 "%s", name);
10711 }
10712 break;
10713
10714 case dw_val_class_fde_ref:
10715 {
10716 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10717
10718 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10719 a->dw_attr_val.v.val_fde_index * 2);
10720 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10721 "%s", name);
10722 }
10723 break;
10724
10725 case dw_val_class_vms_delta:
10726 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10727 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10728 AT_vms_delta2 (a), AT_vms_delta1 (a),
10729 "%s", name);
10730 #else
10731 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10732 AT_vms_delta2 (a), AT_vms_delta1 (a),
10733 "%s", name);
10734 #endif
10735 break;
10736
10737 case dw_val_class_lbl_id:
10738 output_attr_index_or_value (a);
10739 break;
10740
10741 case dw_val_class_lineptr:
10742 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10743 debug_line_section, "%s", name);
10744 break;
10745
10746 case dw_val_class_macptr:
10747 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10748 debug_macinfo_section, "%s", name);
10749 break;
10750
10751 case dw_val_class_loclistsptr:
10752 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10753 debug_loc_section, "%s", name);
10754 break;
10755
10756 case dw_val_class_str:
10757 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10758 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10759 a->dw_attr_val.v.val_str->label,
10760 debug_str_section,
10761 "%s: \"%s\"", name, AT_string (a));
10762 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10763 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10764 a->dw_attr_val.v.val_str->label,
10765 debug_line_str_section,
10766 "%s: \"%s\"", name, AT_string (a));
10767 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10768 dw2_asm_output_data_uleb128 (AT_index (a),
10769 "%s: \"%s\"", name, AT_string (a));
10770 else
10771 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10772 break;
10773
10774 case dw_val_class_file:
10775 {
10776 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10777
10778 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10779 a->dw_attr_val.v.val_file->filename);
10780 break;
10781 }
10782
10783 case dw_val_class_file_implicit:
10784 if (flag_debug_asm)
10785 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10786 ASM_COMMENT_START, name,
10787 maybe_emit_file (a->dw_attr_val.v.val_file),
10788 a->dw_attr_val.v.val_file->filename);
10789 break;
10790
10791 case dw_val_class_data8:
10792 {
10793 int i;
10794
10795 for (i = 0; i < 8; i++)
10796 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10797 i == 0 ? "%s" : NULL, name);
10798 break;
10799 }
10800
10801 case dw_val_class_high_pc:
10802 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10803 get_AT_low_pc (die), "DW_AT_high_pc");
10804 break;
10805
10806 case dw_val_class_discr_value:
10807 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10808 break;
10809
10810 case dw_val_class_discr_list:
10811 {
10812 dw_discr_list_ref list = AT_discr_list (a);
10813 const int size = size_of_discr_list (list);
10814
10815 /* This is a block, so output its length first. */
10816 dw2_asm_output_data (constant_size (size), size,
10817 "%s: block size", name);
10818
10819 for (; list != NULL; list = list->dw_discr_next)
10820 {
10821 /* One byte for the discriminant value descriptor, and then as
10822 many LEB128 numbers as required. */
10823 if (list->dw_discr_range)
10824 dw2_asm_output_data (1, DW_DSC_range,
10825 "%s: DW_DSC_range", name);
10826 else
10827 dw2_asm_output_data (1, DW_DSC_label,
10828 "%s: DW_DSC_label", name);
10829
10830 output_discr_value (&list->dw_discr_lower_bound, name);
10831 if (list->dw_discr_range)
10832 output_discr_value (&list->dw_discr_upper_bound, name);
10833 }
10834 break;
10835 }
10836
10837 default:
10838 gcc_unreachable ();
10839 }
10840 }
10841
10842 FOR_EACH_CHILD (die, c, output_die (c));
10843
10844 /* Add null byte to terminate sibling list. */
10845 if (die->die_child != NULL)
10846 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10847 (unsigned long) die->die_offset);
10848 }
10849
10850 /* Output the dwarf version number. */
10851
10852 static void
10853 output_dwarf_version ()
10854 {
10855 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10856 views in loclist. That will change eventually. */
10857 if (dwarf_version == 6)
10858 {
10859 static bool once;
10860 if (!once)
10861 {
10862 warning (0,
10863 "-gdwarf-6 is output as version 5 with incompatibilities");
10864 once = true;
10865 }
10866 dw2_asm_output_data (2, 5, "DWARF version number");
10867 }
10868 else
10869 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10870 }
10871
10872 /* Output the compilation unit that appears at the beginning of the
10873 .debug_info section, and precedes the DIE descriptions. */
10874
10875 static void
10876 output_compilation_unit_header (enum dwarf_unit_type ut)
10877 {
10878 if (!XCOFF_DEBUGGING_INFO)
10879 {
10880 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10881 dw2_asm_output_data (4, 0xffffffff,
10882 "Initial length escape value indicating 64-bit DWARF extension");
10883 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10884 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10885 "Length of Compilation Unit Info");
10886 }
10887
10888 output_dwarf_version ();
10889 if (dwarf_version >= 5)
10890 {
10891 const char *name;
10892 switch (ut)
10893 {
10894 case DW_UT_compile: name = "DW_UT_compile"; break;
10895 case DW_UT_type: name = "DW_UT_type"; break;
10896 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10897 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10898 default: gcc_unreachable ();
10899 }
10900 dw2_asm_output_data (1, ut, "%s", name);
10901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10902 }
10903 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10904 debug_abbrev_section,
10905 "Offset Into Abbrev. Section");
10906 if (dwarf_version < 5)
10907 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10908 }
10909
10910 /* Output the compilation unit DIE and its children. */
10911
10912 static void
10913 output_comp_unit (dw_die_ref die, int output_if_empty,
10914 const unsigned char *dwo_id)
10915 {
10916 const char *secname, *oldsym;
10917 char *tmp;
10918
10919 /* Unless we are outputting main CU, we may throw away empty ones. */
10920 if (!output_if_empty && die->die_child == NULL)
10921 return;
10922
10923 /* Even if there are no children of this DIE, we must output the information
10924 about the compilation unit. Otherwise, on an empty translation unit, we
10925 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10926 will then complain when examining the file. First mark all the DIEs in
10927 this CU so we know which get local refs. */
10928 mark_dies (die);
10929
10930 external_ref_hash_type *extern_map = optimize_external_refs (die);
10931
10932 /* For now, optimize only the main CU, in order to optimize the rest
10933 we'd need to see all of them earlier. Leave the rest for post-linking
10934 tools like DWZ. */
10935 if (die == comp_unit_die ())
10936 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10937
10938 build_abbrev_table (die, extern_map);
10939
10940 optimize_abbrev_table ();
10941
10942 delete extern_map;
10943
10944 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10945 next_die_offset = (dwo_id
10946 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10947 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10948 calc_die_sizes (die);
10949
10950 oldsym = die->die_id.die_symbol;
10951 if (oldsym && die->comdat_type_p)
10952 {
10953 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10954
10955 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10956 secname = tmp;
10957 die->die_id.die_symbol = NULL;
10958 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10959 }
10960 else
10961 {
10962 switch_to_section (debug_info_section);
10963 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10964 info_section_emitted = true;
10965 }
10966
10967 /* For LTO cross unit DIE refs we want a symbol on the start of the
10968 debuginfo section, not on the CU DIE. */
10969 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10970 {
10971 /* ??? No way to get visibility assembled without a decl. */
10972 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10973 get_identifier (oldsym), char_type_node);
10974 TREE_PUBLIC (decl) = true;
10975 TREE_STATIC (decl) = true;
10976 DECL_ARTIFICIAL (decl) = true;
10977 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10978 DECL_VISIBILITY_SPECIFIED (decl) = true;
10979 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10980 #ifdef ASM_WEAKEN_LABEL
10981 /* We prefer a .weak because that handles duplicates from duplicate
10982 archive members in a graceful way. */
10983 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10984 #else
10985 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10986 #endif
10987 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10988 }
10989
10990 /* Output debugging information. */
10991 output_compilation_unit_header (dwo_id
10992 ? DW_UT_split_compile : DW_UT_compile);
10993 if (dwarf_version >= 5)
10994 {
10995 if (dwo_id != NULL)
10996 for (int i = 0; i < 8; i++)
10997 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10998 }
10999 output_die (die);
11000
11001 /* Leave the marks on the main CU, so we can check them in
11002 output_pubnames. */
11003 if (oldsym)
11004 {
11005 unmark_dies (die);
11006 die->die_id.die_symbol = oldsym;
11007 }
11008 }
11009
11010 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11011 and .debug_pubtypes. This is configured per-target, but can be
11012 overridden by the -gpubnames or -gno-pubnames options. */
11013
11014 static inline bool
11015 want_pubnames (void)
11016 {
11017 if (debug_info_level <= DINFO_LEVEL_TERSE)
11018 return false;
11019 if (debug_generate_pub_sections != -1)
11020 return debug_generate_pub_sections;
11021 return targetm.want_debug_pub_sections;
11022 }
11023
11024 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11025
11026 static void
11027 add_AT_pubnames (dw_die_ref die)
11028 {
11029 if (want_pubnames ())
11030 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11031 }
11032
11033 /* Add a string attribute value to a skeleton DIE. */
11034
11035 static inline void
11036 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11037 const char *str)
11038 {
11039 dw_attr_node attr;
11040 struct indirect_string_node *node;
11041
11042 if (! skeleton_debug_str_hash)
11043 skeleton_debug_str_hash
11044 = hash_table<indirect_string_hasher>::create_ggc (10);
11045
11046 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11047 find_string_form (node);
11048 if (node->form == dwarf_FORM (DW_FORM_strx))
11049 node->form = DW_FORM_strp;
11050
11051 attr.dw_attr = attr_kind;
11052 attr.dw_attr_val.val_class = dw_val_class_str;
11053 attr.dw_attr_val.val_entry = NULL;
11054 attr.dw_attr_val.v.val_str = node;
11055 add_dwarf_attr (die, &attr);
11056 }
11057
11058 /* Helper function to generate top-level dies for skeleton debug_info and
11059 debug_types. */
11060
11061 static void
11062 add_top_level_skeleton_die_attrs (dw_die_ref die)
11063 {
11064 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11065 const char *comp_dir = comp_dir_string ();
11066
11067 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11068 if (comp_dir != NULL)
11069 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11070 add_AT_pubnames (die);
11071 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11072 }
11073
11074 /* Output skeleton debug sections that point to the dwo file. */
11075
11076 static void
11077 output_skeleton_debug_sections (dw_die_ref comp_unit,
11078 const unsigned char *dwo_id)
11079 {
11080 /* These attributes will be found in the full debug_info section. */
11081 remove_AT (comp_unit, DW_AT_producer);
11082 remove_AT (comp_unit, DW_AT_language);
11083
11084 switch_to_section (debug_skeleton_info_section);
11085 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11086
11087 /* Produce the skeleton compilation-unit header. This one differs enough from
11088 a normal CU header that it's better not to call output_compilation_unit
11089 header. */
11090 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11091 dw2_asm_output_data (4, 0xffffffff,
11092 "Initial length escape value indicating 64-bit "
11093 "DWARF extension");
11094
11095 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11096 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11097 - DWARF_INITIAL_LENGTH_SIZE
11098 + size_of_die (comp_unit),
11099 "Length of Compilation Unit Info");
11100 output_dwarf_version ();
11101 if (dwarf_version >= 5)
11102 {
11103 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11104 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11105 }
11106 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11107 debug_skeleton_abbrev_section,
11108 "Offset Into Abbrev. Section");
11109 if (dwarf_version < 5)
11110 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11111 else
11112 for (int i = 0; i < 8; i++)
11113 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11114
11115 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11116 output_die (comp_unit);
11117
11118 /* Build the skeleton debug_abbrev section. */
11119 switch_to_section (debug_skeleton_abbrev_section);
11120 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11121
11122 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11123
11124 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11125 }
11126
11127 /* Output a comdat type unit DIE and its children. */
11128
11129 static void
11130 output_comdat_type_unit (comdat_type_node *node)
11131 {
11132 const char *secname;
11133 char *tmp;
11134 int i;
11135 #if defined (OBJECT_FORMAT_ELF)
11136 tree comdat_key;
11137 #endif
11138
11139 /* First mark all the DIEs in this CU so we know which get local refs. */
11140 mark_dies (node->root_die);
11141
11142 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11143
11144 build_abbrev_table (node->root_die, extern_map);
11145
11146 delete extern_map;
11147 extern_map = NULL;
11148
11149 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11150 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11151 calc_die_sizes (node->root_die);
11152
11153 #if defined (OBJECT_FORMAT_ELF)
11154 if (dwarf_version >= 5)
11155 {
11156 if (!dwarf_split_debug_info)
11157 secname = ".debug_info";
11158 else
11159 secname = ".debug_info.dwo";
11160 }
11161 else if (!dwarf_split_debug_info)
11162 secname = ".debug_types";
11163 else
11164 secname = ".debug_types.dwo";
11165
11166 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11167 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11168 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11169 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11170 comdat_key = get_identifier (tmp);
11171 targetm.asm_out.named_section (secname,
11172 SECTION_DEBUG | SECTION_LINKONCE,
11173 comdat_key);
11174 #else
11175 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11176 sprintf (tmp, (dwarf_version >= 5
11177 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11178 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11179 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11180 secname = tmp;
11181 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11182 #endif
11183
11184 /* Output debugging information. */
11185 output_compilation_unit_header (dwarf_split_debug_info
11186 ? DW_UT_split_type : DW_UT_type);
11187 output_signature (node->signature, "Type Signature");
11188 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11189 "Offset to Type DIE");
11190 output_die (node->root_die);
11191
11192 unmark_dies (node->root_die);
11193 }
11194
11195 /* Return the DWARF2/3 pubname associated with a decl. */
11196
11197 static const char *
11198 dwarf2_name (tree decl, int scope)
11199 {
11200 if (DECL_NAMELESS (decl))
11201 return NULL;
11202 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11203 }
11204
11205 /* Add a new entry to .debug_pubnames if appropriate. */
11206
11207 static void
11208 add_pubname_string (const char *str, dw_die_ref die)
11209 {
11210 pubname_entry e;
11211
11212 e.die = die;
11213 e.name = xstrdup (str);
11214 vec_safe_push (pubname_table, e);
11215 }
11216
11217 static void
11218 add_pubname (tree decl, dw_die_ref die)
11219 {
11220 if (!want_pubnames ())
11221 return;
11222
11223 /* Don't add items to the table when we expect that the consumer will have
11224 just read the enclosing die. For example, if the consumer is looking at a
11225 class_member, it will either be inside the class already, or will have just
11226 looked up the class to find the member. Either way, searching the class is
11227 faster than searching the index. */
11228 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11229 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11230 {
11231 const char *name = dwarf2_name (decl, 1);
11232
11233 if (name)
11234 add_pubname_string (name, die);
11235 }
11236 }
11237
11238 /* Add an enumerator to the pubnames section. */
11239
11240 static void
11241 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11242 {
11243 pubname_entry e;
11244
11245 gcc_assert (scope_name);
11246 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11247 e.die = die;
11248 vec_safe_push (pubname_table, e);
11249 }
11250
11251 /* Add a new entry to .debug_pubtypes if appropriate. */
11252
11253 static void
11254 add_pubtype (tree decl, dw_die_ref die)
11255 {
11256 pubname_entry e;
11257
11258 if (!want_pubnames ())
11259 return;
11260
11261 if ((TREE_PUBLIC (decl)
11262 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11263 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11264 {
11265 tree scope = NULL;
11266 const char *scope_name = "";
11267 const char *sep = is_cxx () ? "::" : ".";
11268 const char *name;
11269
11270 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11271 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11272 {
11273 scope_name = lang_hooks.dwarf_name (scope, 1);
11274 if (scope_name != NULL && scope_name[0] != '\0')
11275 scope_name = concat (scope_name, sep, NULL);
11276 else
11277 scope_name = "";
11278 }
11279
11280 if (TYPE_P (decl))
11281 name = type_tag (decl);
11282 else
11283 name = lang_hooks.dwarf_name (decl, 1);
11284
11285 /* If we don't have a name for the type, there's no point in adding
11286 it to the table. */
11287 if (name != NULL && name[0] != '\0')
11288 {
11289 e.die = die;
11290 e.name = concat (scope_name, name, NULL);
11291 vec_safe_push (pubtype_table, e);
11292 }
11293
11294 /* Although it might be more consistent to add the pubinfo for the
11295 enumerators as their dies are created, they should only be added if the
11296 enum type meets the criteria above. So rather than re-check the parent
11297 enum type whenever an enumerator die is created, just output them all
11298 here. This isn't protected by the name conditional because anonymous
11299 enums don't have names. */
11300 if (die->die_tag == DW_TAG_enumeration_type)
11301 {
11302 dw_die_ref c;
11303
11304 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11305 }
11306 }
11307 }
11308
11309 /* Output a single entry in the pubnames table. */
11310
11311 static void
11312 output_pubname (dw_offset die_offset, pubname_entry *entry)
11313 {
11314 dw_die_ref die = entry->die;
11315 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11316
11317 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11318
11319 if (debug_generate_pub_sections == 2)
11320 {
11321 /* This logic follows gdb's method for determining the value of the flag
11322 byte. */
11323 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11324 switch (die->die_tag)
11325 {
11326 case DW_TAG_typedef:
11327 case DW_TAG_base_type:
11328 case DW_TAG_subrange_type:
11329 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11330 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11331 break;
11332 case DW_TAG_enumerator:
11333 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11334 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11335 if (!is_cxx ())
11336 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11337 break;
11338 case DW_TAG_subprogram:
11339 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11340 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11341 if (!is_ada ())
11342 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11343 break;
11344 case DW_TAG_constant:
11345 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11346 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11347 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11348 break;
11349 case DW_TAG_variable:
11350 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11351 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11352 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11353 break;
11354 case DW_TAG_namespace:
11355 case DW_TAG_imported_declaration:
11356 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11357 break;
11358 case DW_TAG_class_type:
11359 case DW_TAG_interface_type:
11360 case DW_TAG_structure_type:
11361 case DW_TAG_union_type:
11362 case DW_TAG_enumeration_type:
11363 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11364 if (!is_cxx ())
11365 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11366 break;
11367 default:
11368 /* An unusual tag. Leave the flag-byte empty. */
11369 break;
11370 }
11371 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11372 "GDB-index flags");
11373 }
11374
11375 dw2_asm_output_nstring (entry->name, -1, "external name");
11376 }
11377
11378
11379 /* Output the public names table used to speed up access to externally
11380 visible names; or the public types table used to find type definitions. */
11381
11382 static void
11383 output_pubnames (vec<pubname_entry, va_gc> *names)
11384 {
11385 unsigned i;
11386 unsigned long pubnames_length = size_of_pubnames (names);
11387 pubname_entry *pub;
11388
11389 if (!XCOFF_DEBUGGING_INFO)
11390 {
11391 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11392 dw2_asm_output_data (4, 0xffffffff,
11393 "Initial length escape value indicating 64-bit DWARF extension");
11394 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11395 "Pub Info Length");
11396 }
11397
11398 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11399 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11400
11401 if (dwarf_split_debug_info)
11402 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11403 debug_skeleton_info_section,
11404 "Offset of Compilation Unit Info");
11405 else
11406 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11407 debug_info_section,
11408 "Offset of Compilation Unit Info");
11409 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11410 "Compilation Unit Length");
11411
11412 FOR_EACH_VEC_ELT (*names, i, pub)
11413 {
11414 if (include_pubname_in_output (names, pub))
11415 {
11416 dw_offset die_offset = pub->die->die_offset;
11417
11418 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11419 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11420 gcc_assert (pub->die->die_mark);
11421
11422 /* If we're putting types in their own .debug_types sections,
11423 the .debug_pubtypes table will still point to the compile
11424 unit (not the type unit), so we want to use the offset of
11425 the skeleton DIE (if there is one). */
11426 if (pub->die->comdat_type_p && names == pubtype_table)
11427 {
11428 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11429
11430 if (type_node != NULL)
11431 die_offset = (type_node->skeleton_die != NULL
11432 ? type_node->skeleton_die->die_offset
11433 : comp_unit_die ()->die_offset);
11434 }
11435
11436 output_pubname (die_offset, pub);
11437 }
11438 }
11439
11440 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11441 }
11442
11443 /* Output public names and types tables if necessary. */
11444
11445 static void
11446 output_pubtables (void)
11447 {
11448 if (!want_pubnames () || !info_section_emitted)
11449 return;
11450
11451 switch_to_section (debug_pubnames_section);
11452 output_pubnames (pubname_table);
11453 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11454 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11455 simply won't look for the section. */
11456 switch_to_section (debug_pubtypes_section);
11457 output_pubnames (pubtype_table);
11458 }
11459
11460
11461 /* Output the information that goes into the .debug_aranges table.
11462 Namely, define the beginning and ending address range of the
11463 text section generated for this compilation unit. */
11464
11465 static void
11466 output_aranges (void)
11467 {
11468 unsigned i;
11469 unsigned long aranges_length = size_of_aranges ();
11470
11471 if (!XCOFF_DEBUGGING_INFO)
11472 {
11473 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11474 dw2_asm_output_data (4, 0xffffffff,
11475 "Initial length escape value indicating 64-bit DWARF extension");
11476 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11477 "Length of Address Ranges Info");
11478 }
11479
11480 /* Version number for aranges is still 2, even up to DWARF5. */
11481 dw2_asm_output_data (2, 2, "DWARF aranges version");
11482 if (dwarf_split_debug_info)
11483 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11484 debug_skeleton_info_section,
11485 "Offset of Compilation Unit Info");
11486 else
11487 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11488 debug_info_section,
11489 "Offset of Compilation Unit Info");
11490 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11491 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11492
11493 /* We need to align to twice the pointer size here. */
11494 if (DWARF_ARANGES_PAD_SIZE)
11495 {
11496 /* Pad using a 2 byte words so that padding is correct for any
11497 pointer size. */
11498 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11499 2 * DWARF2_ADDR_SIZE);
11500 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11501 dw2_asm_output_data (2, 0, NULL);
11502 }
11503
11504 /* It is necessary not to output these entries if the sections were
11505 not used; if the sections were not used, the length will be 0 and
11506 the address may end up as 0 if the section is discarded by ld
11507 --gc-sections, leaving an invalid (0, 0) entry that can be
11508 confused with the terminator. */
11509 if (text_section_used)
11510 {
11511 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11512 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11513 text_section_label, "Length");
11514 }
11515 if (cold_text_section_used)
11516 {
11517 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11518 "Address");
11519 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11520 cold_text_section_label, "Length");
11521 }
11522
11523 if (have_multiple_function_sections)
11524 {
11525 unsigned fde_idx;
11526 dw_fde_ref fde;
11527
11528 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11529 {
11530 if (DECL_IGNORED_P (fde->decl))
11531 continue;
11532 if (!fde->in_std_section)
11533 {
11534 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11535 "Address");
11536 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11537 fde->dw_fde_begin, "Length");
11538 }
11539 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11540 {
11541 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11542 "Address");
11543 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11544 fde->dw_fde_second_begin, "Length");
11545 }
11546 }
11547 }
11548
11549 /* Output the terminator words. */
11550 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11551 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11552 }
11553
11554 /* Add a new entry to .debug_ranges. Return its index into
11555 ranges_table vector. */
11556
11557 static unsigned int
11558 add_ranges_num (int num, bool maybe_new_sec)
11559 {
11560 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11561 vec_safe_push (ranges_table, r);
11562 return vec_safe_length (ranges_table) - 1;
11563 }
11564
11565 /* Add a new entry to .debug_ranges corresponding to a block, or a
11566 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11567 this entry might be in a different section from previous range. */
11568
11569 static unsigned int
11570 add_ranges (const_tree block, bool maybe_new_sec)
11571 {
11572 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11573 }
11574
11575 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11576 chain, or middle entry of a chain that will be directly referred to. */
11577
11578 static void
11579 note_rnglist_head (unsigned int offset)
11580 {
11581 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11582 return;
11583 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11584 }
11585
11586 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11587 When using dwarf_split_debug_info, address attributes in dies destined
11588 for the final executable should be direct references--setting the
11589 parameter force_direct ensures this behavior. */
11590
11591 static void
11592 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11593 bool *added, bool force_direct)
11594 {
11595 unsigned int in_use = vec_safe_length (ranges_by_label);
11596 unsigned int offset;
11597 dw_ranges_by_label rbl = { begin, end };
11598 vec_safe_push (ranges_by_label, rbl);
11599 offset = add_ranges_num (-(int)in_use - 1, true);
11600 if (!*added)
11601 {
11602 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11603 *added = true;
11604 note_rnglist_head (offset);
11605 }
11606 }
11607
11608 /* Emit .debug_ranges section. */
11609
11610 static void
11611 output_ranges (void)
11612 {
11613 unsigned i;
11614 static const char *const start_fmt = "Offset %#x";
11615 const char *fmt = start_fmt;
11616 dw_ranges *r;
11617
11618 switch_to_section (debug_ranges_section);
11619 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11620 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11621 {
11622 int block_num = r->num;
11623
11624 if (block_num > 0)
11625 {
11626 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11627 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11628
11629 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11630 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11631
11632 /* If all code is in the text section, then the compilation
11633 unit base address defaults to DW_AT_low_pc, which is the
11634 base of the text section. */
11635 if (!have_multiple_function_sections)
11636 {
11637 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11638 text_section_label,
11639 fmt, i * 2 * DWARF2_ADDR_SIZE);
11640 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11641 text_section_label, NULL);
11642 }
11643
11644 /* Otherwise, the compilation unit base address is zero,
11645 which allows us to use absolute addresses, and not worry
11646 about whether the target supports cross-section
11647 arithmetic. */
11648 else
11649 {
11650 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11651 fmt, i * 2 * DWARF2_ADDR_SIZE);
11652 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11653 }
11654
11655 fmt = NULL;
11656 }
11657
11658 /* Negative block_num stands for an index into ranges_by_label. */
11659 else if (block_num < 0)
11660 {
11661 int lab_idx = - block_num - 1;
11662
11663 if (!have_multiple_function_sections)
11664 {
11665 gcc_unreachable ();
11666 #if 0
11667 /* If we ever use add_ranges_by_labels () for a single
11668 function section, all we have to do is to take out
11669 the #if 0 above. */
11670 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11671 (*ranges_by_label)[lab_idx].begin,
11672 text_section_label,
11673 fmt, i * 2 * DWARF2_ADDR_SIZE);
11674 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11675 (*ranges_by_label)[lab_idx].end,
11676 text_section_label, NULL);
11677 #endif
11678 }
11679 else
11680 {
11681 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11682 (*ranges_by_label)[lab_idx].begin,
11683 fmt, i * 2 * DWARF2_ADDR_SIZE);
11684 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11685 (*ranges_by_label)[lab_idx].end,
11686 NULL);
11687 }
11688 }
11689 else
11690 {
11691 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11692 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11693 fmt = start_fmt;
11694 }
11695 }
11696 }
11697
11698 /* Non-zero if .debug_line_str should be used for .debug_line section
11699 strings or strings that are likely shareable with those. */
11700 #define DWARF5_USE_DEBUG_LINE_STR \
11701 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11702 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11703 /* FIXME: there is no .debug_line_str.dwo section, \
11704 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11705 && !dwarf_split_debug_info)
11706
11707 /* Assign .debug_rnglists indexes. */
11708
11709 static void
11710 index_rnglists (void)
11711 {
11712 unsigned i;
11713 dw_ranges *r;
11714
11715 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11716 if (r->label)
11717 r->idx = rnglist_idx++;
11718 }
11719
11720 /* Emit .debug_rnglists section. */
11721
11722 static void
11723 output_rnglists (unsigned generation)
11724 {
11725 unsigned i;
11726 dw_ranges *r;
11727 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11728 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11729 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11730
11731 switch_to_section (debug_ranges_section);
11732 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11733 /* There are up to 4 unique ranges labels per generation.
11734 See also init_sections_and_labels. */
11735 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11736 2 + generation * 4);
11737 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11738 3 + generation * 4);
11739 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11740 dw2_asm_output_data (4, 0xffffffff,
11741 "Initial length escape value indicating "
11742 "64-bit DWARF extension");
11743 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11744 "Length of Range Lists");
11745 ASM_OUTPUT_LABEL (asm_out_file, l1);
11746 output_dwarf_version ();
11747 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11748 dw2_asm_output_data (1, 0, "Segment Size");
11749 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11750 about relocation sizes and primarily care about the size of .debug*
11751 sections in linked shared libraries and executables, then
11752 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11753 into it are usually larger than just DW_FORM_sec_offset offsets
11754 into the .debug_rnglists section. */
11755 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11756 "Offset Entry Count");
11757 if (dwarf_split_debug_info)
11758 {
11759 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11760 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11761 if (r->label)
11762 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11763 ranges_base_label, NULL);
11764 }
11765
11766 const char *lab = "";
11767 unsigned int len = vec_safe_length (ranges_table);
11768 const char *base = NULL;
11769 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11770 {
11771 int block_num = r->num;
11772
11773 if (r->label)
11774 {
11775 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11776 lab = r->label;
11777 }
11778 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11779 base = NULL;
11780 if (block_num > 0)
11781 {
11782 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11783 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11784
11785 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11786 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11787
11788 if (HAVE_AS_LEB128)
11789 {
11790 /* If all code is in the text section, then the compilation
11791 unit base address defaults to DW_AT_low_pc, which is the
11792 base of the text section. */
11793 if (!have_multiple_function_sections)
11794 {
11795 dw2_asm_output_data (1, DW_RLE_offset_pair,
11796 "DW_RLE_offset_pair (%s)", lab);
11797 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11798 "Range begin address (%s)", lab);
11799 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11800 "Range end address (%s)", lab);
11801 continue;
11802 }
11803 if (base == NULL)
11804 {
11805 dw_ranges *r2 = NULL;
11806 if (i < len - 1)
11807 r2 = &(*ranges_table)[i + 1];
11808 if (r2
11809 && r2->num != 0
11810 && r2->label == NULL
11811 && !r2->maybe_new_sec)
11812 {
11813 dw2_asm_output_data (1, DW_RLE_base_address,
11814 "DW_RLE_base_address (%s)", lab);
11815 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11816 "Base address (%s)", lab);
11817 strcpy (basebuf, blabel);
11818 base = basebuf;
11819 }
11820 }
11821 if (base)
11822 {
11823 dw2_asm_output_data (1, DW_RLE_offset_pair,
11824 "DW_RLE_offset_pair (%s)", lab);
11825 dw2_asm_output_delta_uleb128 (blabel, base,
11826 "Range begin address (%s)", lab);
11827 dw2_asm_output_delta_uleb128 (elabel, base,
11828 "Range end address (%s)", lab);
11829 continue;
11830 }
11831 dw2_asm_output_data (1, DW_RLE_start_length,
11832 "DW_RLE_start_length (%s)", lab);
11833 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11834 "Range begin address (%s)", lab);
11835 dw2_asm_output_delta_uleb128 (elabel, blabel,
11836 "Range length (%s)", lab);
11837 }
11838 else
11839 {
11840 dw2_asm_output_data (1, DW_RLE_start_end,
11841 "DW_RLE_start_end (%s)", lab);
11842 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11843 "Range begin address (%s)", lab);
11844 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11845 "Range end address (%s)", lab);
11846 }
11847 }
11848
11849 /* Negative block_num stands for an index into ranges_by_label. */
11850 else if (block_num < 0)
11851 {
11852 int lab_idx = - block_num - 1;
11853 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11854 const char *elabel = (*ranges_by_label)[lab_idx].end;
11855
11856 if (!have_multiple_function_sections)
11857 gcc_unreachable ();
11858 if (HAVE_AS_LEB128)
11859 {
11860 dw2_asm_output_data (1, DW_RLE_start_length,
11861 "DW_RLE_start_length (%s)", lab);
11862 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11863 "Range begin address (%s)", lab);
11864 dw2_asm_output_delta_uleb128 (elabel, blabel,
11865 "Range length (%s)", lab);
11866 }
11867 else
11868 {
11869 dw2_asm_output_data (1, DW_RLE_start_end,
11870 "DW_RLE_start_end (%s)", lab);
11871 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11872 "Range begin address (%s)", lab);
11873 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11874 "Range end address (%s)", lab);
11875 }
11876 }
11877 else
11878 dw2_asm_output_data (1, DW_RLE_end_of_list,
11879 "DW_RLE_end_of_list (%s)", lab);
11880 }
11881 ASM_OUTPUT_LABEL (asm_out_file, l2);
11882 }
11883
11884 /* Data structure containing information about input files. */
11885 struct file_info
11886 {
11887 const char *path; /* Complete file name. */
11888 const char *fname; /* File name part. */
11889 int length; /* Length of entire string. */
11890 struct dwarf_file_data * file_idx; /* Index in input file table. */
11891 int dir_idx; /* Index in directory table. */
11892 };
11893
11894 /* Data structure containing information about directories with source
11895 files. */
11896 struct dir_info
11897 {
11898 const char *path; /* Path including directory name. */
11899 int length; /* Path length. */
11900 int prefix; /* Index of directory entry which is a prefix. */
11901 int count; /* Number of files in this directory. */
11902 int dir_idx; /* Index of directory used as base. */
11903 };
11904
11905 /* Callback function for file_info comparison. We sort by looking at
11906 the directories in the path. */
11907
11908 static int
11909 file_info_cmp (const void *p1, const void *p2)
11910 {
11911 const struct file_info *const s1 = (const struct file_info *) p1;
11912 const struct file_info *const s2 = (const struct file_info *) p2;
11913 const unsigned char *cp1;
11914 const unsigned char *cp2;
11915
11916 /* Take care of file names without directories. We need to make sure that
11917 we return consistent values to qsort since some will get confused if
11918 we return the same value when identical operands are passed in opposite
11919 orders. So if neither has a directory, return 0 and otherwise return
11920 1 or -1 depending on which one has the directory. We want the one with
11921 the directory to sort after the one without, so all no directory files
11922 are at the start (normally only the compilation unit file). */
11923 if ((s1->path == s1->fname || s2->path == s2->fname))
11924 return (s2->path == s2->fname) - (s1->path == s1->fname);
11925
11926 cp1 = (const unsigned char *) s1->path;
11927 cp2 = (const unsigned char *) s2->path;
11928
11929 while (1)
11930 {
11931 ++cp1;
11932 ++cp2;
11933 /* Reached the end of the first path? If so, handle like above,
11934 but now we want longer directory prefixes before shorter ones. */
11935 if ((cp1 == (const unsigned char *) s1->fname)
11936 || (cp2 == (const unsigned char *) s2->fname))
11937 return ((cp1 == (const unsigned char *) s1->fname)
11938 - (cp2 == (const unsigned char *) s2->fname));
11939
11940 /* Character of current path component the same? */
11941 else if (*cp1 != *cp2)
11942 return *cp1 - *cp2;
11943 }
11944 }
11945
11946 struct file_name_acquire_data
11947 {
11948 struct file_info *files;
11949 int used_files;
11950 int max_files;
11951 };
11952
11953 /* Traversal function for the hash table. */
11954
11955 int
11956 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11957 {
11958 struct dwarf_file_data *d = *slot;
11959 struct file_info *fi;
11960 const char *f;
11961
11962 gcc_assert (fnad->max_files >= d->emitted_number);
11963
11964 if (! d->emitted_number)
11965 return 1;
11966
11967 gcc_assert (fnad->max_files != fnad->used_files);
11968
11969 fi = fnad->files + fnad->used_files++;
11970
11971 /* Skip all leading "./". */
11972 f = d->filename;
11973 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11974 f += 2;
11975
11976 /* Create a new array entry. */
11977 fi->path = f;
11978 fi->length = strlen (f);
11979 fi->file_idx = d;
11980
11981 /* Search for the file name part. */
11982 f = strrchr (f, DIR_SEPARATOR);
11983 #if defined (DIR_SEPARATOR_2)
11984 {
11985 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11986
11987 if (g != NULL)
11988 {
11989 if (f == NULL || f < g)
11990 f = g;
11991 }
11992 }
11993 #endif
11994
11995 fi->fname = f == NULL ? fi->path : f + 1;
11996 return 1;
11997 }
11998
11999 /* Helper function for output_file_names. Emit a FORM encoded
12000 string STR, with assembly comment start ENTRY_KIND and
12001 index IDX */
12002
12003 static void
12004 output_line_string (enum dwarf_form form, const char *str,
12005 const char *entry_kind, unsigned int idx)
12006 {
12007 switch (form)
12008 {
12009 case DW_FORM_string:
12010 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12011 break;
12012 case DW_FORM_line_strp:
12013 if (!debug_line_str_hash)
12014 debug_line_str_hash
12015 = hash_table<indirect_string_hasher>::create_ggc (10);
12016
12017 struct indirect_string_node *node;
12018 node = find_AT_string_in_table (str, debug_line_str_hash);
12019 set_indirect_string (node);
12020 node->form = form;
12021 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12022 debug_line_str_section, "%s: %#x: \"%s\"",
12023 entry_kind, 0, node->str);
12024 break;
12025 default:
12026 gcc_unreachable ();
12027 }
12028 }
12029
12030 /* Output the directory table and the file name table. We try to minimize
12031 the total amount of memory needed. A heuristic is used to avoid large
12032 slowdowns with many input files. */
12033
12034 static void
12035 output_file_names (void)
12036 {
12037 struct file_name_acquire_data fnad;
12038 int numfiles;
12039 struct file_info *files;
12040 struct dir_info *dirs;
12041 int *saved;
12042 int *savehere;
12043 int *backmap;
12044 int ndirs;
12045 int idx_offset;
12046 int i;
12047
12048 if (!last_emitted_file)
12049 {
12050 if (dwarf_version >= 5)
12051 {
12052 dw2_asm_output_data (1, 0, "Directory entry format count");
12053 dw2_asm_output_data_uleb128 (0, "Directories count");
12054 dw2_asm_output_data (1, 0, "File name entry format count");
12055 dw2_asm_output_data_uleb128 (0, "File names count");
12056 }
12057 else
12058 {
12059 dw2_asm_output_data (1, 0, "End directory table");
12060 dw2_asm_output_data (1, 0, "End file name table");
12061 }
12062 return;
12063 }
12064
12065 numfiles = last_emitted_file->emitted_number;
12066
12067 /* Allocate the various arrays we need. */
12068 files = XALLOCAVEC (struct file_info, numfiles);
12069 dirs = XALLOCAVEC (struct dir_info, numfiles);
12070
12071 fnad.files = files;
12072 fnad.used_files = 0;
12073 fnad.max_files = numfiles;
12074 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12075 gcc_assert (fnad.used_files == fnad.max_files);
12076
12077 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12078
12079 /* Find all the different directories used. */
12080 dirs[0].path = files[0].path;
12081 dirs[0].length = files[0].fname - files[0].path;
12082 dirs[0].prefix = -1;
12083 dirs[0].count = 1;
12084 dirs[0].dir_idx = 0;
12085 files[0].dir_idx = 0;
12086 ndirs = 1;
12087
12088 for (i = 1; i < numfiles; i++)
12089 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12090 && memcmp (dirs[ndirs - 1].path, files[i].path,
12091 dirs[ndirs - 1].length) == 0)
12092 {
12093 /* Same directory as last entry. */
12094 files[i].dir_idx = ndirs - 1;
12095 ++dirs[ndirs - 1].count;
12096 }
12097 else
12098 {
12099 int j;
12100
12101 /* This is a new directory. */
12102 dirs[ndirs].path = files[i].path;
12103 dirs[ndirs].length = files[i].fname - files[i].path;
12104 dirs[ndirs].count = 1;
12105 dirs[ndirs].dir_idx = ndirs;
12106 files[i].dir_idx = ndirs;
12107
12108 /* Search for a prefix. */
12109 dirs[ndirs].prefix = -1;
12110 for (j = 0; j < ndirs; j++)
12111 if (dirs[j].length < dirs[ndirs].length
12112 && dirs[j].length > 1
12113 && (dirs[ndirs].prefix == -1
12114 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12115 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12116 dirs[ndirs].prefix = j;
12117
12118 ++ndirs;
12119 }
12120
12121 /* Now to the actual work. We have to find a subset of the directories which
12122 allow expressing the file name using references to the directory table
12123 with the least amount of characters. We do not do an exhaustive search
12124 where we would have to check out every combination of every single
12125 possible prefix. Instead we use a heuristic which provides nearly optimal
12126 results in most cases and never is much off. */
12127 saved = XALLOCAVEC (int, ndirs);
12128 savehere = XALLOCAVEC (int, ndirs);
12129
12130 memset (saved, '\0', ndirs * sizeof (saved[0]));
12131 for (i = 0; i < ndirs; i++)
12132 {
12133 int j;
12134 int total;
12135
12136 /* We can always save some space for the current directory. But this
12137 does not mean it will be enough to justify adding the directory. */
12138 savehere[i] = dirs[i].length;
12139 total = (savehere[i] - saved[i]) * dirs[i].count;
12140
12141 for (j = i + 1; j < ndirs; j++)
12142 {
12143 savehere[j] = 0;
12144 if (saved[j] < dirs[i].length)
12145 {
12146 /* Determine whether the dirs[i] path is a prefix of the
12147 dirs[j] path. */
12148 int k;
12149
12150 k = dirs[j].prefix;
12151 while (k != -1 && k != (int) i)
12152 k = dirs[k].prefix;
12153
12154 if (k == (int) i)
12155 {
12156 /* Yes it is. We can possibly save some memory by
12157 writing the filenames in dirs[j] relative to
12158 dirs[i]. */
12159 savehere[j] = dirs[i].length;
12160 total += (savehere[j] - saved[j]) * dirs[j].count;
12161 }
12162 }
12163 }
12164
12165 /* Check whether we can save enough to justify adding the dirs[i]
12166 directory. */
12167 if (total > dirs[i].length + 1)
12168 {
12169 /* It's worthwhile adding. */
12170 for (j = i; j < ndirs; j++)
12171 if (savehere[j] > 0)
12172 {
12173 /* Remember how much we saved for this directory so far. */
12174 saved[j] = savehere[j];
12175
12176 /* Remember the prefix directory. */
12177 dirs[j].dir_idx = i;
12178 }
12179 }
12180 }
12181
12182 /* Emit the directory name table. */
12183 idx_offset = dirs[0].length > 0 ? 1 : 0;
12184 enum dwarf_form str_form = DW_FORM_string;
12185 enum dwarf_form idx_form = DW_FORM_udata;
12186 if (dwarf_version >= 5)
12187 {
12188 const char *comp_dir = comp_dir_string ();
12189 if (comp_dir == NULL)
12190 comp_dir = "";
12191 dw2_asm_output_data (1, 1, "Directory entry format count");
12192 if (DWARF5_USE_DEBUG_LINE_STR)
12193 str_form = DW_FORM_line_strp;
12194 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12195 dw2_asm_output_data_uleb128 (str_form, "%s",
12196 get_DW_FORM_name (str_form));
12197 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12198 if (str_form == DW_FORM_string)
12199 {
12200 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12201 for (i = 1 - idx_offset; i < ndirs; i++)
12202 dw2_asm_output_nstring (dirs[i].path,
12203 dirs[i].length
12204 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12205 "Directory Entry: %#x", i + idx_offset);
12206 }
12207 else
12208 {
12209 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12210 for (i = 1 - idx_offset; i < ndirs; i++)
12211 {
12212 const char *str
12213 = ggc_alloc_string (dirs[i].path,
12214 dirs[i].length
12215 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12216 output_line_string (str_form, str, "Directory Entry",
12217 (unsigned) i + idx_offset);
12218 }
12219 }
12220 }
12221 else
12222 {
12223 for (i = 1 - idx_offset; i < ndirs; i++)
12224 dw2_asm_output_nstring (dirs[i].path,
12225 dirs[i].length
12226 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12227 "Directory Entry: %#x", i + idx_offset);
12228
12229 dw2_asm_output_data (1, 0, "End directory table");
12230 }
12231
12232 /* We have to emit them in the order of emitted_number since that's
12233 used in the debug info generation. To do this efficiently we
12234 generate a back-mapping of the indices first. */
12235 backmap = XALLOCAVEC (int, numfiles);
12236 for (i = 0; i < numfiles; i++)
12237 backmap[files[i].file_idx->emitted_number - 1] = i;
12238
12239 if (dwarf_version >= 5)
12240 {
12241 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12242 if (filename0 == NULL)
12243 filename0 = "";
12244 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12245 DW_FORM_data2. Choose one based on the number of directories
12246 and how much space would they occupy in each encoding.
12247 If we have at most 256 directories, all indexes fit into
12248 a single byte, so DW_FORM_data1 is most compact (if there
12249 are at most 128 directories, DW_FORM_udata would be as
12250 compact as that, but not shorter and slower to decode). */
12251 if (ndirs + idx_offset <= 256)
12252 idx_form = DW_FORM_data1;
12253 /* If there are more than 65536 directories, we have to use
12254 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12255 Otherwise, compute what space would occupy if all the indexes
12256 used DW_FORM_udata - sum - and compare that to how large would
12257 be DW_FORM_data2 encoding, and pick the more efficient one. */
12258 else if (ndirs + idx_offset <= 65536)
12259 {
12260 unsigned HOST_WIDE_INT sum = 1;
12261 for (i = 0; i < numfiles; i++)
12262 {
12263 int file_idx = backmap[i];
12264 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12265 sum += size_of_uleb128 (dir_idx);
12266 }
12267 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12268 idx_form = DW_FORM_data2;
12269 }
12270 #ifdef VMS_DEBUGGING_INFO
12271 dw2_asm_output_data (1, 4, "File name entry format count");
12272 #else
12273 dw2_asm_output_data (1, 2, "File name entry format count");
12274 #endif
12275 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12276 dw2_asm_output_data_uleb128 (str_form, "%s",
12277 get_DW_FORM_name (str_form));
12278 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12279 "DW_LNCT_directory_index");
12280 dw2_asm_output_data_uleb128 (idx_form, "%s",
12281 get_DW_FORM_name (idx_form));
12282 #ifdef VMS_DEBUGGING_INFO
12283 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12284 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12285 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12286 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12287 #endif
12288 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12289
12290 output_line_string (str_form, filename0, "File Entry", 0);
12291
12292 /* Include directory index. */
12293 if (idx_form != DW_FORM_udata)
12294 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12295 0, NULL);
12296 else
12297 dw2_asm_output_data_uleb128 (0, NULL);
12298
12299 #ifdef VMS_DEBUGGING_INFO
12300 dw2_asm_output_data_uleb128 (0, NULL);
12301 dw2_asm_output_data_uleb128 (0, NULL);
12302 #endif
12303 }
12304
12305 /* Now write all the file names. */
12306 for (i = 0; i < numfiles; i++)
12307 {
12308 int file_idx = backmap[i];
12309 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12310
12311 #ifdef VMS_DEBUGGING_INFO
12312 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12313
12314 /* Setting these fields can lead to debugger miscomparisons,
12315 but VMS Debug requires them to be set correctly. */
12316
12317 int ver;
12318 long long cdt;
12319 long siz;
12320 int maxfilelen = (strlen (files[file_idx].path)
12321 + dirs[dir_idx].length
12322 + MAX_VMS_VERSION_LEN + 1);
12323 char *filebuf = XALLOCAVEC (char, maxfilelen);
12324
12325 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12326 snprintf (filebuf, maxfilelen, "%s;%d",
12327 files[file_idx].path + dirs[dir_idx].length, ver);
12328
12329 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12330
12331 /* Include directory index. */
12332 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12333 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12334 dir_idx + idx_offset, NULL);
12335 else
12336 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12337
12338 /* Modification time. */
12339 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12340 &cdt, 0, 0, 0) == 0)
12341 ? cdt : 0, NULL);
12342
12343 /* File length in bytes. */
12344 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12345 0, &siz, 0, 0) == 0)
12346 ? siz : 0, NULL);
12347 #else
12348 output_line_string (str_form,
12349 files[file_idx].path + dirs[dir_idx].length,
12350 "File Entry", (unsigned) i + 1);
12351
12352 /* Include directory index. */
12353 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12354 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12355 dir_idx + idx_offset, NULL);
12356 else
12357 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12358
12359 if (dwarf_version >= 5)
12360 continue;
12361
12362 /* Modification time. */
12363 dw2_asm_output_data_uleb128 (0, NULL);
12364
12365 /* File length in bytes. */
12366 dw2_asm_output_data_uleb128 (0, NULL);
12367 #endif /* VMS_DEBUGGING_INFO */
12368 }
12369
12370 if (dwarf_version < 5)
12371 dw2_asm_output_data (1, 0, "End file name table");
12372 }
12373
12374
12375 /* Output one line number table into the .debug_line section. */
12376
12377 static void
12378 output_one_line_info_table (dw_line_info_table *table)
12379 {
12380 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12381 unsigned int current_line = 1;
12382 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12383 dw_line_info_entry *ent, *prev_addr;
12384 size_t i;
12385 unsigned int view;
12386
12387 view = 0;
12388
12389 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12390 {
12391 switch (ent->opcode)
12392 {
12393 case LI_set_address:
12394 /* ??? Unfortunately, we have little choice here currently, and
12395 must always use the most general form. GCC does not know the
12396 address delta itself, so we can't use DW_LNS_advance_pc. Many
12397 ports do have length attributes which will give an upper bound
12398 on the address range. We could perhaps use length attributes
12399 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12400 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12401
12402 view = 0;
12403
12404 /* This can handle any delta. This takes
12405 4+DWARF2_ADDR_SIZE bytes. */
12406 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12407 debug_variable_location_views
12408 ? ", reset view to 0" : "");
12409 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12410 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12411 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12412
12413 prev_addr = ent;
12414 break;
12415
12416 case LI_adv_address:
12417 {
12418 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12419 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12420 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12421
12422 view++;
12423
12424 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12425 dw2_asm_output_delta (2, line_label, prev_label,
12426 "from %s to %s", prev_label, line_label);
12427
12428 prev_addr = ent;
12429 break;
12430 }
12431
12432 case LI_set_line:
12433 if (ent->val == current_line)
12434 {
12435 /* We still need to start a new row, so output a copy insn. */
12436 dw2_asm_output_data (1, DW_LNS_copy,
12437 "copy line %u", current_line);
12438 }
12439 else
12440 {
12441 int line_offset = ent->val - current_line;
12442 int line_delta = line_offset - DWARF_LINE_BASE;
12443
12444 current_line = ent->val;
12445 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12446 {
12447 /* This can handle deltas from -10 to 234, using the current
12448 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12449 This takes 1 byte. */
12450 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12451 "line %u", current_line);
12452 }
12453 else
12454 {
12455 /* This can handle any delta. This takes at least 4 bytes,
12456 depending on the value being encoded. */
12457 dw2_asm_output_data (1, DW_LNS_advance_line,
12458 "advance to line %u", current_line);
12459 dw2_asm_output_data_sleb128 (line_offset, NULL);
12460 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12461 }
12462 }
12463 break;
12464
12465 case LI_set_file:
12466 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12467 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12468 break;
12469
12470 case LI_set_column:
12471 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12472 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12473 break;
12474
12475 case LI_negate_stmt:
12476 current_is_stmt = !current_is_stmt;
12477 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12478 "is_stmt %d", current_is_stmt);
12479 break;
12480
12481 case LI_set_prologue_end:
12482 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12483 "set prologue end");
12484 break;
12485
12486 case LI_set_epilogue_begin:
12487 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12488 "set epilogue begin");
12489 break;
12490
12491 case LI_set_discriminator:
12492 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12493 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12494 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12495 dw2_asm_output_data_uleb128 (ent->val, NULL);
12496 break;
12497 }
12498 }
12499
12500 /* Emit debug info for the address of the end of the table. */
12501 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12502 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12503 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12504 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12505
12506 dw2_asm_output_data (1, 0, "end sequence");
12507 dw2_asm_output_data_uleb128 (1, NULL);
12508 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12509 }
12510
12511 /* Output the source line number correspondence information. This
12512 information goes into the .debug_line section. */
12513
12514 static void
12515 output_line_info (bool prologue_only)
12516 {
12517 static unsigned int generation;
12518 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12519 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12520 bool saw_one = false;
12521 int opc;
12522
12523 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12524 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12525 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12526 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12527
12528 if (!XCOFF_DEBUGGING_INFO)
12529 {
12530 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12531 dw2_asm_output_data (4, 0xffffffff,
12532 "Initial length escape value indicating 64-bit DWARF extension");
12533 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12534 "Length of Source Line Info");
12535 }
12536
12537 ASM_OUTPUT_LABEL (asm_out_file, l1);
12538
12539 output_dwarf_version ();
12540 if (dwarf_version >= 5)
12541 {
12542 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12543 dw2_asm_output_data (1, 0, "Segment Size");
12544 }
12545 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12546 ASM_OUTPUT_LABEL (asm_out_file, p1);
12547
12548 /* Define the architecture-dependent minimum instruction length (in bytes).
12549 In this implementation of DWARF, this field is used for information
12550 purposes only. Since GCC generates assembly language, we have no
12551 a priori knowledge of how many instruction bytes are generated for each
12552 source line, and therefore can use only the DW_LNE_set_address and
12553 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12554 this as '1', which is "correct enough" for all architectures,
12555 and don't let the target override. */
12556 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12557
12558 if (dwarf_version >= 4)
12559 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12560 "Maximum Operations Per Instruction");
12561 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12562 "Default is_stmt_start flag");
12563 dw2_asm_output_data (1, DWARF_LINE_BASE,
12564 "Line Base Value (Special Opcodes)");
12565 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12566 "Line Range Value (Special Opcodes)");
12567 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12568 "Special Opcode Base");
12569
12570 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12571 {
12572 int n_op_args;
12573 switch (opc)
12574 {
12575 case DW_LNS_advance_pc:
12576 case DW_LNS_advance_line:
12577 case DW_LNS_set_file:
12578 case DW_LNS_set_column:
12579 case DW_LNS_fixed_advance_pc:
12580 case DW_LNS_set_isa:
12581 n_op_args = 1;
12582 break;
12583 default:
12584 n_op_args = 0;
12585 break;
12586 }
12587
12588 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12589 opc, n_op_args);
12590 }
12591
12592 /* Write out the information about the files we use. */
12593 output_file_names ();
12594 ASM_OUTPUT_LABEL (asm_out_file, p2);
12595 if (prologue_only)
12596 {
12597 /* Output the marker for the end of the line number info. */
12598 ASM_OUTPUT_LABEL (asm_out_file, l2);
12599 return;
12600 }
12601
12602 if (separate_line_info)
12603 {
12604 dw_line_info_table *table;
12605 size_t i;
12606
12607 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12608 if (table->in_use)
12609 {
12610 output_one_line_info_table (table);
12611 saw_one = true;
12612 }
12613 }
12614 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12615 {
12616 output_one_line_info_table (cold_text_section_line_info);
12617 saw_one = true;
12618 }
12619
12620 /* ??? Some Darwin linkers crash on a .debug_line section with no
12621 sequences. Further, merely a DW_LNE_end_sequence entry is not
12622 sufficient -- the address column must also be initialized.
12623 Make sure to output at least one set_address/end_sequence pair,
12624 choosing .text since that section is always present. */
12625 if (text_section_line_info->in_use || !saw_one)
12626 output_one_line_info_table (text_section_line_info);
12627
12628 /* Output the marker for the end of the line number info. */
12629 ASM_OUTPUT_LABEL (asm_out_file, l2);
12630 }
12631 \f
12632 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12633
12634 static inline bool
12635 need_endianity_attribute_p (bool reverse)
12636 {
12637 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12638 }
12639
12640 /* Given a pointer to a tree node for some base type, return a pointer to
12641 a DIE that describes the given type. REVERSE is true if the type is
12642 to be interpreted in the reverse storage order wrt the target order.
12643
12644 This routine must only be called for GCC type nodes that correspond to
12645 Dwarf base (fundamental) types. */
12646
12647 static dw_die_ref
12648 base_type_die (tree type, bool reverse)
12649 {
12650 dw_die_ref base_type_result;
12651 enum dwarf_type encoding;
12652 bool fpt_used = false;
12653 struct fixed_point_type_info fpt_info;
12654 tree type_bias = NULL_TREE;
12655
12656 /* If this is a subtype that should not be emitted as a subrange type,
12657 use the base type. See subrange_type_for_debug_p. */
12658 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12659 type = TREE_TYPE (type);
12660
12661 switch (TREE_CODE (type))
12662 {
12663 case INTEGER_TYPE:
12664 if ((dwarf_version >= 4 || !dwarf_strict)
12665 && TYPE_NAME (type)
12666 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12667 && DECL_IS_BUILTIN (TYPE_NAME (type))
12668 && DECL_NAME (TYPE_NAME (type)))
12669 {
12670 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12671 if (strcmp (name, "char16_t") == 0
12672 || strcmp (name, "char32_t") == 0)
12673 {
12674 encoding = DW_ATE_UTF;
12675 break;
12676 }
12677 }
12678 if ((dwarf_version >= 3 || !dwarf_strict)
12679 && lang_hooks.types.get_fixed_point_type_info)
12680 {
12681 memset (&fpt_info, 0, sizeof (fpt_info));
12682 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12683 {
12684 fpt_used = true;
12685 encoding = ((TYPE_UNSIGNED (type))
12686 ? DW_ATE_unsigned_fixed
12687 : DW_ATE_signed_fixed);
12688 break;
12689 }
12690 }
12691 if (TYPE_STRING_FLAG (type))
12692 {
12693 if (TYPE_UNSIGNED (type))
12694 encoding = DW_ATE_unsigned_char;
12695 else
12696 encoding = DW_ATE_signed_char;
12697 }
12698 else if (TYPE_UNSIGNED (type))
12699 encoding = DW_ATE_unsigned;
12700 else
12701 encoding = DW_ATE_signed;
12702
12703 if (!dwarf_strict
12704 && lang_hooks.types.get_type_bias)
12705 type_bias = lang_hooks.types.get_type_bias (type);
12706 break;
12707
12708 case REAL_TYPE:
12709 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12710 {
12711 if (dwarf_version >= 3 || !dwarf_strict)
12712 encoding = DW_ATE_decimal_float;
12713 else
12714 encoding = DW_ATE_lo_user;
12715 }
12716 else
12717 encoding = DW_ATE_float;
12718 break;
12719
12720 case FIXED_POINT_TYPE:
12721 if (!(dwarf_version >= 3 || !dwarf_strict))
12722 encoding = DW_ATE_lo_user;
12723 else if (TYPE_UNSIGNED (type))
12724 encoding = DW_ATE_unsigned_fixed;
12725 else
12726 encoding = DW_ATE_signed_fixed;
12727 break;
12728
12729 /* Dwarf2 doesn't know anything about complex ints, so use
12730 a user defined type for it. */
12731 case COMPLEX_TYPE:
12732 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12733 encoding = DW_ATE_complex_float;
12734 else
12735 encoding = DW_ATE_lo_user;
12736 break;
12737
12738 case BOOLEAN_TYPE:
12739 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12740 encoding = DW_ATE_boolean;
12741 break;
12742
12743 default:
12744 /* No other TREE_CODEs are Dwarf fundamental types. */
12745 gcc_unreachable ();
12746 }
12747
12748 base_type_result = new_die_raw (DW_TAG_base_type);
12749
12750 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12751 int_size_in_bytes (type));
12752 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12753
12754 if (need_endianity_attribute_p (reverse))
12755 add_AT_unsigned (base_type_result, DW_AT_endianity,
12756 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12757
12758 add_alignment_attribute (base_type_result, type);
12759
12760 if (fpt_used)
12761 {
12762 switch (fpt_info.scale_factor_kind)
12763 {
12764 case fixed_point_scale_factor_binary:
12765 add_AT_int (base_type_result, DW_AT_binary_scale,
12766 fpt_info.scale_factor.binary);
12767 break;
12768
12769 case fixed_point_scale_factor_decimal:
12770 add_AT_int (base_type_result, DW_AT_decimal_scale,
12771 fpt_info.scale_factor.decimal);
12772 break;
12773
12774 case fixed_point_scale_factor_arbitrary:
12775 /* Arbitrary scale factors cannot be described in standard DWARF,
12776 yet. */
12777 if (!dwarf_strict)
12778 {
12779 /* Describe the scale factor as a rational constant. */
12780 const dw_die_ref scale_factor
12781 = new_die (DW_TAG_constant, comp_unit_die (), type);
12782
12783 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12784 fpt_info.scale_factor.arbitrary.numerator);
12785 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12786 fpt_info.scale_factor.arbitrary.denominator);
12787
12788 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12789 }
12790 break;
12791
12792 default:
12793 gcc_unreachable ();
12794 }
12795 }
12796
12797 if (type_bias)
12798 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12799 dw_scalar_form_constant
12800 | dw_scalar_form_exprloc
12801 | dw_scalar_form_reference,
12802 NULL);
12803
12804 return base_type_result;
12805 }
12806
12807 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12808 named 'auto' in its type: return true for it, false otherwise. */
12809
12810 static inline bool
12811 is_cxx_auto (tree type)
12812 {
12813 if (is_cxx ())
12814 {
12815 tree name = TYPE_IDENTIFIER (type);
12816 if (name == get_identifier ("auto")
12817 || name == get_identifier ("decltype(auto)"))
12818 return true;
12819 }
12820 return false;
12821 }
12822
12823 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12824 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12825
12826 static inline int
12827 is_base_type (tree type)
12828 {
12829 switch (TREE_CODE (type))
12830 {
12831 case INTEGER_TYPE:
12832 case REAL_TYPE:
12833 case FIXED_POINT_TYPE:
12834 case COMPLEX_TYPE:
12835 case BOOLEAN_TYPE:
12836 return 1;
12837
12838 case VOID_TYPE:
12839 case ARRAY_TYPE:
12840 case RECORD_TYPE:
12841 case UNION_TYPE:
12842 case QUAL_UNION_TYPE:
12843 case ENUMERAL_TYPE:
12844 case FUNCTION_TYPE:
12845 case METHOD_TYPE:
12846 case POINTER_TYPE:
12847 case REFERENCE_TYPE:
12848 case NULLPTR_TYPE:
12849 case OFFSET_TYPE:
12850 case LANG_TYPE:
12851 case VECTOR_TYPE:
12852 return 0;
12853
12854 default:
12855 if (is_cxx_auto (type))
12856 return 0;
12857 gcc_unreachable ();
12858 }
12859
12860 return 0;
12861 }
12862
12863 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12864 node, return the size in bits for the type if it is a constant, or else
12865 return the alignment for the type if the type's size is not constant, or
12866 else return BITS_PER_WORD if the type actually turns out to be an
12867 ERROR_MARK node. */
12868
12869 static inline unsigned HOST_WIDE_INT
12870 simple_type_size_in_bits (const_tree type)
12871 {
12872 if (TREE_CODE (type) == ERROR_MARK)
12873 return BITS_PER_WORD;
12874 else if (TYPE_SIZE (type) == NULL_TREE)
12875 return 0;
12876 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12877 return tree_to_uhwi (TYPE_SIZE (type));
12878 else
12879 return TYPE_ALIGN (type);
12880 }
12881
12882 /* Similarly, but return an offset_int instead of UHWI. */
12883
12884 static inline offset_int
12885 offset_int_type_size_in_bits (const_tree type)
12886 {
12887 if (TREE_CODE (type) == ERROR_MARK)
12888 return BITS_PER_WORD;
12889 else if (TYPE_SIZE (type) == NULL_TREE)
12890 return 0;
12891 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12892 return wi::to_offset (TYPE_SIZE (type));
12893 else
12894 return TYPE_ALIGN (type);
12895 }
12896
12897 /* Given a pointer to a tree node for a subrange type, return a pointer
12898 to a DIE that describes the given type. */
12899
12900 static dw_die_ref
12901 subrange_type_die (tree type, tree low, tree high, tree bias,
12902 dw_die_ref context_die)
12903 {
12904 dw_die_ref subrange_die;
12905 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12906
12907 if (context_die == NULL)
12908 context_die = comp_unit_die ();
12909
12910 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12911
12912 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12913 {
12914 /* The size of the subrange type and its base type do not match,
12915 so we need to generate a size attribute for the subrange type. */
12916 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12917 }
12918
12919 add_alignment_attribute (subrange_die, type);
12920
12921 if (low)
12922 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12923 if (high)
12924 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12925 if (bias && !dwarf_strict)
12926 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12927 dw_scalar_form_constant
12928 | dw_scalar_form_exprloc
12929 | dw_scalar_form_reference,
12930 NULL);
12931
12932 return subrange_die;
12933 }
12934
12935 /* Returns the (const and/or volatile) cv_qualifiers associated with
12936 the decl node. This will normally be augmented with the
12937 cv_qualifiers of the underlying type in add_type_attribute. */
12938
12939 static int
12940 decl_quals (const_tree decl)
12941 {
12942 return ((TREE_READONLY (decl)
12943 /* The C++ front-end correctly marks reference-typed
12944 variables as readonly, but from a language (and debug
12945 info) standpoint they are not const-qualified. */
12946 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12947 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12948 | (TREE_THIS_VOLATILE (decl)
12949 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12950 }
12951
12952 /* Determine the TYPE whose qualifiers match the largest strict subset
12953 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12954 qualifiers outside QUAL_MASK. */
12955
12956 static int
12957 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12958 {
12959 tree t;
12960 int best_rank = 0, best_qual = 0, max_rank;
12961
12962 type_quals &= qual_mask;
12963 max_rank = popcount_hwi (type_quals) - 1;
12964
12965 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12966 t = TYPE_NEXT_VARIANT (t))
12967 {
12968 int q = TYPE_QUALS (t) & qual_mask;
12969
12970 if ((q & type_quals) == q && q != type_quals
12971 && check_base_type (t, type))
12972 {
12973 int rank = popcount_hwi (q);
12974
12975 if (rank > best_rank)
12976 {
12977 best_rank = rank;
12978 best_qual = q;
12979 }
12980 }
12981 }
12982
12983 return best_qual;
12984 }
12985
12986 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12987 static const dwarf_qual_info_t dwarf_qual_info[] =
12988 {
12989 { TYPE_QUAL_CONST, DW_TAG_const_type },
12990 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12991 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12992 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12993 };
12994 static const unsigned int dwarf_qual_info_size
12995 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12996
12997 /* If DIE is a qualified DIE of some base DIE with the same parent,
12998 return the base DIE, otherwise return NULL. Set MASK to the
12999 qualifiers added compared to the returned DIE. */
13000
13001 static dw_die_ref
13002 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13003 {
13004 unsigned int i;
13005 for (i = 0; i < dwarf_qual_info_size; i++)
13006 if (die->die_tag == dwarf_qual_info[i].t)
13007 break;
13008 if (i == dwarf_qual_info_size)
13009 return NULL;
13010 if (vec_safe_length (die->die_attr) != 1)
13011 return NULL;
13012 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13013 if (type == NULL || type->die_parent != die->die_parent)
13014 return NULL;
13015 *mask |= dwarf_qual_info[i].q;
13016 if (depth)
13017 {
13018 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13019 if (ret)
13020 return ret;
13021 }
13022 return type;
13023 }
13024
13025 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13026 entry that chains the modifiers specified by CV_QUALS in front of the
13027 given type. REVERSE is true if the type is to be interpreted in the
13028 reverse storage order wrt the target order. */
13029
13030 static dw_die_ref
13031 modified_type_die (tree type, int cv_quals, bool reverse,
13032 dw_die_ref context_die)
13033 {
13034 enum tree_code code = TREE_CODE (type);
13035 dw_die_ref mod_type_die;
13036 dw_die_ref sub_die = NULL;
13037 tree item_type = NULL;
13038 tree qualified_type;
13039 tree name, low, high;
13040 dw_die_ref mod_scope;
13041 /* Only these cv-qualifiers are currently handled. */
13042 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13043 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13044 ENCODE_QUAL_ADDR_SPACE(~0U));
13045 const bool reverse_base_type
13046 = need_endianity_attribute_p (reverse) && is_base_type (type);
13047
13048 if (code == ERROR_MARK)
13049 return NULL;
13050
13051 if (lang_hooks.types.get_debug_type)
13052 {
13053 tree debug_type = lang_hooks.types.get_debug_type (type);
13054
13055 if (debug_type != NULL_TREE && debug_type != type)
13056 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13057 }
13058
13059 cv_quals &= cv_qual_mask;
13060
13061 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13062 tag modifier (and not an attribute) old consumers won't be able
13063 to handle it. */
13064 if (dwarf_version < 3)
13065 cv_quals &= ~TYPE_QUAL_RESTRICT;
13066
13067 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13068 if (dwarf_version < 5)
13069 cv_quals &= ~TYPE_QUAL_ATOMIC;
13070
13071 /* See if we already have the appropriately qualified variant of
13072 this type. */
13073 qualified_type = get_qualified_type (type, cv_quals);
13074
13075 if (qualified_type == sizetype)
13076 {
13077 /* Try not to expose the internal sizetype type's name. */
13078 if (TYPE_NAME (qualified_type)
13079 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13080 {
13081 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13082
13083 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13084 && (TYPE_PRECISION (t)
13085 == TYPE_PRECISION (qualified_type))
13086 && (TYPE_UNSIGNED (t)
13087 == TYPE_UNSIGNED (qualified_type)));
13088 qualified_type = t;
13089 }
13090 else if (qualified_type == sizetype
13091 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13092 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13093 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13094 qualified_type = size_type_node;
13095 }
13096
13097 /* If we do, then we can just use its DIE, if it exists. */
13098 if (qualified_type)
13099 {
13100 mod_type_die = lookup_type_die (qualified_type);
13101
13102 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13103 dealt with specially: the DIE with the attribute, if it exists, is
13104 placed immediately after the regular DIE for the same base type. */
13105 if (mod_type_die
13106 && (!reverse_base_type
13107 || ((mod_type_die = mod_type_die->die_sib) != NULL
13108 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13109 return mod_type_die;
13110 }
13111
13112 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13113
13114 /* Handle C typedef types. */
13115 if (name
13116 && TREE_CODE (name) == TYPE_DECL
13117 && DECL_ORIGINAL_TYPE (name)
13118 && !DECL_ARTIFICIAL (name))
13119 {
13120 tree dtype = TREE_TYPE (name);
13121
13122 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13123 if (qualified_type == dtype && !reverse_base_type)
13124 {
13125 tree origin = decl_ultimate_origin (name);
13126
13127 /* Typedef variants that have an abstract origin don't get their own
13128 type DIE (see gen_typedef_die), so fall back on the ultimate
13129 abstract origin instead. */
13130 if (origin != NULL && origin != name)
13131 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13132 context_die);
13133
13134 /* For a named type, use the typedef. */
13135 gen_type_die (qualified_type, context_die);
13136 return lookup_type_die (qualified_type);
13137 }
13138 else
13139 {
13140 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13141 dquals &= cv_qual_mask;
13142 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13143 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13144 /* cv-unqualified version of named type. Just use
13145 the unnamed type to which it refers. */
13146 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13147 reverse, context_die);
13148 /* Else cv-qualified version of named type; fall through. */
13149 }
13150 }
13151
13152 mod_scope = scope_die_for (type, context_die);
13153
13154 if (cv_quals)
13155 {
13156 int sub_quals = 0, first_quals = 0;
13157 unsigned i;
13158 dw_die_ref first = NULL, last = NULL;
13159
13160 /* Determine a lesser qualified type that most closely matches
13161 this one. Then generate DW_TAG_* entries for the remaining
13162 qualifiers. */
13163 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13164 cv_qual_mask);
13165 if (sub_quals && use_debug_types)
13166 {
13167 bool needed = false;
13168 /* If emitting type units, make sure the order of qualifiers
13169 is canonical. Thus, start from unqualified type if
13170 an earlier qualifier is missing in sub_quals, but some later
13171 one is present there. */
13172 for (i = 0; i < dwarf_qual_info_size; i++)
13173 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13174 needed = true;
13175 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13176 {
13177 sub_quals = 0;
13178 break;
13179 }
13180 }
13181 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13182 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13183 {
13184 /* As not all intermediate qualified DIEs have corresponding
13185 tree types, ensure that qualified DIEs in the same scope
13186 as their DW_AT_type are emitted after their DW_AT_type,
13187 only with other qualified DIEs for the same type possibly
13188 in between them. Determine the range of such qualified
13189 DIEs now (first being the base type, last being corresponding
13190 last qualified DIE for it). */
13191 unsigned int count = 0;
13192 first = qualified_die_p (mod_type_die, &first_quals,
13193 dwarf_qual_info_size);
13194 if (first == NULL)
13195 first = mod_type_die;
13196 gcc_assert ((first_quals & ~sub_quals) == 0);
13197 for (count = 0, last = first;
13198 count < (1U << dwarf_qual_info_size);
13199 count++, last = last->die_sib)
13200 {
13201 int quals = 0;
13202 if (last == mod_scope->die_child)
13203 break;
13204 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13205 != first)
13206 break;
13207 }
13208 }
13209
13210 for (i = 0; i < dwarf_qual_info_size; i++)
13211 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13212 {
13213 dw_die_ref d;
13214 if (first && first != last)
13215 {
13216 for (d = first->die_sib; ; d = d->die_sib)
13217 {
13218 int quals = 0;
13219 qualified_die_p (d, &quals, dwarf_qual_info_size);
13220 if (quals == (first_quals | dwarf_qual_info[i].q))
13221 break;
13222 if (d == last)
13223 {
13224 d = NULL;
13225 break;
13226 }
13227 }
13228 if (d)
13229 {
13230 mod_type_die = d;
13231 continue;
13232 }
13233 }
13234 if (first)
13235 {
13236 d = new_die_raw (dwarf_qual_info[i].t);
13237 add_child_die_after (mod_scope, d, last);
13238 last = d;
13239 }
13240 else
13241 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13242 if (mod_type_die)
13243 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13244 mod_type_die = d;
13245 first_quals |= dwarf_qual_info[i].q;
13246 }
13247 }
13248 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13249 {
13250 dwarf_tag tag = DW_TAG_pointer_type;
13251 if (code == REFERENCE_TYPE)
13252 {
13253 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13254 tag = DW_TAG_rvalue_reference_type;
13255 else
13256 tag = DW_TAG_reference_type;
13257 }
13258 mod_type_die = new_die (tag, mod_scope, type);
13259
13260 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13261 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13262 add_alignment_attribute (mod_type_die, type);
13263 item_type = TREE_TYPE (type);
13264
13265 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13266 if (!ADDR_SPACE_GENERIC_P (as))
13267 {
13268 int action = targetm.addr_space.debug (as);
13269 if (action >= 0)
13270 {
13271 /* Positive values indicate an address_class. */
13272 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13273 }
13274 else
13275 {
13276 /* Negative values indicate an (inverted) segment base reg. */
13277 dw_loc_descr_ref d
13278 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13279 add_AT_loc (mod_type_die, DW_AT_segment, d);
13280 }
13281 }
13282 }
13283 else if (code == INTEGER_TYPE
13284 && TREE_TYPE (type) != NULL_TREE
13285 && subrange_type_for_debug_p (type, &low, &high))
13286 {
13287 tree bias = NULL_TREE;
13288 if (lang_hooks.types.get_type_bias)
13289 bias = lang_hooks.types.get_type_bias (type);
13290 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13291 item_type = TREE_TYPE (type);
13292 }
13293 else if (is_base_type (type))
13294 {
13295 mod_type_die = base_type_die (type, reverse);
13296
13297 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13298 if (reverse_base_type)
13299 {
13300 dw_die_ref after_die
13301 = modified_type_die (type, cv_quals, false, context_die);
13302 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13303 }
13304 else
13305 add_child_die (comp_unit_die (), mod_type_die);
13306
13307 add_pubtype (type, mod_type_die);
13308 }
13309 else
13310 {
13311 gen_type_die (type, context_die);
13312
13313 /* We have to get the type_main_variant here (and pass that to the
13314 `lookup_type_die' routine) because the ..._TYPE node we have
13315 might simply be a *copy* of some original type node (where the
13316 copy was created to help us keep track of typedef names) and
13317 that copy might have a different TYPE_UID from the original
13318 ..._TYPE node. */
13319 if (TREE_CODE (type) == FUNCTION_TYPE
13320 || TREE_CODE (type) == METHOD_TYPE)
13321 {
13322 /* For function/method types, can't just use type_main_variant here,
13323 because that can have different ref-qualifiers for C++,
13324 but try to canonicalize. */
13325 tree main = TYPE_MAIN_VARIANT (type);
13326 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13327 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13328 && check_base_type (t, main)
13329 && check_lang_type (t, type))
13330 return lookup_type_die (t);
13331 return lookup_type_die (type);
13332 }
13333 else if (TREE_CODE (type) != VECTOR_TYPE
13334 && TREE_CODE (type) != ARRAY_TYPE)
13335 return lookup_type_die (type_main_variant (type));
13336 else
13337 /* Vectors have the debugging information in the type,
13338 not the main variant. */
13339 return lookup_type_die (type);
13340 }
13341
13342 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13343 don't output a DW_TAG_typedef, since there isn't one in the
13344 user's program; just attach a DW_AT_name to the type.
13345 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13346 if the base type already has the same name. */
13347 if (name
13348 && ((TREE_CODE (name) != TYPE_DECL
13349 && (qualified_type == TYPE_MAIN_VARIANT (type)
13350 || (cv_quals == TYPE_UNQUALIFIED)))
13351 || (TREE_CODE (name) == TYPE_DECL
13352 && TREE_TYPE (name) == qualified_type
13353 && DECL_NAME (name))))
13354 {
13355 if (TREE_CODE (name) == TYPE_DECL)
13356 /* Could just call add_name_and_src_coords_attributes here,
13357 but since this is a builtin type it doesn't have any
13358 useful source coordinates anyway. */
13359 name = DECL_NAME (name);
13360 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13361 }
13362 /* This probably indicates a bug. */
13363 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13364 {
13365 name = TYPE_IDENTIFIER (type);
13366 add_name_attribute (mod_type_die,
13367 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13368 }
13369
13370 if (qualified_type && !reverse_base_type)
13371 equate_type_number_to_die (qualified_type, mod_type_die);
13372
13373 if (item_type)
13374 /* We must do this after the equate_type_number_to_die call, in case
13375 this is a recursive type. This ensures that the modified_type_die
13376 recursion will terminate even if the type is recursive. Recursive
13377 types are possible in Ada. */
13378 sub_die = modified_type_die (item_type,
13379 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13380 reverse,
13381 context_die);
13382
13383 if (sub_die != NULL)
13384 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13385
13386 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13387 if (TYPE_ARTIFICIAL (type))
13388 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13389
13390 return mod_type_die;
13391 }
13392
13393 /* Generate DIEs for the generic parameters of T.
13394 T must be either a generic type or a generic function.
13395 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13396
13397 static void
13398 gen_generic_params_dies (tree t)
13399 {
13400 tree parms, args;
13401 int parms_num, i;
13402 dw_die_ref die = NULL;
13403 int non_default;
13404
13405 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13406 return;
13407
13408 if (TYPE_P (t))
13409 die = lookup_type_die (t);
13410 else if (DECL_P (t))
13411 die = lookup_decl_die (t);
13412
13413 gcc_assert (die);
13414
13415 parms = lang_hooks.get_innermost_generic_parms (t);
13416 if (!parms)
13417 /* T has no generic parameter. It means T is neither a generic type
13418 or function. End of story. */
13419 return;
13420
13421 parms_num = TREE_VEC_LENGTH (parms);
13422 args = lang_hooks.get_innermost_generic_args (t);
13423 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13424 non_default = int_cst_value (TREE_CHAIN (args));
13425 else
13426 non_default = TREE_VEC_LENGTH (args);
13427 for (i = 0; i < parms_num; i++)
13428 {
13429 tree parm, arg, arg_pack_elems;
13430 dw_die_ref parm_die;
13431
13432 parm = TREE_VEC_ELT (parms, i);
13433 arg = TREE_VEC_ELT (args, i);
13434 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13435 gcc_assert (parm && TREE_VALUE (parm) && arg);
13436
13437 if (parm && TREE_VALUE (parm) && arg)
13438 {
13439 /* If PARM represents a template parameter pack,
13440 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13441 by DW_TAG_template_*_parameter DIEs for the argument
13442 pack elements of ARG. Note that ARG would then be
13443 an argument pack. */
13444 if (arg_pack_elems)
13445 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13446 arg_pack_elems,
13447 die);
13448 else
13449 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13450 true /* emit name */, die);
13451 if (i >= non_default)
13452 add_AT_flag (parm_die, DW_AT_default_value, 1);
13453 }
13454 }
13455 }
13456
13457 /* Create and return a DIE for PARM which should be
13458 the representation of a generic type parameter.
13459 For instance, in the C++ front end, PARM would be a template parameter.
13460 ARG is the argument to PARM.
13461 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13462 name of the PARM.
13463 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13464 as a child node. */
13465
13466 static dw_die_ref
13467 generic_parameter_die (tree parm, tree arg,
13468 bool emit_name_p,
13469 dw_die_ref parent_die)
13470 {
13471 dw_die_ref tmpl_die = NULL;
13472 const char *name = NULL;
13473
13474 if (!parm || !DECL_NAME (parm) || !arg)
13475 return NULL;
13476
13477 /* We support non-type generic parameters and arguments,
13478 type generic parameters and arguments, as well as
13479 generic generic parameters (a.k.a. template template parameters in C++)
13480 and arguments. */
13481 if (TREE_CODE (parm) == PARM_DECL)
13482 /* PARM is a nontype generic parameter */
13483 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13484 else if (TREE_CODE (parm) == TYPE_DECL)
13485 /* PARM is a type generic parameter. */
13486 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13487 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13488 /* PARM is a generic generic parameter.
13489 Its DIE is a GNU extension. It shall have a
13490 DW_AT_name attribute to represent the name of the template template
13491 parameter, and a DW_AT_GNU_template_name attribute to represent the
13492 name of the template template argument. */
13493 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13494 parent_die, parm);
13495 else
13496 gcc_unreachable ();
13497
13498 if (tmpl_die)
13499 {
13500 tree tmpl_type;
13501
13502 /* If PARM is a generic parameter pack, it means we are
13503 emitting debug info for a template argument pack element.
13504 In other terms, ARG is a template argument pack element.
13505 In that case, we don't emit any DW_AT_name attribute for
13506 the die. */
13507 if (emit_name_p)
13508 {
13509 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13510 gcc_assert (name);
13511 add_AT_string (tmpl_die, DW_AT_name, name);
13512 }
13513
13514 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13515 {
13516 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13517 TMPL_DIE should have a child DW_AT_type attribute that is set
13518 to the type of the argument to PARM, which is ARG.
13519 If PARM is a type generic parameter, TMPL_DIE should have a
13520 child DW_AT_type that is set to ARG. */
13521 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13522 add_type_attribute (tmpl_die, tmpl_type,
13523 (TREE_THIS_VOLATILE (tmpl_type)
13524 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13525 false, parent_die);
13526 }
13527 else
13528 {
13529 /* So TMPL_DIE is a DIE representing a
13530 a generic generic template parameter, a.k.a template template
13531 parameter in C++ and arg is a template. */
13532
13533 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13534 to the name of the argument. */
13535 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13536 if (name)
13537 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13538 }
13539
13540 if (TREE_CODE (parm) == PARM_DECL)
13541 /* So PARM is a non-type generic parameter.
13542 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13543 attribute of TMPL_DIE which value represents the value
13544 of ARG.
13545 We must be careful here:
13546 The value of ARG might reference some function decls.
13547 We might currently be emitting debug info for a generic
13548 type and types are emitted before function decls, we don't
13549 know if the function decls referenced by ARG will actually be
13550 emitted after cgraph computations.
13551 So must defer the generation of the DW_AT_const_value to
13552 after cgraph is ready. */
13553 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13554 }
13555
13556 return tmpl_die;
13557 }
13558
13559 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13560 PARM_PACK must be a template parameter pack. The returned DIE
13561 will be child DIE of PARENT_DIE. */
13562
13563 static dw_die_ref
13564 template_parameter_pack_die (tree parm_pack,
13565 tree parm_pack_args,
13566 dw_die_ref parent_die)
13567 {
13568 dw_die_ref die;
13569 int j;
13570
13571 gcc_assert (parent_die && parm_pack);
13572
13573 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13574 add_name_and_src_coords_attributes (die, parm_pack);
13575 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13576 generic_parameter_die (parm_pack,
13577 TREE_VEC_ELT (parm_pack_args, j),
13578 false /* Don't emit DW_AT_name */,
13579 die);
13580 return die;
13581 }
13582
13583 /* Return the DBX register number described by a given RTL node. */
13584
13585 static unsigned int
13586 dbx_reg_number (const_rtx rtl)
13587 {
13588 unsigned regno = REGNO (rtl);
13589
13590 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13591
13592 #ifdef LEAF_REG_REMAP
13593 if (crtl->uses_only_leaf_regs)
13594 {
13595 int leaf_reg = LEAF_REG_REMAP (regno);
13596 if (leaf_reg != -1)
13597 regno = (unsigned) leaf_reg;
13598 }
13599 #endif
13600
13601 regno = DBX_REGISTER_NUMBER (regno);
13602 gcc_assert (regno != INVALID_REGNUM);
13603 return regno;
13604 }
13605
13606 /* Optionally add a DW_OP_piece term to a location description expression.
13607 DW_OP_piece is only added if the location description expression already
13608 doesn't end with DW_OP_piece. */
13609
13610 static void
13611 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13612 {
13613 dw_loc_descr_ref loc;
13614
13615 if (*list_head != NULL)
13616 {
13617 /* Find the end of the chain. */
13618 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13619 ;
13620
13621 if (loc->dw_loc_opc != DW_OP_piece)
13622 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13623 }
13624 }
13625
13626 /* Return a location descriptor that designates a machine register or
13627 zero if there is none. */
13628
13629 static dw_loc_descr_ref
13630 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13631 {
13632 rtx regs;
13633
13634 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13635 return 0;
13636
13637 /* We only use "frame base" when we're sure we're talking about the
13638 post-prologue local stack frame. We do this by *not* running
13639 register elimination until this point, and recognizing the special
13640 argument pointer and soft frame pointer rtx's.
13641 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13642 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13643 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13644 {
13645 dw_loc_descr_ref result = NULL;
13646
13647 if (dwarf_version >= 4 || !dwarf_strict)
13648 {
13649 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13650 initialized);
13651 if (result)
13652 add_loc_descr (&result,
13653 new_loc_descr (DW_OP_stack_value, 0, 0));
13654 }
13655 return result;
13656 }
13657
13658 regs = targetm.dwarf_register_span (rtl);
13659
13660 if (REG_NREGS (rtl) > 1 || regs)
13661 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13662 else
13663 {
13664 unsigned int dbx_regnum = dbx_reg_number (rtl);
13665 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13666 return 0;
13667 return one_reg_loc_descriptor (dbx_regnum, initialized);
13668 }
13669 }
13670
13671 /* Return a location descriptor that designates a machine register for
13672 a given hard register number. */
13673
13674 static dw_loc_descr_ref
13675 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13676 {
13677 dw_loc_descr_ref reg_loc_descr;
13678
13679 if (regno <= 31)
13680 reg_loc_descr
13681 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13682 else
13683 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13684
13685 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13686 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13687
13688 return reg_loc_descr;
13689 }
13690
13691 /* Given an RTL of a register, return a location descriptor that
13692 designates a value that spans more than one register. */
13693
13694 static dw_loc_descr_ref
13695 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13696 enum var_init_status initialized)
13697 {
13698 int size, i;
13699 dw_loc_descr_ref loc_result = NULL;
13700
13701 /* Simple, contiguous registers. */
13702 if (regs == NULL_RTX)
13703 {
13704 unsigned reg = REGNO (rtl);
13705 int nregs;
13706
13707 #ifdef LEAF_REG_REMAP
13708 if (crtl->uses_only_leaf_regs)
13709 {
13710 int leaf_reg = LEAF_REG_REMAP (reg);
13711 if (leaf_reg != -1)
13712 reg = (unsigned) leaf_reg;
13713 }
13714 #endif
13715
13716 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13717 nregs = REG_NREGS (rtl);
13718
13719 /* At present we only track constant-sized pieces. */
13720 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13721 return NULL;
13722 size /= nregs;
13723
13724 loc_result = NULL;
13725 while (nregs--)
13726 {
13727 dw_loc_descr_ref t;
13728
13729 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13730 VAR_INIT_STATUS_INITIALIZED);
13731 add_loc_descr (&loc_result, t);
13732 add_loc_descr_op_piece (&loc_result, size);
13733 ++reg;
13734 }
13735 return loc_result;
13736 }
13737
13738 /* Now onto stupid register sets in non contiguous locations. */
13739
13740 gcc_assert (GET_CODE (regs) == PARALLEL);
13741
13742 /* At present we only track constant-sized pieces. */
13743 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13744 return NULL;
13745 loc_result = NULL;
13746
13747 for (i = 0; i < XVECLEN (regs, 0); ++i)
13748 {
13749 dw_loc_descr_ref t;
13750
13751 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13752 VAR_INIT_STATUS_INITIALIZED);
13753 add_loc_descr (&loc_result, t);
13754 add_loc_descr_op_piece (&loc_result, size);
13755 }
13756
13757 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13758 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13759 return loc_result;
13760 }
13761
13762 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13763
13764 /* Return a location descriptor that designates a constant i,
13765 as a compound operation from constant (i >> shift), constant shift
13766 and DW_OP_shl. */
13767
13768 static dw_loc_descr_ref
13769 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13770 {
13771 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13772 add_loc_descr (&ret, int_loc_descriptor (shift));
13773 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13774 return ret;
13775 }
13776
13777 /* Return a location descriptor that designates constant POLY_I. */
13778
13779 static dw_loc_descr_ref
13780 int_loc_descriptor (poly_int64 poly_i)
13781 {
13782 enum dwarf_location_atom op;
13783
13784 HOST_WIDE_INT i;
13785 if (!poly_i.is_constant (&i))
13786 {
13787 /* Create location descriptions for the non-constant part and
13788 add any constant offset at the end. */
13789 dw_loc_descr_ref ret = NULL;
13790 HOST_WIDE_INT constant = poly_i.coeffs[0];
13791 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13792 {
13793 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13794 if (coeff != 0)
13795 {
13796 dw_loc_descr_ref start = ret;
13797 unsigned int factor;
13798 int bias;
13799 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13800 (j, &factor, &bias);
13801
13802 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13803 add COEFF * (REGNO / FACTOR) now and subtract
13804 COEFF * BIAS from the final constant part. */
13805 constant -= coeff * bias;
13806 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13807 if (coeff % factor == 0)
13808 coeff /= factor;
13809 else
13810 {
13811 int amount = exact_log2 (factor);
13812 gcc_assert (amount >= 0);
13813 add_loc_descr (&ret, int_loc_descriptor (amount));
13814 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13815 }
13816 if (coeff != 1)
13817 {
13818 add_loc_descr (&ret, int_loc_descriptor (coeff));
13819 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13820 }
13821 if (start)
13822 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13823 }
13824 }
13825 loc_descr_plus_const (&ret, constant);
13826 return ret;
13827 }
13828
13829 /* Pick the smallest representation of a constant, rather than just
13830 defaulting to the LEB encoding. */
13831 if (i >= 0)
13832 {
13833 int clz = clz_hwi (i);
13834 int ctz = ctz_hwi (i);
13835 if (i <= 31)
13836 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13837 else if (i <= 0xff)
13838 op = DW_OP_const1u;
13839 else if (i <= 0xffff)
13840 op = DW_OP_const2u;
13841 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13842 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13843 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13844 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13845 while DW_OP_const4u is 5 bytes. */
13846 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13847 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13848 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13849 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13850 while DW_OP_const4u is 5 bytes. */
13851 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13852
13853 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13854 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13855 <= 4)
13856 {
13857 /* As i >= 2**31, the double cast above will yield a negative number.
13858 Since wrapping is defined in DWARF expressions we can output big
13859 positive integers as small negative ones, regardless of the size
13860 of host wide ints.
13861
13862 Here, since the evaluator will handle 32-bit values and since i >=
13863 2**31, we know it's going to be interpreted as a negative literal:
13864 store it this way if we can do better than 5 bytes this way. */
13865 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13866 }
13867 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13868 op = DW_OP_const4u;
13869
13870 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13871 least 6 bytes: see if we can do better before falling back to it. */
13872 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13873 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13874 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13875 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13876 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13877 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13878 >= HOST_BITS_PER_WIDE_INT)
13879 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13880 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13881 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13882 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13883 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13884 && size_of_uleb128 (i) > 6)
13885 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13886 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13887 else
13888 op = DW_OP_constu;
13889 }
13890 else
13891 {
13892 if (i >= -0x80)
13893 op = DW_OP_const1s;
13894 else if (i >= -0x8000)
13895 op = DW_OP_const2s;
13896 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13897 {
13898 if (size_of_int_loc_descriptor (i) < 5)
13899 {
13900 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13901 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13902 return ret;
13903 }
13904 op = DW_OP_const4s;
13905 }
13906 else
13907 {
13908 if (size_of_int_loc_descriptor (i)
13909 < (unsigned long) 1 + size_of_sleb128 (i))
13910 {
13911 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13912 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13913 return ret;
13914 }
13915 op = DW_OP_consts;
13916 }
13917 }
13918
13919 return new_loc_descr (op, i, 0);
13920 }
13921
13922 /* Likewise, for unsigned constants. */
13923
13924 static dw_loc_descr_ref
13925 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13926 {
13927 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13928 const unsigned HOST_WIDE_INT max_uint
13929 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13930
13931 /* If possible, use the clever signed constants handling. */
13932 if (i <= max_int)
13933 return int_loc_descriptor ((HOST_WIDE_INT) i);
13934
13935 /* Here, we are left with positive numbers that cannot be represented as
13936 HOST_WIDE_INT, i.e.:
13937 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13938
13939 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13940 whereas may be better to output a negative integer: thanks to integer
13941 wrapping, we know that:
13942 x = x - 2 ** DWARF2_ADDR_SIZE
13943 = x - 2 * (max (HOST_WIDE_INT) + 1)
13944 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13945 small negative integers. Let's try that in cases it will clearly improve
13946 the encoding: there is no gain turning DW_OP_const4u into
13947 DW_OP_const4s. */
13948 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13949 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13950 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13951 {
13952 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13953
13954 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13955 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13956 const HOST_WIDE_INT second_shift
13957 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13958
13959 /* So we finally have:
13960 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13961 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13962 return int_loc_descriptor (second_shift);
13963 }
13964
13965 /* Last chance: fallback to a simple constant operation. */
13966 return new_loc_descr
13967 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13968 ? DW_OP_const4u
13969 : DW_OP_const8u,
13970 i, 0);
13971 }
13972
13973 /* Generate and return a location description that computes the unsigned
13974 comparison of the two stack top entries (a OP b where b is the top-most
13975 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13976 LE_EXPR, GT_EXPR or GE_EXPR. */
13977
13978 static dw_loc_descr_ref
13979 uint_comparison_loc_list (enum tree_code kind)
13980 {
13981 enum dwarf_location_atom op, flip_op;
13982 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13983
13984 switch (kind)
13985 {
13986 case LT_EXPR:
13987 op = DW_OP_lt;
13988 break;
13989 case LE_EXPR:
13990 op = DW_OP_le;
13991 break;
13992 case GT_EXPR:
13993 op = DW_OP_gt;
13994 break;
13995 case GE_EXPR:
13996 op = DW_OP_ge;
13997 break;
13998 default:
13999 gcc_unreachable ();
14000 }
14001
14002 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14003 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14004
14005 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14006 possible to perform unsigned comparisons: we just have to distinguish
14007 three cases:
14008
14009 1. when a and b have the same sign (as signed integers); then we should
14010 return: a OP(signed) b;
14011
14012 2. when a is a negative signed integer while b is a positive one, then a
14013 is a greater unsigned integer than b; likewise when a and b's roles
14014 are flipped.
14015
14016 So first, compare the sign of the two operands. */
14017 ret = new_loc_descr (DW_OP_over, 0, 0);
14018 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14019 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14020 /* If they have different signs (i.e. they have different sign bits), then
14021 the stack top value has now the sign bit set and thus it's smaller than
14022 zero. */
14023 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14024 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14025 add_loc_descr (&ret, bra_node);
14026
14027 /* We are in case 1. At this point, we know both operands have the same
14028 sign, to it's safe to use the built-in signed comparison. */
14029 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14030 add_loc_descr (&ret, jmp_node);
14031
14032 /* We are in case 2. Here, we know both operands do not have the same sign,
14033 so we have to flip the signed comparison. */
14034 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14035 tmp = new_loc_descr (flip_op, 0, 0);
14036 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14037 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14038 add_loc_descr (&ret, tmp);
14039
14040 /* This dummy operation is necessary to make the two branches join. */
14041 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14042 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14043 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14044 add_loc_descr (&ret, tmp);
14045
14046 return ret;
14047 }
14048
14049 /* Likewise, but takes the location description lists (might be destructive on
14050 them). Return NULL if either is NULL or if concatenation fails. */
14051
14052 static dw_loc_list_ref
14053 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14054 enum tree_code kind)
14055 {
14056 if (left == NULL || right == NULL)
14057 return NULL;
14058
14059 add_loc_list (&left, right);
14060 if (left == NULL)
14061 return NULL;
14062
14063 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14064 return left;
14065 }
14066
14067 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14068 without actually allocating it. */
14069
14070 static unsigned long
14071 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14072 {
14073 return size_of_int_loc_descriptor (i >> shift)
14074 + size_of_int_loc_descriptor (shift)
14075 + 1;
14076 }
14077
14078 /* Return size_of_locs (int_loc_descriptor (i)) without
14079 actually allocating it. */
14080
14081 static unsigned long
14082 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14083 {
14084 unsigned long s;
14085
14086 if (i >= 0)
14087 {
14088 int clz, ctz;
14089 if (i <= 31)
14090 return 1;
14091 else if (i <= 0xff)
14092 return 2;
14093 else if (i <= 0xffff)
14094 return 3;
14095 clz = clz_hwi (i);
14096 ctz = ctz_hwi (i);
14097 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14098 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14099 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14100 - clz - 5);
14101 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14102 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14103 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14104 - clz - 8);
14105 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14106 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14107 <= 4)
14108 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14109 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14110 return 5;
14111 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14112 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14113 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14114 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14115 - clz - 8);
14116 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14117 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14118 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14119 - clz - 16);
14120 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14121 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14122 && s > 6)
14123 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14124 - clz - 32);
14125 else
14126 return 1 + s;
14127 }
14128 else
14129 {
14130 if (i >= -0x80)
14131 return 2;
14132 else if (i >= -0x8000)
14133 return 3;
14134 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14135 {
14136 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14137 {
14138 s = size_of_int_loc_descriptor (-i) + 1;
14139 if (s < 5)
14140 return s;
14141 }
14142 return 5;
14143 }
14144 else
14145 {
14146 unsigned long r = 1 + size_of_sleb128 (i);
14147 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14148 {
14149 s = size_of_int_loc_descriptor (-i) + 1;
14150 if (s < r)
14151 return s;
14152 }
14153 return r;
14154 }
14155 }
14156 }
14157
14158 /* Return loc description representing "address" of integer value.
14159 This can appear only as toplevel expression. */
14160
14161 static dw_loc_descr_ref
14162 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14163 {
14164 int litsize;
14165 dw_loc_descr_ref loc_result = NULL;
14166
14167 if (!(dwarf_version >= 4 || !dwarf_strict))
14168 return NULL;
14169
14170 litsize = size_of_int_loc_descriptor (i);
14171 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14172 is more compact. For DW_OP_stack_value we need:
14173 litsize + 1 (DW_OP_stack_value)
14174 and for DW_OP_implicit_value:
14175 1 (DW_OP_implicit_value) + 1 (length) + size. */
14176 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14177 {
14178 loc_result = int_loc_descriptor (i);
14179 add_loc_descr (&loc_result,
14180 new_loc_descr (DW_OP_stack_value, 0, 0));
14181 return loc_result;
14182 }
14183
14184 loc_result = new_loc_descr (DW_OP_implicit_value,
14185 size, 0);
14186 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14187 loc_result->dw_loc_oprnd2.v.val_int = i;
14188 return loc_result;
14189 }
14190
14191 /* Return a location descriptor that designates a base+offset location. */
14192
14193 static dw_loc_descr_ref
14194 based_loc_descr (rtx reg, poly_int64 offset,
14195 enum var_init_status initialized)
14196 {
14197 unsigned int regno;
14198 dw_loc_descr_ref result;
14199 dw_fde_ref fde = cfun->fde;
14200
14201 /* We only use "frame base" when we're sure we're talking about the
14202 post-prologue local stack frame. We do this by *not* running
14203 register elimination until this point, and recognizing the special
14204 argument pointer and soft frame pointer rtx's. */
14205 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14206 {
14207 rtx elim = (ira_use_lra_p
14208 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14209 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14210
14211 if (elim != reg)
14212 {
14213 /* Allow hard frame pointer here even if frame pointer
14214 isn't used since hard frame pointer is encoded with
14215 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14216 not hard frame pointer directly. */
14217 elim = strip_offset_and_add (elim, &offset);
14218 gcc_assert (elim == hard_frame_pointer_rtx
14219 || elim == stack_pointer_rtx);
14220
14221 /* If drap register is used to align stack, use frame
14222 pointer + offset to access stack variables. If stack
14223 is aligned without drap, use stack pointer + offset to
14224 access stack variables. */
14225 if (crtl->stack_realign_tried
14226 && reg == frame_pointer_rtx)
14227 {
14228 int base_reg
14229 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14230 ? HARD_FRAME_POINTER_REGNUM
14231 : REGNO (elim));
14232 return new_reg_loc_descr (base_reg, offset);
14233 }
14234
14235 gcc_assert (frame_pointer_fb_offset_valid);
14236 offset += frame_pointer_fb_offset;
14237 HOST_WIDE_INT const_offset;
14238 if (offset.is_constant (&const_offset))
14239 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14240 else
14241 {
14242 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14243 loc_descr_plus_const (&ret, offset);
14244 return ret;
14245 }
14246 }
14247 }
14248
14249 regno = REGNO (reg);
14250 #ifdef LEAF_REG_REMAP
14251 if (crtl->uses_only_leaf_regs)
14252 {
14253 int leaf_reg = LEAF_REG_REMAP (regno);
14254 if (leaf_reg != -1)
14255 regno = (unsigned) leaf_reg;
14256 }
14257 #endif
14258 regno = DWARF_FRAME_REGNUM (regno);
14259
14260 HOST_WIDE_INT const_offset;
14261 if (!optimize && fde
14262 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14263 && offset.is_constant (&const_offset))
14264 {
14265 /* Use cfa+offset to represent the location of arguments passed
14266 on the stack when drap is used to align stack.
14267 Only do this when not optimizing, for optimized code var-tracking
14268 is supposed to track where the arguments live and the register
14269 used as vdrap or drap in some spot might be used for something
14270 else in other part of the routine. */
14271 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14272 }
14273
14274 result = new_reg_loc_descr (regno, offset);
14275
14276 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14277 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14278
14279 return result;
14280 }
14281
14282 /* Return true if this RTL expression describes a base+offset calculation. */
14283
14284 static inline int
14285 is_based_loc (const_rtx rtl)
14286 {
14287 return (GET_CODE (rtl) == PLUS
14288 && ((REG_P (XEXP (rtl, 0))
14289 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14290 && CONST_INT_P (XEXP (rtl, 1)))));
14291 }
14292
14293 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14294 failed. */
14295
14296 static dw_loc_descr_ref
14297 tls_mem_loc_descriptor (rtx mem)
14298 {
14299 tree base;
14300 dw_loc_descr_ref loc_result;
14301
14302 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14303 return NULL;
14304
14305 base = get_base_address (MEM_EXPR (mem));
14306 if (base == NULL
14307 || !VAR_P (base)
14308 || !DECL_THREAD_LOCAL_P (base))
14309 return NULL;
14310
14311 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14312 if (loc_result == NULL)
14313 return NULL;
14314
14315 if (maybe_ne (MEM_OFFSET (mem), 0))
14316 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14317
14318 return loc_result;
14319 }
14320
14321 /* Output debug info about reason why we failed to expand expression as dwarf
14322 expression. */
14323
14324 static void
14325 expansion_failed (tree expr, rtx rtl, char const *reason)
14326 {
14327 if (dump_file && (dump_flags & TDF_DETAILS))
14328 {
14329 fprintf (dump_file, "Failed to expand as dwarf: ");
14330 if (expr)
14331 print_generic_expr (dump_file, expr, dump_flags);
14332 if (rtl)
14333 {
14334 fprintf (dump_file, "\n");
14335 print_rtl (dump_file, rtl);
14336 }
14337 fprintf (dump_file, "\nReason: %s\n", reason);
14338 }
14339 }
14340
14341 /* Helper function for const_ok_for_output. */
14342
14343 static bool
14344 const_ok_for_output_1 (rtx rtl)
14345 {
14346 if (targetm.const_not_ok_for_debug_p (rtl))
14347 {
14348 if (GET_CODE (rtl) != UNSPEC)
14349 {
14350 expansion_failed (NULL_TREE, rtl,
14351 "Expression rejected for debug by the backend.\n");
14352 return false;
14353 }
14354
14355 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14356 the target hook doesn't explicitly allow it in debug info, assume
14357 we can't express it in the debug info. */
14358 /* Don't complain about TLS UNSPECs, those are just too hard to
14359 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14360 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14361 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14362 if (flag_checking
14363 && (XVECLEN (rtl, 0) == 0
14364 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14365 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14366 inform (current_function_decl
14367 ? DECL_SOURCE_LOCATION (current_function_decl)
14368 : UNKNOWN_LOCATION,
14369 #if NUM_UNSPEC_VALUES > 0
14370 "non-delegitimized UNSPEC %s (%d) found in variable location",
14371 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14372 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14373 XINT (rtl, 1));
14374 #else
14375 "non-delegitimized UNSPEC %d found in variable location",
14376 XINT (rtl, 1));
14377 #endif
14378 expansion_failed (NULL_TREE, rtl,
14379 "UNSPEC hasn't been delegitimized.\n");
14380 return false;
14381 }
14382
14383 if (CONST_POLY_INT_P (rtl))
14384 return false;
14385
14386 if (targetm.const_not_ok_for_debug_p (rtl))
14387 {
14388 expansion_failed (NULL_TREE, rtl,
14389 "Expression rejected for debug by the backend.\n");
14390 return false;
14391 }
14392
14393 /* FIXME: Refer to PR60655. It is possible for simplification
14394 of rtl expressions in var tracking to produce such expressions.
14395 We should really identify / validate expressions
14396 enclosed in CONST that can be handled by assemblers on various
14397 targets and only handle legitimate cases here. */
14398 switch (GET_CODE (rtl))
14399 {
14400 case SYMBOL_REF:
14401 break;
14402 case NOT:
14403 case NEG:
14404 return false;
14405 default:
14406 return true;
14407 }
14408
14409 if (CONSTANT_POOL_ADDRESS_P (rtl))
14410 {
14411 bool marked;
14412 get_pool_constant_mark (rtl, &marked);
14413 /* If all references to this pool constant were optimized away,
14414 it was not output and thus we can't represent it. */
14415 if (!marked)
14416 {
14417 expansion_failed (NULL_TREE, rtl,
14418 "Constant was removed from constant pool.\n");
14419 return false;
14420 }
14421 }
14422
14423 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14424 return false;
14425
14426 /* Avoid references to external symbols in debug info, on several targets
14427 the linker might even refuse to link when linking a shared library,
14428 and in many other cases the relocations for .debug_info/.debug_loc are
14429 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14430 to be defined within the same shared library or executable are fine. */
14431 if (SYMBOL_REF_EXTERNAL_P (rtl))
14432 {
14433 tree decl = SYMBOL_REF_DECL (rtl);
14434
14435 if (decl == NULL || !targetm.binds_local_p (decl))
14436 {
14437 expansion_failed (NULL_TREE, rtl,
14438 "Symbol not defined in current TU.\n");
14439 return false;
14440 }
14441 }
14442
14443 return true;
14444 }
14445
14446 /* Return true if constant RTL can be emitted in DW_OP_addr or
14447 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14448 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14449
14450 static bool
14451 const_ok_for_output (rtx rtl)
14452 {
14453 if (GET_CODE (rtl) == SYMBOL_REF)
14454 return const_ok_for_output_1 (rtl);
14455
14456 if (GET_CODE (rtl) == CONST)
14457 {
14458 subrtx_var_iterator::array_type array;
14459 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14460 if (!const_ok_for_output_1 (*iter))
14461 return false;
14462 return true;
14463 }
14464
14465 return true;
14466 }
14467
14468 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14469 if possible, NULL otherwise. */
14470
14471 static dw_die_ref
14472 base_type_for_mode (machine_mode mode, bool unsignedp)
14473 {
14474 dw_die_ref type_die;
14475 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14476
14477 if (type == NULL)
14478 return NULL;
14479 switch (TREE_CODE (type))
14480 {
14481 case INTEGER_TYPE:
14482 case REAL_TYPE:
14483 break;
14484 default:
14485 return NULL;
14486 }
14487 type_die = lookup_type_die (type);
14488 if (!type_die)
14489 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14490 comp_unit_die ());
14491 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14492 return NULL;
14493 return type_die;
14494 }
14495
14496 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14497 type matching MODE, or, if MODE is narrower than or as wide as
14498 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14499 possible. */
14500
14501 static dw_loc_descr_ref
14502 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14503 {
14504 machine_mode outer_mode = mode;
14505 dw_die_ref type_die;
14506 dw_loc_descr_ref cvt;
14507
14508 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14509 {
14510 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14511 return op;
14512 }
14513 type_die = base_type_for_mode (outer_mode, 1);
14514 if (type_die == NULL)
14515 return NULL;
14516 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14517 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14518 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14519 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14520 add_loc_descr (&op, cvt);
14521 return op;
14522 }
14523
14524 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14525
14526 static dw_loc_descr_ref
14527 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14528 dw_loc_descr_ref op1)
14529 {
14530 dw_loc_descr_ref ret = op0;
14531 add_loc_descr (&ret, op1);
14532 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14533 if (STORE_FLAG_VALUE != 1)
14534 {
14535 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14536 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14537 }
14538 return ret;
14539 }
14540
14541 /* Subroutine of scompare_loc_descriptor for the case in which we're
14542 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14543 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14544
14545 static dw_loc_descr_ref
14546 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14547 scalar_int_mode op_mode,
14548 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14549 {
14550 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14551 dw_loc_descr_ref cvt;
14552
14553 if (type_die == NULL)
14554 return NULL;
14555 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14556 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14557 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14558 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14559 add_loc_descr (&op0, cvt);
14560 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14561 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14562 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14563 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14564 add_loc_descr (&op1, cvt);
14565 return compare_loc_descriptor (op, op0, op1);
14566 }
14567
14568 /* Subroutine of scompare_loc_descriptor for the case in which we're
14569 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14570 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14571
14572 static dw_loc_descr_ref
14573 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14574 scalar_int_mode op_mode,
14575 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14576 {
14577 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14578 /* For eq/ne, if the operands are known to be zero-extended,
14579 there is no need to do the fancy shifting up. */
14580 if (op == DW_OP_eq || op == DW_OP_ne)
14581 {
14582 dw_loc_descr_ref last0, last1;
14583 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14584 ;
14585 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14586 ;
14587 /* deref_size zero extends, and for constants we can check
14588 whether they are zero extended or not. */
14589 if (((last0->dw_loc_opc == DW_OP_deref_size
14590 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14591 || (CONST_INT_P (XEXP (rtl, 0))
14592 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14593 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14594 && ((last1->dw_loc_opc == DW_OP_deref_size
14595 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14596 || (CONST_INT_P (XEXP (rtl, 1))
14597 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14598 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14599 return compare_loc_descriptor (op, op0, op1);
14600
14601 /* EQ/NE comparison against constant in narrower type than
14602 DWARF2_ADDR_SIZE can be performed either as
14603 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14604 DW_OP_{eq,ne}
14605 or
14606 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14607 DW_OP_{eq,ne}. Pick whatever is shorter. */
14608 if (CONST_INT_P (XEXP (rtl, 1))
14609 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14610 && (size_of_int_loc_descriptor (shift) + 1
14611 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14612 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14613 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14614 & GET_MODE_MASK (op_mode))))
14615 {
14616 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14617 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14618 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14619 & GET_MODE_MASK (op_mode));
14620 return compare_loc_descriptor (op, op0, op1);
14621 }
14622 }
14623 add_loc_descr (&op0, int_loc_descriptor (shift));
14624 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14625 if (CONST_INT_P (XEXP (rtl, 1)))
14626 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14627 else
14628 {
14629 add_loc_descr (&op1, int_loc_descriptor (shift));
14630 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14631 }
14632 return compare_loc_descriptor (op, op0, op1);
14633 }
14634
14635 /* Return location descriptor for unsigned comparison OP RTL. */
14636
14637 static dw_loc_descr_ref
14638 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14639 machine_mode mem_mode)
14640 {
14641 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14642 dw_loc_descr_ref op0, op1;
14643
14644 if (op_mode == VOIDmode)
14645 op_mode = GET_MODE (XEXP (rtl, 1));
14646 if (op_mode == VOIDmode)
14647 return NULL;
14648
14649 scalar_int_mode int_op_mode;
14650 if (dwarf_strict
14651 && dwarf_version < 5
14652 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14653 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14654 return NULL;
14655
14656 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14657 VAR_INIT_STATUS_INITIALIZED);
14658 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14659 VAR_INIT_STATUS_INITIALIZED);
14660
14661 if (op0 == NULL || op1 == NULL)
14662 return NULL;
14663
14664 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14665 {
14666 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14667 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14668
14669 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14670 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14671 }
14672 return compare_loc_descriptor (op, op0, op1);
14673 }
14674
14675 /* Return location descriptor for unsigned comparison OP RTL. */
14676
14677 static dw_loc_descr_ref
14678 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14679 machine_mode mem_mode)
14680 {
14681 dw_loc_descr_ref op0, op1;
14682
14683 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14684 if (test_op_mode == VOIDmode)
14685 test_op_mode = GET_MODE (XEXP (rtl, 1));
14686
14687 scalar_int_mode op_mode;
14688 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14689 return NULL;
14690
14691 if (dwarf_strict
14692 && dwarf_version < 5
14693 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14694 return NULL;
14695
14696 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14697 VAR_INIT_STATUS_INITIALIZED);
14698 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14699 VAR_INIT_STATUS_INITIALIZED);
14700
14701 if (op0 == NULL || op1 == NULL)
14702 return NULL;
14703
14704 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14705 {
14706 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14707 dw_loc_descr_ref last0, last1;
14708 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14709 ;
14710 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14711 ;
14712 if (CONST_INT_P (XEXP (rtl, 0)))
14713 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14714 /* deref_size zero extends, so no need to mask it again. */
14715 else if (last0->dw_loc_opc != DW_OP_deref_size
14716 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14717 {
14718 add_loc_descr (&op0, int_loc_descriptor (mask));
14719 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14720 }
14721 if (CONST_INT_P (XEXP (rtl, 1)))
14722 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14723 /* deref_size zero extends, so no need to mask it again. */
14724 else if (last1->dw_loc_opc != DW_OP_deref_size
14725 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14726 {
14727 add_loc_descr (&op1, int_loc_descriptor (mask));
14728 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14729 }
14730 }
14731 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14732 {
14733 HOST_WIDE_INT bias = 1;
14734 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14735 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14736 if (CONST_INT_P (XEXP (rtl, 1)))
14737 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14738 + INTVAL (XEXP (rtl, 1)));
14739 else
14740 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14741 bias, 0));
14742 }
14743 return compare_loc_descriptor (op, op0, op1);
14744 }
14745
14746 /* Return location descriptor for {U,S}{MIN,MAX}. */
14747
14748 static dw_loc_descr_ref
14749 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14750 machine_mode mem_mode)
14751 {
14752 enum dwarf_location_atom op;
14753 dw_loc_descr_ref op0, op1, ret;
14754 dw_loc_descr_ref bra_node, drop_node;
14755
14756 scalar_int_mode int_mode;
14757 if (dwarf_strict
14758 && dwarf_version < 5
14759 && (!is_a <scalar_int_mode> (mode, &int_mode)
14760 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14761 return NULL;
14762
14763 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14764 VAR_INIT_STATUS_INITIALIZED);
14765 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14766 VAR_INIT_STATUS_INITIALIZED);
14767
14768 if (op0 == NULL || op1 == NULL)
14769 return NULL;
14770
14771 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14772 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14773 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14774 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14775 {
14776 /* Checked by the caller. */
14777 int_mode = as_a <scalar_int_mode> (mode);
14778 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14779 {
14780 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14781 add_loc_descr (&op0, int_loc_descriptor (mask));
14782 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14783 add_loc_descr (&op1, int_loc_descriptor (mask));
14784 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14785 }
14786 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14787 {
14788 HOST_WIDE_INT bias = 1;
14789 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14790 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14791 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14792 }
14793 }
14794 else if (is_a <scalar_int_mode> (mode, &int_mode)
14795 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14796 {
14797 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14798 add_loc_descr (&op0, int_loc_descriptor (shift));
14799 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14800 add_loc_descr (&op1, int_loc_descriptor (shift));
14801 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14802 }
14803 else if (is_a <scalar_int_mode> (mode, &int_mode)
14804 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14805 {
14806 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14807 dw_loc_descr_ref cvt;
14808 if (type_die == NULL)
14809 return NULL;
14810 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14811 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14812 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14813 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14814 add_loc_descr (&op0, cvt);
14815 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14816 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14817 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14818 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14819 add_loc_descr (&op1, cvt);
14820 }
14821
14822 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14823 op = DW_OP_lt;
14824 else
14825 op = DW_OP_gt;
14826 ret = op0;
14827 add_loc_descr (&ret, op1);
14828 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14829 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14830 add_loc_descr (&ret, bra_node);
14831 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14832 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14833 add_loc_descr (&ret, drop_node);
14834 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14835 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14836 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14837 && is_a <scalar_int_mode> (mode, &int_mode)
14838 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14839 ret = convert_descriptor_to_mode (int_mode, ret);
14840 return ret;
14841 }
14842
14843 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14844 but after converting arguments to type_die, afterwards
14845 convert back to unsigned. */
14846
14847 static dw_loc_descr_ref
14848 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14849 scalar_int_mode mode, machine_mode mem_mode)
14850 {
14851 dw_loc_descr_ref cvt, op0, op1;
14852
14853 if (type_die == NULL)
14854 return NULL;
14855 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14856 VAR_INIT_STATUS_INITIALIZED);
14857 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14858 VAR_INIT_STATUS_INITIALIZED);
14859 if (op0 == NULL || op1 == NULL)
14860 return NULL;
14861 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14862 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14863 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14864 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14865 add_loc_descr (&op0, cvt);
14866 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14867 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14868 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14869 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14870 add_loc_descr (&op1, cvt);
14871 add_loc_descr (&op0, op1);
14872 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14873 return convert_descriptor_to_mode (mode, op0);
14874 }
14875
14876 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14877 const0 is DW_OP_lit0 or corresponding typed constant,
14878 const1 is DW_OP_lit1 or corresponding typed constant
14879 and constMSB is constant with just the MSB bit set
14880 for the mode):
14881 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14882 L1: const0 DW_OP_swap
14883 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14884 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14885 L3: DW_OP_drop
14886 L4: DW_OP_nop
14887
14888 CTZ is similar:
14889 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14890 L1: const0 DW_OP_swap
14891 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14892 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14893 L3: DW_OP_drop
14894 L4: DW_OP_nop
14895
14896 FFS is similar:
14897 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14898 L1: const1 DW_OP_swap
14899 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14900 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14901 L3: DW_OP_drop
14902 L4: DW_OP_nop */
14903
14904 static dw_loc_descr_ref
14905 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14906 machine_mode mem_mode)
14907 {
14908 dw_loc_descr_ref op0, ret, tmp;
14909 HOST_WIDE_INT valv;
14910 dw_loc_descr_ref l1jump, l1label;
14911 dw_loc_descr_ref l2jump, l2label;
14912 dw_loc_descr_ref l3jump, l3label;
14913 dw_loc_descr_ref l4jump, l4label;
14914 rtx msb;
14915
14916 if (GET_MODE (XEXP (rtl, 0)) != mode)
14917 return NULL;
14918
14919 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14920 VAR_INIT_STATUS_INITIALIZED);
14921 if (op0 == NULL)
14922 return NULL;
14923 ret = op0;
14924 if (GET_CODE (rtl) == CLZ)
14925 {
14926 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14927 valv = GET_MODE_BITSIZE (mode);
14928 }
14929 else if (GET_CODE (rtl) == FFS)
14930 valv = 0;
14931 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14932 valv = GET_MODE_BITSIZE (mode);
14933 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14934 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14935 add_loc_descr (&ret, l1jump);
14936 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14937 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14938 VAR_INIT_STATUS_INITIALIZED);
14939 if (tmp == NULL)
14940 return NULL;
14941 add_loc_descr (&ret, tmp);
14942 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14943 add_loc_descr (&ret, l4jump);
14944 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14945 ? const1_rtx : const0_rtx,
14946 mode, mem_mode,
14947 VAR_INIT_STATUS_INITIALIZED);
14948 if (l1label == NULL)
14949 return NULL;
14950 add_loc_descr (&ret, l1label);
14951 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14952 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14953 add_loc_descr (&ret, l2label);
14954 if (GET_CODE (rtl) != CLZ)
14955 msb = const1_rtx;
14956 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14957 msb = GEN_INT (HOST_WIDE_INT_1U
14958 << (GET_MODE_BITSIZE (mode) - 1));
14959 else
14960 msb = immed_wide_int_const
14961 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14962 GET_MODE_PRECISION (mode)), mode);
14963 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14964 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14965 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14966 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14967 else
14968 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14969 VAR_INIT_STATUS_INITIALIZED);
14970 if (tmp == NULL)
14971 return NULL;
14972 add_loc_descr (&ret, tmp);
14973 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14974 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14975 add_loc_descr (&ret, l3jump);
14976 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14977 VAR_INIT_STATUS_INITIALIZED);
14978 if (tmp == NULL)
14979 return NULL;
14980 add_loc_descr (&ret, tmp);
14981 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14982 ? DW_OP_shl : DW_OP_shr, 0, 0));
14983 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14984 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14985 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14986 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14987 add_loc_descr (&ret, l2jump);
14988 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14989 add_loc_descr (&ret, l3label);
14990 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14991 add_loc_descr (&ret, l4label);
14992 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14993 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14994 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14995 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14996 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14997 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14998 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14999 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15000 return ret;
15001 }
15002
15003 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15004 const1 is DW_OP_lit1 or corresponding typed constant):
15005 const0 DW_OP_swap
15006 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15007 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15008 L2: DW_OP_drop
15009
15010 PARITY is similar:
15011 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15012 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15013 L2: DW_OP_drop */
15014
15015 static dw_loc_descr_ref
15016 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15017 machine_mode mem_mode)
15018 {
15019 dw_loc_descr_ref op0, ret, tmp;
15020 dw_loc_descr_ref l1jump, l1label;
15021 dw_loc_descr_ref l2jump, l2label;
15022
15023 if (GET_MODE (XEXP (rtl, 0)) != mode)
15024 return NULL;
15025
15026 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15027 VAR_INIT_STATUS_INITIALIZED);
15028 if (op0 == NULL)
15029 return NULL;
15030 ret = op0;
15031 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15032 VAR_INIT_STATUS_INITIALIZED);
15033 if (tmp == NULL)
15034 return NULL;
15035 add_loc_descr (&ret, tmp);
15036 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15037 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15038 add_loc_descr (&ret, l1label);
15039 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15040 add_loc_descr (&ret, l2jump);
15041 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15042 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15043 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15044 VAR_INIT_STATUS_INITIALIZED);
15045 if (tmp == NULL)
15046 return NULL;
15047 add_loc_descr (&ret, tmp);
15048 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15049 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15050 ? DW_OP_plus : DW_OP_xor, 0, 0));
15051 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15052 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15053 VAR_INIT_STATUS_INITIALIZED);
15054 add_loc_descr (&ret, tmp);
15055 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15056 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15057 add_loc_descr (&ret, l1jump);
15058 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15059 add_loc_descr (&ret, l2label);
15060 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15061 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15062 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15063 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15064 return ret;
15065 }
15066
15067 /* BSWAP (constS is initial shift count, either 56 or 24):
15068 constS const0
15069 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15070 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15071 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15072 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15073 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15074
15075 static dw_loc_descr_ref
15076 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15077 machine_mode mem_mode)
15078 {
15079 dw_loc_descr_ref op0, ret, tmp;
15080 dw_loc_descr_ref l1jump, l1label;
15081 dw_loc_descr_ref l2jump, l2label;
15082
15083 if (BITS_PER_UNIT != 8
15084 || (GET_MODE_BITSIZE (mode) != 32
15085 && GET_MODE_BITSIZE (mode) != 64))
15086 return NULL;
15087
15088 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15089 VAR_INIT_STATUS_INITIALIZED);
15090 if (op0 == NULL)
15091 return NULL;
15092
15093 ret = op0;
15094 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15095 mode, mem_mode,
15096 VAR_INIT_STATUS_INITIALIZED);
15097 if (tmp == NULL)
15098 return NULL;
15099 add_loc_descr (&ret, tmp);
15100 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15101 VAR_INIT_STATUS_INITIALIZED);
15102 if (tmp == NULL)
15103 return NULL;
15104 add_loc_descr (&ret, tmp);
15105 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15106 add_loc_descr (&ret, l1label);
15107 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15108 mode, mem_mode,
15109 VAR_INIT_STATUS_INITIALIZED);
15110 add_loc_descr (&ret, tmp);
15111 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15112 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15113 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15114 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15115 VAR_INIT_STATUS_INITIALIZED);
15116 if (tmp == NULL)
15117 return NULL;
15118 add_loc_descr (&ret, tmp);
15119 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15120 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15121 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15122 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15123 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15124 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15125 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15126 VAR_INIT_STATUS_INITIALIZED);
15127 add_loc_descr (&ret, tmp);
15128 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15129 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15130 add_loc_descr (&ret, l2jump);
15131 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15132 VAR_INIT_STATUS_INITIALIZED);
15133 add_loc_descr (&ret, tmp);
15134 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15135 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15136 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15137 add_loc_descr (&ret, l1jump);
15138 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15139 add_loc_descr (&ret, l2label);
15140 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15141 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15142 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15143 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15144 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15145 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15146 return ret;
15147 }
15148
15149 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15150 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15151 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15152 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15153
15154 ROTATERT is similar:
15155 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15156 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15157 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15158
15159 static dw_loc_descr_ref
15160 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15161 machine_mode mem_mode)
15162 {
15163 rtx rtlop1 = XEXP (rtl, 1);
15164 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15165 int i;
15166
15167 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15168 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15169 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15170 VAR_INIT_STATUS_INITIALIZED);
15171 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15172 VAR_INIT_STATUS_INITIALIZED);
15173 if (op0 == NULL || op1 == NULL)
15174 return NULL;
15175 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15176 for (i = 0; i < 2; i++)
15177 {
15178 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15179 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15180 mode, mem_mode,
15181 VAR_INIT_STATUS_INITIALIZED);
15182 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15183 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15184 ? DW_OP_const4u
15185 : HOST_BITS_PER_WIDE_INT == 64
15186 ? DW_OP_const8u : DW_OP_constu,
15187 GET_MODE_MASK (mode), 0);
15188 else
15189 mask[i] = NULL;
15190 if (mask[i] == NULL)
15191 return NULL;
15192 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15193 }
15194 ret = op0;
15195 add_loc_descr (&ret, op1);
15196 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15197 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15198 if (GET_CODE (rtl) == ROTATERT)
15199 {
15200 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15201 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15202 GET_MODE_BITSIZE (mode), 0));
15203 }
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15205 if (mask[0] != NULL)
15206 add_loc_descr (&ret, mask[0]);
15207 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15208 if (mask[1] != NULL)
15209 {
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15211 add_loc_descr (&ret, mask[1]);
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15213 }
15214 if (GET_CODE (rtl) == ROTATE)
15215 {
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15218 GET_MODE_BITSIZE (mode), 0));
15219 }
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15222 return ret;
15223 }
15224
15225 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15226 for DEBUG_PARAMETER_REF RTL. */
15227
15228 static dw_loc_descr_ref
15229 parameter_ref_descriptor (rtx rtl)
15230 {
15231 dw_loc_descr_ref ret;
15232 dw_die_ref ref;
15233
15234 if (dwarf_strict)
15235 return NULL;
15236 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15237 /* With LTO during LTRANS we get the late DIE that refers to the early
15238 DIE, thus we add another indirection here. This seems to confuse
15239 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15240 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15241 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15242 if (ref)
15243 {
15244 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15245 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15246 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15247 }
15248 else
15249 {
15250 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15251 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15252 }
15253 return ret;
15254 }
15255
15256 /* The following routine converts the RTL for a variable or parameter
15257 (resident in memory) into an equivalent Dwarf representation of a
15258 mechanism for getting the address of that same variable onto the top of a
15259 hypothetical "address evaluation" stack.
15260
15261 When creating memory location descriptors, we are effectively transforming
15262 the RTL for a memory-resident object into its Dwarf postfix expression
15263 equivalent. This routine recursively descends an RTL tree, turning
15264 it into Dwarf postfix code as it goes.
15265
15266 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15267
15268 MEM_MODE is the mode of the memory reference, needed to handle some
15269 autoincrement addressing modes.
15270
15271 Return 0 if we can't represent the location. */
15272
15273 dw_loc_descr_ref
15274 mem_loc_descriptor (rtx rtl, machine_mode mode,
15275 machine_mode mem_mode,
15276 enum var_init_status initialized)
15277 {
15278 dw_loc_descr_ref mem_loc_result = NULL;
15279 enum dwarf_location_atom op;
15280 dw_loc_descr_ref op0, op1;
15281 rtx inner = NULL_RTX;
15282 poly_int64 offset;
15283
15284 if (mode == VOIDmode)
15285 mode = GET_MODE (rtl);
15286
15287 /* Note that for a dynamically sized array, the location we will generate a
15288 description of here will be the lowest numbered location which is
15289 actually within the array. That's *not* necessarily the same as the
15290 zeroth element of the array. */
15291
15292 rtl = targetm.delegitimize_address (rtl);
15293
15294 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15295 return NULL;
15296
15297 scalar_int_mode int_mode, inner_mode, op1_mode;
15298 switch (GET_CODE (rtl))
15299 {
15300 case POST_INC:
15301 case POST_DEC:
15302 case POST_MODIFY:
15303 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15304
15305 case SUBREG:
15306 /* The case of a subreg may arise when we have a local (register)
15307 variable or a formal (register) parameter which doesn't quite fill
15308 up an entire register. For now, just assume that it is
15309 legitimate to make the Dwarf info refer to the whole register which
15310 contains the given subreg. */
15311 if (!subreg_lowpart_p (rtl))
15312 break;
15313 inner = SUBREG_REG (rtl);
15314 /* FALLTHRU */
15315 case TRUNCATE:
15316 if (inner == NULL_RTX)
15317 inner = XEXP (rtl, 0);
15318 if (is_a <scalar_int_mode> (mode, &int_mode)
15319 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15320 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15321 #ifdef POINTERS_EXTEND_UNSIGNED
15322 || (int_mode == Pmode && mem_mode != VOIDmode)
15323 #endif
15324 )
15325 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15326 {
15327 mem_loc_result = mem_loc_descriptor (inner,
15328 inner_mode,
15329 mem_mode, initialized);
15330 break;
15331 }
15332 if (dwarf_strict && dwarf_version < 5)
15333 break;
15334 if (is_a <scalar_int_mode> (mode, &int_mode)
15335 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15336 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15337 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15338 {
15339 dw_die_ref type_die;
15340 dw_loc_descr_ref cvt;
15341
15342 mem_loc_result = mem_loc_descriptor (inner,
15343 GET_MODE (inner),
15344 mem_mode, initialized);
15345 if (mem_loc_result == NULL)
15346 break;
15347 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15348 if (type_die == NULL)
15349 {
15350 mem_loc_result = NULL;
15351 break;
15352 }
15353 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15354 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15355 else
15356 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15357 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15358 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15359 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15360 add_loc_descr (&mem_loc_result, cvt);
15361 if (is_a <scalar_int_mode> (mode, &int_mode)
15362 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15363 {
15364 /* Convert it to untyped afterwards. */
15365 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15366 add_loc_descr (&mem_loc_result, cvt);
15367 }
15368 }
15369 break;
15370
15371 case REG:
15372 if (!is_a <scalar_int_mode> (mode, &int_mode)
15373 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15374 && rtl != arg_pointer_rtx
15375 && rtl != frame_pointer_rtx
15376 #ifdef POINTERS_EXTEND_UNSIGNED
15377 && (int_mode != Pmode || mem_mode == VOIDmode)
15378 #endif
15379 ))
15380 {
15381 dw_die_ref type_die;
15382 unsigned int dbx_regnum;
15383
15384 if (dwarf_strict && dwarf_version < 5)
15385 break;
15386 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15387 break;
15388 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15389 if (type_die == NULL)
15390 break;
15391
15392 dbx_regnum = dbx_reg_number (rtl);
15393 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15394 break;
15395 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15396 dbx_regnum, 0);
15397 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15398 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15399 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15400 break;
15401 }
15402 /* Whenever a register number forms a part of the description of the
15403 method for calculating the (dynamic) address of a memory resident
15404 object, DWARF rules require the register number be referred to as
15405 a "base register". This distinction is not based in any way upon
15406 what category of register the hardware believes the given register
15407 belongs to. This is strictly DWARF terminology we're dealing with
15408 here. Note that in cases where the location of a memory-resident
15409 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15410 OP_CONST (0)) the actual DWARF location descriptor that we generate
15411 may just be OP_BASEREG (basereg). This may look deceptively like
15412 the object in question was allocated to a register (rather than in
15413 memory) so DWARF consumers need to be aware of the subtle
15414 distinction between OP_REG and OP_BASEREG. */
15415 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15416 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15417 else if (stack_realign_drap
15418 && crtl->drap_reg
15419 && crtl->args.internal_arg_pointer == rtl
15420 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15421 {
15422 /* If RTL is internal_arg_pointer, which has been optimized
15423 out, use DRAP instead. */
15424 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15425 VAR_INIT_STATUS_INITIALIZED);
15426 }
15427 break;
15428
15429 case SIGN_EXTEND:
15430 case ZERO_EXTEND:
15431 if (!is_a <scalar_int_mode> (mode, &int_mode)
15432 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15433 break;
15434 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15435 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15436 if (op0 == 0)
15437 break;
15438 else if (GET_CODE (rtl) == ZERO_EXTEND
15439 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15440 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15441 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15442 to expand zero extend as two shifts instead of
15443 masking. */
15444 && GET_MODE_SIZE (inner_mode) <= 4)
15445 {
15446 mem_loc_result = op0;
15447 add_loc_descr (&mem_loc_result,
15448 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15449 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15450 }
15451 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15452 {
15453 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15454 shift *= BITS_PER_UNIT;
15455 if (GET_CODE (rtl) == SIGN_EXTEND)
15456 op = DW_OP_shra;
15457 else
15458 op = DW_OP_shr;
15459 mem_loc_result = op0;
15460 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15461 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15462 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15463 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15464 }
15465 else if (!dwarf_strict || dwarf_version >= 5)
15466 {
15467 dw_die_ref type_die1, type_die2;
15468 dw_loc_descr_ref cvt;
15469
15470 type_die1 = base_type_for_mode (inner_mode,
15471 GET_CODE (rtl) == ZERO_EXTEND);
15472 if (type_die1 == NULL)
15473 break;
15474 type_die2 = base_type_for_mode (int_mode, 1);
15475 if (type_die2 == NULL)
15476 break;
15477 mem_loc_result = op0;
15478 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15479 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15480 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15481 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15482 add_loc_descr (&mem_loc_result, cvt);
15483 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15484 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15485 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15486 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15487 add_loc_descr (&mem_loc_result, cvt);
15488 }
15489 break;
15490
15491 case MEM:
15492 {
15493 rtx new_rtl = avoid_constant_pool_reference (rtl);
15494 if (new_rtl != rtl)
15495 {
15496 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15497 initialized);
15498 if (mem_loc_result != NULL)
15499 return mem_loc_result;
15500 }
15501 }
15502 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15503 get_address_mode (rtl), mode,
15504 VAR_INIT_STATUS_INITIALIZED);
15505 if (mem_loc_result == NULL)
15506 mem_loc_result = tls_mem_loc_descriptor (rtl);
15507 if (mem_loc_result != NULL)
15508 {
15509 if (!is_a <scalar_int_mode> (mode, &int_mode)
15510 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15511 {
15512 dw_die_ref type_die;
15513 dw_loc_descr_ref deref;
15514 HOST_WIDE_INT size;
15515
15516 if (dwarf_strict && dwarf_version < 5)
15517 return NULL;
15518 if (!GET_MODE_SIZE (mode).is_constant (&size))
15519 return NULL;
15520 type_die
15521 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15522 if (type_die == NULL)
15523 return NULL;
15524 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15525 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15526 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15527 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15528 add_loc_descr (&mem_loc_result, deref);
15529 }
15530 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15531 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15532 else
15533 add_loc_descr (&mem_loc_result,
15534 new_loc_descr (DW_OP_deref_size,
15535 GET_MODE_SIZE (int_mode), 0));
15536 }
15537 break;
15538
15539 case LO_SUM:
15540 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15541
15542 case LABEL_REF:
15543 /* Some ports can transform a symbol ref into a label ref, because
15544 the symbol ref is too far away and has to be dumped into a constant
15545 pool. */
15546 case CONST:
15547 case SYMBOL_REF:
15548 if (!is_a <scalar_int_mode> (mode, &int_mode)
15549 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15550 #ifdef POINTERS_EXTEND_UNSIGNED
15551 && (int_mode != Pmode || mem_mode == VOIDmode)
15552 #endif
15553 ))
15554 break;
15555 if (GET_CODE (rtl) == SYMBOL_REF
15556 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15557 {
15558 dw_loc_descr_ref temp;
15559
15560 /* If this is not defined, we have no way to emit the data. */
15561 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15562 break;
15563
15564 temp = new_addr_loc_descr (rtl, dtprel_true);
15565
15566 /* We check for DWARF 5 here because gdb did not implement
15567 DW_OP_form_tls_address until after 7.12. */
15568 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15569 ? DW_OP_form_tls_address
15570 : DW_OP_GNU_push_tls_address),
15571 0, 0);
15572 add_loc_descr (&mem_loc_result, temp);
15573
15574 break;
15575 }
15576
15577 if (!const_ok_for_output (rtl))
15578 {
15579 if (GET_CODE (rtl) == CONST)
15580 switch (GET_CODE (XEXP (rtl, 0)))
15581 {
15582 case NOT:
15583 op = DW_OP_not;
15584 goto try_const_unop;
15585 case NEG:
15586 op = DW_OP_neg;
15587 goto try_const_unop;
15588 try_const_unop:
15589 rtx arg;
15590 arg = XEXP (XEXP (rtl, 0), 0);
15591 if (!CONSTANT_P (arg))
15592 arg = gen_rtx_CONST (int_mode, arg);
15593 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15594 initialized);
15595 if (op0)
15596 {
15597 mem_loc_result = op0;
15598 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15599 }
15600 break;
15601 default:
15602 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15603 mem_mode, initialized);
15604 break;
15605 }
15606 break;
15607 }
15608
15609 symref:
15610 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15611 vec_safe_push (used_rtx_array, rtl);
15612 break;
15613
15614 case CONCAT:
15615 case CONCATN:
15616 case VAR_LOCATION:
15617 case DEBUG_IMPLICIT_PTR:
15618 expansion_failed (NULL_TREE, rtl,
15619 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15620 return 0;
15621
15622 case ENTRY_VALUE:
15623 if (dwarf_strict && dwarf_version < 5)
15624 return NULL;
15625 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15626 {
15627 if (!is_a <scalar_int_mode> (mode, &int_mode)
15628 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15629 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15630 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15631 else
15632 {
15633 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15634 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15635 return NULL;
15636 op0 = one_reg_loc_descriptor (dbx_regnum,
15637 VAR_INIT_STATUS_INITIALIZED);
15638 }
15639 }
15640 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15641 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15642 {
15643 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15644 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15645 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15646 return NULL;
15647 }
15648 else
15649 gcc_unreachable ();
15650 if (op0 == NULL)
15651 return NULL;
15652 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15653 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15654 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15655 break;
15656
15657 case DEBUG_PARAMETER_REF:
15658 mem_loc_result = parameter_ref_descriptor (rtl);
15659 break;
15660
15661 case PRE_MODIFY:
15662 /* Extract the PLUS expression nested inside and fall into
15663 PLUS code below. */
15664 rtl = XEXP (rtl, 1);
15665 goto plus;
15666
15667 case PRE_INC:
15668 case PRE_DEC:
15669 /* Turn these into a PLUS expression and fall into the PLUS code
15670 below. */
15671 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15672 gen_int_mode (GET_CODE (rtl) == PRE_INC
15673 ? GET_MODE_UNIT_SIZE (mem_mode)
15674 : -GET_MODE_UNIT_SIZE (mem_mode),
15675 mode));
15676
15677 /* fall through */
15678
15679 case PLUS:
15680 plus:
15681 if (is_based_loc (rtl)
15682 && is_a <scalar_int_mode> (mode, &int_mode)
15683 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15684 || XEXP (rtl, 0) == arg_pointer_rtx
15685 || XEXP (rtl, 0) == frame_pointer_rtx))
15686 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15687 INTVAL (XEXP (rtl, 1)),
15688 VAR_INIT_STATUS_INITIALIZED);
15689 else
15690 {
15691 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15692 VAR_INIT_STATUS_INITIALIZED);
15693 if (mem_loc_result == 0)
15694 break;
15695
15696 if (CONST_INT_P (XEXP (rtl, 1))
15697 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15698 <= DWARF2_ADDR_SIZE))
15699 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15700 else
15701 {
15702 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15703 VAR_INIT_STATUS_INITIALIZED);
15704 if (op1 == 0)
15705 return NULL;
15706 add_loc_descr (&mem_loc_result, op1);
15707 add_loc_descr (&mem_loc_result,
15708 new_loc_descr (DW_OP_plus, 0, 0));
15709 }
15710 }
15711 break;
15712
15713 /* If a pseudo-reg is optimized away, it is possible for it to
15714 be replaced with a MEM containing a multiply or shift. */
15715 case MINUS:
15716 op = DW_OP_minus;
15717 goto do_binop;
15718
15719 case MULT:
15720 op = DW_OP_mul;
15721 goto do_binop;
15722
15723 case DIV:
15724 if ((!dwarf_strict || dwarf_version >= 5)
15725 && is_a <scalar_int_mode> (mode, &int_mode)
15726 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15727 {
15728 mem_loc_result = typed_binop (DW_OP_div, rtl,
15729 base_type_for_mode (mode, 0),
15730 int_mode, mem_mode);
15731 break;
15732 }
15733 op = DW_OP_div;
15734 goto do_binop;
15735
15736 case UMOD:
15737 op = DW_OP_mod;
15738 goto do_binop;
15739
15740 case ASHIFT:
15741 op = DW_OP_shl;
15742 goto do_shift;
15743
15744 case ASHIFTRT:
15745 op = DW_OP_shra;
15746 goto do_shift;
15747
15748 case LSHIFTRT:
15749 op = DW_OP_shr;
15750 goto do_shift;
15751
15752 do_shift:
15753 if (!is_a <scalar_int_mode> (mode, &int_mode))
15754 break;
15755 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15756 VAR_INIT_STATUS_INITIALIZED);
15757 {
15758 rtx rtlop1 = XEXP (rtl, 1);
15759 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15760 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15761 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15762 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15763 VAR_INIT_STATUS_INITIALIZED);
15764 }
15765
15766 if (op0 == 0 || op1 == 0)
15767 break;
15768
15769 mem_loc_result = op0;
15770 add_loc_descr (&mem_loc_result, op1);
15771 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15772 break;
15773
15774 case AND:
15775 op = DW_OP_and;
15776 goto do_binop;
15777
15778 case IOR:
15779 op = DW_OP_or;
15780 goto do_binop;
15781
15782 case XOR:
15783 op = DW_OP_xor;
15784 goto do_binop;
15785
15786 do_binop:
15787 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15788 VAR_INIT_STATUS_INITIALIZED);
15789 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15790 VAR_INIT_STATUS_INITIALIZED);
15791
15792 if (op0 == 0 || op1 == 0)
15793 break;
15794
15795 mem_loc_result = op0;
15796 add_loc_descr (&mem_loc_result, op1);
15797 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15798 break;
15799
15800 case MOD:
15801 if ((!dwarf_strict || dwarf_version >= 5)
15802 && is_a <scalar_int_mode> (mode, &int_mode)
15803 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15804 {
15805 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15806 base_type_for_mode (mode, 0),
15807 int_mode, mem_mode);
15808 break;
15809 }
15810
15811 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15812 VAR_INIT_STATUS_INITIALIZED);
15813 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15814 VAR_INIT_STATUS_INITIALIZED);
15815
15816 if (op0 == 0 || op1 == 0)
15817 break;
15818
15819 mem_loc_result = op0;
15820 add_loc_descr (&mem_loc_result, op1);
15821 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15822 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15823 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15824 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15825 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15826 break;
15827
15828 case UDIV:
15829 if ((!dwarf_strict || dwarf_version >= 5)
15830 && is_a <scalar_int_mode> (mode, &int_mode))
15831 {
15832 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15833 {
15834 op = DW_OP_div;
15835 goto do_binop;
15836 }
15837 mem_loc_result = typed_binop (DW_OP_div, rtl,
15838 base_type_for_mode (int_mode, 1),
15839 int_mode, mem_mode);
15840 }
15841 break;
15842
15843 case NOT:
15844 op = DW_OP_not;
15845 goto do_unop;
15846
15847 case ABS:
15848 op = DW_OP_abs;
15849 goto do_unop;
15850
15851 case NEG:
15852 op = DW_OP_neg;
15853 goto do_unop;
15854
15855 do_unop:
15856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15857 VAR_INIT_STATUS_INITIALIZED);
15858
15859 if (op0 == 0)
15860 break;
15861
15862 mem_loc_result = op0;
15863 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15864 break;
15865
15866 case CONST_INT:
15867 if (!is_a <scalar_int_mode> (mode, &int_mode)
15868 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15869 #ifdef POINTERS_EXTEND_UNSIGNED
15870 || (int_mode == Pmode
15871 && mem_mode != VOIDmode
15872 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15873 #endif
15874 )
15875 {
15876 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15877 break;
15878 }
15879 if ((!dwarf_strict || dwarf_version >= 5)
15880 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15881 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15882 {
15883 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15884 scalar_int_mode amode;
15885 if (type_die == NULL)
15886 return NULL;
15887 if (INTVAL (rtl) >= 0
15888 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15889 .exists (&amode))
15890 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15891 /* const DW_OP_convert <XXX> vs.
15892 DW_OP_const_type <XXX, 1, const>. */
15893 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15894 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15895 {
15896 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15897 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15898 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15899 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15900 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15901 add_loc_descr (&mem_loc_result, op0);
15902 return mem_loc_result;
15903 }
15904 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15905 INTVAL (rtl));
15906 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15907 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15908 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15909 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15910 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15911 else
15912 {
15913 mem_loc_result->dw_loc_oprnd2.val_class
15914 = dw_val_class_const_double;
15915 mem_loc_result->dw_loc_oprnd2.v.val_double
15916 = double_int::from_shwi (INTVAL (rtl));
15917 }
15918 }
15919 break;
15920
15921 case CONST_DOUBLE:
15922 if (!dwarf_strict || dwarf_version >= 5)
15923 {
15924 dw_die_ref type_die;
15925
15926 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15927 CONST_DOUBLE rtx could represent either a large integer
15928 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15929 the value is always a floating point constant.
15930
15931 When it is an integer, a CONST_DOUBLE is used whenever
15932 the constant requires 2 HWIs to be adequately represented.
15933 We output CONST_DOUBLEs as blocks. */
15934 if (mode == VOIDmode
15935 || (GET_MODE (rtl) == VOIDmode
15936 && maybe_ne (GET_MODE_BITSIZE (mode),
15937 HOST_BITS_PER_DOUBLE_INT)))
15938 break;
15939 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15940 if (type_die == NULL)
15941 return NULL;
15942 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15943 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15944 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15945 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15946 #if TARGET_SUPPORTS_WIDE_INT == 0
15947 if (!SCALAR_FLOAT_MODE_P (mode))
15948 {
15949 mem_loc_result->dw_loc_oprnd2.val_class
15950 = dw_val_class_const_double;
15951 mem_loc_result->dw_loc_oprnd2.v.val_double
15952 = rtx_to_double_int (rtl);
15953 }
15954 else
15955 #endif
15956 {
15957 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15958 unsigned int length = GET_MODE_SIZE (float_mode);
15959 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15960
15961 insert_float (rtl, array);
15962 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15963 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15964 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15965 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15966 }
15967 }
15968 break;
15969
15970 case CONST_WIDE_INT:
15971 if (!dwarf_strict || dwarf_version >= 5)
15972 {
15973 dw_die_ref type_die;
15974
15975 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15976 if (type_die == NULL)
15977 return NULL;
15978 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15979 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15980 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15981 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15982 mem_loc_result->dw_loc_oprnd2.val_class
15983 = dw_val_class_wide_int;
15984 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15985 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15986 }
15987 break;
15988
15989 case CONST_POLY_INT:
15990 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
15991 break;
15992
15993 case EQ:
15994 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15995 break;
15996
15997 case GE:
15998 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15999 break;
16000
16001 case GT:
16002 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16003 break;
16004
16005 case LE:
16006 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16007 break;
16008
16009 case LT:
16010 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16011 break;
16012
16013 case NE:
16014 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16015 break;
16016
16017 case GEU:
16018 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16019 break;
16020
16021 case GTU:
16022 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16023 break;
16024
16025 case LEU:
16026 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16027 break;
16028
16029 case LTU:
16030 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16031 break;
16032
16033 case UMIN:
16034 case UMAX:
16035 if (!SCALAR_INT_MODE_P (mode))
16036 break;
16037 /* FALLTHRU */
16038 case SMIN:
16039 case SMAX:
16040 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16041 break;
16042
16043 case ZERO_EXTRACT:
16044 case SIGN_EXTRACT:
16045 if (CONST_INT_P (XEXP (rtl, 1))
16046 && CONST_INT_P (XEXP (rtl, 2))
16047 && is_a <scalar_int_mode> (mode, &int_mode)
16048 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16049 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16050 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16051 && ((unsigned) INTVAL (XEXP (rtl, 1))
16052 + (unsigned) INTVAL (XEXP (rtl, 2))
16053 <= GET_MODE_BITSIZE (int_mode)))
16054 {
16055 int shift, size;
16056 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16057 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16058 if (op0 == 0)
16059 break;
16060 if (GET_CODE (rtl) == SIGN_EXTRACT)
16061 op = DW_OP_shra;
16062 else
16063 op = DW_OP_shr;
16064 mem_loc_result = op0;
16065 size = INTVAL (XEXP (rtl, 1));
16066 shift = INTVAL (XEXP (rtl, 2));
16067 if (BITS_BIG_ENDIAN)
16068 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16069 if (shift + size != (int) DWARF2_ADDR_SIZE)
16070 {
16071 add_loc_descr (&mem_loc_result,
16072 int_loc_descriptor (DWARF2_ADDR_SIZE
16073 - shift - size));
16074 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16075 }
16076 if (size != (int) DWARF2_ADDR_SIZE)
16077 {
16078 add_loc_descr (&mem_loc_result,
16079 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16080 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16081 }
16082 }
16083 break;
16084
16085 case IF_THEN_ELSE:
16086 {
16087 dw_loc_descr_ref op2, bra_node, drop_node;
16088 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16089 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16090 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16091 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16092 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16093 VAR_INIT_STATUS_INITIALIZED);
16094 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16095 VAR_INIT_STATUS_INITIALIZED);
16096 if (op0 == NULL || op1 == NULL || op2 == NULL)
16097 break;
16098
16099 mem_loc_result = op1;
16100 add_loc_descr (&mem_loc_result, op2);
16101 add_loc_descr (&mem_loc_result, op0);
16102 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16103 add_loc_descr (&mem_loc_result, bra_node);
16104 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16105 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16106 add_loc_descr (&mem_loc_result, drop_node);
16107 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16108 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16109 }
16110 break;
16111
16112 case FLOAT_EXTEND:
16113 case FLOAT_TRUNCATE:
16114 case FLOAT:
16115 case UNSIGNED_FLOAT:
16116 case FIX:
16117 case UNSIGNED_FIX:
16118 if (!dwarf_strict || dwarf_version >= 5)
16119 {
16120 dw_die_ref type_die;
16121 dw_loc_descr_ref cvt;
16122
16123 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16124 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16125 if (op0 == NULL)
16126 break;
16127 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16128 && (GET_CODE (rtl) == FLOAT
16129 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16130 {
16131 type_die = base_type_for_mode (int_mode,
16132 GET_CODE (rtl) == UNSIGNED_FLOAT);
16133 if (type_die == NULL)
16134 break;
16135 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16136 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16137 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16138 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16139 add_loc_descr (&op0, cvt);
16140 }
16141 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16142 if (type_die == NULL)
16143 break;
16144 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16145 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16146 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16147 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16148 add_loc_descr (&op0, cvt);
16149 if (is_a <scalar_int_mode> (mode, &int_mode)
16150 && (GET_CODE (rtl) == FIX
16151 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16152 {
16153 op0 = convert_descriptor_to_mode (int_mode, op0);
16154 if (op0 == NULL)
16155 break;
16156 }
16157 mem_loc_result = op0;
16158 }
16159 break;
16160
16161 case CLZ:
16162 case CTZ:
16163 case FFS:
16164 if (is_a <scalar_int_mode> (mode, &int_mode))
16165 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16166 break;
16167
16168 case POPCOUNT:
16169 case PARITY:
16170 if (is_a <scalar_int_mode> (mode, &int_mode))
16171 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16172 break;
16173
16174 case BSWAP:
16175 if (is_a <scalar_int_mode> (mode, &int_mode))
16176 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16177 break;
16178
16179 case ROTATE:
16180 case ROTATERT:
16181 if (is_a <scalar_int_mode> (mode, &int_mode))
16182 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16183 break;
16184
16185 case COMPARE:
16186 /* In theory, we could implement the above. */
16187 /* DWARF cannot represent the unsigned compare operations
16188 natively. */
16189 case SS_MULT:
16190 case US_MULT:
16191 case SS_DIV:
16192 case US_DIV:
16193 case SS_PLUS:
16194 case US_PLUS:
16195 case SS_MINUS:
16196 case US_MINUS:
16197 case SS_NEG:
16198 case US_NEG:
16199 case SS_ABS:
16200 case SS_ASHIFT:
16201 case US_ASHIFT:
16202 case SS_TRUNCATE:
16203 case US_TRUNCATE:
16204 case UNORDERED:
16205 case ORDERED:
16206 case UNEQ:
16207 case UNGE:
16208 case UNGT:
16209 case UNLE:
16210 case UNLT:
16211 case LTGT:
16212 case FRACT_CONVERT:
16213 case UNSIGNED_FRACT_CONVERT:
16214 case SAT_FRACT:
16215 case UNSIGNED_SAT_FRACT:
16216 case SQRT:
16217 case ASM_OPERANDS:
16218 case VEC_MERGE:
16219 case VEC_SELECT:
16220 case VEC_CONCAT:
16221 case VEC_DUPLICATE:
16222 case VEC_SERIES:
16223 case UNSPEC:
16224 case HIGH:
16225 case FMA:
16226 case STRICT_LOW_PART:
16227 case CONST_VECTOR:
16228 case CONST_FIXED:
16229 case CLRSB:
16230 case CLOBBER:
16231 case CLOBBER_HIGH:
16232 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16233 can't express it in the debug info. This can happen e.g. with some
16234 TLS UNSPECs. */
16235 break;
16236
16237 case CONST_STRING:
16238 resolve_one_addr (&rtl);
16239 goto symref;
16240
16241 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16242 the expression. An UNSPEC rtx represents a raw DWARF operation,
16243 new_loc_descr is called for it to build the operation directly.
16244 Otherwise mem_loc_descriptor is called recursively. */
16245 case PARALLEL:
16246 {
16247 int index = 0;
16248 dw_loc_descr_ref exp_result = NULL;
16249
16250 for (; index < XVECLEN (rtl, 0); index++)
16251 {
16252 rtx elem = XVECEXP (rtl, 0, index);
16253 if (GET_CODE (elem) == UNSPEC)
16254 {
16255 /* Each DWARF operation UNSPEC contain two operands, if
16256 one operand is not used for the operation, const0_rtx is
16257 passed. */
16258 gcc_assert (XVECLEN (elem, 0) == 2);
16259
16260 HOST_WIDE_INT dw_op = XINT (elem, 1);
16261 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16262 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16263 exp_result
16264 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16265 oprnd2);
16266 }
16267 else
16268 exp_result
16269 = mem_loc_descriptor (elem, mode, mem_mode,
16270 VAR_INIT_STATUS_INITIALIZED);
16271
16272 if (!mem_loc_result)
16273 mem_loc_result = exp_result;
16274 else
16275 add_loc_descr (&mem_loc_result, exp_result);
16276 }
16277
16278 break;
16279 }
16280
16281 default:
16282 if (flag_checking)
16283 {
16284 print_rtl (stderr, rtl);
16285 gcc_unreachable ();
16286 }
16287 break;
16288 }
16289
16290 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16291 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16292
16293 return mem_loc_result;
16294 }
16295
16296 /* Return a descriptor that describes the concatenation of two locations.
16297 This is typically a complex variable. */
16298
16299 static dw_loc_descr_ref
16300 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16301 {
16302 /* At present we only track constant-sized pieces. */
16303 unsigned int size0, size1;
16304 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16305 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16306 return 0;
16307
16308 dw_loc_descr_ref cc_loc_result = NULL;
16309 dw_loc_descr_ref x0_ref
16310 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16311 dw_loc_descr_ref x1_ref
16312 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16313
16314 if (x0_ref == 0 || x1_ref == 0)
16315 return 0;
16316
16317 cc_loc_result = x0_ref;
16318 add_loc_descr_op_piece (&cc_loc_result, size0);
16319
16320 add_loc_descr (&cc_loc_result, x1_ref);
16321 add_loc_descr_op_piece (&cc_loc_result, size1);
16322
16323 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16324 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16325
16326 return cc_loc_result;
16327 }
16328
16329 /* Return a descriptor that describes the concatenation of N
16330 locations. */
16331
16332 static dw_loc_descr_ref
16333 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16334 {
16335 unsigned int i;
16336 dw_loc_descr_ref cc_loc_result = NULL;
16337 unsigned int n = XVECLEN (concatn, 0);
16338 unsigned int size;
16339
16340 for (i = 0; i < n; ++i)
16341 {
16342 dw_loc_descr_ref ref;
16343 rtx x = XVECEXP (concatn, 0, i);
16344
16345 /* At present we only track constant-sized pieces. */
16346 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16347 return NULL;
16348
16349 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16350 if (ref == NULL)
16351 return NULL;
16352
16353 add_loc_descr (&cc_loc_result, ref);
16354 add_loc_descr_op_piece (&cc_loc_result, size);
16355 }
16356
16357 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16358 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16359
16360 return cc_loc_result;
16361 }
16362
16363 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16364 for DEBUG_IMPLICIT_PTR RTL. */
16365
16366 static dw_loc_descr_ref
16367 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16368 {
16369 dw_loc_descr_ref ret;
16370 dw_die_ref ref;
16371
16372 if (dwarf_strict && dwarf_version < 5)
16373 return NULL;
16374 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16375 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16376 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16377 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16378 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16379 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16380 if (ref)
16381 {
16382 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16383 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16384 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16385 }
16386 else
16387 {
16388 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16389 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16390 }
16391 return ret;
16392 }
16393
16394 /* Output a proper Dwarf location descriptor for a variable or parameter
16395 which is either allocated in a register or in a memory location. For a
16396 register, we just generate an OP_REG and the register number. For a
16397 memory location we provide a Dwarf postfix expression describing how to
16398 generate the (dynamic) address of the object onto the address stack.
16399
16400 MODE is mode of the decl if this loc_descriptor is going to be used in
16401 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16402 allowed, VOIDmode otherwise.
16403
16404 If we don't know how to describe it, return 0. */
16405
16406 static dw_loc_descr_ref
16407 loc_descriptor (rtx rtl, machine_mode mode,
16408 enum var_init_status initialized)
16409 {
16410 dw_loc_descr_ref loc_result = NULL;
16411 scalar_int_mode int_mode;
16412
16413 switch (GET_CODE (rtl))
16414 {
16415 case SUBREG:
16416 /* The case of a subreg may arise when we have a local (register)
16417 variable or a formal (register) parameter which doesn't quite fill
16418 up an entire register. For now, just assume that it is
16419 legitimate to make the Dwarf info refer to the whole register which
16420 contains the given subreg. */
16421 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16422 loc_result = loc_descriptor (SUBREG_REG (rtl),
16423 GET_MODE (SUBREG_REG (rtl)), initialized);
16424 else
16425 goto do_default;
16426 break;
16427
16428 case REG:
16429 loc_result = reg_loc_descriptor (rtl, initialized);
16430 break;
16431
16432 case MEM:
16433 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16434 GET_MODE (rtl), initialized);
16435 if (loc_result == NULL)
16436 loc_result = tls_mem_loc_descriptor (rtl);
16437 if (loc_result == NULL)
16438 {
16439 rtx new_rtl = avoid_constant_pool_reference (rtl);
16440 if (new_rtl != rtl)
16441 loc_result = loc_descriptor (new_rtl, mode, initialized);
16442 }
16443 break;
16444
16445 case CONCAT:
16446 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16447 initialized);
16448 break;
16449
16450 case CONCATN:
16451 loc_result = concatn_loc_descriptor (rtl, initialized);
16452 break;
16453
16454 case VAR_LOCATION:
16455 /* Single part. */
16456 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16457 {
16458 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16459 if (GET_CODE (loc) == EXPR_LIST)
16460 loc = XEXP (loc, 0);
16461 loc_result = loc_descriptor (loc, mode, initialized);
16462 break;
16463 }
16464
16465 rtl = XEXP (rtl, 1);
16466 /* FALLTHRU */
16467
16468 case PARALLEL:
16469 {
16470 rtvec par_elems = XVEC (rtl, 0);
16471 int num_elem = GET_NUM_ELEM (par_elems);
16472 machine_mode mode;
16473 int i, size;
16474
16475 /* Create the first one, so we have something to add to. */
16476 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16477 VOIDmode, initialized);
16478 if (loc_result == NULL)
16479 return NULL;
16480 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16481 /* At present we only track constant-sized pieces. */
16482 if (!GET_MODE_SIZE (mode).is_constant (&size))
16483 return NULL;
16484 add_loc_descr_op_piece (&loc_result, size);
16485 for (i = 1; i < num_elem; i++)
16486 {
16487 dw_loc_descr_ref temp;
16488
16489 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16490 VOIDmode, initialized);
16491 if (temp == NULL)
16492 return NULL;
16493 add_loc_descr (&loc_result, temp);
16494 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16495 /* At present we only track constant-sized pieces. */
16496 if (!GET_MODE_SIZE (mode).is_constant (&size))
16497 return NULL;
16498 add_loc_descr_op_piece (&loc_result, size);
16499 }
16500 }
16501 break;
16502
16503 case CONST_INT:
16504 if (mode != VOIDmode && mode != BLKmode)
16505 {
16506 int_mode = as_a <scalar_int_mode> (mode);
16507 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16508 INTVAL (rtl));
16509 }
16510 break;
16511
16512 case CONST_DOUBLE:
16513 if (mode == VOIDmode)
16514 mode = GET_MODE (rtl);
16515
16516 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16517 {
16518 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16519
16520 /* Note that a CONST_DOUBLE rtx could represent either an integer
16521 or a floating-point constant. A CONST_DOUBLE is used whenever
16522 the constant requires more than one word in order to be
16523 adequately represented. We output CONST_DOUBLEs as blocks. */
16524 scalar_mode smode = as_a <scalar_mode> (mode);
16525 loc_result = new_loc_descr (DW_OP_implicit_value,
16526 GET_MODE_SIZE (smode), 0);
16527 #if TARGET_SUPPORTS_WIDE_INT == 0
16528 if (!SCALAR_FLOAT_MODE_P (smode))
16529 {
16530 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16531 loc_result->dw_loc_oprnd2.v.val_double
16532 = rtx_to_double_int (rtl);
16533 }
16534 else
16535 #endif
16536 {
16537 unsigned int length = GET_MODE_SIZE (smode);
16538 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16539
16540 insert_float (rtl, array);
16541 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16542 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16543 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16544 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16545 }
16546 }
16547 break;
16548
16549 case CONST_WIDE_INT:
16550 if (mode == VOIDmode)
16551 mode = GET_MODE (rtl);
16552
16553 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16554 {
16555 int_mode = as_a <scalar_int_mode> (mode);
16556 loc_result = new_loc_descr (DW_OP_implicit_value,
16557 GET_MODE_SIZE (int_mode), 0);
16558 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16559 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16560 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16561 }
16562 break;
16563
16564 case CONST_VECTOR:
16565 if (mode == VOIDmode)
16566 mode = GET_MODE (rtl);
16567
16568 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16569 {
16570 unsigned int length;
16571 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16572 return NULL;
16573
16574 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16575 unsigned char *array
16576 = ggc_vec_alloc<unsigned char> (length * elt_size);
16577 unsigned int i;
16578 unsigned char *p;
16579 machine_mode imode = GET_MODE_INNER (mode);
16580
16581 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16582 switch (GET_MODE_CLASS (mode))
16583 {
16584 case MODE_VECTOR_INT:
16585 for (i = 0, p = array; i < length; i++, p += elt_size)
16586 {
16587 rtx elt = CONST_VECTOR_ELT (rtl, i);
16588 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16589 }
16590 break;
16591
16592 case MODE_VECTOR_FLOAT:
16593 for (i = 0, p = array; i < length; i++, p += elt_size)
16594 {
16595 rtx elt = CONST_VECTOR_ELT (rtl, i);
16596 insert_float (elt, p);
16597 }
16598 break;
16599
16600 default:
16601 gcc_unreachable ();
16602 }
16603
16604 loc_result = new_loc_descr (DW_OP_implicit_value,
16605 length * elt_size, 0);
16606 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16607 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16608 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16609 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16610 }
16611 break;
16612
16613 case CONST:
16614 if (mode == VOIDmode
16615 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16616 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16617 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16618 {
16619 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16620 break;
16621 }
16622 /* FALLTHROUGH */
16623 case SYMBOL_REF:
16624 if (!const_ok_for_output (rtl))
16625 break;
16626 /* FALLTHROUGH */
16627 case LABEL_REF:
16628 if (is_a <scalar_int_mode> (mode, &int_mode)
16629 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16630 && (dwarf_version >= 4 || !dwarf_strict))
16631 {
16632 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16633 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16634 vec_safe_push (used_rtx_array, rtl);
16635 }
16636 break;
16637
16638 case DEBUG_IMPLICIT_PTR:
16639 loc_result = implicit_ptr_descriptor (rtl, 0);
16640 break;
16641
16642 case PLUS:
16643 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16644 && CONST_INT_P (XEXP (rtl, 1)))
16645 {
16646 loc_result
16647 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16648 break;
16649 }
16650 /* FALLTHRU */
16651 do_default:
16652 default:
16653 if ((is_a <scalar_int_mode> (mode, &int_mode)
16654 && GET_MODE (rtl) == int_mode
16655 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16656 && dwarf_version >= 4)
16657 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16658 {
16659 /* Value expression. */
16660 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16661 if (loc_result)
16662 add_loc_descr (&loc_result,
16663 new_loc_descr (DW_OP_stack_value, 0, 0));
16664 }
16665 break;
16666 }
16667
16668 return loc_result;
16669 }
16670
16671 /* We need to figure out what section we should use as the base for the
16672 address ranges where a given location is valid.
16673 1. If this particular DECL has a section associated with it, use that.
16674 2. If this function has a section associated with it, use that.
16675 3. Otherwise, use the text section.
16676 XXX: If you split a variable across multiple sections, we won't notice. */
16677
16678 static const char *
16679 secname_for_decl (const_tree decl)
16680 {
16681 const char *secname;
16682
16683 if (VAR_OR_FUNCTION_DECL_P (decl)
16684 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16685 && DECL_SECTION_NAME (decl))
16686 secname = DECL_SECTION_NAME (decl);
16687 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16688 secname = DECL_SECTION_NAME (current_function_decl);
16689 else if (cfun && in_cold_section_p)
16690 secname = crtl->subsections.cold_section_label;
16691 else
16692 secname = text_section_label;
16693
16694 return secname;
16695 }
16696
16697 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16698
16699 static bool
16700 decl_by_reference_p (tree decl)
16701 {
16702 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16703 || VAR_P (decl))
16704 && DECL_BY_REFERENCE (decl));
16705 }
16706
16707 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16708 for VARLOC. */
16709
16710 static dw_loc_descr_ref
16711 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16712 enum var_init_status initialized)
16713 {
16714 int have_address = 0;
16715 dw_loc_descr_ref descr;
16716 machine_mode mode;
16717
16718 if (want_address != 2)
16719 {
16720 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16721 /* Single part. */
16722 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16723 {
16724 varloc = PAT_VAR_LOCATION_LOC (varloc);
16725 if (GET_CODE (varloc) == EXPR_LIST)
16726 varloc = XEXP (varloc, 0);
16727 mode = GET_MODE (varloc);
16728 if (MEM_P (varloc))
16729 {
16730 rtx addr = XEXP (varloc, 0);
16731 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16732 mode, initialized);
16733 if (descr)
16734 have_address = 1;
16735 else
16736 {
16737 rtx x = avoid_constant_pool_reference (varloc);
16738 if (x != varloc)
16739 descr = mem_loc_descriptor (x, mode, VOIDmode,
16740 initialized);
16741 }
16742 }
16743 else
16744 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16745 }
16746 else
16747 return 0;
16748 }
16749 else
16750 {
16751 if (GET_CODE (varloc) == VAR_LOCATION)
16752 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16753 else
16754 mode = DECL_MODE (loc);
16755 descr = loc_descriptor (varloc, mode, initialized);
16756 have_address = 1;
16757 }
16758
16759 if (!descr)
16760 return 0;
16761
16762 if (want_address == 2 && !have_address
16763 && (dwarf_version >= 4 || !dwarf_strict))
16764 {
16765 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16766 {
16767 expansion_failed (loc, NULL_RTX,
16768 "DWARF address size mismatch");
16769 return 0;
16770 }
16771 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16772 have_address = 1;
16773 }
16774 /* Show if we can't fill the request for an address. */
16775 if (want_address && !have_address)
16776 {
16777 expansion_failed (loc, NULL_RTX,
16778 "Want address and only have value");
16779 return 0;
16780 }
16781
16782 /* If we've got an address and don't want one, dereference. */
16783 if (!want_address && have_address)
16784 {
16785 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16786 enum dwarf_location_atom op;
16787
16788 if (size > DWARF2_ADDR_SIZE || size == -1)
16789 {
16790 expansion_failed (loc, NULL_RTX,
16791 "DWARF address size mismatch");
16792 return 0;
16793 }
16794 else if (size == DWARF2_ADDR_SIZE)
16795 op = DW_OP_deref;
16796 else
16797 op = DW_OP_deref_size;
16798
16799 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16800 }
16801
16802 return descr;
16803 }
16804
16805 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16806 if it is not possible. */
16807
16808 static dw_loc_descr_ref
16809 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16810 {
16811 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16812 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16813 else if (dwarf_version >= 3 || !dwarf_strict)
16814 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16815 else
16816 return NULL;
16817 }
16818
16819 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16820 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16821
16822 static dw_loc_descr_ref
16823 dw_sra_loc_expr (tree decl, rtx loc)
16824 {
16825 rtx p;
16826 unsigned HOST_WIDE_INT padsize = 0;
16827 dw_loc_descr_ref descr, *descr_tail;
16828 unsigned HOST_WIDE_INT decl_size;
16829 rtx varloc;
16830 enum var_init_status initialized;
16831
16832 if (DECL_SIZE (decl) == NULL
16833 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16834 return NULL;
16835
16836 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16837 descr = NULL;
16838 descr_tail = &descr;
16839
16840 for (p = loc; p; p = XEXP (p, 1))
16841 {
16842 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16843 rtx loc_note = *decl_piece_varloc_ptr (p);
16844 dw_loc_descr_ref cur_descr;
16845 dw_loc_descr_ref *tail, last = NULL;
16846 unsigned HOST_WIDE_INT opsize = 0;
16847
16848 if (loc_note == NULL_RTX
16849 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16850 {
16851 padsize += bitsize;
16852 continue;
16853 }
16854 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16855 varloc = NOTE_VAR_LOCATION (loc_note);
16856 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16857 if (cur_descr == NULL)
16858 {
16859 padsize += bitsize;
16860 continue;
16861 }
16862
16863 /* Check that cur_descr either doesn't use
16864 DW_OP_*piece operations, or their sum is equal
16865 to bitsize. Otherwise we can't embed it. */
16866 for (tail = &cur_descr; *tail != NULL;
16867 tail = &(*tail)->dw_loc_next)
16868 if ((*tail)->dw_loc_opc == DW_OP_piece)
16869 {
16870 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16871 * BITS_PER_UNIT;
16872 last = *tail;
16873 }
16874 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16875 {
16876 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16877 last = *tail;
16878 }
16879
16880 if (last != NULL && opsize != bitsize)
16881 {
16882 padsize += bitsize;
16883 /* Discard the current piece of the descriptor and release any
16884 addr_table entries it uses. */
16885 remove_loc_list_addr_table_entries (cur_descr);
16886 continue;
16887 }
16888
16889 /* If there is a hole, add DW_OP_*piece after empty DWARF
16890 expression, which means that those bits are optimized out. */
16891 if (padsize)
16892 {
16893 if (padsize > decl_size)
16894 {
16895 remove_loc_list_addr_table_entries (cur_descr);
16896 goto discard_descr;
16897 }
16898 decl_size -= padsize;
16899 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16900 if (*descr_tail == NULL)
16901 {
16902 remove_loc_list_addr_table_entries (cur_descr);
16903 goto discard_descr;
16904 }
16905 descr_tail = &(*descr_tail)->dw_loc_next;
16906 padsize = 0;
16907 }
16908 *descr_tail = cur_descr;
16909 descr_tail = tail;
16910 if (bitsize > decl_size)
16911 goto discard_descr;
16912 decl_size -= bitsize;
16913 if (last == NULL)
16914 {
16915 HOST_WIDE_INT offset = 0;
16916 if (GET_CODE (varloc) == VAR_LOCATION
16917 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16918 {
16919 varloc = PAT_VAR_LOCATION_LOC (varloc);
16920 if (GET_CODE (varloc) == EXPR_LIST)
16921 varloc = XEXP (varloc, 0);
16922 }
16923 do
16924 {
16925 if (GET_CODE (varloc) == CONST
16926 || GET_CODE (varloc) == SIGN_EXTEND
16927 || GET_CODE (varloc) == ZERO_EXTEND)
16928 varloc = XEXP (varloc, 0);
16929 else if (GET_CODE (varloc) == SUBREG)
16930 varloc = SUBREG_REG (varloc);
16931 else
16932 break;
16933 }
16934 while (1);
16935 /* DW_OP_bit_size offset should be zero for register
16936 or implicit location descriptions and empty location
16937 descriptions, but for memory addresses needs big endian
16938 adjustment. */
16939 if (MEM_P (varloc))
16940 {
16941 unsigned HOST_WIDE_INT memsize;
16942 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16943 goto discard_descr;
16944 memsize *= BITS_PER_UNIT;
16945 if (memsize != bitsize)
16946 {
16947 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16948 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16949 goto discard_descr;
16950 if (memsize < bitsize)
16951 goto discard_descr;
16952 if (BITS_BIG_ENDIAN)
16953 offset = memsize - bitsize;
16954 }
16955 }
16956
16957 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16958 if (*descr_tail == NULL)
16959 goto discard_descr;
16960 descr_tail = &(*descr_tail)->dw_loc_next;
16961 }
16962 }
16963
16964 /* If there were any non-empty expressions, add padding till the end of
16965 the decl. */
16966 if (descr != NULL && decl_size != 0)
16967 {
16968 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16969 if (*descr_tail == NULL)
16970 goto discard_descr;
16971 }
16972 return descr;
16973
16974 discard_descr:
16975 /* Discard the descriptor and release any addr_table entries it uses. */
16976 remove_loc_list_addr_table_entries (descr);
16977 return NULL;
16978 }
16979
16980 /* Return the dwarf representation of the location list LOC_LIST of
16981 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16982 function. */
16983
16984 static dw_loc_list_ref
16985 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16986 {
16987 const char *endname, *secname;
16988 var_loc_view endview;
16989 rtx varloc;
16990 enum var_init_status initialized;
16991 struct var_loc_node *node;
16992 dw_loc_descr_ref descr;
16993 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16994 dw_loc_list_ref list = NULL;
16995 dw_loc_list_ref *listp = &list;
16996
16997 /* Now that we know what section we are using for a base,
16998 actually construct the list of locations.
16999 The first location information is what is passed to the
17000 function that creates the location list, and the remaining
17001 locations just get added on to that list.
17002 Note that we only know the start address for a location
17003 (IE location changes), so to build the range, we use
17004 the range [current location start, next location start].
17005 This means we have to special case the last node, and generate
17006 a range of [last location start, end of function label]. */
17007
17008 if (cfun && crtl->has_bb_partition)
17009 {
17010 bool save_in_cold_section_p = in_cold_section_p;
17011 in_cold_section_p = first_function_block_is_cold;
17012 if (loc_list->last_before_switch == NULL)
17013 in_cold_section_p = !in_cold_section_p;
17014 secname = secname_for_decl (decl);
17015 in_cold_section_p = save_in_cold_section_p;
17016 }
17017 else
17018 secname = secname_for_decl (decl);
17019
17020 for (node = loc_list->first; node; node = node->next)
17021 {
17022 bool range_across_switch = false;
17023 if (GET_CODE (node->loc) == EXPR_LIST
17024 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17025 {
17026 if (GET_CODE (node->loc) == EXPR_LIST)
17027 {
17028 descr = NULL;
17029 /* This requires DW_OP_{,bit_}piece, which is not usable
17030 inside DWARF expressions. */
17031 if (want_address == 2)
17032 descr = dw_sra_loc_expr (decl, node->loc);
17033 }
17034 else
17035 {
17036 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17037 varloc = NOTE_VAR_LOCATION (node->loc);
17038 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17039 }
17040 if (descr)
17041 {
17042 /* If section switch happens in between node->label
17043 and node->next->label (or end of function) and
17044 we can't emit it as a single entry list,
17045 emit two ranges, first one ending at the end
17046 of first partition and second one starting at the
17047 beginning of second partition. */
17048 if (node == loc_list->last_before_switch
17049 && (node != loc_list->first || loc_list->first->next
17050 /* If we are to emit a view number, we will emit
17051 a loclist rather than a single location
17052 expression for the entire function (see
17053 loc_list_has_views), so we have to split the
17054 range that straddles across partitions. */
17055 || !ZERO_VIEW_P (node->view))
17056 && current_function_decl)
17057 {
17058 endname = cfun->fde->dw_fde_end;
17059 endview = 0;
17060 range_across_switch = true;
17061 }
17062 /* The variable has a location between NODE->LABEL and
17063 NODE->NEXT->LABEL. */
17064 else if (node->next)
17065 endname = node->next->label, endview = node->next->view;
17066 /* If the variable has a location at the last label
17067 it keeps its location until the end of function. */
17068 else if (!current_function_decl)
17069 endname = text_end_label, endview = 0;
17070 else
17071 {
17072 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17073 current_function_funcdef_no);
17074 endname = ggc_strdup (label_id);
17075 endview = 0;
17076 }
17077
17078 *listp = new_loc_list (descr, node->label, node->view,
17079 endname, endview, secname);
17080 if (TREE_CODE (decl) == PARM_DECL
17081 && node == loc_list->first
17082 && NOTE_P (node->loc)
17083 && strcmp (node->label, endname) == 0)
17084 (*listp)->force = true;
17085 listp = &(*listp)->dw_loc_next;
17086 }
17087 }
17088
17089 if (cfun
17090 && crtl->has_bb_partition
17091 && node == loc_list->last_before_switch)
17092 {
17093 bool save_in_cold_section_p = in_cold_section_p;
17094 in_cold_section_p = !first_function_block_is_cold;
17095 secname = secname_for_decl (decl);
17096 in_cold_section_p = save_in_cold_section_p;
17097 }
17098
17099 if (range_across_switch)
17100 {
17101 if (GET_CODE (node->loc) == EXPR_LIST)
17102 descr = dw_sra_loc_expr (decl, node->loc);
17103 else
17104 {
17105 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17106 varloc = NOTE_VAR_LOCATION (node->loc);
17107 descr = dw_loc_list_1 (decl, varloc, want_address,
17108 initialized);
17109 }
17110 gcc_assert (descr);
17111 /* The variable has a location between NODE->LABEL and
17112 NODE->NEXT->LABEL. */
17113 if (node->next)
17114 endname = node->next->label, endview = node->next->view;
17115 else
17116 endname = cfun->fde->dw_fde_second_end, endview = 0;
17117 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17118 endname, endview, secname);
17119 listp = &(*listp)->dw_loc_next;
17120 }
17121 }
17122
17123 /* Try to avoid the overhead of a location list emitting a location
17124 expression instead, but only if we didn't have more than one
17125 location entry in the first place. If some entries were not
17126 representable, we don't want to pretend a single entry that was
17127 applies to the entire scope in which the variable is
17128 available. */
17129 if (list && loc_list->first->next)
17130 gen_llsym (list);
17131 else
17132 maybe_gen_llsym (list);
17133
17134 return list;
17135 }
17136
17137 /* Return if the loc_list has only single element and thus can be represented
17138 as location description. */
17139
17140 static bool
17141 single_element_loc_list_p (dw_loc_list_ref list)
17142 {
17143 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17144 return !list->ll_symbol;
17145 }
17146
17147 /* Duplicate a single element of location list. */
17148
17149 static inline dw_loc_descr_ref
17150 copy_loc_descr (dw_loc_descr_ref ref)
17151 {
17152 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17153 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17154 return copy;
17155 }
17156
17157 /* To each location in list LIST append loc descr REF. */
17158
17159 static void
17160 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17161 {
17162 dw_loc_descr_ref copy;
17163 add_loc_descr (&list->expr, ref);
17164 list = list->dw_loc_next;
17165 while (list)
17166 {
17167 copy = copy_loc_descr (ref);
17168 add_loc_descr (&list->expr, copy);
17169 while (copy->dw_loc_next)
17170 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17171 list = list->dw_loc_next;
17172 }
17173 }
17174
17175 /* To each location in list LIST prepend loc descr REF. */
17176
17177 static void
17178 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17179 {
17180 dw_loc_descr_ref copy;
17181 dw_loc_descr_ref ref_end = list->expr;
17182 add_loc_descr (&ref, list->expr);
17183 list->expr = ref;
17184 list = list->dw_loc_next;
17185 while (list)
17186 {
17187 dw_loc_descr_ref end = list->expr;
17188 list->expr = copy = copy_loc_descr (ref);
17189 while (copy->dw_loc_next != ref_end)
17190 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17191 copy->dw_loc_next = end;
17192 list = list->dw_loc_next;
17193 }
17194 }
17195
17196 /* Given two lists RET and LIST
17197 produce location list that is result of adding expression in LIST
17198 to expression in RET on each position in program.
17199 Might be destructive on both RET and LIST.
17200
17201 TODO: We handle only simple cases of RET or LIST having at most one
17202 element. General case would involve sorting the lists in program order
17203 and merging them that will need some additional work.
17204 Adding that will improve quality of debug info especially for SRA-ed
17205 structures. */
17206
17207 static void
17208 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17209 {
17210 if (!list)
17211 return;
17212 if (!*ret)
17213 {
17214 *ret = list;
17215 return;
17216 }
17217 if (!list->dw_loc_next)
17218 {
17219 add_loc_descr_to_each (*ret, list->expr);
17220 return;
17221 }
17222 if (!(*ret)->dw_loc_next)
17223 {
17224 prepend_loc_descr_to_each (list, (*ret)->expr);
17225 *ret = list;
17226 return;
17227 }
17228 expansion_failed (NULL_TREE, NULL_RTX,
17229 "Don't know how to merge two non-trivial"
17230 " location lists.\n");
17231 *ret = NULL;
17232 return;
17233 }
17234
17235 /* LOC is constant expression. Try a luck, look it up in constant
17236 pool and return its loc_descr of its address. */
17237
17238 static dw_loc_descr_ref
17239 cst_pool_loc_descr (tree loc)
17240 {
17241 /* Get an RTL for this, if something has been emitted. */
17242 rtx rtl = lookup_constant_def (loc);
17243
17244 if (!rtl || !MEM_P (rtl))
17245 {
17246 gcc_assert (!rtl);
17247 return 0;
17248 }
17249 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17250
17251 /* TODO: We might get more coverage if we was actually delaying expansion
17252 of all expressions till end of compilation when constant pools are fully
17253 populated. */
17254 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17255 {
17256 expansion_failed (loc, NULL_RTX,
17257 "CST value in contant pool but not marked.");
17258 return 0;
17259 }
17260 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17261 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17262 }
17263
17264 /* Return dw_loc_list representing address of addr_expr LOC
17265 by looking for inner INDIRECT_REF expression and turning
17266 it into simple arithmetics.
17267
17268 See loc_list_from_tree for the meaning of CONTEXT. */
17269
17270 static dw_loc_list_ref
17271 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17272 loc_descr_context *context)
17273 {
17274 tree obj, offset;
17275 poly_int64 bitsize, bitpos, bytepos;
17276 machine_mode mode;
17277 int unsignedp, reversep, volatilep = 0;
17278 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17279
17280 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17281 &bitsize, &bitpos, &offset, &mode,
17282 &unsignedp, &reversep, &volatilep);
17283 STRIP_NOPS (obj);
17284 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17285 {
17286 expansion_failed (loc, NULL_RTX, "bitfield access");
17287 return 0;
17288 }
17289 if (!INDIRECT_REF_P (obj))
17290 {
17291 expansion_failed (obj,
17292 NULL_RTX, "no indirect ref in inner refrence");
17293 return 0;
17294 }
17295 if (!offset && known_eq (bitpos, 0))
17296 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17297 context);
17298 else if (toplev
17299 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17300 && (dwarf_version >= 4 || !dwarf_strict))
17301 {
17302 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17303 if (!list_ret)
17304 return 0;
17305 if (offset)
17306 {
17307 /* Variable offset. */
17308 list_ret1 = loc_list_from_tree (offset, 0, context);
17309 if (list_ret1 == 0)
17310 return 0;
17311 add_loc_list (&list_ret, list_ret1);
17312 if (!list_ret)
17313 return 0;
17314 add_loc_descr_to_each (list_ret,
17315 new_loc_descr (DW_OP_plus, 0, 0));
17316 }
17317 HOST_WIDE_INT value;
17318 if (bytepos.is_constant (&value) && value > 0)
17319 add_loc_descr_to_each (list_ret,
17320 new_loc_descr (DW_OP_plus_uconst, value, 0));
17321 else if (maybe_ne (bytepos, 0))
17322 loc_list_plus_const (list_ret, bytepos);
17323 add_loc_descr_to_each (list_ret,
17324 new_loc_descr (DW_OP_stack_value, 0, 0));
17325 }
17326 return list_ret;
17327 }
17328
17329 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17330 all operations from LOC are nops, move to the last one. Insert in NOPS all
17331 operations that are skipped. */
17332
17333 static void
17334 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17335 hash_set<dw_loc_descr_ref> &nops)
17336 {
17337 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17338 {
17339 nops.add (loc);
17340 loc = loc->dw_loc_next;
17341 }
17342 }
17343
17344 /* Helper for loc_descr_without_nops: free the location description operation
17345 P. */
17346
17347 bool
17348 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17349 {
17350 ggc_free (loc);
17351 return true;
17352 }
17353
17354 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17355 finishes LOC. */
17356
17357 static void
17358 loc_descr_without_nops (dw_loc_descr_ref &loc)
17359 {
17360 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17361 return;
17362
17363 /* Set of all DW_OP_nop operations we remove. */
17364 hash_set<dw_loc_descr_ref> nops;
17365
17366 /* First, strip all prefix NOP operations in order to keep the head of the
17367 operations list. */
17368 loc_descr_to_next_no_nop (loc, nops);
17369
17370 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17371 {
17372 /* For control flow operations: strip "prefix" nops in destination
17373 labels. */
17374 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17375 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17376 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17377 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17378
17379 /* Do the same for the operations that follow, then move to the next
17380 iteration. */
17381 if (cur->dw_loc_next != NULL)
17382 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17383 cur = cur->dw_loc_next;
17384 }
17385
17386 nops.traverse<void *, free_loc_descr> (NULL);
17387 }
17388
17389
17390 struct dwarf_procedure_info;
17391
17392 /* Helper structure for location descriptions generation. */
17393 struct loc_descr_context
17394 {
17395 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17396 NULL_TREE if DW_OP_push_object_address in invalid for this location
17397 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17398 tree context_type;
17399 /* The ..._DECL node that should be translated as a
17400 DW_OP_push_object_address operation. */
17401 tree base_decl;
17402 /* Information about the DWARF procedure we are currently generating. NULL if
17403 we are not generating a DWARF procedure. */
17404 struct dwarf_procedure_info *dpi;
17405 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17406 by consumer. Used for DW_TAG_generic_subrange attributes. */
17407 bool placeholder_arg;
17408 /* True if PLACEHOLDER_EXPR has been seen. */
17409 bool placeholder_seen;
17410 };
17411
17412 /* DWARF procedures generation
17413
17414 DWARF expressions (aka. location descriptions) are used to encode variable
17415 things such as sizes or offsets. Such computations can have redundant parts
17416 that can be factorized in order to reduce the size of the output debug
17417 information. This is the whole point of DWARF procedures.
17418
17419 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17420 already factorized into functions ("size functions") in order to handle very
17421 big and complex types. Such functions are quite simple: they have integral
17422 arguments, they return an integral result and their body contains only a
17423 return statement with arithmetic expressions. This is the only kind of
17424 function we are interested in translating into DWARF procedures, here.
17425
17426 DWARF expressions and DWARF procedure are executed using a stack, so we have
17427 to define some calling convention for them to interact. Let's say that:
17428
17429 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17430 all arguments in reverse order (right-to-left) so that when the DWARF
17431 procedure execution starts, the first argument is the top of the stack.
17432
17433 - Then, when returning, the DWARF procedure must have consumed all arguments
17434 on the stack, must have pushed the result and touched nothing else.
17435
17436 - Each integral argument and the result are integral types can be hold in a
17437 single stack slot.
17438
17439 - We call "frame offset" the number of stack slots that are "under DWARF
17440 procedure control": it includes the arguments slots, the temporaries and
17441 the result slot. Thus, it is equal to the number of arguments when the
17442 procedure execution starts and must be equal to one (the result) when it
17443 returns. */
17444
17445 /* Helper structure used when generating operations for a DWARF procedure. */
17446 struct dwarf_procedure_info
17447 {
17448 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17449 currently translated. */
17450 tree fndecl;
17451 /* The number of arguments FNDECL takes. */
17452 unsigned args_count;
17453 };
17454
17455 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17456 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17457 equate it to this DIE. */
17458
17459 static dw_die_ref
17460 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17461 dw_die_ref parent_die)
17462 {
17463 dw_die_ref dwarf_proc_die;
17464
17465 if ((dwarf_version < 3 && dwarf_strict)
17466 || location == NULL)
17467 return NULL;
17468
17469 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17470 if (fndecl)
17471 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17472 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17473 return dwarf_proc_die;
17474 }
17475
17476 /* Return whether TYPE is a supported type as a DWARF procedure argument
17477 type or return type (we handle only scalar types and pointer types that
17478 aren't wider than the DWARF expression evaluation stack. */
17479
17480 static bool
17481 is_handled_procedure_type (tree type)
17482 {
17483 return ((INTEGRAL_TYPE_P (type)
17484 || TREE_CODE (type) == OFFSET_TYPE
17485 || TREE_CODE (type) == POINTER_TYPE)
17486 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17487 }
17488
17489 /* Helper for resolve_args_picking: do the same but stop when coming across
17490 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17491 offset *before* evaluating the corresponding operation. */
17492
17493 static bool
17494 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17495 struct dwarf_procedure_info *dpi,
17496 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17497 {
17498 /* The "frame_offset" identifier is already used to name a macro... */
17499 unsigned frame_offset_ = initial_frame_offset;
17500 dw_loc_descr_ref l;
17501
17502 for (l = loc; l != NULL;)
17503 {
17504 bool existed;
17505 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17506
17507 /* If we already met this node, there is nothing to compute anymore. */
17508 if (existed)
17509 {
17510 /* Make sure that the stack size is consistent wherever the execution
17511 flow comes from. */
17512 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17513 break;
17514 }
17515 l_frame_offset = frame_offset_;
17516
17517 /* If needed, relocate the picking offset with respect to the frame
17518 offset. */
17519 if (l->frame_offset_rel)
17520 {
17521 unsigned HOST_WIDE_INT off;
17522 switch (l->dw_loc_opc)
17523 {
17524 case DW_OP_pick:
17525 off = l->dw_loc_oprnd1.v.val_unsigned;
17526 break;
17527 case DW_OP_dup:
17528 off = 0;
17529 break;
17530 case DW_OP_over:
17531 off = 1;
17532 break;
17533 default:
17534 gcc_unreachable ();
17535 }
17536 /* frame_offset_ is the size of the current stack frame, including
17537 incoming arguments. Besides, the arguments are pushed
17538 right-to-left. Thus, in order to access the Nth argument from
17539 this operation node, the picking has to skip temporaries *plus*
17540 one stack slot per argument (0 for the first one, 1 for the second
17541 one, etc.).
17542
17543 The targetted argument number (N) is already set as the operand,
17544 and the number of temporaries can be computed with:
17545 frame_offsets_ - dpi->args_count */
17546 off += frame_offset_ - dpi->args_count;
17547
17548 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17549 if (off > 255)
17550 return false;
17551
17552 if (off == 0)
17553 {
17554 l->dw_loc_opc = DW_OP_dup;
17555 l->dw_loc_oprnd1.v.val_unsigned = 0;
17556 }
17557 else if (off == 1)
17558 {
17559 l->dw_loc_opc = DW_OP_over;
17560 l->dw_loc_oprnd1.v.val_unsigned = 0;
17561 }
17562 else
17563 {
17564 l->dw_loc_opc = DW_OP_pick;
17565 l->dw_loc_oprnd1.v.val_unsigned = off;
17566 }
17567 }
17568
17569 /* Update frame_offset according to the effect the current operation has
17570 on the stack. */
17571 switch (l->dw_loc_opc)
17572 {
17573 case DW_OP_deref:
17574 case DW_OP_swap:
17575 case DW_OP_rot:
17576 case DW_OP_abs:
17577 case DW_OP_neg:
17578 case DW_OP_not:
17579 case DW_OP_plus_uconst:
17580 case DW_OP_skip:
17581 case DW_OP_reg0:
17582 case DW_OP_reg1:
17583 case DW_OP_reg2:
17584 case DW_OP_reg3:
17585 case DW_OP_reg4:
17586 case DW_OP_reg5:
17587 case DW_OP_reg6:
17588 case DW_OP_reg7:
17589 case DW_OP_reg8:
17590 case DW_OP_reg9:
17591 case DW_OP_reg10:
17592 case DW_OP_reg11:
17593 case DW_OP_reg12:
17594 case DW_OP_reg13:
17595 case DW_OP_reg14:
17596 case DW_OP_reg15:
17597 case DW_OP_reg16:
17598 case DW_OP_reg17:
17599 case DW_OP_reg18:
17600 case DW_OP_reg19:
17601 case DW_OP_reg20:
17602 case DW_OP_reg21:
17603 case DW_OP_reg22:
17604 case DW_OP_reg23:
17605 case DW_OP_reg24:
17606 case DW_OP_reg25:
17607 case DW_OP_reg26:
17608 case DW_OP_reg27:
17609 case DW_OP_reg28:
17610 case DW_OP_reg29:
17611 case DW_OP_reg30:
17612 case DW_OP_reg31:
17613 case DW_OP_bregx:
17614 case DW_OP_piece:
17615 case DW_OP_deref_size:
17616 case DW_OP_nop:
17617 case DW_OP_bit_piece:
17618 case DW_OP_implicit_value:
17619 case DW_OP_stack_value:
17620 break;
17621
17622 case DW_OP_addr:
17623 case DW_OP_const1u:
17624 case DW_OP_const1s:
17625 case DW_OP_const2u:
17626 case DW_OP_const2s:
17627 case DW_OP_const4u:
17628 case DW_OP_const4s:
17629 case DW_OP_const8u:
17630 case DW_OP_const8s:
17631 case DW_OP_constu:
17632 case DW_OP_consts:
17633 case DW_OP_dup:
17634 case DW_OP_over:
17635 case DW_OP_pick:
17636 case DW_OP_lit0:
17637 case DW_OP_lit1:
17638 case DW_OP_lit2:
17639 case DW_OP_lit3:
17640 case DW_OP_lit4:
17641 case DW_OP_lit5:
17642 case DW_OP_lit6:
17643 case DW_OP_lit7:
17644 case DW_OP_lit8:
17645 case DW_OP_lit9:
17646 case DW_OP_lit10:
17647 case DW_OP_lit11:
17648 case DW_OP_lit12:
17649 case DW_OP_lit13:
17650 case DW_OP_lit14:
17651 case DW_OP_lit15:
17652 case DW_OP_lit16:
17653 case DW_OP_lit17:
17654 case DW_OP_lit18:
17655 case DW_OP_lit19:
17656 case DW_OP_lit20:
17657 case DW_OP_lit21:
17658 case DW_OP_lit22:
17659 case DW_OP_lit23:
17660 case DW_OP_lit24:
17661 case DW_OP_lit25:
17662 case DW_OP_lit26:
17663 case DW_OP_lit27:
17664 case DW_OP_lit28:
17665 case DW_OP_lit29:
17666 case DW_OP_lit30:
17667 case DW_OP_lit31:
17668 case DW_OP_breg0:
17669 case DW_OP_breg1:
17670 case DW_OP_breg2:
17671 case DW_OP_breg3:
17672 case DW_OP_breg4:
17673 case DW_OP_breg5:
17674 case DW_OP_breg6:
17675 case DW_OP_breg7:
17676 case DW_OP_breg8:
17677 case DW_OP_breg9:
17678 case DW_OP_breg10:
17679 case DW_OP_breg11:
17680 case DW_OP_breg12:
17681 case DW_OP_breg13:
17682 case DW_OP_breg14:
17683 case DW_OP_breg15:
17684 case DW_OP_breg16:
17685 case DW_OP_breg17:
17686 case DW_OP_breg18:
17687 case DW_OP_breg19:
17688 case DW_OP_breg20:
17689 case DW_OP_breg21:
17690 case DW_OP_breg22:
17691 case DW_OP_breg23:
17692 case DW_OP_breg24:
17693 case DW_OP_breg25:
17694 case DW_OP_breg26:
17695 case DW_OP_breg27:
17696 case DW_OP_breg28:
17697 case DW_OP_breg29:
17698 case DW_OP_breg30:
17699 case DW_OP_breg31:
17700 case DW_OP_fbreg:
17701 case DW_OP_push_object_address:
17702 case DW_OP_call_frame_cfa:
17703 case DW_OP_GNU_variable_value:
17704 ++frame_offset_;
17705 break;
17706
17707 case DW_OP_drop:
17708 case DW_OP_xderef:
17709 case DW_OP_and:
17710 case DW_OP_div:
17711 case DW_OP_minus:
17712 case DW_OP_mod:
17713 case DW_OP_mul:
17714 case DW_OP_or:
17715 case DW_OP_plus:
17716 case DW_OP_shl:
17717 case DW_OP_shr:
17718 case DW_OP_shra:
17719 case DW_OP_xor:
17720 case DW_OP_bra:
17721 case DW_OP_eq:
17722 case DW_OP_ge:
17723 case DW_OP_gt:
17724 case DW_OP_le:
17725 case DW_OP_lt:
17726 case DW_OP_ne:
17727 case DW_OP_regx:
17728 case DW_OP_xderef_size:
17729 --frame_offset_;
17730 break;
17731
17732 case DW_OP_call2:
17733 case DW_OP_call4:
17734 case DW_OP_call_ref:
17735 {
17736 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17737 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17738
17739 if (stack_usage == NULL)
17740 return false;
17741 frame_offset_ += *stack_usage;
17742 break;
17743 }
17744
17745 case DW_OP_implicit_pointer:
17746 case DW_OP_entry_value:
17747 case DW_OP_const_type:
17748 case DW_OP_regval_type:
17749 case DW_OP_deref_type:
17750 case DW_OP_convert:
17751 case DW_OP_reinterpret:
17752 case DW_OP_form_tls_address:
17753 case DW_OP_GNU_push_tls_address:
17754 case DW_OP_GNU_uninit:
17755 case DW_OP_GNU_encoded_addr:
17756 case DW_OP_GNU_implicit_pointer:
17757 case DW_OP_GNU_entry_value:
17758 case DW_OP_GNU_const_type:
17759 case DW_OP_GNU_regval_type:
17760 case DW_OP_GNU_deref_type:
17761 case DW_OP_GNU_convert:
17762 case DW_OP_GNU_reinterpret:
17763 case DW_OP_GNU_parameter_ref:
17764 /* loc_list_from_tree will probably not output these operations for
17765 size functions, so assume they will not appear here. */
17766 /* Fall through... */
17767
17768 default:
17769 gcc_unreachable ();
17770 }
17771
17772 /* Now, follow the control flow (except subroutine calls). */
17773 switch (l->dw_loc_opc)
17774 {
17775 case DW_OP_bra:
17776 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17777 frame_offsets))
17778 return false;
17779 /* Fall through. */
17780
17781 case DW_OP_skip:
17782 l = l->dw_loc_oprnd1.v.val_loc;
17783 break;
17784
17785 case DW_OP_stack_value:
17786 return true;
17787
17788 default:
17789 l = l->dw_loc_next;
17790 break;
17791 }
17792 }
17793
17794 return true;
17795 }
17796
17797 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17798 operations) in order to resolve the operand of DW_OP_pick operations that
17799 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17800 offset *before* LOC is executed. Return if all relocations were
17801 successful. */
17802
17803 static bool
17804 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17805 struct dwarf_procedure_info *dpi)
17806 {
17807 /* Associate to all visited operations the frame offset *before* evaluating
17808 this operation. */
17809 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17810
17811 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17812 frame_offsets);
17813 }
17814
17815 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17816 Return NULL if it is not possible. */
17817
17818 static dw_die_ref
17819 function_to_dwarf_procedure (tree fndecl)
17820 {
17821 struct loc_descr_context ctx;
17822 struct dwarf_procedure_info dpi;
17823 dw_die_ref dwarf_proc_die;
17824 tree tree_body = DECL_SAVED_TREE (fndecl);
17825 dw_loc_descr_ref loc_body, epilogue;
17826
17827 tree cursor;
17828 unsigned i;
17829
17830 /* Do not generate multiple DWARF procedures for the same function
17831 declaration. */
17832 dwarf_proc_die = lookup_decl_die (fndecl);
17833 if (dwarf_proc_die != NULL)
17834 return dwarf_proc_die;
17835
17836 /* DWARF procedures are available starting with the DWARFv3 standard. */
17837 if (dwarf_version < 3 && dwarf_strict)
17838 return NULL;
17839
17840 /* We handle only functions for which we still have a body, that return a
17841 supported type and that takes arguments with supported types. Note that
17842 there is no point translating functions that return nothing. */
17843 if (tree_body == NULL_TREE
17844 || DECL_RESULT (fndecl) == NULL_TREE
17845 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17846 return NULL;
17847
17848 for (cursor = DECL_ARGUMENTS (fndecl);
17849 cursor != NULL_TREE;
17850 cursor = TREE_CHAIN (cursor))
17851 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17852 return NULL;
17853
17854 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17855 if (TREE_CODE (tree_body) != RETURN_EXPR)
17856 return NULL;
17857 tree_body = TREE_OPERAND (tree_body, 0);
17858 if (TREE_CODE (tree_body) != MODIFY_EXPR
17859 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17860 return NULL;
17861 tree_body = TREE_OPERAND (tree_body, 1);
17862
17863 /* Try to translate the body expression itself. Note that this will probably
17864 cause an infinite recursion if its call graph has a cycle. This is very
17865 unlikely for size functions, however, so don't bother with such things at
17866 the moment. */
17867 ctx.context_type = NULL_TREE;
17868 ctx.base_decl = NULL_TREE;
17869 ctx.dpi = &dpi;
17870 ctx.placeholder_arg = false;
17871 ctx.placeholder_seen = false;
17872 dpi.fndecl = fndecl;
17873 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17874 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17875 if (!loc_body)
17876 return NULL;
17877
17878 /* After evaluating all operands in "loc_body", we should still have on the
17879 stack all arguments plus the desired function result (top of the stack).
17880 Generate code in order to keep only the result in our stack frame. */
17881 epilogue = NULL;
17882 for (i = 0; i < dpi.args_count; ++i)
17883 {
17884 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17885 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17886 op_couple->dw_loc_next->dw_loc_next = epilogue;
17887 epilogue = op_couple;
17888 }
17889 add_loc_descr (&loc_body, epilogue);
17890 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17891 return NULL;
17892
17893 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17894 because they are considered useful. Now there is an epilogue, they are
17895 not anymore, so give it another try. */
17896 loc_descr_without_nops (loc_body);
17897
17898 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17899 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17900 though, given that size functions do not come from source, so they should
17901 not have a dedicated DW_TAG_subprogram DIE. */
17902 dwarf_proc_die
17903 = new_dwarf_proc_die (loc_body, fndecl,
17904 get_context_die (DECL_CONTEXT (fndecl)));
17905
17906 /* The called DWARF procedure consumes one stack slot per argument and
17907 returns one stack slot. */
17908 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17909
17910 return dwarf_proc_die;
17911 }
17912
17913
17914 /* Generate Dwarf location list representing LOC.
17915 If WANT_ADDRESS is false, expression computing LOC will be computed
17916 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17917 if WANT_ADDRESS is 2, expression computing address useable in location
17918 will be returned (i.e. DW_OP_reg can be used
17919 to refer to register values).
17920
17921 CONTEXT provides information to customize the location descriptions
17922 generation. Its context_type field specifies what type is implicitly
17923 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17924 will not be generated.
17925
17926 Its DPI field determines whether we are generating a DWARF expression for a
17927 DWARF procedure, so PARM_DECL references are processed specifically.
17928
17929 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17930 and dpi fields were null. */
17931
17932 static dw_loc_list_ref
17933 loc_list_from_tree_1 (tree loc, int want_address,
17934 struct loc_descr_context *context)
17935 {
17936 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17937 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17938 int have_address = 0;
17939 enum dwarf_location_atom op;
17940
17941 /* ??? Most of the time we do not take proper care for sign/zero
17942 extending the values properly. Hopefully this won't be a real
17943 problem... */
17944
17945 if (context != NULL
17946 && context->base_decl == loc
17947 && want_address == 0)
17948 {
17949 if (dwarf_version >= 3 || !dwarf_strict)
17950 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17951 NULL, 0, NULL, 0, NULL);
17952 else
17953 return NULL;
17954 }
17955
17956 switch (TREE_CODE (loc))
17957 {
17958 case ERROR_MARK:
17959 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17960 return 0;
17961
17962 case PLACEHOLDER_EXPR:
17963 /* This case involves extracting fields from an object to determine the
17964 position of other fields. It is supposed to appear only as the first
17965 operand of COMPONENT_REF nodes and to reference precisely the type
17966 that the context allows. */
17967 if (context != NULL
17968 && TREE_TYPE (loc) == context->context_type
17969 && want_address >= 1)
17970 {
17971 if (dwarf_version >= 3 || !dwarf_strict)
17972 {
17973 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17974 have_address = 1;
17975 break;
17976 }
17977 else
17978 return NULL;
17979 }
17980 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17981 the single argument passed by consumer. */
17982 else if (context != NULL
17983 && context->placeholder_arg
17984 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17985 && want_address == 0)
17986 {
17987 ret = new_loc_descr (DW_OP_pick, 0, 0);
17988 ret->frame_offset_rel = 1;
17989 context->placeholder_seen = true;
17990 break;
17991 }
17992 else
17993 expansion_failed (loc, NULL_RTX,
17994 "PLACEHOLDER_EXPR for an unexpected type");
17995 break;
17996
17997 case CALL_EXPR:
17998 {
17999 const int nargs = call_expr_nargs (loc);
18000 tree callee = get_callee_fndecl (loc);
18001 int i;
18002 dw_die_ref dwarf_proc;
18003
18004 if (callee == NULL_TREE)
18005 goto call_expansion_failed;
18006
18007 /* We handle only functions that return an integer. */
18008 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18009 goto call_expansion_failed;
18010
18011 dwarf_proc = function_to_dwarf_procedure (callee);
18012 if (dwarf_proc == NULL)
18013 goto call_expansion_failed;
18014
18015 /* Evaluate arguments right-to-left so that the first argument will
18016 be the top-most one on the stack. */
18017 for (i = nargs - 1; i >= 0; --i)
18018 {
18019 dw_loc_descr_ref loc_descr
18020 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18021 context);
18022
18023 if (loc_descr == NULL)
18024 goto call_expansion_failed;
18025
18026 add_loc_descr (&ret, loc_descr);
18027 }
18028
18029 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18030 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18031 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18032 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18033 add_loc_descr (&ret, ret1);
18034 break;
18035
18036 call_expansion_failed:
18037 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18038 /* There are no opcodes for these operations. */
18039 return 0;
18040 }
18041
18042 case PREINCREMENT_EXPR:
18043 case PREDECREMENT_EXPR:
18044 case POSTINCREMENT_EXPR:
18045 case POSTDECREMENT_EXPR:
18046 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18047 /* There are no opcodes for these operations. */
18048 return 0;
18049
18050 case ADDR_EXPR:
18051 /* If we already want an address, see if there is INDIRECT_REF inside
18052 e.g. for &this->field. */
18053 if (want_address)
18054 {
18055 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18056 (loc, want_address == 2, context);
18057 if (list_ret)
18058 have_address = 1;
18059 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18060 && (ret = cst_pool_loc_descr (loc)))
18061 have_address = 1;
18062 }
18063 /* Otherwise, process the argument and look for the address. */
18064 if (!list_ret && !ret)
18065 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18066 else
18067 {
18068 if (want_address)
18069 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18070 return NULL;
18071 }
18072 break;
18073
18074 case VAR_DECL:
18075 if (DECL_THREAD_LOCAL_P (loc))
18076 {
18077 rtx rtl;
18078 enum dwarf_location_atom tls_op;
18079 enum dtprel_bool dtprel = dtprel_false;
18080
18081 if (targetm.have_tls)
18082 {
18083 /* If this is not defined, we have no way to emit the
18084 data. */
18085 if (!targetm.asm_out.output_dwarf_dtprel)
18086 return 0;
18087
18088 /* The way DW_OP_GNU_push_tls_address is specified, we
18089 can only look up addresses of objects in the current
18090 module. We used DW_OP_addr as first op, but that's
18091 wrong, because DW_OP_addr is relocated by the debug
18092 info consumer, while DW_OP_GNU_push_tls_address
18093 operand shouldn't be. */
18094 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18095 return 0;
18096 dtprel = dtprel_true;
18097 /* We check for DWARF 5 here because gdb did not implement
18098 DW_OP_form_tls_address until after 7.12. */
18099 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18100 : DW_OP_GNU_push_tls_address);
18101 }
18102 else
18103 {
18104 if (!targetm.emutls.debug_form_tls_address
18105 || !(dwarf_version >= 3 || !dwarf_strict))
18106 return 0;
18107 /* We stuffed the control variable into the DECL_VALUE_EXPR
18108 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18109 no longer appear in gimple code. We used the control
18110 variable in specific so that we could pick it up here. */
18111 loc = DECL_VALUE_EXPR (loc);
18112 tls_op = DW_OP_form_tls_address;
18113 }
18114
18115 rtl = rtl_for_decl_location (loc);
18116 if (rtl == NULL_RTX)
18117 return 0;
18118
18119 if (!MEM_P (rtl))
18120 return 0;
18121 rtl = XEXP (rtl, 0);
18122 if (! CONSTANT_P (rtl))
18123 return 0;
18124
18125 ret = new_addr_loc_descr (rtl, dtprel);
18126 ret1 = new_loc_descr (tls_op, 0, 0);
18127 add_loc_descr (&ret, ret1);
18128
18129 have_address = 1;
18130 break;
18131 }
18132 /* FALLTHRU */
18133
18134 case PARM_DECL:
18135 if (context != NULL && context->dpi != NULL
18136 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18137 {
18138 /* We are generating code for a DWARF procedure and we want to access
18139 one of its arguments: find the appropriate argument offset and let
18140 the resolve_args_picking pass compute the offset that complies
18141 with the stack frame size. */
18142 unsigned i = 0;
18143 tree cursor;
18144
18145 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18146 cursor != NULL_TREE && cursor != loc;
18147 cursor = TREE_CHAIN (cursor), ++i)
18148 ;
18149 /* If we are translating a DWARF procedure, all referenced parameters
18150 must belong to the current function. */
18151 gcc_assert (cursor != NULL_TREE);
18152
18153 ret = new_loc_descr (DW_OP_pick, i, 0);
18154 ret->frame_offset_rel = 1;
18155 break;
18156 }
18157 /* FALLTHRU */
18158
18159 case RESULT_DECL:
18160 if (DECL_HAS_VALUE_EXPR_P (loc))
18161 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18162 want_address, context);
18163 /* FALLTHRU */
18164
18165 case FUNCTION_DECL:
18166 {
18167 rtx rtl;
18168 var_loc_list *loc_list = lookup_decl_loc (loc);
18169
18170 if (loc_list && loc_list->first)
18171 {
18172 list_ret = dw_loc_list (loc_list, loc, want_address);
18173 have_address = want_address != 0;
18174 break;
18175 }
18176 rtl = rtl_for_decl_location (loc);
18177 if (rtl == NULL_RTX)
18178 {
18179 if (TREE_CODE (loc) != FUNCTION_DECL
18180 && early_dwarf
18181 && current_function_decl
18182 && want_address != 1
18183 && ! DECL_IGNORED_P (loc)
18184 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18185 || POINTER_TYPE_P (TREE_TYPE (loc)))
18186 && DECL_CONTEXT (loc) == current_function_decl
18187 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18188 <= DWARF2_ADDR_SIZE))
18189 {
18190 dw_die_ref ref = lookup_decl_die (loc);
18191 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18192 if (ref)
18193 {
18194 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18195 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18196 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18197 }
18198 else
18199 {
18200 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18201 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18202 }
18203 break;
18204 }
18205 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18206 return 0;
18207 }
18208 else if (CONST_INT_P (rtl))
18209 {
18210 HOST_WIDE_INT val = INTVAL (rtl);
18211 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18212 val &= GET_MODE_MASK (DECL_MODE (loc));
18213 ret = int_loc_descriptor (val);
18214 }
18215 else if (GET_CODE (rtl) == CONST_STRING)
18216 {
18217 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18218 return 0;
18219 }
18220 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18221 ret = new_addr_loc_descr (rtl, dtprel_false);
18222 else
18223 {
18224 machine_mode mode, mem_mode;
18225
18226 /* Certain constructs can only be represented at top-level. */
18227 if (want_address == 2)
18228 {
18229 ret = loc_descriptor (rtl, VOIDmode,
18230 VAR_INIT_STATUS_INITIALIZED);
18231 have_address = 1;
18232 }
18233 else
18234 {
18235 mode = GET_MODE (rtl);
18236 mem_mode = VOIDmode;
18237 if (MEM_P (rtl))
18238 {
18239 mem_mode = mode;
18240 mode = get_address_mode (rtl);
18241 rtl = XEXP (rtl, 0);
18242 have_address = 1;
18243 }
18244 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18245 VAR_INIT_STATUS_INITIALIZED);
18246 }
18247 if (!ret)
18248 expansion_failed (loc, rtl,
18249 "failed to produce loc descriptor for rtl");
18250 }
18251 }
18252 break;
18253
18254 case MEM_REF:
18255 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18256 {
18257 have_address = 1;
18258 goto do_plus;
18259 }
18260 /* Fallthru. */
18261 case INDIRECT_REF:
18262 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18263 have_address = 1;
18264 break;
18265
18266 case TARGET_MEM_REF:
18267 case SSA_NAME:
18268 case DEBUG_EXPR_DECL:
18269 return NULL;
18270
18271 case COMPOUND_EXPR:
18272 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18273 context);
18274
18275 CASE_CONVERT:
18276 case VIEW_CONVERT_EXPR:
18277 case SAVE_EXPR:
18278 case MODIFY_EXPR:
18279 case NON_LVALUE_EXPR:
18280 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18281 context);
18282
18283 case COMPONENT_REF:
18284 case BIT_FIELD_REF:
18285 case ARRAY_REF:
18286 case ARRAY_RANGE_REF:
18287 case REALPART_EXPR:
18288 case IMAGPART_EXPR:
18289 {
18290 tree obj, offset;
18291 poly_int64 bitsize, bitpos, bytepos;
18292 machine_mode mode;
18293 int unsignedp, reversep, volatilep = 0;
18294
18295 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18296 &unsignedp, &reversep, &volatilep);
18297
18298 gcc_assert (obj != loc);
18299
18300 list_ret = loc_list_from_tree_1 (obj,
18301 want_address == 2
18302 && known_eq (bitpos, 0)
18303 && !offset ? 2 : 1,
18304 context);
18305 /* TODO: We can extract value of the small expression via shifting even
18306 for nonzero bitpos. */
18307 if (list_ret == 0)
18308 return 0;
18309 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18310 || !multiple_p (bitsize, BITS_PER_UNIT))
18311 {
18312 expansion_failed (loc, NULL_RTX,
18313 "bitfield access");
18314 return 0;
18315 }
18316
18317 if (offset != NULL_TREE)
18318 {
18319 /* Variable offset. */
18320 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18321 if (list_ret1 == 0)
18322 return 0;
18323 add_loc_list (&list_ret, list_ret1);
18324 if (!list_ret)
18325 return 0;
18326 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18327 }
18328
18329 HOST_WIDE_INT value;
18330 if (bytepos.is_constant (&value) && value > 0)
18331 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18332 value, 0));
18333 else if (maybe_ne (bytepos, 0))
18334 loc_list_plus_const (list_ret, bytepos);
18335
18336 have_address = 1;
18337 break;
18338 }
18339
18340 case INTEGER_CST:
18341 if ((want_address || !tree_fits_shwi_p (loc))
18342 && (ret = cst_pool_loc_descr (loc)))
18343 have_address = 1;
18344 else if (want_address == 2
18345 && tree_fits_shwi_p (loc)
18346 && (ret = address_of_int_loc_descriptor
18347 (int_size_in_bytes (TREE_TYPE (loc)),
18348 tree_to_shwi (loc))))
18349 have_address = 1;
18350 else if (tree_fits_shwi_p (loc))
18351 ret = int_loc_descriptor (tree_to_shwi (loc));
18352 else if (tree_fits_uhwi_p (loc))
18353 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18354 else
18355 {
18356 expansion_failed (loc, NULL_RTX,
18357 "Integer operand is not host integer");
18358 return 0;
18359 }
18360 break;
18361
18362 case CONSTRUCTOR:
18363 case REAL_CST:
18364 case STRING_CST:
18365 case COMPLEX_CST:
18366 if ((ret = cst_pool_loc_descr (loc)))
18367 have_address = 1;
18368 else if (TREE_CODE (loc) == CONSTRUCTOR)
18369 {
18370 tree type = TREE_TYPE (loc);
18371 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18372 unsigned HOST_WIDE_INT offset = 0;
18373 unsigned HOST_WIDE_INT cnt;
18374 constructor_elt *ce;
18375
18376 if (TREE_CODE (type) == RECORD_TYPE)
18377 {
18378 /* This is very limited, but it's enough to output
18379 pointers to member functions, as long as the
18380 referenced function is defined in the current
18381 translation unit. */
18382 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18383 {
18384 tree val = ce->value;
18385
18386 tree field = ce->index;
18387
18388 if (val)
18389 STRIP_NOPS (val);
18390
18391 if (!field || DECL_BIT_FIELD (field))
18392 {
18393 expansion_failed (loc, NULL_RTX,
18394 "bitfield in record type constructor");
18395 size = offset = (unsigned HOST_WIDE_INT)-1;
18396 ret = NULL;
18397 break;
18398 }
18399
18400 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18401 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18402 gcc_assert (pos + fieldsize <= size);
18403 if (pos < offset)
18404 {
18405 expansion_failed (loc, NULL_RTX,
18406 "out-of-order fields in record constructor");
18407 size = offset = (unsigned HOST_WIDE_INT)-1;
18408 ret = NULL;
18409 break;
18410 }
18411 if (pos > offset)
18412 {
18413 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18414 add_loc_descr (&ret, ret1);
18415 offset = pos;
18416 }
18417 if (val && fieldsize != 0)
18418 {
18419 ret1 = loc_descriptor_from_tree (val, want_address, context);
18420 if (!ret1)
18421 {
18422 expansion_failed (loc, NULL_RTX,
18423 "unsupported expression in field");
18424 size = offset = (unsigned HOST_WIDE_INT)-1;
18425 ret = NULL;
18426 break;
18427 }
18428 add_loc_descr (&ret, ret1);
18429 }
18430 if (fieldsize)
18431 {
18432 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18433 add_loc_descr (&ret, ret1);
18434 offset = pos + fieldsize;
18435 }
18436 }
18437
18438 if (offset != size)
18439 {
18440 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18441 add_loc_descr (&ret, ret1);
18442 offset = size;
18443 }
18444
18445 have_address = !!want_address;
18446 }
18447 else
18448 expansion_failed (loc, NULL_RTX,
18449 "constructor of non-record type");
18450 }
18451 else
18452 /* We can construct small constants here using int_loc_descriptor. */
18453 expansion_failed (loc, NULL_RTX,
18454 "constructor or constant not in constant pool");
18455 break;
18456
18457 case TRUTH_AND_EXPR:
18458 case TRUTH_ANDIF_EXPR:
18459 case BIT_AND_EXPR:
18460 op = DW_OP_and;
18461 goto do_binop;
18462
18463 case TRUTH_XOR_EXPR:
18464 case BIT_XOR_EXPR:
18465 op = DW_OP_xor;
18466 goto do_binop;
18467
18468 case TRUTH_OR_EXPR:
18469 case TRUTH_ORIF_EXPR:
18470 case BIT_IOR_EXPR:
18471 op = DW_OP_or;
18472 goto do_binop;
18473
18474 case FLOOR_DIV_EXPR:
18475 case CEIL_DIV_EXPR:
18476 case ROUND_DIV_EXPR:
18477 case TRUNC_DIV_EXPR:
18478 case EXACT_DIV_EXPR:
18479 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18480 return 0;
18481 op = DW_OP_div;
18482 goto do_binop;
18483
18484 case MINUS_EXPR:
18485 op = DW_OP_minus;
18486 goto do_binop;
18487
18488 case FLOOR_MOD_EXPR:
18489 case CEIL_MOD_EXPR:
18490 case ROUND_MOD_EXPR:
18491 case TRUNC_MOD_EXPR:
18492 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18493 {
18494 op = DW_OP_mod;
18495 goto do_binop;
18496 }
18497 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18498 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18499 if (list_ret == 0 || list_ret1 == 0)
18500 return 0;
18501
18502 add_loc_list (&list_ret, list_ret1);
18503 if (list_ret == 0)
18504 return 0;
18505 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18506 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18507 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18508 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18509 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18510 break;
18511
18512 case MULT_EXPR:
18513 op = DW_OP_mul;
18514 goto do_binop;
18515
18516 case LSHIFT_EXPR:
18517 op = DW_OP_shl;
18518 goto do_binop;
18519
18520 case RSHIFT_EXPR:
18521 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18522 goto do_binop;
18523
18524 case POINTER_PLUS_EXPR:
18525 case PLUS_EXPR:
18526 do_plus:
18527 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18528 {
18529 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18530 smarter to encode their opposite. The DW_OP_plus_uconst operation
18531 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18532 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18533 bytes, Y being the size of the operation that pushes the opposite
18534 of the addend. So let's choose the smallest representation. */
18535 const tree tree_addend = TREE_OPERAND (loc, 1);
18536 offset_int wi_addend;
18537 HOST_WIDE_INT shwi_addend;
18538 dw_loc_descr_ref loc_naddend;
18539
18540 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18541 if (list_ret == 0)
18542 return 0;
18543
18544 /* Try to get the literal to push. It is the opposite of the addend,
18545 so as we rely on wrapping during DWARF evaluation, first decode
18546 the literal as a "DWARF-sized" signed number. */
18547 wi_addend = wi::to_offset (tree_addend);
18548 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18549 shwi_addend = wi_addend.to_shwi ();
18550 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18551 ? int_loc_descriptor (-shwi_addend)
18552 : NULL;
18553
18554 if (loc_naddend != NULL
18555 && ((unsigned) size_of_uleb128 (shwi_addend)
18556 > size_of_loc_descr (loc_naddend)))
18557 {
18558 add_loc_descr_to_each (list_ret, loc_naddend);
18559 add_loc_descr_to_each (list_ret,
18560 new_loc_descr (DW_OP_minus, 0, 0));
18561 }
18562 else
18563 {
18564 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18565 {
18566 loc_naddend = loc_cur;
18567 loc_cur = loc_cur->dw_loc_next;
18568 ggc_free (loc_naddend);
18569 }
18570 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18571 }
18572 break;
18573 }
18574
18575 op = DW_OP_plus;
18576 goto do_binop;
18577
18578 case LE_EXPR:
18579 op = DW_OP_le;
18580 goto do_comp_binop;
18581
18582 case GE_EXPR:
18583 op = DW_OP_ge;
18584 goto do_comp_binop;
18585
18586 case LT_EXPR:
18587 op = DW_OP_lt;
18588 goto do_comp_binop;
18589
18590 case GT_EXPR:
18591 op = DW_OP_gt;
18592 goto do_comp_binop;
18593
18594 do_comp_binop:
18595 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18596 {
18597 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18598 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18599 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18600 TREE_CODE (loc));
18601 break;
18602 }
18603 else
18604 goto do_binop;
18605
18606 case EQ_EXPR:
18607 op = DW_OP_eq;
18608 goto do_binop;
18609
18610 case NE_EXPR:
18611 op = DW_OP_ne;
18612 goto do_binop;
18613
18614 do_binop:
18615 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18616 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18617 if (list_ret == 0 || list_ret1 == 0)
18618 return 0;
18619
18620 add_loc_list (&list_ret, list_ret1);
18621 if (list_ret == 0)
18622 return 0;
18623 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18624 break;
18625
18626 case TRUTH_NOT_EXPR:
18627 case BIT_NOT_EXPR:
18628 op = DW_OP_not;
18629 goto do_unop;
18630
18631 case ABS_EXPR:
18632 op = DW_OP_abs;
18633 goto do_unop;
18634
18635 case NEGATE_EXPR:
18636 op = DW_OP_neg;
18637 goto do_unop;
18638
18639 do_unop:
18640 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18641 if (list_ret == 0)
18642 return 0;
18643
18644 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18645 break;
18646
18647 case MIN_EXPR:
18648 case MAX_EXPR:
18649 {
18650 const enum tree_code code =
18651 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18652
18653 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18654 build2 (code, integer_type_node,
18655 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18656 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18657 }
18658
18659 /* fall through */
18660
18661 case COND_EXPR:
18662 {
18663 dw_loc_descr_ref lhs
18664 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18665 dw_loc_list_ref rhs
18666 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18667 dw_loc_descr_ref bra_node, jump_node, tmp;
18668
18669 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18670 if (list_ret == 0 || lhs == 0 || rhs == 0)
18671 return 0;
18672
18673 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18674 add_loc_descr_to_each (list_ret, bra_node);
18675
18676 add_loc_list (&list_ret, rhs);
18677 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18678 add_loc_descr_to_each (list_ret, jump_node);
18679
18680 add_loc_descr_to_each (list_ret, lhs);
18681 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18682 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18683
18684 /* ??? Need a node to point the skip at. Use a nop. */
18685 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18686 add_loc_descr_to_each (list_ret, tmp);
18687 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18688 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18689 }
18690 break;
18691
18692 case FIX_TRUNC_EXPR:
18693 return 0;
18694
18695 default:
18696 /* Leave front-end specific codes as simply unknown. This comes
18697 up, for instance, with the C STMT_EXPR. */
18698 if ((unsigned int) TREE_CODE (loc)
18699 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18700 {
18701 expansion_failed (loc, NULL_RTX,
18702 "language specific tree node");
18703 return 0;
18704 }
18705
18706 /* Otherwise this is a generic code; we should just lists all of
18707 these explicitly. We forgot one. */
18708 if (flag_checking)
18709 gcc_unreachable ();
18710
18711 /* In a release build, we want to degrade gracefully: better to
18712 generate incomplete debugging information than to crash. */
18713 return NULL;
18714 }
18715
18716 if (!ret && !list_ret)
18717 return 0;
18718
18719 if (want_address == 2 && !have_address
18720 && (dwarf_version >= 4 || !dwarf_strict))
18721 {
18722 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18723 {
18724 expansion_failed (loc, NULL_RTX,
18725 "DWARF address size mismatch");
18726 return 0;
18727 }
18728 if (ret)
18729 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18730 else
18731 add_loc_descr_to_each (list_ret,
18732 new_loc_descr (DW_OP_stack_value, 0, 0));
18733 have_address = 1;
18734 }
18735 /* Show if we can't fill the request for an address. */
18736 if (want_address && !have_address)
18737 {
18738 expansion_failed (loc, NULL_RTX,
18739 "Want address and only have value");
18740 return 0;
18741 }
18742
18743 gcc_assert (!ret || !list_ret);
18744
18745 /* If we've got an address and don't want one, dereference. */
18746 if (!want_address && have_address)
18747 {
18748 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18749
18750 if (size > DWARF2_ADDR_SIZE || size == -1)
18751 {
18752 expansion_failed (loc, NULL_RTX,
18753 "DWARF address size mismatch");
18754 return 0;
18755 }
18756 else if (size == DWARF2_ADDR_SIZE)
18757 op = DW_OP_deref;
18758 else
18759 op = DW_OP_deref_size;
18760
18761 if (ret)
18762 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18763 else
18764 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18765 }
18766 if (ret)
18767 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18768
18769 return list_ret;
18770 }
18771
18772 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18773 expressions. */
18774
18775 static dw_loc_list_ref
18776 loc_list_from_tree (tree loc, int want_address,
18777 struct loc_descr_context *context)
18778 {
18779 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18780
18781 for (dw_loc_list_ref loc_cur = result;
18782 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18783 loc_descr_without_nops (loc_cur->expr);
18784 return result;
18785 }
18786
18787 /* Same as above but return only single location expression. */
18788 static dw_loc_descr_ref
18789 loc_descriptor_from_tree (tree loc, int want_address,
18790 struct loc_descr_context *context)
18791 {
18792 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18793 if (!ret)
18794 return NULL;
18795 if (ret->dw_loc_next)
18796 {
18797 expansion_failed (loc, NULL_RTX,
18798 "Location list where only loc descriptor needed");
18799 return NULL;
18800 }
18801 return ret->expr;
18802 }
18803
18804 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18805 pointer to the declared type for the relevant field variable, or return
18806 `integer_type_node' if the given node turns out to be an
18807 ERROR_MARK node. */
18808
18809 static inline tree
18810 field_type (const_tree decl)
18811 {
18812 tree type;
18813
18814 if (TREE_CODE (decl) == ERROR_MARK)
18815 return integer_type_node;
18816
18817 type = DECL_BIT_FIELD_TYPE (decl);
18818 if (type == NULL_TREE)
18819 type = TREE_TYPE (decl);
18820
18821 return type;
18822 }
18823
18824 /* Given a pointer to a tree node, return the alignment in bits for
18825 it, or else return BITS_PER_WORD if the node actually turns out to
18826 be an ERROR_MARK node. */
18827
18828 static inline unsigned
18829 simple_type_align_in_bits (const_tree type)
18830 {
18831 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18832 }
18833
18834 static inline unsigned
18835 simple_decl_align_in_bits (const_tree decl)
18836 {
18837 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18838 }
18839
18840 /* Return the result of rounding T up to ALIGN. */
18841
18842 static inline offset_int
18843 round_up_to_align (const offset_int &t, unsigned int align)
18844 {
18845 return wi::udiv_trunc (t + align - 1, align) * align;
18846 }
18847
18848 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18849 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18850 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18851 if we fail to return the size in one of these two forms. */
18852
18853 static dw_loc_descr_ref
18854 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18855 {
18856 tree tree_size;
18857 struct loc_descr_context ctx;
18858
18859 /* Return a constant integer in priority, if possible. */
18860 *cst_size = int_size_in_bytes (type);
18861 if (*cst_size != -1)
18862 return NULL;
18863
18864 ctx.context_type = const_cast<tree> (type);
18865 ctx.base_decl = NULL_TREE;
18866 ctx.dpi = NULL;
18867 ctx.placeholder_arg = false;
18868 ctx.placeholder_seen = false;
18869
18870 type = TYPE_MAIN_VARIANT (type);
18871 tree_size = TYPE_SIZE_UNIT (type);
18872 return ((tree_size != NULL_TREE)
18873 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18874 : NULL);
18875 }
18876
18877 /* Helper structure for RECORD_TYPE processing. */
18878 struct vlr_context
18879 {
18880 /* Root RECORD_TYPE. It is needed to generate data member location
18881 descriptions in variable-length records (VLR), but also to cope with
18882 variants, which are composed of nested structures multiplexed with
18883 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18884 function processing a FIELD_DECL, it is required to be non null. */
18885 tree struct_type;
18886 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18887 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18888 this variant part as part of the root record (in storage units). For
18889 regular records, it must be NULL_TREE. */
18890 tree variant_part_offset;
18891 };
18892
18893 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18894 addressed byte of the "containing object" for the given FIELD_DECL. If
18895 possible, return a native constant through CST_OFFSET (in which case NULL is
18896 returned); otherwise return a DWARF expression that computes the offset.
18897
18898 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18899 that offset is, either because the argument turns out to be a pointer to an
18900 ERROR_MARK node, or because the offset expression is too complex for us.
18901
18902 CTX is required: see the comment for VLR_CONTEXT. */
18903
18904 static dw_loc_descr_ref
18905 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18906 HOST_WIDE_INT *cst_offset)
18907 {
18908 tree tree_result;
18909 dw_loc_list_ref loc_result;
18910
18911 *cst_offset = 0;
18912
18913 if (TREE_CODE (decl) == ERROR_MARK)
18914 return NULL;
18915 else
18916 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18917
18918 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18919 case. */
18920 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18921 return NULL;
18922
18923 #ifdef PCC_BITFIELD_TYPE_MATTERS
18924 /* We used to handle only constant offsets in all cases. Now, we handle
18925 properly dynamic byte offsets only when PCC bitfield type doesn't
18926 matter. */
18927 if (PCC_BITFIELD_TYPE_MATTERS
18928 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18929 {
18930 offset_int object_offset_in_bits;
18931 offset_int object_offset_in_bytes;
18932 offset_int bitpos_int;
18933 tree type;
18934 tree field_size_tree;
18935 offset_int deepest_bitpos;
18936 offset_int field_size_in_bits;
18937 unsigned int type_align_in_bits;
18938 unsigned int decl_align_in_bits;
18939 offset_int type_size_in_bits;
18940
18941 bitpos_int = wi::to_offset (bit_position (decl));
18942 type = field_type (decl);
18943 type_size_in_bits = offset_int_type_size_in_bits (type);
18944 type_align_in_bits = simple_type_align_in_bits (type);
18945
18946 field_size_tree = DECL_SIZE (decl);
18947
18948 /* The size could be unspecified if there was an error, or for
18949 a flexible array member. */
18950 if (!field_size_tree)
18951 field_size_tree = bitsize_zero_node;
18952
18953 /* If the size of the field is not constant, use the type size. */
18954 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18955 field_size_in_bits = wi::to_offset (field_size_tree);
18956 else
18957 field_size_in_bits = type_size_in_bits;
18958
18959 decl_align_in_bits = simple_decl_align_in_bits (decl);
18960
18961 /* The GCC front-end doesn't make any attempt to keep track of the
18962 starting bit offset (relative to the start of the containing
18963 structure type) of the hypothetical "containing object" for a
18964 bit-field. Thus, when computing the byte offset value for the
18965 start of the "containing object" of a bit-field, we must deduce
18966 this information on our own. This can be rather tricky to do in
18967 some cases. For example, handling the following structure type
18968 definition when compiling for an i386/i486 target (which only
18969 aligns long long's to 32-bit boundaries) can be very tricky:
18970
18971 struct S { int field1; long long field2:31; };
18972
18973 Fortunately, there is a simple rule-of-thumb which can be used
18974 in such cases. When compiling for an i386/i486, GCC will
18975 allocate 8 bytes for the structure shown above. It decides to
18976 do this based upon one simple rule for bit-field allocation.
18977 GCC allocates each "containing object" for each bit-field at
18978 the first (i.e. lowest addressed) legitimate alignment boundary
18979 (based upon the required minimum alignment for the declared
18980 type of the field) which it can possibly use, subject to the
18981 condition that there is still enough available space remaining
18982 in the containing object (when allocated at the selected point)
18983 to fully accommodate all of the bits of the bit-field itself.
18984
18985 This simple rule makes it obvious why GCC allocates 8 bytes for
18986 each object of the structure type shown above. When looking
18987 for a place to allocate the "containing object" for `field2',
18988 the compiler simply tries to allocate a 64-bit "containing
18989 object" at each successive 32-bit boundary (starting at zero)
18990 until it finds a place to allocate that 64- bit field such that
18991 at least 31 contiguous (and previously unallocated) bits remain
18992 within that selected 64 bit field. (As it turns out, for the
18993 example above, the compiler finds it is OK to allocate the
18994 "containing object" 64-bit field at bit-offset zero within the
18995 structure type.)
18996
18997 Here we attempt to work backwards from the limited set of facts
18998 we're given, and we try to deduce from those facts, where GCC
18999 must have believed that the containing object started (within
19000 the structure type). The value we deduce is then used (by the
19001 callers of this routine) to generate DW_AT_location and
19002 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19003 the case of DW_AT_location, regular fields as well). */
19004
19005 /* Figure out the bit-distance from the start of the structure to
19006 the "deepest" bit of the bit-field. */
19007 deepest_bitpos = bitpos_int + field_size_in_bits;
19008
19009 /* This is the tricky part. Use some fancy footwork to deduce
19010 where the lowest addressed bit of the containing object must
19011 be. */
19012 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19013
19014 /* Round up to type_align by default. This works best for
19015 bitfields. */
19016 object_offset_in_bits
19017 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19018
19019 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19020 {
19021 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19022
19023 /* Round up to decl_align instead. */
19024 object_offset_in_bits
19025 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19026 }
19027
19028 object_offset_in_bytes
19029 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19030 if (ctx->variant_part_offset == NULL_TREE)
19031 {
19032 *cst_offset = object_offset_in_bytes.to_shwi ();
19033 return NULL;
19034 }
19035 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19036 }
19037 else
19038 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19039 tree_result = byte_position (decl);
19040
19041 if (ctx->variant_part_offset != NULL_TREE)
19042 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19043 ctx->variant_part_offset, tree_result);
19044
19045 /* If the byte offset is a constant, it's simplier to handle a native
19046 constant rather than a DWARF expression. */
19047 if (TREE_CODE (tree_result) == INTEGER_CST)
19048 {
19049 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19050 return NULL;
19051 }
19052 struct loc_descr_context loc_ctx = {
19053 ctx->struct_type, /* context_type */
19054 NULL_TREE, /* base_decl */
19055 NULL, /* dpi */
19056 false, /* placeholder_arg */
19057 false /* placeholder_seen */
19058 };
19059 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19060
19061 /* We want a DWARF expression: abort if we only have a location list with
19062 multiple elements. */
19063 if (!loc_result || !single_element_loc_list_p (loc_result))
19064 return NULL;
19065 else
19066 return loc_result->expr;
19067 }
19068 \f
19069 /* The following routines define various Dwarf attributes and any data
19070 associated with them. */
19071
19072 /* Add a location description attribute value to a DIE.
19073
19074 This emits location attributes suitable for whole variables and
19075 whole parameters. Note that the location attributes for struct fields are
19076 generated by the routine `data_member_location_attribute' below. */
19077
19078 static inline void
19079 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19080 dw_loc_list_ref descr)
19081 {
19082 bool check_no_locviews = true;
19083 if (descr == 0)
19084 return;
19085 if (single_element_loc_list_p (descr))
19086 add_AT_loc (die, attr_kind, descr->expr);
19087 else
19088 {
19089 add_AT_loc_list (die, attr_kind, descr);
19090 gcc_assert (descr->ll_symbol);
19091 if (attr_kind == DW_AT_location && descr->vl_symbol
19092 && dwarf2out_locviews_in_attribute ())
19093 {
19094 add_AT_view_list (die, DW_AT_GNU_locviews);
19095 check_no_locviews = false;
19096 }
19097 }
19098
19099 if (check_no_locviews)
19100 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19101 }
19102
19103 /* Add DW_AT_accessibility attribute to DIE if needed. */
19104
19105 static void
19106 add_accessibility_attribute (dw_die_ref die, tree decl)
19107 {
19108 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19109 children, otherwise the default is DW_ACCESS_public. In DWARF2
19110 the default has always been DW_ACCESS_public. */
19111 if (TREE_PROTECTED (decl))
19112 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19113 else if (TREE_PRIVATE (decl))
19114 {
19115 if (dwarf_version == 2
19116 || die->die_parent == NULL
19117 || die->die_parent->die_tag != DW_TAG_class_type)
19118 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19119 }
19120 else if (dwarf_version > 2
19121 && die->die_parent
19122 && die->die_parent->die_tag == DW_TAG_class_type)
19123 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19124 }
19125
19126 /* Attach the specialized form of location attribute used for data members of
19127 struct and union types. In the special case of a FIELD_DECL node which
19128 represents a bit-field, the "offset" part of this special location
19129 descriptor must indicate the distance in bytes from the lowest-addressed
19130 byte of the containing struct or union type to the lowest-addressed byte of
19131 the "containing object" for the bit-field. (See the `field_byte_offset'
19132 function above).
19133
19134 For any given bit-field, the "containing object" is a hypothetical object
19135 (of some integral or enum type) within which the given bit-field lives. The
19136 type of this hypothetical "containing object" is always the same as the
19137 declared type of the individual bit-field itself (for GCC anyway... the
19138 DWARF spec doesn't actually mandate this). Note that it is the size (in
19139 bytes) of the hypothetical "containing object" which will be given in the
19140 DW_AT_byte_size attribute for this bit-field. (See the
19141 `byte_size_attribute' function below.) It is also used when calculating the
19142 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19143 function below.)
19144
19145 CTX is required: see the comment for VLR_CONTEXT. */
19146
19147 static void
19148 add_data_member_location_attribute (dw_die_ref die,
19149 tree decl,
19150 struct vlr_context *ctx)
19151 {
19152 HOST_WIDE_INT offset;
19153 dw_loc_descr_ref loc_descr = 0;
19154
19155 if (TREE_CODE (decl) == TREE_BINFO)
19156 {
19157 /* We're working on the TAG_inheritance for a base class. */
19158 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19159 {
19160 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19161 aren't at a fixed offset from all (sub)objects of the same
19162 type. We need to extract the appropriate offset from our
19163 vtable. The following dwarf expression means
19164
19165 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19166
19167 This is specific to the V3 ABI, of course. */
19168
19169 dw_loc_descr_ref tmp;
19170
19171 /* Make a copy of the object address. */
19172 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19173 add_loc_descr (&loc_descr, tmp);
19174
19175 /* Extract the vtable address. */
19176 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19177 add_loc_descr (&loc_descr, tmp);
19178
19179 /* Calculate the address of the offset. */
19180 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19181 gcc_assert (offset < 0);
19182
19183 tmp = int_loc_descriptor (-offset);
19184 add_loc_descr (&loc_descr, tmp);
19185 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19186 add_loc_descr (&loc_descr, tmp);
19187
19188 /* Extract the offset. */
19189 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19190 add_loc_descr (&loc_descr, tmp);
19191
19192 /* Add it to the object address. */
19193 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19194 add_loc_descr (&loc_descr, tmp);
19195 }
19196 else
19197 offset = tree_to_shwi (BINFO_OFFSET (decl));
19198 }
19199 else
19200 {
19201 loc_descr = field_byte_offset (decl, ctx, &offset);
19202
19203 /* If loc_descr is available then we know the field offset is dynamic.
19204 However, GDB does not handle dynamic field offsets very well at the
19205 moment. */
19206 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19207 {
19208 loc_descr = NULL;
19209 offset = 0;
19210 }
19211
19212 /* Data member location evalutation starts with the base address on the
19213 stack. Compute the field offset and add it to this base address. */
19214 else if (loc_descr != NULL)
19215 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19216 }
19217
19218 if (! loc_descr)
19219 {
19220 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19221 e.g. GDB only added support to it in November 2016. For DWARF5
19222 we need newer debug info consumers anyway. We might change this
19223 to dwarf_version >= 4 once most consumers catched up. */
19224 if (dwarf_version >= 5
19225 && TREE_CODE (decl) == FIELD_DECL
19226 && DECL_BIT_FIELD_TYPE (decl))
19227 {
19228 tree off = bit_position (decl);
19229 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19230 {
19231 remove_AT (die, DW_AT_byte_size);
19232 remove_AT (die, DW_AT_bit_offset);
19233 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19234 return;
19235 }
19236 }
19237 if (dwarf_version > 2)
19238 {
19239 /* Don't need to output a location expression, just the constant. */
19240 if (offset < 0)
19241 add_AT_int (die, DW_AT_data_member_location, offset);
19242 else
19243 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19244 return;
19245 }
19246 else
19247 {
19248 enum dwarf_location_atom op;
19249
19250 /* The DWARF2 standard says that we should assume that the structure
19251 address is already on the stack, so we can specify a structure
19252 field address by using DW_OP_plus_uconst. */
19253 op = DW_OP_plus_uconst;
19254 loc_descr = new_loc_descr (op, offset, 0);
19255 }
19256 }
19257
19258 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19259 }
19260
19261 /* Writes integer values to dw_vec_const array. */
19262
19263 static void
19264 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19265 {
19266 while (size != 0)
19267 {
19268 *dest++ = val & 0xff;
19269 val >>= 8;
19270 --size;
19271 }
19272 }
19273
19274 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19275
19276 static HOST_WIDE_INT
19277 extract_int (const unsigned char *src, unsigned int size)
19278 {
19279 HOST_WIDE_INT val = 0;
19280
19281 src += size;
19282 while (size != 0)
19283 {
19284 val <<= 8;
19285 val |= *--src & 0xff;
19286 --size;
19287 }
19288 return val;
19289 }
19290
19291 /* Writes wide_int values to dw_vec_const array. */
19292
19293 static void
19294 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19295 {
19296 int i;
19297
19298 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19299 {
19300 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19301 return;
19302 }
19303
19304 /* We'd have to extend this code to support odd sizes. */
19305 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19306
19307 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19308
19309 if (WORDS_BIG_ENDIAN)
19310 for (i = n - 1; i >= 0; i--)
19311 {
19312 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19313 dest += sizeof (HOST_WIDE_INT);
19314 }
19315 else
19316 for (i = 0; i < n; i++)
19317 {
19318 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19319 dest += sizeof (HOST_WIDE_INT);
19320 }
19321 }
19322
19323 /* Writes floating point values to dw_vec_const array. */
19324
19325 static void
19326 insert_float (const_rtx rtl, unsigned char *array)
19327 {
19328 long val[4];
19329 int i;
19330 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19331
19332 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19333
19334 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19335 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19336 {
19337 insert_int (val[i], 4, array);
19338 array += 4;
19339 }
19340 }
19341
19342 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19343 does not have a "location" either in memory or in a register. These
19344 things can arise in GNU C when a constant is passed as an actual parameter
19345 to an inlined function. They can also arise in C++ where declared
19346 constants do not necessarily get memory "homes". */
19347
19348 static bool
19349 add_const_value_attribute (dw_die_ref die, rtx rtl)
19350 {
19351 switch (GET_CODE (rtl))
19352 {
19353 case CONST_INT:
19354 {
19355 HOST_WIDE_INT val = INTVAL (rtl);
19356
19357 if (val < 0)
19358 add_AT_int (die, DW_AT_const_value, val);
19359 else
19360 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19361 }
19362 return true;
19363
19364 case CONST_WIDE_INT:
19365 {
19366 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19367 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19368 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19369 wide_int w = wi::zext (w1, prec);
19370 add_AT_wide (die, DW_AT_const_value, w);
19371 }
19372 return true;
19373
19374 case CONST_DOUBLE:
19375 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19376 floating-point constant. A CONST_DOUBLE is used whenever the
19377 constant requires more than one word in order to be adequately
19378 represented. */
19379 if (TARGET_SUPPORTS_WIDE_INT == 0
19380 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19381 add_AT_double (die, DW_AT_const_value,
19382 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19383 else
19384 {
19385 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19386 unsigned int length = GET_MODE_SIZE (mode);
19387 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19388
19389 insert_float (rtl, array);
19390 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19391 }
19392 return true;
19393
19394 case CONST_VECTOR:
19395 {
19396 unsigned int length;
19397 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19398 return false;
19399
19400 machine_mode mode = GET_MODE (rtl);
19401 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19402 unsigned char *array
19403 = ggc_vec_alloc<unsigned char> (length * elt_size);
19404 unsigned int i;
19405 unsigned char *p;
19406 machine_mode imode = GET_MODE_INNER (mode);
19407
19408 switch (GET_MODE_CLASS (mode))
19409 {
19410 case MODE_VECTOR_INT:
19411 for (i = 0, p = array; i < length; i++, p += elt_size)
19412 {
19413 rtx elt = CONST_VECTOR_ELT (rtl, i);
19414 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19415 }
19416 break;
19417
19418 case MODE_VECTOR_FLOAT:
19419 for (i = 0, p = array; i < length; i++, p += elt_size)
19420 {
19421 rtx elt = CONST_VECTOR_ELT (rtl, i);
19422 insert_float (elt, p);
19423 }
19424 break;
19425
19426 default:
19427 gcc_unreachable ();
19428 }
19429
19430 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19431 }
19432 return true;
19433
19434 case CONST_STRING:
19435 if (dwarf_version >= 4 || !dwarf_strict)
19436 {
19437 dw_loc_descr_ref loc_result;
19438 resolve_one_addr (&rtl);
19439 rtl_addr:
19440 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19441 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19442 add_AT_loc (die, DW_AT_location, loc_result);
19443 vec_safe_push (used_rtx_array, rtl);
19444 return true;
19445 }
19446 return false;
19447
19448 case CONST:
19449 if (CONSTANT_P (XEXP (rtl, 0)))
19450 return add_const_value_attribute (die, XEXP (rtl, 0));
19451 /* FALLTHROUGH */
19452 case SYMBOL_REF:
19453 if (!const_ok_for_output (rtl))
19454 return false;
19455 /* FALLTHROUGH */
19456 case LABEL_REF:
19457 if (dwarf_version >= 4 || !dwarf_strict)
19458 goto rtl_addr;
19459 return false;
19460
19461 case PLUS:
19462 /* In cases where an inlined instance of an inline function is passed
19463 the address of an `auto' variable (which is local to the caller) we
19464 can get a situation where the DECL_RTL of the artificial local
19465 variable (for the inlining) which acts as a stand-in for the
19466 corresponding formal parameter (of the inline function) will look
19467 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19468 exactly a compile-time constant expression, but it isn't the address
19469 of the (artificial) local variable either. Rather, it represents the
19470 *value* which the artificial local variable always has during its
19471 lifetime. We currently have no way to represent such quasi-constant
19472 values in Dwarf, so for now we just punt and generate nothing. */
19473 return false;
19474
19475 case HIGH:
19476 case CONST_FIXED:
19477 return false;
19478
19479 case MEM:
19480 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19481 && MEM_READONLY_P (rtl)
19482 && GET_MODE (rtl) == BLKmode)
19483 {
19484 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19485 return true;
19486 }
19487 return false;
19488
19489 default:
19490 /* No other kinds of rtx should be possible here. */
19491 gcc_unreachable ();
19492 }
19493 return false;
19494 }
19495
19496 /* Determine whether the evaluation of EXPR references any variables
19497 or functions which aren't otherwise used (and therefore may not be
19498 output). */
19499 static tree
19500 reference_to_unused (tree * tp, int * walk_subtrees,
19501 void * data ATTRIBUTE_UNUSED)
19502 {
19503 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19504 *walk_subtrees = 0;
19505
19506 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19507 && ! TREE_ASM_WRITTEN (*tp))
19508 return *tp;
19509 /* ??? The C++ FE emits debug information for using decls, so
19510 putting gcc_unreachable here falls over. See PR31899. For now
19511 be conservative. */
19512 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19513 return *tp;
19514 else if (VAR_P (*tp))
19515 {
19516 varpool_node *node = varpool_node::get (*tp);
19517 if (!node || !node->definition)
19518 return *tp;
19519 }
19520 else if (TREE_CODE (*tp) == FUNCTION_DECL
19521 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19522 {
19523 /* The call graph machinery must have finished analyzing,
19524 optimizing and gimplifying the CU by now.
19525 So if *TP has no call graph node associated
19526 to it, it means *TP will not be emitted. */
19527 if (!cgraph_node::get (*tp))
19528 return *tp;
19529 }
19530 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19531 return *tp;
19532
19533 return NULL_TREE;
19534 }
19535
19536 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19537 for use in a later add_const_value_attribute call. */
19538
19539 static rtx
19540 rtl_for_decl_init (tree init, tree type)
19541 {
19542 rtx rtl = NULL_RTX;
19543
19544 STRIP_NOPS (init);
19545
19546 /* If a variable is initialized with a string constant without embedded
19547 zeros, build CONST_STRING. */
19548 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19549 {
19550 tree enttype = TREE_TYPE (type);
19551 tree domain = TYPE_DOMAIN (type);
19552 scalar_int_mode mode;
19553
19554 if (is_int_mode (TYPE_MODE (enttype), &mode)
19555 && GET_MODE_SIZE (mode) == 1
19556 && domain
19557 && TYPE_MAX_VALUE (domain)
19558 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19559 && integer_zerop (TYPE_MIN_VALUE (domain))
19560 && compare_tree_int (TYPE_MAX_VALUE (domain),
19561 TREE_STRING_LENGTH (init) - 1) == 0
19562 && ((size_t) TREE_STRING_LENGTH (init)
19563 == strlen (TREE_STRING_POINTER (init)) + 1))
19564 {
19565 rtl = gen_rtx_CONST_STRING (VOIDmode,
19566 ggc_strdup (TREE_STRING_POINTER (init)));
19567 rtl = gen_rtx_MEM (BLKmode, rtl);
19568 MEM_READONLY_P (rtl) = 1;
19569 }
19570 }
19571 /* Other aggregates, and complex values, could be represented using
19572 CONCAT: FIXME! */
19573 else if (AGGREGATE_TYPE_P (type)
19574 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19575 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19576 || TREE_CODE (type) == COMPLEX_TYPE)
19577 ;
19578 /* Vectors only work if their mode is supported by the target.
19579 FIXME: generic vectors ought to work too. */
19580 else if (TREE_CODE (type) == VECTOR_TYPE
19581 && !VECTOR_MODE_P (TYPE_MODE (type)))
19582 ;
19583 /* If the initializer is something that we know will expand into an
19584 immediate RTL constant, expand it now. We must be careful not to
19585 reference variables which won't be output. */
19586 else if (initializer_constant_valid_p (init, type)
19587 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19588 {
19589 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19590 possible. */
19591 if (TREE_CODE (type) == VECTOR_TYPE)
19592 switch (TREE_CODE (init))
19593 {
19594 case VECTOR_CST:
19595 break;
19596 case CONSTRUCTOR:
19597 if (TREE_CONSTANT (init))
19598 {
19599 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19600 bool constant_p = true;
19601 tree value;
19602 unsigned HOST_WIDE_INT ix;
19603
19604 /* Even when ctor is constant, it might contain non-*_CST
19605 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19606 belong into VECTOR_CST nodes. */
19607 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19608 if (!CONSTANT_CLASS_P (value))
19609 {
19610 constant_p = false;
19611 break;
19612 }
19613
19614 if (constant_p)
19615 {
19616 init = build_vector_from_ctor (type, elts);
19617 break;
19618 }
19619 }
19620 /* FALLTHRU */
19621
19622 default:
19623 return NULL;
19624 }
19625
19626 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19627
19628 /* If expand_expr returns a MEM, it wasn't immediate. */
19629 gcc_assert (!rtl || !MEM_P (rtl));
19630 }
19631
19632 return rtl;
19633 }
19634
19635 /* Generate RTL for the variable DECL to represent its location. */
19636
19637 static rtx
19638 rtl_for_decl_location (tree decl)
19639 {
19640 rtx rtl;
19641
19642 /* Here we have to decide where we are going to say the parameter "lives"
19643 (as far as the debugger is concerned). We only have a couple of
19644 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19645
19646 DECL_RTL normally indicates where the parameter lives during most of the
19647 activation of the function. If optimization is enabled however, this
19648 could be either NULL or else a pseudo-reg. Both of those cases indicate
19649 that the parameter doesn't really live anywhere (as far as the code
19650 generation parts of GCC are concerned) during most of the function's
19651 activation. That will happen (for example) if the parameter is never
19652 referenced within the function.
19653
19654 We could just generate a location descriptor here for all non-NULL
19655 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19656 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19657 where DECL_RTL is NULL or is a pseudo-reg.
19658
19659 Note however that we can only get away with using DECL_INCOMING_RTL as
19660 a backup substitute for DECL_RTL in certain limited cases. In cases
19661 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19662 we can be sure that the parameter was passed using the same type as it is
19663 declared to have within the function, and that its DECL_INCOMING_RTL
19664 points us to a place where a value of that type is passed.
19665
19666 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19667 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19668 because in these cases DECL_INCOMING_RTL points us to a value of some
19669 type which is *different* from the type of the parameter itself. Thus,
19670 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19671 such cases, the debugger would end up (for example) trying to fetch a
19672 `float' from a place which actually contains the first part of a
19673 `double'. That would lead to really incorrect and confusing
19674 output at debug-time.
19675
19676 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19677 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19678 are a couple of exceptions however. On little-endian machines we can
19679 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19680 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19681 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19682 when (on a little-endian machine) a non-prototyped function has a
19683 parameter declared to be of type `short' or `char'. In such cases,
19684 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19685 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19686 passed `int' value. If the debugger then uses that address to fetch
19687 a `short' or a `char' (on a little-endian machine) the result will be
19688 the correct data, so we allow for such exceptional cases below.
19689
19690 Note that our goal here is to describe the place where the given formal
19691 parameter lives during most of the function's activation (i.e. between the
19692 end of the prologue and the start of the epilogue). We'll do that as best
19693 as we can. Note however that if the given formal parameter is modified
19694 sometime during the execution of the function, then a stack backtrace (at
19695 debug-time) will show the function as having been called with the *new*
19696 value rather than the value which was originally passed in. This happens
19697 rarely enough that it is not a major problem, but it *is* a problem, and
19698 I'd like to fix it.
19699
19700 A future version of dwarf2out.c may generate two additional attributes for
19701 any given DW_TAG_formal_parameter DIE which will describe the "passed
19702 type" and the "passed location" for the given formal parameter in addition
19703 to the attributes we now generate to indicate the "declared type" and the
19704 "active location" for each parameter. This additional set of attributes
19705 could be used by debuggers for stack backtraces. Separately, note that
19706 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19707 This happens (for example) for inlined-instances of inline function formal
19708 parameters which are never referenced. This really shouldn't be
19709 happening. All PARM_DECL nodes should get valid non-NULL
19710 DECL_INCOMING_RTL values. FIXME. */
19711
19712 /* Use DECL_RTL as the "location" unless we find something better. */
19713 rtl = DECL_RTL_IF_SET (decl);
19714
19715 /* When generating abstract instances, ignore everything except
19716 constants, symbols living in memory, and symbols living in
19717 fixed registers. */
19718 if (! reload_completed)
19719 {
19720 if (rtl
19721 && (CONSTANT_P (rtl)
19722 || (MEM_P (rtl)
19723 && CONSTANT_P (XEXP (rtl, 0)))
19724 || (REG_P (rtl)
19725 && VAR_P (decl)
19726 && TREE_STATIC (decl))))
19727 {
19728 rtl = targetm.delegitimize_address (rtl);
19729 return rtl;
19730 }
19731 rtl = NULL_RTX;
19732 }
19733 else if (TREE_CODE (decl) == PARM_DECL)
19734 {
19735 if (rtl == NULL_RTX
19736 || is_pseudo_reg (rtl)
19737 || (MEM_P (rtl)
19738 && is_pseudo_reg (XEXP (rtl, 0))
19739 && DECL_INCOMING_RTL (decl)
19740 && MEM_P (DECL_INCOMING_RTL (decl))
19741 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19742 {
19743 tree declared_type = TREE_TYPE (decl);
19744 tree passed_type = DECL_ARG_TYPE (decl);
19745 machine_mode dmode = TYPE_MODE (declared_type);
19746 machine_mode pmode = TYPE_MODE (passed_type);
19747
19748 /* This decl represents a formal parameter which was optimized out.
19749 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19750 all cases where (rtl == NULL_RTX) just below. */
19751 if (dmode == pmode)
19752 rtl = DECL_INCOMING_RTL (decl);
19753 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19754 && SCALAR_INT_MODE_P (dmode)
19755 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19756 && DECL_INCOMING_RTL (decl))
19757 {
19758 rtx inc = DECL_INCOMING_RTL (decl);
19759 if (REG_P (inc))
19760 rtl = inc;
19761 else if (MEM_P (inc))
19762 {
19763 if (BYTES_BIG_ENDIAN)
19764 rtl = adjust_address_nv (inc, dmode,
19765 GET_MODE_SIZE (pmode)
19766 - GET_MODE_SIZE (dmode));
19767 else
19768 rtl = inc;
19769 }
19770 }
19771 }
19772
19773 /* If the parm was passed in registers, but lives on the stack, then
19774 make a big endian correction if the mode of the type of the
19775 parameter is not the same as the mode of the rtl. */
19776 /* ??? This is the same series of checks that are made in dbxout.c before
19777 we reach the big endian correction code there. It isn't clear if all
19778 of these checks are necessary here, but keeping them all is the safe
19779 thing to do. */
19780 else if (MEM_P (rtl)
19781 && XEXP (rtl, 0) != const0_rtx
19782 && ! CONSTANT_P (XEXP (rtl, 0))
19783 /* Not passed in memory. */
19784 && !MEM_P (DECL_INCOMING_RTL (decl))
19785 /* Not passed by invisible reference. */
19786 && (!REG_P (XEXP (rtl, 0))
19787 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19788 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19789 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19790 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19791 #endif
19792 )
19793 /* Big endian correction check. */
19794 && BYTES_BIG_ENDIAN
19795 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19796 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19797 UNITS_PER_WORD))
19798 {
19799 machine_mode addr_mode = get_address_mode (rtl);
19800 poly_int64 offset = (UNITS_PER_WORD
19801 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19802
19803 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19804 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19805 }
19806 }
19807 else if (VAR_P (decl)
19808 && rtl
19809 && MEM_P (rtl)
19810 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19811 {
19812 machine_mode addr_mode = get_address_mode (rtl);
19813 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19814 GET_MODE (rtl));
19815
19816 /* If a variable is declared "register" yet is smaller than
19817 a register, then if we store the variable to memory, it
19818 looks like we're storing a register-sized value, when in
19819 fact we are not. We need to adjust the offset of the
19820 storage location to reflect the actual value's bytes,
19821 else gdb will not be able to display it. */
19822 if (maybe_ne (offset, 0))
19823 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19824 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19825 }
19826
19827 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19828 and will have been substituted directly into all expressions that use it.
19829 C does not have such a concept, but C++ and other languages do. */
19830 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19831 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19832
19833 if (rtl)
19834 rtl = targetm.delegitimize_address (rtl);
19835
19836 /* If we don't look past the constant pool, we risk emitting a
19837 reference to a constant pool entry that isn't referenced from
19838 code, and thus is not emitted. */
19839 if (rtl)
19840 rtl = avoid_constant_pool_reference (rtl);
19841
19842 /* Try harder to get a rtl. If this symbol ends up not being emitted
19843 in the current CU, resolve_addr will remove the expression referencing
19844 it. */
19845 if (rtl == NULL_RTX
19846 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19847 && VAR_P (decl)
19848 && !DECL_EXTERNAL (decl)
19849 && TREE_STATIC (decl)
19850 && DECL_NAME (decl)
19851 && !DECL_HARD_REGISTER (decl)
19852 && DECL_MODE (decl) != VOIDmode)
19853 {
19854 rtl = make_decl_rtl_for_debug (decl);
19855 if (!MEM_P (rtl)
19856 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19857 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19858 rtl = NULL_RTX;
19859 }
19860
19861 return rtl;
19862 }
19863
19864 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19865 returned. If so, the decl for the COMMON block is returned, and the
19866 value is the offset into the common block for the symbol. */
19867
19868 static tree
19869 fortran_common (tree decl, HOST_WIDE_INT *value)
19870 {
19871 tree val_expr, cvar;
19872 machine_mode mode;
19873 poly_int64 bitsize, bitpos;
19874 tree offset;
19875 HOST_WIDE_INT cbitpos;
19876 int unsignedp, reversep, volatilep = 0;
19877
19878 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19879 it does not have a value (the offset into the common area), or if it
19880 is thread local (as opposed to global) then it isn't common, and shouldn't
19881 be handled as such. */
19882 if (!VAR_P (decl)
19883 || !TREE_STATIC (decl)
19884 || !DECL_HAS_VALUE_EXPR_P (decl)
19885 || !is_fortran ())
19886 return NULL_TREE;
19887
19888 val_expr = DECL_VALUE_EXPR (decl);
19889 if (TREE_CODE (val_expr) != COMPONENT_REF)
19890 return NULL_TREE;
19891
19892 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19893 &unsignedp, &reversep, &volatilep);
19894
19895 if (cvar == NULL_TREE
19896 || !VAR_P (cvar)
19897 || DECL_ARTIFICIAL (cvar)
19898 || !TREE_PUBLIC (cvar)
19899 /* We don't expect to have to cope with variable offsets,
19900 since at present all static data must have a constant size. */
19901 || !bitpos.is_constant (&cbitpos))
19902 return NULL_TREE;
19903
19904 *value = 0;
19905 if (offset != NULL)
19906 {
19907 if (!tree_fits_shwi_p (offset))
19908 return NULL_TREE;
19909 *value = tree_to_shwi (offset);
19910 }
19911 if (cbitpos != 0)
19912 *value += cbitpos / BITS_PER_UNIT;
19913
19914 return cvar;
19915 }
19916
19917 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19918 data attribute for a variable or a parameter. We generate the
19919 DW_AT_const_value attribute only in those cases where the given variable
19920 or parameter does not have a true "location" either in memory or in a
19921 register. This can happen (for example) when a constant is passed as an
19922 actual argument in a call to an inline function. (It's possible that
19923 these things can crop up in other ways also.) Note that one type of
19924 constant value which can be passed into an inlined function is a constant
19925 pointer. This can happen for example if an actual argument in an inlined
19926 function call evaluates to a compile-time constant address.
19927
19928 CACHE_P is true if it is worth caching the location list for DECL,
19929 so that future calls can reuse it rather than regenerate it from scratch.
19930 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19931 since we will need to refer to them each time the function is inlined. */
19932
19933 static bool
19934 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19935 {
19936 rtx rtl;
19937 dw_loc_list_ref list;
19938 var_loc_list *loc_list;
19939 cached_dw_loc_list *cache;
19940
19941 if (early_dwarf)
19942 return false;
19943
19944 if (TREE_CODE (decl) == ERROR_MARK)
19945 return false;
19946
19947 if (get_AT (die, DW_AT_location)
19948 || get_AT (die, DW_AT_const_value))
19949 return true;
19950
19951 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19952 || TREE_CODE (decl) == RESULT_DECL);
19953
19954 /* Try to get some constant RTL for this decl, and use that as the value of
19955 the location. */
19956
19957 rtl = rtl_for_decl_location (decl);
19958 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19959 && add_const_value_attribute (die, rtl))
19960 return true;
19961
19962 /* See if we have single element location list that is equivalent to
19963 a constant value. That way we are better to use add_const_value_attribute
19964 rather than expanding constant value equivalent. */
19965 loc_list = lookup_decl_loc (decl);
19966 if (loc_list
19967 && loc_list->first
19968 && loc_list->first->next == NULL
19969 && NOTE_P (loc_list->first->loc)
19970 && NOTE_VAR_LOCATION (loc_list->first->loc)
19971 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19972 {
19973 struct var_loc_node *node;
19974
19975 node = loc_list->first;
19976 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19977 if (GET_CODE (rtl) == EXPR_LIST)
19978 rtl = XEXP (rtl, 0);
19979 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19980 && add_const_value_attribute (die, rtl))
19981 return true;
19982 }
19983 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19984 list several times. See if we've already cached the contents. */
19985 list = NULL;
19986 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19987 cache_p = false;
19988 if (cache_p)
19989 {
19990 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19991 if (cache)
19992 list = cache->loc_list;
19993 }
19994 if (list == NULL)
19995 {
19996 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19997 NULL);
19998 /* It is usually worth caching this result if the decl is from
19999 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20000 if (cache_p && list && list->dw_loc_next)
20001 {
20002 cached_dw_loc_list **slot
20003 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20004 DECL_UID (decl),
20005 INSERT);
20006 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20007 cache->decl_id = DECL_UID (decl);
20008 cache->loc_list = list;
20009 *slot = cache;
20010 }
20011 }
20012 if (list)
20013 {
20014 add_AT_location_description (die, DW_AT_location, list);
20015 return true;
20016 }
20017 /* None of that worked, so it must not really have a location;
20018 try adding a constant value attribute from the DECL_INITIAL. */
20019 return tree_add_const_value_attribute_for_decl (die, decl);
20020 }
20021
20022 /* Helper function for tree_add_const_value_attribute. Natively encode
20023 initializer INIT into an array. Return true if successful. */
20024
20025 static bool
20026 native_encode_initializer (tree init, unsigned char *array, int size)
20027 {
20028 tree type;
20029
20030 if (init == NULL_TREE)
20031 return false;
20032
20033 STRIP_NOPS (init);
20034 switch (TREE_CODE (init))
20035 {
20036 case STRING_CST:
20037 type = TREE_TYPE (init);
20038 if (TREE_CODE (type) == ARRAY_TYPE)
20039 {
20040 tree enttype = TREE_TYPE (type);
20041 scalar_int_mode mode;
20042
20043 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20044 || GET_MODE_SIZE (mode) != 1)
20045 return false;
20046 if (int_size_in_bytes (type) != size)
20047 return false;
20048 if (size > TREE_STRING_LENGTH (init))
20049 {
20050 memcpy (array, TREE_STRING_POINTER (init),
20051 TREE_STRING_LENGTH (init));
20052 memset (array + TREE_STRING_LENGTH (init),
20053 '\0', size - TREE_STRING_LENGTH (init));
20054 }
20055 else
20056 memcpy (array, TREE_STRING_POINTER (init), size);
20057 return true;
20058 }
20059 return false;
20060 case CONSTRUCTOR:
20061 type = TREE_TYPE (init);
20062 if (int_size_in_bytes (type) != size)
20063 return false;
20064 if (TREE_CODE (type) == ARRAY_TYPE)
20065 {
20066 HOST_WIDE_INT min_index;
20067 unsigned HOST_WIDE_INT cnt;
20068 int curpos = 0, fieldsize;
20069 constructor_elt *ce;
20070
20071 if (TYPE_DOMAIN (type) == NULL_TREE
20072 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20073 return false;
20074
20075 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20076 if (fieldsize <= 0)
20077 return false;
20078
20079 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20080 memset (array, '\0', size);
20081 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20082 {
20083 tree val = ce->value;
20084 tree index = ce->index;
20085 int pos = curpos;
20086 if (index && TREE_CODE (index) == RANGE_EXPR)
20087 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20088 * fieldsize;
20089 else if (index)
20090 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20091
20092 if (val)
20093 {
20094 STRIP_NOPS (val);
20095 if (!native_encode_initializer (val, array + pos, fieldsize))
20096 return false;
20097 }
20098 curpos = pos + fieldsize;
20099 if (index && TREE_CODE (index) == RANGE_EXPR)
20100 {
20101 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20102 - tree_to_shwi (TREE_OPERAND (index, 0));
20103 while (count-- > 0)
20104 {
20105 if (val)
20106 memcpy (array + curpos, array + pos, fieldsize);
20107 curpos += fieldsize;
20108 }
20109 }
20110 gcc_assert (curpos <= size);
20111 }
20112 return true;
20113 }
20114 else if (TREE_CODE (type) == RECORD_TYPE
20115 || TREE_CODE (type) == UNION_TYPE)
20116 {
20117 tree field = NULL_TREE;
20118 unsigned HOST_WIDE_INT cnt;
20119 constructor_elt *ce;
20120
20121 if (int_size_in_bytes (type) != size)
20122 return false;
20123
20124 if (TREE_CODE (type) == RECORD_TYPE)
20125 field = TYPE_FIELDS (type);
20126
20127 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20128 {
20129 tree val = ce->value;
20130 int pos, fieldsize;
20131
20132 if (ce->index != 0)
20133 field = ce->index;
20134
20135 if (val)
20136 STRIP_NOPS (val);
20137
20138 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20139 return false;
20140
20141 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20142 && TYPE_DOMAIN (TREE_TYPE (field))
20143 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20144 return false;
20145 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20146 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20147 return false;
20148 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20149 pos = int_byte_position (field);
20150 gcc_assert (pos + fieldsize <= size);
20151 if (val && fieldsize != 0
20152 && !native_encode_initializer (val, array + pos, fieldsize))
20153 return false;
20154 }
20155 return true;
20156 }
20157 return false;
20158 case VIEW_CONVERT_EXPR:
20159 case NON_LVALUE_EXPR:
20160 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20161 default:
20162 return native_encode_expr (init, array, size) == size;
20163 }
20164 }
20165
20166 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20167 attribute is the const value T. */
20168
20169 static bool
20170 tree_add_const_value_attribute (dw_die_ref die, tree t)
20171 {
20172 tree init;
20173 tree type = TREE_TYPE (t);
20174 rtx rtl;
20175
20176 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20177 return false;
20178
20179 init = t;
20180 gcc_assert (!DECL_P (init));
20181
20182 if (TREE_CODE (init) == INTEGER_CST)
20183 {
20184 if (tree_fits_uhwi_p (init))
20185 {
20186 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20187 return true;
20188 }
20189 if (tree_fits_shwi_p (init))
20190 {
20191 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20192 return true;
20193 }
20194 }
20195 if (! early_dwarf)
20196 {
20197 rtl = rtl_for_decl_init (init, type);
20198 if (rtl)
20199 return add_const_value_attribute (die, rtl);
20200 }
20201 /* If the host and target are sane, try harder. */
20202 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20203 && initializer_constant_valid_p (init, type))
20204 {
20205 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20206 if (size > 0 && (int) size == size)
20207 {
20208 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20209
20210 if (native_encode_initializer (init, array, size))
20211 {
20212 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20213 return true;
20214 }
20215 ggc_free (array);
20216 }
20217 }
20218 return false;
20219 }
20220
20221 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20222 attribute is the const value of T, where T is an integral constant
20223 variable with static storage duration
20224 (so it can't be a PARM_DECL or a RESULT_DECL). */
20225
20226 static bool
20227 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20228 {
20229
20230 if (!decl
20231 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20232 || (VAR_P (decl) && !TREE_STATIC (decl)))
20233 return false;
20234
20235 if (TREE_READONLY (decl)
20236 && ! TREE_THIS_VOLATILE (decl)
20237 && DECL_INITIAL (decl))
20238 /* OK */;
20239 else
20240 return false;
20241
20242 /* Don't add DW_AT_const_value if abstract origin already has one. */
20243 if (get_AT (var_die, DW_AT_const_value))
20244 return false;
20245
20246 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20247 }
20248
20249 /* Convert the CFI instructions for the current function into a
20250 location list. This is used for DW_AT_frame_base when we targeting
20251 a dwarf2 consumer that does not support the dwarf3
20252 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20253 expressions. */
20254
20255 static dw_loc_list_ref
20256 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20257 {
20258 int ix;
20259 dw_fde_ref fde;
20260 dw_loc_list_ref list, *list_tail;
20261 dw_cfi_ref cfi;
20262 dw_cfa_location last_cfa, next_cfa;
20263 const char *start_label, *last_label, *section;
20264 dw_cfa_location remember;
20265
20266 fde = cfun->fde;
20267 gcc_assert (fde != NULL);
20268
20269 section = secname_for_decl (current_function_decl);
20270 list_tail = &list;
20271 list = NULL;
20272
20273 memset (&next_cfa, 0, sizeof (next_cfa));
20274 next_cfa.reg = INVALID_REGNUM;
20275 remember = next_cfa;
20276
20277 start_label = fde->dw_fde_begin;
20278
20279 /* ??? Bald assumption that the CIE opcode list does not contain
20280 advance opcodes. */
20281 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20282 lookup_cfa_1 (cfi, &next_cfa, &remember);
20283
20284 last_cfa = next_cfa;
20285 last_label = start_label;
20286
20287 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20288 {
20289 /* If the first partition contained no CFI adjustments, the
20290 CIE opcodes apply to the whole first partition. */
20291 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20292 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20293 list_tail =&(*list_tail)->dw_loc_next;
20294 start_label = last_label = fde->dw_fde_second_begin;
20295 }
20296
20297 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20298 {
20299 switch (cfi->dw_cfi_opc)
20300 {
20301 case DW_CFA_set_loc:
20302 case DW_CFA_advance_loc1:
20303 case DW_CFA_advance_loc2:
20304 case DW_CFA_advance_loc4:
20305 if (!cfa_equal_p (&last_cfa, &next_cfa))
20306 {
20307 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20308 start_label, 0, last_label, 0, section);
20309
20310 list_tail = &(*list_tail)->dw_loc_next;
20311 last_cfa = next_cfa;
20312 start_label = last_label;
20313 }
20314 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20315 break;
20316
20317 case DW_CFA_advance_loc:
20318 /* The encoding is complex enough that we should never emit this. */
20319 gcc_unreachable ();
20320
20321 default:
20322 lookup_cfa_1 (cfi, &next_cfa, &remember);
20323 break;
20324 }
20325 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20326 {
20327 if (!cfa_equal_p (&last_cfa, &next_cfa))
20328 {
20329 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20330 start_label, 0, last_label, 0, section);
20331
20332 list_tail = &(*list_tail)->dw_loc_next;
20333 last_cfa = next_cfa;
20334 start_label = last_label;
20335 }
20336 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20337 start_label, 0, fde->dw_fde_end, 0, section);
20338 list_tail = &(*list_tail)->dw_loc_next;
20339 start_label = last_label = fde->dw_fde_second_begin;
20340 }
20341 }
20342
20343 if (!cfa_equal_p (&last_cfa, &next_cfa))
20344 {
20345 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20346 start_label, 0, last_label, 0, section);
20347 list_tail = &(*list_tail)->dw_loc_next;
20348 start_label = last_label;
20349 }
20350
20351 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20352 start_label, 0,
20353 fde->dw_fde_second_begin
20354 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20355 section);
20356
20357 maybe_gen_llsym (list);
20358
20359 return list;
20360 }
20361
20362 /* Compute a displacement from the "steady-state frame pointer" to the
20363 frame base (often the same as the CFA), and store it in
20364 frame_pointer_fb_offset. OFFSET is added to the displacement
20365 before the latter is negated. */
20366
20367 static void
20368 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20369 {
20370 rtx reg, elim;
20371
20372 #ifdef FRAME_POINTER_CFA_OFFSET
20373 reg = frame_pointer_rtx;
20374 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20375 #else
20376 reg = arg_pointer_rtx;
20377 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20378 #endif
20379
20380 elim = (ira_use_lra_p
20381 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20382 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20383 elim = strip_offset_and_add (elim, &offset);
20384
20385 frame_pointer_fb_offset = -offset;
20386
20387 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20388 in which to eliminate. This is because it's stack pointer isn't
20389 directly accessible as a register within the ISA. To work around
20390 this, assume that while we cannot provide a proper value for
20391 frame_pointer_fb_offset, we won't need one either. We can use
20392 hard frame pointer in debug info even if frame pointer isn't used
20393 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20394 which uses the DW_AT_frame_base attribute, not hard frame pointer
20395 directly. */
20396 frame_pointer_fb_offset_valid
20397 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20398 }
20399
20400 /* Generate a DW_AT_name attribute given some string value to be included as
20401 the value of the attribute. */
20402
20403 static void
20404 add_name_attribute (dw_die_ref die, const char *name_string)
20405 {
20406 if (name_string != NULL && *name_string != 0)
20407 {
20408 if (demangle_name_func)
20409 name_string = (*demangle_name_func) (name_string);
20410
20411 add_AT_string (die, DW_AT_name, name_string);
20412 }
20413 }
20414
20415 /* Generate a DW_AT_description attribute given some string value to be included
20416 as the value of the attribute. */
20417
20418 static void
20419 add_desc_attribute (dw_die_ref die, const char *name_string)
20420 {
20421 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20422 return;
20423
20424 if (name_string == NULL || *name_string == 0)
20425 return;
20426
20427 if (demangle_name_func)
20428 name_string = (*demangle_name_func) (name_string);
20429
20430 add_AT_string (die, DW_AT_description, name_string);
20431 }
20432
20433 /* Generate a DW_AT_description attribute given some decl to be included
20434 as the value of the attribute. */
20435
20436 static void
20437 add_desc_attribute (dw_die_ref die, tree decl)
20438 {
20439 tree decl_name;
20440
20441 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20442 return;
20443
20444 if (decl == NULL_TREE || !DECL_P (decl))
20445 return;
20446 decl_name = DECL_NAME (decl);
20447
20448 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20449 {
20450 const char *name = dwarf2_name (decl, 0);
20451 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20452 }
20453 else
20454 {
20455 char *desc = print_generic_expr_to_str (decl);
20456 add_desc_attribute (die, desc);
20457 free (desc);
20458 }
20459 }
20460
20461 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20462 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20463 of TYPE accordingly.
20464
20465 ??? This is a temporary measure until after we're able to generate
20466 regular DWARF for the complex Ada type system. */
20467
20468 static void
20469 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20470 dw_die_ref context_die)
20471 {
20472 tree dtype;
20473 dw_die_ref dtype_die;
20474
20475 if (!lang_hooks.types.descriptive_type)
20476 return;
20477
20478 dtype = lang_hooks.types.descriptive_type (type);
20479 if (!dtype)
20480 return;
20481
20482 dtype_die = lookup_type_die (dtype);
20483 if (!dtype_die)
20484 {
20485 gen_type_die (dtype, context_die);
20486 dtype_die = lookup_type_die (dtype);
20487 gcc_assert (dtype_die);
20488 }
20489
20490 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20491 }
20492
20493 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20494
20495 static const char *
20496 comp_dir_string (void)
20497 {
20498 const char *wd;
20499 char *wd1;
20500 static const char *cached_wd = NULL;
20501
20502 if (cached_wd != NULL)
20503 return cached_wd;
20504
20505 wd = get_src_pwd ();
20506 if (wd == NULL)
20507 return NULL;
20508
20509 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20510 {
20511 int wdlen;
20512
20513 wdlen = strlen (wd);
20514 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20515 strcpy (wd1, wd);
20516 wd1 [wdlen] = DIR_SEPARATOR;
20517 wd1 [wdlen + 1] = 0;
20518 wd = wd1;
20519 }
20520
20521 cached_wd = remap_debug_filename (wd);
20522 return cached_wd;
20523 }
20524
20525 /* Generate a DW_AT_comp_dir attribute for DIE. */
20526
20527 static void
20528 add_comp_dir_attribute (dw_die_ref die)
20529 {
20530 const char * wd = comp_dir_string ();
20531 if (wd != NULL)
20532 add_AT_string (die, DW_AT_comp_dir, wd);
20533 }
20534
20535 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20536 pointer computation, ...), output a representation for that bound according
20537 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20538 loc_list_from_tree for the meaning of CONTEXT. */
20539
20540 static void
20541 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20542 int forms, struct loc_descr_context *context)
20543 {
20544 dw_die_ref context_die, decl_die = NULL;
20545 dw_loc_list_ref list;
20546 bool strip_conversions = true;
20547 bool placeholder_seen = false;
20548
20549 while (strip_conversions)
20550 switch (TREE_CODE (value))
20551 {
20552 case ERROR_MARK:
20553 case SAVE_EXPR:
20554 return;
20555
20556 CASE_CONVERT:
20557 case VIEW_CONVERT_EXPR:
20558 value = TREE_OPERAND (value, 0);
20559 break;
20560
20561 default:
20562 strip_conversions = false;
20563 break;
20564 }
20565
20566 /* If possible and permitted, output the attribute as a constant. */
20567 if ((forms & dw_scalar_form_constant) != 0
20568 && TREE_CODE (value) == INTEGER_CST)
20569 {
20570 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20571
20572 /* If HOST_WIDE_INT is big enough then represent the bound as
20573 a constant value. We need to choose a form based on
20574 whether the type is signed or unsigned. We cannot just
20575 call add_AT_unsigned if the value itself is positive
20576 (add_AT_unsigned might add the unsigned value encoded as
20577 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20578 bounds type and then sign extend any unsigned values found
20579 for signed types. This is needed only for
20580 DW_AT_{lower,upper}_bound, since for most other attributes,
20581 consumers will treat DW_FORM_data[1248] as unsigned values,
20582 regardless of the underlying type. */
20583 if (prec <= HOST_BITS_PER_WIDE_INT
20584 || tree_fits_uhwi_p (value))
20585 {
20586 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20587 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20588 else
20589 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20590 }
20591 else
20592 /* Otherwise represent the bound as an unsigned value with
20593 the precision of its type. The precision and signedness
20594 of the type will be necessary to re-interpret it
20595 unambiguously. */
20596 add_AT_wide (die, attr, wi::to_wide (value));
20597 return;
20598 }
20599
20600 /* Otherwise, if it's possible and permitted too, output a reference to
20601 another DIE. */
20602 if ((forms & dw_scalar_form_reference) != 0)
20603 {
20604 tree decl = NULL_TREE;
20605
20606 /* Some type attributes reference an outer type. For instance, the upper
20607 bound of an array may reference an embedding record (this happens in
20608 Ada). */
20609 if (TREE_CODE (value) == COMPONENT_REF
20610 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20611 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20612 decl = TREE_OPERAND (value, 1);
20613
20614 else if (VAR_P (value)
20615 || TREE_CODE (value) == PARM_DECL
20616 || TREE_CODE (value) == RESULT_DECL)
20617 decl = value;
20618
20619 if (decl != NULL_TREE)
20620 {
20621 decl_die = lookup_decl_die (decl);
20622
20623 /* ??? Can this happen, or should the variable have been bound
20624 first? Probably it can, since I imagine that we try to create
20625 the types of parameters in the order in which they exist in
20626 the list, and won't have created a forward reference to a
20627 later parameter. */
20628 if (decl_die != NULL)
20629 {
20630 if (get_AT (decl_die, DW_AT_location)
20631 || get_AT (decl_die, DW_AT_const_value))
20632 {
20633 add_AT_die_ref (die, attr, decl_die);
20634 return;
20635 }
20636 }
20637 }
20638 }
20639
20640 /* Last chance: try to create a stack operation procedure to evaluate the
20641 value. Do nothing if even that is not possible or permitted. */
20642 if ((forms & dw_scalar_form_exprloc) == 0)
20643 return;
20644
20645 list = loc_list_from_tree (value, 2, context);
20646 if (context && context->placeholder_arg)
20647 {
20648 placeholder_seen = context->placeholder_seen;
20649 context->placeholder_seen = false;
20650 }
20651 if (list == NULL || single_element_loc_list_p (list))
20652 {
20653 /* If this attribute is not a reference nor constant, it is
20654 a DWARF expression rather than location description. For that
20655 loc_list_from_tree (value, 0, &context) is needed. */
20656 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20657 if (list2 && single_element_loc_list_p (list2))
20658 {
20659 if (placeholder_seen)
20660 {
20661 struct dwarf_procedure_info dpi;
20662 dpi.fndecl = NULL_TREE;
20663 dpi.args_count = 1;
20664 if (!resolve_args_picking (list2->expr, 1, &dpi))
20665 return;
20666 }
20667 add_AT_loc (die, attr, list2->expr);
20668 return;
20669 }
20670 }
20671
20672 /* If that failed to give a single element location list, fall back to
20673 outputting this as a reference... still if permitted. */
20674 if (list == NULL
20675 || (forms & dw_scalar_form_reference) == 0
20676 || placeholder_seen)
20677 return;
20678
20679 if (!decl_die)
20680 {
20681 if (current_function_decl == 0)
20682 context_die = comp_unit_die ();
20683 else
20684 context_die = lookup_decl_die (current_function_decl);
20685
20686 decl_die = new_die (DW_TAG_variable, context_die, value);
20687 add_AT_flag (decl_die, DW_AT_artificial, 1);
20688 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20689 context_die);
20690 }
20691
20692 add_AT_location_description (decl_die, DW_AT_location, list);
20693 add_AT_die_ref (die, attr, decl_die);
20694 }
20695
20696 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20697 default. */
20698
20699 static int
20700 lower_bound_default (void)
20701 {
20702 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20703 {
20704 case DW_LANG_C:
20705 case DW_LANG_C89:
20706 case DW_LANG_C99:
20707 case DW_LANG_C11:
20708 case DW_LANG_C_plus_plus:
20709 case DW_LANG_C_plus_plus_11:
20710 case DW_LANG_C_plus_plus_14:
20711 case DW_LANG_ObjC:
20712 case DW_LANG_ObjC_plus_plus:
20713 return 0;
20714 case DW_LANG_Fortran77:
20715 case DW_LANG_Fortran90:
20716 case DW_LANG_Fortran95:
20717 case DW_LANG_Fortran03:
20718 case DW_LANG_Fortran08:
20719 return 1;
20720 case DW_LANG_UPC:
20721 case DW_LANG_D:
20722 case DW_LANG_Python:
20723 return dwarf_version >= 4 ? 0 : -1;
20724 case DW_LANG_Ada95:
20725 case DW_LANG_Ada83:
20726 case DW_LANG_Cobol74:
20727 case DW_LANG_Cobol85:
20728 case DW_LANG_Modula2:
20729 case DW_LANG_PLI:
20730 return dwarf_version >= 4 ? 1 : -1;
20731 default:
20732 return -1;
20733 }
20734 }
20735
20736 /* Given a tree node describing an array bound (either lower or upper) output
20737 a representation for that bound. */
20738
20739 static void
20740 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20741 tree bound, struct loc_descr_context *context)
20742 {
20743 int dflt;
20744
20745 while (1)
20746 switch (TREE_CODE (bound))
20747 {
20748 /* Strip all conversions. */
20749 CASE_CONVERT:
20750 case VIEW_CONVERT_EXPR:
20751 bound = TREE_OPERAND (bound, 0);
20752 break;
20753
20754 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20755 are even omitted when they are the default. */
20756 case INTEGER_CST:
20757 /* If the value for this bound is the default one, we can even omit the
20758 attribute. */
20759 if (bound_attr == DW_AT_lower_bound
20760 && tree_fits_shwi_p (bound)
20761 && (dflt = lower_bound_default ()) != -1
20762 && tree_to_shwi (bound) == dflt)
20763 return;
20764
20765 /* FALLTHRU */
20766
20767 default:
20768 /* Because of the complex interaction there can be with other GNAT
20769 encodings, GDB isn't ready yet to handle proper DWARF description
20770 for self-referencial subrange bounds: let GNAT encodings do the
20771 magic in such a case. */
20772 if (is_ada ()
20773 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20774 && contains_placeholder_p (bound))
20775 return;
20776
20777 add_scalar_info (subrange_die, bound_attr, bound,
20778 dw_scalar_form_constant
20779 | dw_scalar_form_exprloc
20780 | dw_scalar_form_reference,
20781 context);
20782 return;
20783 }
20784 }
20785
20786 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20787 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20788 Note that the block of subscript information for an array type also
20789 includes information about the element type of the given array type.
20790
20791 This function reuses previously set type and bound information if
20792 available. */
20793
20794 static void
20795 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20796 {
20797 unsigned dimension_number;
20798 tree lower, upper;
20799 dw_die_ref child = type_die->die_child;
20800
20801 for (dimension_number = 0;
20802 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20803 type = TREE_TYPE (type), dimension_number++)
20804 {
20805 tree domain = TYPE_DOMAIN (type);
20806
20807 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20808 break;
20809
20810 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20811 and (in GNU C only) variable bounds. Handle all three forms
20812 here. */
20813
20814 /* Find and reuse a previously generated DW_TAG_subrange_type if
20815 available.
20816
20817 For multi-dimensional arrays, as we iterate through the
20818 various dimensions in the enclosing for loop above, we also
20819 iterate through the DIE children and pick at each
20820 DW_TAG_subrange_type previously generated (if available).
20821 Each child DW_TAG_subrange_type DIE describes the range of
20822 the current dimension. At this point we should have as many
20823 DW_TAG_subrange_type's as we have dimensions in the
20824 array. */
20825 dw_die_ref subrange_die = NULL;
20826 if (child)
20827 while (1)
20828 {
20829 child = child->die_sib;
20830 if (child->die_tag == DW_TAG_subrange_type)
20831 subrange_die = child;
20832 if (child == type_die->die_child)
20833 {
20834 /* If we wrapped around, stop looking next time. */
20835 child = NULL;
20836 break;
20837 }
20838 if (child->die_tag == DW_TAG_subrange_type)
20839 break;
20840 }
20841 if (!subrange_die)
20842 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20843
20844 if (domain)
20845 {
20846 /* We have an array type with specified bounds. */
20847 lower = TYPE_MIN_VALUE (domain);
20848 upper = TYPE_MAX_VALUE (domain);
20849
20850 /* Define the index type. */
20851 if (TREE_TYPE (domain)
20852 && !get_AT (subrange_die, DW_AT_type))
20853 {
20854 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20855 TREE_TYPE field. We can't emit debug info for this
20856 because it is an unnamed integral type. */
20857 if (TREE_CODE (domain) == INTEGER_TYPE
20858 && TYPE_NAME (domain) == NULL_TREE
20859 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20860 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20861 ;
20862 else
20863 add_type_attribute (subrange_die, TREE_TYPE (domain),
20864 TYPE_UNQUALIFIED, false, type_die);
20865 }
20866
20867 /* ??? If upper is NULL, the array has unspecified length,
20868 but it does have a lower bound. This happens with Fortran
20869 dimension arr(N:*)
20870 Since the debugger is definitely going to need to know N
20871 to produce useful results, go ahead and output the lower
20872 bound solo, and hope the debugger can cope. */
20873
20874 if (!get_AT (subrange_die, DW_AT_lower_bound))
20875 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20876 if (!get_AT (subrange_die, DW_AT_upper_bound)
20877 && !get_AT (subrange_die, DW_AT_count))
20878 {
20879 if (upper)
20880 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20881 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20882 /* Zero-length array. */
20883 add_bound_info (subrange_die, DW_AT_count,
20884 build_int_cst (TREE_TYPE (lower), 0), NULL);
20885 }
20886 }
20887
20888 /* Otherwise we have an array type with an unspecified length. The
20889 DWARF-2 spec does not say how to handle this; let's just leave out the
20890 bounds. */
20891 }
20892 }
20893
20894 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20895
20896 static void
20897 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20898 {
20899 dw_die_ref decl_die;
20900 HOST_WIDE_INT size;
20901 dw_loc_descr_ref size_expr = NULL;
20902
20903 switch (TREE_CODE (tree_node))
20904 {
20905 case ERROR_MARK:
20906 size = 0;
20907 break;
20908 case ENUMERAL_TYPE:
20909 case RECORD_TYPE:
20910 case UNION_TYPE:
20911 case QUAL_UNION_TYPE:
20912 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20913 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20914 {
20915 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20916 return;
20917 }
20918 size_expr = type_byte_size (tree_node, &size);
20919 break;
20920 case FIELD_DECL:
20921 /* For a data member of a struct or union, the DW_AT_byte_size is
20922 generally given as the number of bytes normally allocated for an
20923 object of the *declared* type of the member itself. This is true
20924 even for bit-fields. */
20925 size = int_size_in_bytes (field_type (tree_node));
20926 break;
20927 default:
20928 gcc_unreachable ();
20929 }
20930
20931 /* Support for dynamically-sized objects was introduced by DWARFv3.
20932 At the moment, GDB does not handle variable byte sizes very well,
20933 though. */
20934 if ((dwarf_version >= 3 || !dwarf_strict)
20935 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20936 && size_expr != NULL)
20937 add_AT_loc (die, DW_AT_byte_size, size_expr);
20938
20939 /* Note that `size' might be -1 when we get to this point. If it is, that
20940 indicates that the byte size of the entity in question is variable and
20941 that we could not generate a DWARF expression that computes it. */
20942 if (size >= 0)
20943 add_AT_unsigned (die, DW_AT_byte_size, size);
20944 }
20945
20946 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20947 alignment. */
20948
20949 static void
20950 add_alignment_attribute (dw_die_ref die, tree tree_node)
20951 {
20952 if (dwarf_version < 5 && dwarf_strict)
20953 return;
20954
20955 unsigned align;
20956
20957 if (DECL_P (tree_node))
20958 {
20959 if (!DECL_USER_ALIGN (tree_node))
20960 return;
20961
20962 align = DECL_ALIGN_UNIT (tree_node);
20963 }
20964 else if (TYPE_P (tree_node))
20965 {
20966 if (!TYPE_USER_ALIGN (tree_node))
20967 return;
20968
20969 align = TYPE_ALIGN_UNIT (tree_node);
20970 }
20971 else
20972 gcc_unreachable ();
20973
20974 add_AT_unsigned (die, DW_AT_alignment, align);
20975 }
20976
20977 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20978 which specifies the distance in bits from the highest order bit of the
20979 "containing object" for the bit-field to the highest order bit of the
20980 bit-field itself.
20981
20982 For any given bit-field, the "containing object" is a hypothetical object
20983 (of some integral or enum type) within which the given bit-field lives. The
20984 type of this hypothetical "containing object" is always the same as the
20985 declared type of the individual bit-field itself. The determination of the
20986 exact location of the "containing object" for a bit-field is rather
20987 complicated. It's handled by the `field_byte_offset' function (above).
20988
20989 CTX is required: see the comment for VLR_CONTEXT.
20990
20991 Note that it is the size (in bytes) of the hypothetical "containing object"
20992 which will be given in the DW_AT_byte_size attribute for this bit-field.
20993 (See `byte_size_attribute' above). */
20994
20995 static inline void
20996 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20997 {
20998 HOST_WIDE_INT object_offset_in_bytes;
20999 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21000 HOST_WIDE_INT bitpos_int;
21001 HOST_WIDE_INT highest_order_object_bit_offset;
21002 HOST_WIDE_INT highest_order_field_bit_offset;
21003 HOST_WIDE_INT bit_offset;
21004
21005 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21006
21007 /* Must be a field and a bit field. */
21008 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21009
21010 /* We can't yet handle bit-fields whose offsets are variable, so if we
21011 encounter such things, just return without generating any attribute
21012 whatsoever. Likewise for variable or too large size. */
21013 if (! tree_fits_shwi_p (bit_position (decl))
21014 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21015 return;
21016
21017 bitpos_int = int_bit_position (decl);
21018
21019 /* Note that the bit offset is always the distance (in bits) from the
21020 highest-order bit of the "containing object" to the highest-order bit of
21021 the bit-field itself. Since the "high-order end" of any object or field
21022 is different on big-endian and little-endian machines, the computation
21023 below must take account of these differences. */
21024 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21025 highest_order_field_bit_offset = bitpos_int;
21026
21027 if (! BYTES_BIG_ENDIAN)
21028 {
21029 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21030 highest_order_object_bit_offset +=
21031 simple_type_size_in_bits (original_type);
21032 }
21033
21034 bit_offset
21035 = (! BYTES_BIG_ENDIAN
21036 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21037 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21038
21039 if (bit_offset < 0)
21040 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21041 else
21042 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21043 }
21044
21045 /* For a FIELD_DECL node which represents a bit field, output an attribute
21046 which specifies the length in bits of the given field. */
21047
21048 static inline void
21049 add_bit_size_attribute (dw_die_ref die, tree decl)
21050 {
21051 /* Must be a field and a bit field. */
21052 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21053 && DECL_BIT_FIELD_TYPE (decl));
21054
21055 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21056 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21057 }
21058
21059 /* If the compiled language is ANSI C, then add a 'prototyped'
21060 attribute, if arg types are given for the parameters of a function. */
21061
21062 static inline void
21063 add_prototyped_attribute (dw_die_ref die, tree func_type)
21064 {
21065 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21066 {
21067 case DW_LANG_C:
21068 case DW_LANG_C89:
21069 case DW_LANG_C99:
21070 case DW_LANG_C11:
21071 case DW_LANG_ObjC:
21072 if (prototype_p (func_type))
21073 add_AT_flag (die, DW_AT_prototyped, 1);
21074 break;
21075 default:
21076 break;
21077 }
21078 }
21079
21080 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21081 by looking in the type declaration, the object declaration equate table or
21082 the block mapping. */
21083
21084 static inline dw_die_ref
21085 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21086 {
21087 dw_die_ref origin_die = NULL;
21088
21089 if (DECL_P (origin))
21090 {
21091 dw_die_ref c;
21092 origin_die = lookup_decl_die (origin);
21093 /* "Unwrap" the decls DIE which we put in the imported unit context.
21094 We are looking for the abstract copy here. */
21095 if (in_lto_p
21096 && origin_die
21097 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21098 /* ??? Identify this better. */
21099 && c->with_offset)
21100 origin_die = c;
21101 }
21102 else if (TYPE_P (origin))
21103 origin_die = lookup_type_die (origin);
21104 else if (TREE_CODE (origin) == BLOCK)
21105 origin_die = BLOCK_DIE (origin);
21106
21107 /* XXX: Functions that are never lowered don't always have correct block
21108 trees (in the case of java, they simply have no block tree, in some other
21109 languages). For these functions, there is nothing we can really do to
21110 output correct debug info for inlined functions in all cases. Rather
21111 than die, we'll just produce deficient debug info now, in that we will
21112 have variables without a proper abstract origin. In the future, when all
21113 functions are lowered, we should re-add a gcc_assert (origin_die)
21114 here. */
21115
21116 if (origin_die)
21117 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21118 return origin_die;
21119 }
21120
21121 /* We do not currently support the pure_virtual attribute. */
21122
21123 static inline void
21124 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21125 {
21126 if (DECL_VINDEX (func_decl))
21127 {
21128 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21129
21130 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21131 add_AT_loc (die, DW_AT_vtable_elem_location,
21132 new_loc_descr (DW_OP_constu,
21133 tree_to_shwi (DECL_VINDEX (func_decl)),
21134 0));
21135
21136 /* GNU extension: Record what type this method came from originally. */
21137 if (debug_info_level > DINFO_LEVEL_TERSE
21138 && DECL_CONTEXT (func_decl))
21139 add_AT_die_ref (die, DW_AT_containing_type,
21140 lookup_type_die (DECL_CONTEXT (func_decl)));
21141 }
21142 }
21143 \f
21144 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21145 given decl. This used to be a vendor extension until after DWARF 4
21146 standardized it. */
21147
21148 static void
21149 add_linkage_attr (dw_die_ref die, tree decl)
21150 {
21151 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21152
21153 /* Mimic what assemble_name_raw does with a leading '*'. */
21154 if (name[0] == '*')
21155 name = &name[1];
21156
21157 if (dwarf_version >= 4)
21158 add_AT_string (die, DW_AT_linkage_name, name);
21159 else
21160 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21161 }
21162
21163 /* Add source coordinate attributes for the given decl. */
21164
21165 static void
21166 add_src_coords_attributes (dw_die_ref die, tree decl)
21167 {
21168 expanded_location s;
21169
21170 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21171 return;
21172 s = expand_location (DECL_SOURCE_LOCATION (decl));
21173 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21174 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21175 if (debug_column_info && s.column)
21176 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21177 }
21178
21179 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21180
21181 static void
21182 add_linkage_name_raw (dw_die_ref die, tree decl)
21183 {
21184 /* Defer until we have an assembler name set. */
21185 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21186 {
21187 limbo_die_node *asm_name;
21188
21189 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21190 asm_name->die = die;
21191 asm_name->created_for = decl;
21192 asm_name->next = deferred_asm_name;
21193 deferred_asm_name = asm_name;
21194 }
21195 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21196 add_linkage_attr (die, decl);
21197 }
21198
21199 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21200
21201 static void
21202 add_linkage_name (dw_die_ref die, tree decl)
21203 {
21204 if (debug_info_level > DINFO_LEVEL_NONE
21205 && VAR_OR_FUNCTION_DECL_P (decl)
21206 && TREE_PUBLIC (decl)
21207 && !(VAR_P (decl) && DECL_REGISTER (decl))
21208 && die->die_tag != DW_TAG_member)
21209 add_linkage_name_raw (die, decl);
21210 }
21211
21212 /* Add a DW_AT_name attribute and source coordinate attribute for the
21213 given decl, but only if it actually has a name. */
21214
21215 static void
21216 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21217 bool no_linkage_name)
21218 {
21219 tree decl_name;
21220
21221 decl_name = DECL_NAME (decl);
21222 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21223 {
21224 const char *name = dwarf2_name (decl, 0);
21225 if (name)
21226 add_name_attribute (die, name);
21227 else
21228 add_desc_attribute (die, decl);
21229
21230 if (! DECL_ARTIFICIAL (decl))
21231 add_src_coords_attributes (die, decl);
21232
21233 if (!no_linkage_name)
21234 add_linkage_name (die, decl);
21235 }
21236 else
21237 add_desc_attribute (die, decl);
21238
21239 #ifdef VMS_DEBUGGING_INFO
21240 /* Get the function's name, as described by its RTL. This may be different
21241 from the DECL_NAME name used in the source file. */
21242 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21243 {
21244 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21245 XEXP (DECL_RTL (decl), 0), false);
21246 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21247 }
21248 #endif /* VMS_DEBUGGING_INFO */
21249 }
21250
21251 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21252
21253 static void
21254 add_discr_value (dw_die_ref die, dw_discr_value *value)
21255 {
21256 dw_attr_node attr;
21257
21258 attr.dw_attr = DW_AT_discr_value;
21259 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21260 attr.dw_attr_val.val_entry = NULL;
21261 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21262 if (value->pos)
21263 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21264 else
21265 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21266 add_dwarf_attr (die, &attr);
21267 }
21268
21269 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21270
21271 static void
21272 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21273 {
21274 dw_attr_node attr;
21275
21276 attr.dw_attr = DW_AT_discr_list;
21277 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21278 attr.dw_attr_val.val_entry = NULL;
21279 attr.dw_attr_val.v.val_discr_list = discr_list;
21280 add_dwarf_attr (die, &attr);
21281 }
21282
21283 static inline dw_discr_list_ref
21284 AT_discr_list (dw_attr_node *attr)
21285 {
21286 return attr->dw_attr_val.v.val_discr_list;
21287 }
21288
21289 #ifdef VMS_DEBUGGING_INFO
21290 /* Output the debug main pointer die for VMS */
21291
21292 void
21293 dwarf2out_vms_debug_main_pointer (void)
21294 {
21295 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21296 dw_die_ref die;
21297
21298 /* Allocate the VMS debug main subprogram die. */
21299 die = new_die_raw (DW_TAG_subprogram);
21300 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21301 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21302 current_function_funcdef_no);
21303 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21304
21305 /* Make it the first child of comp_unit_die (). */
21306 die->die_parent = comp_unit_die ();
21307 if (comp_unit_die ()->die_child)
21308 {
21309 die->die_sib = comp_unit_die ()->die_child->die_sib;
21310 comp_unit_die ()->die_child->die_sib = die;
21311 }
21312 else
21313 {
21314 die->die_sib = die;
21315 comp_unit_die ()->die_child = die;
21316 }
21317 }
21318 #endif /* VMS_DEBUGGING_INFO */
21319
21320 /* walk_tree helper function for uses_local_type, below. */
21321
21322 static tree
21323 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21324 {
21325 if (!TYPE_P (*tp))
21326 *walk_subtrees = 0;
21327 else
21328 {
21329 tree name = TYPE_NAME (*tp);
21330 if (name && DECL_P (name) && decl_function_context (name))
21331 return *tp;
21332 }
21333 return NULL_TREE;
21334 }
21335
21336 /* If TYPE involves a function-local type (including a local typedef to a
21337 non-local type), returns that type; otherwise returns NULL_TREE. */
21338
21339 static tree
21340 uses_local_type (tree type)
21341 {
21342 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21343 return used;
21344 }
21345
21346 /* Return the DIE for the scope that immediately contains this type.
21347 Non-named types that do not involve a function-local type get global
21348 scope. Named types nested in namespaces or other types get their
21349 containing scope. All other types (i.e. function-local named types) get
21350 the current active scope. */
21351
21352 static dw_die_ref
21353 scope_die_for (tree t, dw_die_ref context_die)
21354 {
21355 dw_die_ref scope_die = NULL;
21356 tree containing_scope;
21357
21358 /* Non-types always go in the current scope. */
21359 gcc_assert (TYPE_P (t));
21360
21361 /* Use the scope of the typedef, rather than the scope of the type
21362 it refers to. */
21363 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21364 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21365 else
21366 containing_scope = TYPE_CONTEXT (t);
21367
21368 /* Use the containing namespace if there is one. */
21369 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21370 {
21371 if (context_die == lookup_decl_die (containing_scope))
21372 /* OK */;
21373 else if (debug_info_level > DINFO_LEVEL_TERSE)
21374 context_die = get_context_die (containing_scope);
21375 else
21376 containing_scope = NULL_TREE;
21377 }
21378
21379 /* Ignore function type "scopes" from the C frontend. They mean that
21380 a tagged type is local to a parmlist of a function declarator, but
21381 that isn't useful to DWARF. */
21382 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21383 containing_scope = NULL_TREE;
21384
21385 if (SCOPE_FILE_SCOPE_P (containing_scope))
21386 {
21387 /* If T uses a local type keep it local as well, to avoid references
21388 to function-local DIEs from outside the function. */
21389 if (current_function_decl && uses_local_type (t))
21390 scope_die = context_die;
21391 else
21392 scope_die = comp_unit_die ();
21393 }
21394 else if (TYPE_P (containing_scope))
21395 {
21396 /* For types, we can just look up the appropriate DIE. */
21397 if (debug_info_level > DINFO_LEVEL_TERSE)
21398 scope_die = get_context_die (containing_scope);
21399 else
21400 {
21401 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21402 if (scope_die == NULL)
21403 scope_die = comp_unit_die ();
21404 }
21405 }
21406 else
21407 scope_die = context_die;
21408
21409 return scope_die;
21410 }
21411
21412 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21413
21414 static inline int
21415 local_scope_p (dw_die_ref context_die)
21416 {
21417 for (; context_die; context_die = context_die->die_parent)
21418 if (context_die->die_tag == DW_TAG_inlined_subroutine
21419 || context_die->die_tag == DW_TAG_subprogram)
21420 return 1;
21421
21422 return 0;
21423 }
21424
21425 /* Returns nonzero if CONTEXT_DIE is a class. */
21426
21427 static inline int
21428 class_scope_p (dw_die_ref context_die)
21429 {
21430 return (context_die
21431 && (context_die->die_tag == DW_TAG_structure_type
21432 || context_die->die_tag == DW_TAG_class_type
21433 || context_die->die_tag == DW_TAG_interface_type
21434 || context_die->die_tag == DW_TAG_union_type));
21435 }
21436
21437 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21438 whether or not to treat a DIE in this context as a declaration. */
21439
21440 static inline int
21441 class_or_namespace_scope_p (dw_die_ref context_die)
21442 {
21443 return (class_scope_p (context_die)
21444 || (context_die && context_die->die_tag == DW_TAG_namespace));
21445 }
21446
21447 /* Many forms of DIEs require a "type description" attribute. This
21448 routine locates the proper "type descriptor" die for the type given
21449 by 'type' plus any additional qualifiers given by 'cv_quals', and
21450 adds a DW_AT_type attribute below the given die. */
21451
21452 static void
21453 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21454 bool reverse, dw_die_ref context_die)
21455 {
21456 enum tree_code code = TREE_CODE (type);
21457 dw_die_ref type_die = NULL;
21458
21459 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21460 or fixed-point type, use the inner type. This is because we have no
21461 support for unnamed types in base_type_die. This can happen if this is
21462 an Ada subrange type. Correct solution is emit a subrange type die. */
21463 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21464 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21465 type = TREE_TYPE (type), code = TREE_CODE (type);
21466
21467 if (code == ERROR_MARK
21468 /* Handle a special case. For functions whose return type is void, we
21469 generate *no* type attribute. (Note that no object may have type
21470 `void', so this only applies to function return types). */
21471 || code == VOID_TYPE)
21472 return;
21473
21474 type_die = modified_type_die (type,
21475 cv_quals | TYPE_QUALS (type),
21476 reverse,
21477 context_die);
21478
21479 if (type_die != NULL)
21480 add_AT_die_ref (object_die, DW_AT_type, type_die);
21481 }
21482
21483 /* Given an object die, add the calling convention attribute for the
21484 function call type. */
21485 static void
21486 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21487 {
21488 enum dwarf_calling_convention value = DW_CC_normal;
21489
21490 value = ((enum dwarf_calling_convention)
21491 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21492
21493 if (is_fortran ()
21494 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21495 {
21496 /* DWARF 2 doesn't provide a way to identify a program's source-level
21497 entry point. DW_AT_calling_convention attributes are only meant
21498 to describe functions' calling conventions. However, lacking a
21499 better way to signal the Fortran main program, we used this for
21500 a long time, following existing custom. Now, DWARF 4 has
21501 DW_AT_main_subprogram, which we add below, but some tools still
21502 rely on the old way, which we thus keep. */
21503 value = DW_CC_program;
21504
21505 if (dwarf_version >= 4 || !dwarf_strict)
21506 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21507 }
21508
21509 /* Only add the attribute if the backend requests it, and
21510 is not DW_CC_normal. */
21511 if (value && (value != DW_CC_normal))
21512 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21513 }
21514
21515 /* Given a tree pointer to a struct, class, union, or enum type node, return
21516 a pointer to the (string) tag name for the given type, or zero if the type
21517 was declared without a tag. */
21518
21519 static const char *
21520 type_tag (const_tree type)
21521 {
21522 const char *name = 0;
21523
21524 if (TYPE_NAME (type) != 0)
21525 {
21526 tree t = 0;
21527
21528 /* Find the IDENTIFIER_NODE for the type name. */
21529 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21530 && !TYPE_NAMELESS (type))
21531 t = TYPE_NAME (type);
21532
21533 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21534 a TYPE_DECL node, regardless of whether or not a `typedef' was
21535 involved. */
21536 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21537 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21538 {
21539 /* We want to be extra verbose. Don't call dwarf_name if
21540 DECL_NAME isn't set. The default hook for decl_printable_name
21541 doesn't like that, and in this context it's correct to return
21542 0, instead of "<anonymous>" or the like. */
21543 if (DECL_NAME (TYPE_NAME (type))
21544 && !DECL_NAMELESS (TYPE_NAME (type)))
21545 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21546 }
21547
21548 /* Now get the name as a string, or invent one. */
21549 if (!name && t != 0)
21550 name = IDENTIFIER_POINTER (t);
21551 }
21552
21553 return (name == 0 || *name == '\0') ? 0 : name;
21554 }
21555
21556 /* Return the type associated with a data member, make a special check
21557 for bit field types. */
21558
21559 static inline tree
21560 member_declared_type (const_tree member)
21561 {
21562 return (DECL_BIT_FIELD_TYPE (member)
21563 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21564 }
21565
21566 /* Get the decl's label, as described by its RTL. This may be different
21567 from the DECL_NAME name used in the source file. */
21568
21569 #if 0
21570 static const char *
21571 decl_start_label (tree decl)
21572 {
21573 rtx x;
21574 const char *fnname;
21575
21576 x = DECL_RTL (decl);
21577 gcc_assert (MEM_P (x));
21578
21579 x = XEXP (x, 0);
21580 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21581
21582 fnname = XSTR (x, 0);
21583 return fnname;
21584 }
21585 #endif
21586 \f
21587 /* For variable-length arrays that have been previously generated, but
21588 may be incomplete due to missing subscript info, fill the subscript
21589 info. Return TRUE if this is one of those cases. */
21590 static bool
21591 fill_variable_array_bounds (tree type)
21592 {
21593 if (TREE_ASM_WRITTEN (type)
21594 && TREE_CODE (type) == ARRAY_TYPE
21595 && variably_modified_type_p (type, NULL))
21596 {
21597 dw_die_ref array_die = lookup_type_die (type);
21598 if (!array_die)
21599 return false;
21600 add_subscript_info (array_die, type, !is_ada ());
21601 return true;
21602 }
21603 return false;
21604 }
21605
21606 /* These routines generate the internal representation of the DIE's for
21607 the compilation unit. Debugging information is collected by walking
21608 the declaration trees passed in from dwarf2out_decl(). */
21609
21610 static void
21611 gen_array_type_die (tree type, dw_die_ref context_die)
21612 {
21613 dw_die_ref array_die;
21614
21615 /* GNU compilers represent multidimensional array types as sequences of one
21616 dimensional array types whose element types are themselves array types.
21617 We sometimes squish that down to a single array_type DIE with multiple
21618 subscripts in the Dwarf debugging info. The draft Dwarf specification
21619 say that we are allowed to do this kind of compression in C, because
21620 there is no difference between an array of arrays and a multidimensional
21621 array. We don't do this for Ada to remain as close as possible to the
21622 actual representation, which is especially important against the language
21623 flexibilty wrt arrays of variable size. */
21624
21625 bool collapse_nested_arrays = !is_ada ();
21626
21627 if (fill_variable_array_bounds (type))
21628 return;
21629
21630 dw_die_ref scope_die = scope_die_for (type, context_die);
21631 tree element_type;
21632
21633 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21634 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21635 if (TYPE_STRING_FLAG (type)
21636 && TREE_CODE (type) == ARRAY_TYPE
21637 && is_fortran ()
21638 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21639 {
21640 HOST_WIDE_INT size;
21641
21642 array_die = new_die (DW_TAG_string_type, scope_die, type);
21643 add_name_attribute (array_die, type_tag (type));
21644 equate_type_number_to_die (type, array_die);
21645 size = int_size_in_bytes (type);
21646 if (size >= 0)
21647 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21648 /* ??? We can't annotate types late, but for LTO we may not
21649 generate a location early either (gfortran.dg/save_6.f90). */
21650 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21651 && TYPE_DOMAIN (type) != NULL_TREE
21652 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21653 {
21654 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21655 tree rszdecl = szdecl;
21656
21657 size = int_size_in_bytes (TREE_TYPE (szdecl));
21658 if (!DECL_P (szdecl))
21659 {
21660 if (TREE_CODE (szdecl) == INDIRECT_REF
21661 && DECL_P (TREE_OPERAND (szdecl, 0)))
21662 {
21663 rszdecl = TREE_OPERAND (szdecl, 0);
21664 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21665 != DWARF2_ADDR_SIZE)
21666 size = 0;
21667 }
21668 else
21669 size = 0;
21670 }
21671 if (size > 0)
21672 {
21673 dw_loc_list_ref loc
21674 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21675 NULL);
21676 if (loc)
21677 {
21678 add_AT_location_description (array_die, DW_AT_string_length,
21679 loc);
21680 if (size != DWARF2_ADDR_SIZE)
21681 add_AT_unsigned (array_die, dwarf_version >= 5
21682 ? DW_AT_string_length_byte_size
21683 : DW_AT_byte_size, size);
21684 }
21685 }
21686 }
21687 return;
21688 }
21689
21690 array_die = new_die (DW_TAG_array_type, scope_die, type);
21691 add_name_attribute (array_die, type_tag (type));
21692 equate_type_number_to_die (type, array_die);
21693
21694 if (TREE_CODE (type) == VECTOR_TYPE)
21695 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21696
21697 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21698 if (is_fortran ()
21699 && TREE_CODE (type) == ARRAY_TYPE
21700 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21701 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21702 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21703
21704 #if 0
21705 /* We default the array ordering. Debuggers will probably do the right
21706 things even if DW_AT_ordering is not present. It's not even an issue
21707 until we start to get into multidimensional arrays anyway. If a debugger
21708 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21709 then we'll have to put the DW_AT_ordering attribute back in. (But if
21710 and when we find out that we need to put these in, we will only do so
21711 for multidimensional arrays. */
21712 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21713 #endif
21714
21715 if (TREE_CODE (type) == VECTOR_TYPE)
21716 {
21717 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21718 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21719 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21720 add_bound_info (subrange_die, DW_AT_upper_bound,
21721 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21722 }
21723 else
21724 add_subscript_info (array_die, type, collapse_nested_arrays);
21725
21726 /* Add representation of the type of the elements of this array type and
21727 emit the corresponding DIE if we haven't done it already. */
21728 element_type = TREE_TYPE (type);
21729 if (collapse_nested_arrays)
21730 while (TREE_CODE (element_type) == ARRAY_TYPE)
21731 {
21732 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21733 break;
21734 element_type = TREE_TYPE (element_type);
21735 }
21736
21737 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21738 TREE_CODE (type) == ARRAY_TYPE
21739 && TYPE_REVERSE_STORAGE_ORDER (type),
21740 context_die);
21741
21742 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21743 if (TYPE_ARTIFICIAL (type))
21744 add_AT_flag (array_die, DW_AT_artificial, 1);
21745
21746 if (get_AT (array_die, DW_AT_name))
21747 add_pubtype (type, array_die);
21748
21749 add_alignment_attribute (array_die, type);
21750 }
21751
21752 /* This routine generates DIE for array with hidden descriptor, details
21753 are filled into *info by a langhook. */
21754
21755 static void
21756 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21757 dw_die_ref context_die)
21758 {
21759 const dw_die_ref scope_die = scope_die_for (type, context_die);
21760 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21761 struct loc_descr_context context = { type, info->base_decl, NULL,
21762 false, false };
21763 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21764 int dim;
21765
21766 add_name_attribute (array_die, type_tag (type));
21767 equate_type_number_to_die (type, array_die);
21768
21769 if (info->ndimensions > 1)
21770 switch (info->ordering)
21771 {
21772 case array_descr_ordering_row_major:
21773 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21774 break;
21775 case array_descr_ordering_column_major:
21776 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21777 break;
21778 default:
21779 break;
21780 }
21781
21782 if (dwarf_version >= 3 || !dwarf_strict)
21783 {
21784 if (info->data_location)
21785 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21786 dw_scalar_form_exprloc, &context);
21787 if (info->associated)
21788 add_scalar_info (array_die, DW_AT_associated, info->associated,
21789 dw_scalar_form_constant
21790 | dw_scalar_form_exprloc
21791 | dw_scalar_form_reference, &context);
21792 if (info->allocated)
21793 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21794 dw_scalar_form_constant
21795 | dw_scalar_form_exprloc
21796 | dw_scalar_form_reference, &context);
21797 if (info->stride)
21798 {
21799 const enum dwarf_attribute attr
21800 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21801 const int forms
21802 = (info->stride_in_bits)
21803 ? dw_scalar_form_constant
21804 : (dw_scalar_form_constant
21805 | dw_scalar_form_exprloc
21806 | dw_scalar_form_reference);
21807
21808 add_scalar_info (array_die, attr, info->stride, forms, &context);
21809 }
21810 }
21811 if (dwarf_version >= 5)
21812 {
21813 if (info->rank)
21814 {
21815 add_scalar_info (array_die, DW_AT_rank, info->rank,
21816 dw_scalar_form_constant
21817 | dw_scalar_form_exprloc, &context);
21818 subrange_tag = DW_TAG_generic_subrange;
21819 context.placeholder_arg = true;
21820 }
21821 }
21822
21823 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21824
21825 for (dim = 0; dim < info->ndimensions; dim++)
21826 {
21827 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21828
21829 if (info->dimen[dim].bounds_type)
21830 add_type_attribute (subrange_die,
21831 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21832 false, context_die);
21833 if (info->dimen[dim].lower_bound)
21834 add_bound_info (subrange_die, DW_AT_lower_bound,
21835 info->dimen[dim].lower_bound, &context);
21836 if (info->dimen[dim].upper_bound)
21837 add_bound_info (subrange_die, DW_AT_upper_bound,
21838 info->dimen[dim].upper_bound, &context);
21839 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21840 add_scalar_info (subrange_die, DW_AT_byte_stride,
21841 info->dimen[dim].stride,
21842 dw_scalar_form_constant
21843 | dw_scalar_form_exprloc
21844 | dw_scalar_form_reference,
21845 &context);
21846 }
21847
21848 gen_type_die (info->element_type, context_die);
21849 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21850 TREE_CODE (type) == ARRAY_TYPE
21851 && TYPE_REVERSE_STORAGE_ORDER (type),
21852 context_die);
21853
21854 if (get_AT (array_die, DW_AT_name))
21855 add_pubtype (type, array_die);
21856
21857 add_alignment_attribute (array_die, type);
21858 }
21859
21860 #if 0
21861 static void
21862 gen_entry_point_die (tree decl, dw_die_ref context_die)
21863 {
21864 tree origin = decl_ultimate_origin (decl);
21865 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21866
21867 if (origin != NULL)
21868 add_abstract_origin_attribute (decl_die, origin);
21869 else
21870 {
21871 add_name_and_src_coords_attributes (decl_die, decl);
21872 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21873 TYPE_UNQUALIFIED, false, context_die);
21874 }
21875
21876 if (DECL_ABSTRACT_P (decl))
21877 equate_decl_number_to_die (decl, decl_die);
21878 else
21879 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21880 }
21881 #endif
21882
21883 /* Walk through the list of incomplete types again, trying once more to
21884 emit full debugging info for them. */
21885
21886 static void
21887 retry_incomplete_types (void)
21888 {
21889 set_early_dwarf s;
21890 int i;
21891
21892 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21893 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21894 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21895 vec_safe_truncate (incomplete_types, 0);
21896 }
21897
21898 /* Determine what tag to use for a record type. */
21899
21900 static enum dwarf_tag
21901 record_type_tag (tree type)
21902 {
21903 if (! lang_hooks.types.classify_record)
21904 return DW_TAG_structure_type;
21905
21906 switch (lang_hooks.types.classify_record (type))
21907 {
21908 case RECORD_IS_STRUCT:
21909 return DW_TAG_structure_type;
21910
21911 case RECORD_IS_CLASS:
21912 return DW_TAG_class_type;
21913
21914 case RECORD_IS_INTERFACE:
21915 if (dwarf_version >= 3 || !dwarf_strict)
21916 return DW_TAG_interface_type;
21917 return DW_TAG_structure_type;
21918
21919 default:
21920 gcc_unreachable ();
21921 }
21922 }
21923
21924 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21925 include all of the information about the enumeration values also. Each
21926 enumerated type name/value is listed as a child of the enumerated type
21927 DIE. */
21928
21929 static dw_die_ref
21930 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21931 {
21932 dw_die_ref type_die = lookup_type_die (type);
21933 dw_die_ref orig_type_die = type_die;
21934
21935 if (type_die == NULL)
21936 {
21937 type_die = new_die (DW_TAG_enumeration_type,
21938 scope_die_for (type, context_die), type);
21939 equate_type_number_to_die (type, type_die);
21940 add_name_attribute (type_die, type_tag (type));
21941 if ((dwarf_version >= 4 || !dwarf_strict)
21942 && ENUM_IS_SCOPED (type))
21943 add_AT_flag (type_die, DW_AT_enum_class, 1);
21944 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21945 add_AT_flag (type_die, DW_AT_declaration, 1);
21946 if (!dwarf_strict)
21947 add_AT_unsigned (type_die, DW_AT_encoding,
21948 TYPE_UNSIGNED (type)
21949 ? DW_ATE_unsigned
21950 : DW_ATE_signed);
21951 }
21952 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21953 return type_die;
21954 else
21955 remove_AT (type_die, DW_AT_declaration);
21956
21957 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21958 given enum type is incomplete, do not generate the DW_AT_byte_size
21959 attribute or the DW_AT_element_list attribute. */
21960 if (TYPE_SIZE (type))
21961 {
21962 tree link;
21963
21964 if (!ENUM_IS_OPAQUE (type))
21965 TREE_ASM_WRITTEN (type) = 1;
21966 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21967 add_byte_size_attribute (type_die, type);
21968 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21969 add_alignment_attribute (type_die, type);
21970 if ((dwarf_version >= 3 || !dwarf_strict)
21971 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21972 {
21973 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21974 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21975 context_die);
21976 }
21977 if (TYPE_STUB_DECL (type) != NULL_TREE)
21978 {
21979 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
21980 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21981 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
21982 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21983 }
21984
21985 /* If the first reference to this type was as the return type of an
21986 inline function, then it may not have a parent. Fix this now. */
21987 if (type_die->die_parent == NULL)
21988 add_child_die (scope_die_for (type, context_die), type_die);
21989
21990 for (link = TYPE_VALUES (type);
21991 link != NULL; link = TREE_CHAIN (link))
21992 {
21993 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21994 tree value = TREE_VALUE (link);
21995
21996 gcc_assert (!ENUM_IS_OPAQUE (type));
21997 add_name_attribute (enum_die,
21998 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21999
22000 if (TREE_CODE (value) == CONST_DECL)
22001 value = DECL_INITIAL (value);
22002
22003 if (simple_type_size_in_bits (TREE_TYPE (value))
22004 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22005 {
22006 /* For constant forms created by add_AT_unsigned DWARF
22007 consumers (GDB, elfutils, etc.) always zero extend
22008 the value. Only when the actual value is negative
22009 do we need to use add_AT_int to generate a constant
22010 form that can represent negative values. */
22011 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22012 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22013 add_AT_unsigned (enum_die, DW_AT_const_value,
22014 (unsigned HOST_WIDE_INT) val);
22015 else
22016 add_AT_int (enum_die, DW_AT_const_value, val);
22017 }
22018 else
22019 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22020 that here. TODO: This should be re-worked to use correct
22021 signed/unsigned double tags for all cases. */
22022 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22023 }
22024
22025 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22026 if (TYPE_ARTIFICIAL (type)
22027 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22028 add_AT_flag (type_die, DW_AT_artificial, 1);
22029 }
22030 else
22031 add_AT_flag (type_die, DW_AT_declaration, 1);
22032
22033 add_pubtype (type, type_die);
22034
22035 return type_die;
22036 }
22037
22038 /* Generate a DIE to represent either a real live formal parameter decl or to
22039 represent just the type of some formal parameter position in some function
22040 type.
22041
22042 Note that this routine is a bit unusual because its argument may be a
22043 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22044 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22045 node. If it's the former then this function is being called to output a
22046 DIE to represent a formal parameter object (or some inlining thereof). If
22047 it's the latter, then this function is only being called to output a
22048 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22049 argument type of some subprogram type.
22050 If EMIT_NAME_P is true, name and source coordinate attributes
22051 are emitted. */
22052
22053 static dw_die_ref
22054 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22055 dw_die_ref context_die)
22056 {
22057 tree node_or_origin = node ? node : origin;
22058 tree ultimate_origin;
22059 dw_die_ref parm_die = NULL;
22060
22061 if (DECL_P (node_or_origin))
22062 {
22063 parm_die = lookup_decl_die (node);
22064
22065 /* If the contexts differ, we may not be talking about the same
22066 thing.
22067 ??? When in LTO the DIE parent is the "abstract" copy and the
22068 context_die is the specification "copy". But this whole block
22069 should eventually be no longer needed. */
22070 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22071 {
22072 if (!DECL_ABSTRACT_P (node))
22073 {
22074 /* This can happen when creating an inlined instance, in
22075 which case we need to create a new DIE that will get
22076 annotated with DW_AT_abstract_origin. */
22077 parm_die = NULL;
22078 }
22079 else
22080 gcc_unreachable ();
22081 }
22082
22083 if (parm_die && parm_die->die_parent == NULL)
22084 {
22085 /* Check that parm_die already has the right attributes that
22086 we would have added below. If any attributes are
22087 missing, fall through to add them. */
22088 if (! DECL_ABSTRACT_P (node_or_origin)
22089 && !get_AT (parm_die, DW_AT_location)
22090 && !get_AT (parm_die, DW_AT_const_value))
22091 /* We are missing location info, and are about to add it. */
22092 ;
22093 else
22094 {
22095 add_child_die (context_die, parm_die);
22096 return parm_die;
22097 }
22098 }
22099 }
22100
22101 /* If we have a previously generated DIE, use it, unless this is an
22102 concrete instance (origin != NULL), in which case we need a new
22103 DIE with a corresponding DW_AT_abstract_origin. */
22104 bool reusing_die;
22105 if (parm_die && origin == NULL)
22106 reusing_die = true;
22107 else
22108 {
22109 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22110 reusing_die = false;
22111 }
22112
22113 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22114 {
22115 case tcc_declaration:
22116 ultimate_origin = decl_ultimate_origin (node_or_origin);
22117 if (node || ultimate_origin)
22118 origin = ultimate_origin;
22119
22120 if (reusing_die)
22121 goto add_location;
22122
22123 if (origin != NULL)
22124 add_abstract_origin_attribute (parm_die, origin);
22125 else if (emit_name_p)
22126 add_name_and_src_coords_attributes (parm_die, node);
22127 if (origin == NULL
22128 || (! DECL_ABSTRACT_P (node_or_origin)
22129 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22130 decl_function_context
22131 (node_or_origin))))
22132 {
22133 tree type = TREE_TYPE (node_or_origin);
22134 if (decl_by_reference_p (node_or_origin))
22135 add_type_attribute (parm_die, TREE_TYPE (type),
22136 TYPE_UNQUALIFIED,
22137 false, context_die);
22138 else
22139 add_type_attribute (parm_die, type,
22140 decl_quals (node_or_origin),
22141 false, context_die);
22142 }
22143 if (origin == NULL && DECL_ARTIFICIAL (node))
22144 add_AT_flag (parm_die, DW_AT_artificial, 1);
22145 add_location:
22146 if (node && node != origin)
22147 equate_decl_number_to_die (node, parm_die);
22148 if (! DECL_ABSTRACT_P (node_or_origin))
22149 add_location_or_const_value_attribute (parm_die, node_or_origin,
22150 node == NULL);
22151
22152 break;
22153
22154 case tcc_type:
22155 /* We were called with some kind of a ..._TYPE node. */
22156 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22157 context_die);
22158 break;
22159
22160 default:
22161 gcc_unreachable ();
22162 }
22163
22164 return parm_die;
22165 }
22166
22167 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22168 children DW_TAG_formal_parameter DIEs representing the arguments of the
22169 parameter pack.
22170
22171 PARM_PACK must be a function parameter pack.
22172 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22173 must point to the subsequent arguments of the function PACK_ARG belongs to.
22174 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22175 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22176 following the last one for which a DIE was generated. */
22177
22178 static dw_die_ref
22179 gen_formal_parameter_pack_die (tree parm_pack,
22180 tree pack_arg,
22181 dw_die_ref subr_die,
22182 tree *next_arg)
22183 {
22184 tree arg;
22185 dw_die_ref parm_pack_die;
22186
22187 gcc_assert (parm_pack
22188 && lang_hooks.function_parameter_pack_p (parm_pack)
22189 && subr_die);
22190
22191 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22192 add_src_coords_attributes (parm_pack_die, parm_pack);
22193
22194 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22195 {
22196 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22197 parm_pack))
22198 break;
22199 gen_formal_parameter_die (arg, NULL,
22200 false /* Don't emit name attribute. */,
22201 parm_pack_die);
22202 }
22203 if (next_arg)
22204 *next_arg = arg;
22205 return parm_pack_die;
22206 }
22207
22208 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22209 at the end of an (ANSI prototyped) formal parameters list. */
22210
22211 static void
22212 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22213 {
22214 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22215 }
22216
22217 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22218 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22219 parameters as specified in some function type specification (except for
22220 those which appear as part of a function *definition*). */
22221
22222 static void
22223 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22224 {
22225 tree link;
22226 tree formal_type = NULL;
22227 tree first_parm_type;
22228 tree arg;
22229
22230 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22231 {
22232 arg = DECL_ARGUMENTS (function_or_method_type);
22233 function_or_method_type = TREE_TYPE (function_or_method_type);
22234 }
22235 else
22236 arg = NULL_TREE;
22237
22238 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22239
22240 /* Make our first pass over the list of formal parameter types and output a
22241 DW_TAG_formal_parameter DIE for each one. */
22242 for (link = first_parm_type; link; )
22243 {
22244 dw_die_ref parm_die;
22245
22246 formal_type = TREE_VALUE (link);
22247 if (formal_type == void_type_node)
22248 break;
22249
22250 /* Output a (nameless) DIE to represent the formal parameter itself. */
22251 parm_die = gen_formal_parameter_die (formal_type, NULL,
22252 true /* Emit name attribute. */,
22253 context_die);
22254 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22255 && link == first_parm_type)
22256 {
22257 add_AT_flag (parm_die, DW_AT_artificial, 1);
22258 if (dwarf_version >= 3 || !dwarf_strict)
22259 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22260 }
22261 else if (arg && DECL_ARTIFICIAL (arg))
22262 add_AT_flag (parm_die, DW_AT_artificial, 1);
22263
22264 link = TREE_CHAIN (link);
22265 if (arg)
22266 arg = DECL_CHAIN (arg);
22267 }
22268
22269 /* If this function type has an ellipsis, add a
22270 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22271 if (formal_type != void_type_node)
22272 gen_unspecified_parameters_die (function_or_method_type, context_die);
22273
22274 /* Make our second (and final) pass over the list of formal parameter types
22275 and output DIEs to represent those types (as necessary). */
22276 for (link = TYPE_ARG_TYPES (function_or_method_type);
22277 link && TREE_VALUE (link);
22278 link = TREE_CHAIN (link))
22279 gen_type_die (TREE_VALUE (link), context_die);
22280 }
22281
22282 /* We want to generate the DIE for TYPE so that we can generate the
22283 die for MEMBER, which has been defined; we will need to refer back
22284 to the member declaration nested within TYPE. If we're trying to
22285 generate minimal debug info for TYPE, processing TYPE won't do the
22286 trick; we need to attach the member declaration by hand. */
22287
22288 static void
22289 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22290 {
22291 gen_type_die (type, context_die);
22292
22293 /* If we're trying to avoid duplicate debug info, we may not have
22294 emitted the member decl for this function. Emit it now. */
22295 if (TYPE_STUB_DECL (type)
22296 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22297 && ! lookup_decl_die (member))
22298 {
22299 dw_die_ref type_die;
22300 gcc_assert (!decl_ultimate_origin (member));
22301
22302 type_die = lookup_type_die_strip_naming_typedef (type);
22303 if (TREE_CODE (member) == FUNCTION_DECL)
22304 gen_subprogram_die (member, type_die);
22305 else if (TREE_CODE (member) == FIELD_DECL)
22306 {
22307 /* Ignore the nameless fields that are used to skip bits but handle
22308 C++ anonymous unions and structs. */
22309 if (DECL_NAME (member) != NULL_TREE
22310 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22311 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22312 {
22313 struct vlr_context vlr_ctx = {
22314 DECL_CONTEXT (member), /* struct_type */
22315 NULL_TREE /* variant_part_offset */
22316 };
22317 gen_type_die (member_declared_type (member), type_die);
22318 gen_field_die (member, &vlr_ctx, type_die);
22319 }
22320 }
22321 else
22322 gen_variable_die (member, NULL_TREE, type_die);
22323 }
22324 }
22325 \f
22326 /* Forward declare these functions, because they are mutually recursive
22327 with their set_block_* pairing functions. */
22328 static void set_decl_origin_self (tree);
22329
22330 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22331 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22332 that it points to the node itself, thus indicating that the node is its
22333 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22334 the given node is NULL, recursively descend the decl/block tree which
22335 it is the root of, and for each other ..._DECL or BLOCK node contained
22336 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22337 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22338 values to point to themselves. */
22339
22340 static void
22341 set_block_origin_self (tree stmt)
22342 {
22343 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22344 {
22345 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22346
22347 {
22348 tree local_decl;
22349
22350 for (local_decl = BLOCK_VARS (stmt);
22351 local_decl != NULL_TREE;
22352 local_decl = DECL_CHAIN (local_decl))
22353 /* Do not recurse on nested functions since the inlining status
22354 of parent and child can be different as per the DWARF spec. */
22355 if (TREE_CODE (local_decl) != FUNCTION_DECL
22356 && !DECL_EXTERNAL (local_decl))
22357 set_decl_origin_self (local_decl);
22358 }
22359
22360 {
22361 tree subblock;
22362
22363 for (subblock = BLOCK_SUBBLOCKS (stmt);
22364 subblock != NULL_TREE;
22365 subblock = BLOCK_CHAIN (subblock))
22366 set_block_origin_self (subblock); /* Recurse. */
22367 }
22368 }
22369 }
22370
22371 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22372 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22373 node to so that it points to the node itself, thus indicating that the
22374 node represents its own (abstract) origin. Additionally, if the
22375 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22376 the decl/block tree of which the given node is the root of, and for
22377 each other ..._DECL or BLOCK node contained therein whose
22378 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22379 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22380 point to themselves. */
22381
22382 static void
22383 set_decl_origin_self (tree decl)
22384 {
22385 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22386 {
22387 DECL_ABSTRACT_ORIGIN (decl) = decl;
22388 if (TREE_CODE (decl) == FUNCTION_DECL)
22389 {
22390 tree arg;
22391
22392 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22393 DECL_ABSTRACT_ORIGIN (arg) = arg;
22394 if (DECL_INITIAL (decl) != NULL_TREE
22395 && DECL_INITIAL (decl) != error_mark_node)
22396 set_block_origin_self (DECL_INITIAL (decl));
22397 }
22398 }
22399 }
22400 \f
22401 /* Mark the early DIE for DECL as the abstract instance. */
22402
22403 static void
22404 dwarf2out_abstract_function (tree decl)
22405 {
22406 dw_die_ref old_die;
22407
22408 /* Make sure we have the actual abstract inline, not a clone. */
22409 decl = DECL_ORIGIN (decl);
22410
22411 if (DECL_IGNORED_P (decl))
22412 return;
22413
22414 old_die = lookup_decl_die (decl);
22415 /* With early debug we always have an old DIE unless we are in LTO
22416 and the user did not compile but only link with debug. */
22417 if (in_lto_p && ! old_die)
22418 return;
22419 gcc_assert (old_die != NULL);
22420 if (get_AT (old_die, DW_AT_inline)
22421 || get_AT (old_die, DW_AT_abstract_origin))
22422 /* We've already generated the abstract instance. */
22423 return;
22424
22425 /* Go ahead and put DW_AT_inline on the DIE. */
22426 if (DECL_DECLARED_INLINE_P (decl))
22427 {
22428 if (cgraph_function_possibly_inlined_p (decl))
22429 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22430 else
22431 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22432 }
22433 else
22434 {
22435 if (cgraph_function_possibly_inlined_p (decl))
22436 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22437 else
22438 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22439 }
22440
22441 if (DECL_DECLARED_INLINE_P (decl)
22442 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22443 add_AT_flag (old_die, DW_AT_artificial, 1);
22444
22445 set_decl_origin_self (decl);
22446 }
22447
22448 /* Helper function of premark_used_types() which gets called through
22449 htab_traverse.
22450
22451 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22452 marked as unused by prune_unused_types. */
22453
22454 bool
22455 premark_used_types_helper (tree const &type, void *)
22456 {
22457 dw_die_ref die;
22458
22459 die = lookup_type_die (type);
22460 if (die != NULL)
22461 die->die_perennial_p = 1;
22462 return true;
22463 }
22464
22465 /* Helper function of premark_types_used_by_global_vars which gets called
22466 through htab_traverse.
22467
22468 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22469 marked as unused by prune_unused_types. The DIE of the type is marked
22470 only if the global variable using the type will actually be emitted. */
22471
22472 int
22473 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22474 void *)
22475 {
22476 struct types_used_by_vars_entry *entry;
22477 dw_die_ref die;
22478
22479 entry = (struct types_used_by_vars_entry *) *slot;
22480 gcc_assert (entry->type != NULL
22481 && entry->var_decl != NULL);
22482 die = lookup_type_die (entry->type);
22483 if (die)
22484 {
22485 /* Ask cgraph if the global variable really is to be emitted.
22486 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22487 varpool_node *node = varpool_node::get (entry->var_decl);
22488 if (node && node->definition)
22489 {
22490 die->die_perennial_p = 1;
22491 /* Keep the parent DIEs as well. */
22492 while ((die = die->die_parent) && die->die_perennial_p == 0)
22493 die->die_perennial_p = 1;
22494 }
22495 }
22496 return 1;
22497 }
22498
22499 /* Mark all members of used_types_hash as perennial. */
22500
22501 static void
22502 premark_used_types (struct function *fun)
22503 {
22504 if (fun && fun->used_types_hash)
22505 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22506 }
22507
22508 /* Mark all members of types_used_by_vars_entry as perennial. */
22509
22510 static void
22511 premark_types_used_by_global_vars (void)
22512 {
22513 if (types_used_by_vars_hash)
22514 types_used_by_vars_hash
22515 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22516 }
22517
22518 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22519 for CA_LOC call arg loc node. */
22520
22521 static dw_die_ref
22522 gen_call_site_die (tree decl, dw_die_ref subr_die,
22523 struct call_arg_loc_node *ca_loc)
22524 {
22525 dw_die_ref stmt_die = NULL, die;
22526 tree block = ca_loc->block;
22527
22528 while (block
22529 && block != DECL_INITIAL (decl)
22530 && TREE_CODE (block) == BLOCK)
22531 {
22532 stmt_die = BLOCK_DIE (block);
22533 if (stmt_die)
22534 break;
22535 block = BLOCK_SUPERCONTEXT (block);
22536 }
22537 if (stmt_die == NULL)
22538 stmt_die = subr_die;
22539 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22540 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22541 if (ca_loc->tail_call_p)
22542 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22543 if (ca_loc->symbol_ref)
22544 {
22545 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22546 if (tdie)
22547 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22548 else
22549 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22550 false);
22551 }
22552 return die;
22553 }
22554
22555 /* Generate a DIE to represent a declared function (either file-scope or
22556 block-local). */
22557
22558 static void
22559 gen_subprogram_die (tree decl, dw_die_ref context_die)
22560 {
22561 tree origin = decl_ultimate_origin (decl);
22562 dw_die_ref subr_die;
22563 dw_die_ref old_die = lookup_decl_die (decl);
22564
22565 /* This function gets called multiple times for different stages of
22566 the debug process. For example, for func() in this code:
22567
22568 namespace S
22569 {
22570 void func() { ... }
22571 }
22572
22573 ...we get called 4 times. Twice in early debug and twice in
22574 late debug:
22575
22576 Early debug
22577 -----------
22578
22579 1. Once while generating func() within the namespace. This is
22580 the declaration. The declaration bit below is set, as the
22581 context is the namespace.
22582
22583 A new DIE will be generated with DW_AT_declaration set.
22584
22585 2. Once for func() itself. This is the specification. The
22586 declaration bit below is clear as the context is the CU.
22587
22588 We will use the cached DIE from (1) to create a new DIE with
22589 DW_AT_specification pointing to the declaration in (1).
22590
22591 Late debug via rest_of_handle_final()
22592 -------------------------------------
22593
22594 3. Once generating func() within the namespace. This is also the
22595 declaration, as in (1), but this time we will early exit below
22596 as we have a cached DIE and a declaration needs no additional
22597 annotations (no locations), as the source declaration line
22598 info is enough.
22599
22600 4. Once for func() itself. As in (2), this is the specification,
22601 but this time we will re-use the cached DIE, and just annotate
22602 it with the location information that should now be available.
22603
22604 For something without namespaces, but with abstract instances, we
22605 are also called a multiple times:
22606
22607 class Base
22608 {
22609 public:
22610 Base (); // constructor declaration (1)
22611 };
22612
22613 Base::Base () { } // constructor specification (2)
22614
22615 Early debug
22616 -----------
22617
22618 1. Once for the Base() constructor by virtue of it being a
22619 member of the Base class. This is done via
22620 rest_of_type_compilation.
22621
22622 This is a declaration, so a new DIE will be created with
22623 DW_AT_declaration.
22624
22625 2. Once for the Base() constructor definition, but this time
22626 while generating the abstract instance of the base
22627 constructor (__base_ctor) which is being generated via early
22628 debug of reachable functions.
22629
22630 Even though we have a cached version of the declaration (1),
22631 we will create a DW_AT_specification of the declaration DIE
22632 in (1).
22633
22634 3. Once for the __base_ctor itself, but this time, we generate
22635 an DW_AT_abstract_origin version of the DW_AT_specification in
22636 (2).
22637
22638 Late debug via rest_of_handle_final
22639 -----------------------------------
22640
22641 4. One final time for the __base_ctor (which will have a cached
22642 DIE with DW_AT_abstract_origin created in (3). This time,
22643 we will just annotate the location information now
22644 available.
22645 */
22646 int declaration = (current_function_decl != decl
22647 || class_or_namespace_scope_p (context_die));
22648
22649 /* A declaration that has been previously dumped needs no
22650 additional information. */
22651 if (old_die && declaration)
22652 return;
22653
22654 /* Now that the C++ front end lazily declares artificial member fns, we
22655 might need to retrofit the declaration into its class. */
22656 if (!declaration && !origin && !old_die
22657 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22658 && !class_or_namespace_scope_p (context_die)
22659 && debug_info_level > DINFO_LEVEL_TERSE)
22660 old_die = force_decl_die (decl);
22661
22662 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22663 if (origin != NULL)
22664 {
22665 gcc_assert (!declaration || local_scope_p (context_die));
22666
22667 /* Fixup die_parent for the abstract instance of a nested
22668 inline function. */
22669 if (old_die && old_die->die_parent == NULL)
22670 add_child_die (context_die, old_die);
22671
22672 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22673 {
22674 /* If we have a DW_AT_abstract_origin we have a working
22675 cached version. */
22676 subr_die = old_die;
22677 }
22678 else
22679 {
22680 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22681 add_abstract_origin_attribute (subr_die, origin);
22682 /* This is where the actual code for a cloned function is.
22683 Let's emit linkage name attribute for it. This helps
22684 debuggers to e.g, set breakpoints into
22685 constructors/destructors when the user asks "break
22686 K::K". */
22687 add_linkage_name (subr_die, decl);
22688 }
22689 }
22690 /* A cached copy, possibly from early dwarf generation. Reuse as
22691 much as possible. */
22692 else if (old_die)
22693 {
22694 if (!get_AT_flag (old_die, DW_AT_declaration)
22695 /* We can have a normal definition following an inline one in the
22696 case of redefinition of GNU C extern inlines.
22697 It seems reasonable to use AT_specification in this case. */
22698 && !get_AT (old_die, DW_AT_inline))
22699 {
22700 /* Detect and ignore this case, where we are trying to output
22701 something we have already output. */
22702 if (get_AT (old_die, DW_AT_low_pc)
22703 || get_AT (old_die, DW_AT_ranges))
22704 return;
22705
22706 /* If we have no location information, this must be a
22707 partially generated DIE from early dwarf generation.
22708 Fall through and generate it. */
22709 }
22710
22711 /* If the definition comes from the same place as the declaration,
22712 maybe use the old DIE. We always want the DIE for this function
22713 that has the *_pc attributes to be under comp_unit_die so the
22714 debugger can find it. We also need to do this for abstract
22715 instances of inlines, since the spec requires the out-of-line copy
22716 to have the same parent. For local class methods, this doesn't
22717 apply; we just use the old DIE. */
22718 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22719 struct dwarf_file_data * file_index = lookup_filename (s.file);
22720 if (((is_unit_die (old_die->die_parent)
22721 /* This condition fixes the inconsistency/ICE with the
22722 following Fortran test (or some derivative thereof) while
22723 building libgfortran:
22724
22725 module some_m
22726 contains
22727 logical function funky (FLAG)
22728 funky = .true.
22729 end function
22730 end module
22731 */
22732 || (old_die->die_parent
22733 && old_die->die_parent->die_tag == DW_TAG_module)
22734 || local_scope_p (old_die->die_parent)
22735 || context_die == NULL)
22736 && (DECL_ARTIFICIAL (decl)
22737 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22738 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22739 == (unsigned) s.line)
22740 && (!debug_column_info
22741 || s.column == 0
22742 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22743 == (unsigned) s.column)))))
22744 /* With LTO if there's an abstract instance for
22745 the old DIE, this is a concrete instance and
22746 thus re-use the DIE. */
22747 || get_AT (old_die, DW_AT_abstract_origin))
22748 {
22749 subr_die = old_die;
22750
22751 /* Clear out the declaration attribute, but leave the
22752 parameters so they can be augmented with location
22753 information later. Unless this was a declaration, in
22754 which case, wipe out the nameless parameters and recreate
22755 them further down. */
22756 if (remove_AT (subr_die, DW_AT_declaration))
22757 {
22758
22759 remove_AT (subr_die, DW_AT_object_pointer);
22760 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22761 }
22762 }
22763 /* Make a specification pointing to the previously built
22764 declaration. */
22765 else
22766 {
22767 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22768 add_AT_specification (subr_die, old_die);
22769 add_pubname (decl, subr_die);
22770 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22771 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22772 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22773 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22774 if (debug_column_info
22775 && s.column
22776 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22777 != (unsigned) s.column))
22778 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22779
22780 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22781 emit the real type on the definition die. */
22782 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22783 {
22784 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22785 if (die == auto_die || die == decltype_auto_die)
22786 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22787 TYPE_UNQUALIFIED, false, context_die);
22788 }
22789
22790 /* When we process the method declaration, we haven't seen
22791 the out-of-class defaulted definition yet, so we have to
22792 recheck now. */
22793 if ((dwarf_version >= 5 || ! dwarf_strict)
22794 && !get_AT (subr_die, DW_AT_defaulted))
22795 {
22796 int defaulted
22797 = lang_hooks.decls.decl_dwarf_attribute (decl,
22798 DW_AT_defaulted);
22799 if (defaulted != -1)
22800 {
22801 /* Other values must have been handled before. */
22802 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22803 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22804 }
22805 }
22806 }
22807 }
22808 /* Create a fresh DIE for anything else. */
22809 else
22810 {
22811 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22812
22813 if (TREE_PUBLIC (decl))
22814 add_AT_flag (subr_die, DW_AT_external, 1);
22815
22816 add_name_and_src_coords_attributes (subr_die, decl);
22817 add_pubname (decl, subr_die);
22818 if (debug_info_level > DINFO_LEVEL_TERSE)
22819 {
22820 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22821 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22822 TYPE_UNQUALIFIED, false, context_die);
22823 }
22824
22825 add_pure_or_virtual_attribute (subr_die, decl);
22826 if (DECL_ARTIFICIAL (decl))
22827 add_AT_flag (subr_die, DW_AT_artificial, 1);
22828
22829 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22830 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22831
22832 add_alignment_attribute (subr_die, decl);
22833
22834 add_accessibility_attribute (subr_die, decl);
22835 }
22836
22837 /* Unless we have an existing non-declaration DIE, equate the new
22838 DIE. */
22839 if (!old_die || is_declaration_die (old_die))
22840 equate_decl_number_to_die (decl, subr_die);
22841
22842 if (declaration)
22843 {
22844 if (!old_die || !get_AT (old_die, DW_AT_inline))
22845 {
22846 add_AT_flag (subr_die, DW_AT_declaration, 1);
22847
22848 /* If this is an explicit function declaration then generate
22849 a DW_AT_explicit attribute. */
22850 if ((dwarf_version >= 3 || !dwarf_strict)
22851 && lang_hooks.decls.decl_dwarf_attribute (decl,
22852 DW_AT_explicit) == 1)
22853 add_AT_flag (subr_die, DW_AT_explicit, 1);
22854
22855 /* If this is a C++11 deleted special function member then generate
22856 a DW_AT_deleted attribute. */
22857 if ((dwarf_version >= 5 || !dwarf_strict)
22858 && lang_hooks.decls.decl_dwarf_attribute (decl,
22859 DW_AT_deleted) == 1)
22860 add_AT_flag (subr_die, DW_AT_deleted, 1);
22861
22862 /* If this is a C++11 defaulted special function member then
22863 generate a DW_AT_defaulted attribute. */
22864 if (dwarf_version >= 5 || !dwarf_strict)
22865 {
22866 int defaulted
22867 = lang_hooks.decls.decl_dwarf_attribute (decl,
22868 DW_AT_defaulted);
22869 if (defaulted != -1)
22870 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22871 }
22872
22873 /* If this is a C++11 non-static member function with & ref-qualifier
22874 then generate a DW_AT_reference attribute. */
22875 if ((dwarf_version >= 5 || !dwarf_strict)
22876 && lang_hooks.decls.decl_dwarf_attribute (decl,
22877 DW_AT_reference) == 1)
22878 add_AT_flag (subr_die, DW_AT_reference, 1);
22879
22880 /* If this is a C++11 non-static member function with &&
22881 ref-qualifier then generate a DW_AT_reference attribute. */
22882 if ((dwarf_version >= 5 || !dwarf_strict)
22883 && lang_hooks.decls.decl_dwarf_attribute (decl,
22884 DW_AT_rvalue_reference)
22885 == 1)
22886 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22887 }
22888 }
22889 /* For non DECL_EXTERNALs, if range information is available, fill
22890 the DIE with it. */
22891 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22892 {
22893 HOST_WIDE_INT cfa_fb_offset;
22894
22895 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22896
22897 if (!crtl->has_bb_partition)
22898 {
22899 dw_fde_ref fde = fun->fde;
22900 if (fde->dw_fde_begin)
22901 {
22902 /* We have already generated the labels. */
22903 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22904 fde->dw_fde_end, false);
22905 }
22906 else
22907 {
22908 /* Create start/end labels and add the range. */
22909 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22910 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22911 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22912 current_function_funcdef_no);
22913 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22914 current_function_funcdef_no);
22915 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22916 false);
22917 }
22918
22919 #if VMS_DEBUGGING_INFO
22920 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22921 Section 2.3 Prologue and Epilogue Attributes:
22922 When a breakpoint is set on entry to a function, it is generally
22923 desirable for execution to be suspended, not on the very first
22924 instruction of the function, but rather at a point after the
22925 function's frame has been set up, after any language defined local
22926 declaration processing has been completed, and before execution of
22927 the first statement of the function begins. Debuggers generally
22928 cannot properly determine where this point is. Similarly for a
22929 breakpoint set on exit from a function. The prologue and epilogue
22930 attributes allow a compiler to communicate the location(s) to use. */
22931
22932 {
22933 if (fde->dw_fde_vms_end_prologue)
22934 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22935 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22936
22937 if (fde->dw_fde_vms_begin_epilogue)
22938 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22939 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22940 }
22941 #endif
22942
22943 }
22944 else
22945 {
22946 /* Generate pubnames entries for the split function code ranges. */
22947 dw_fde_ref fde = fun->fde;
22948
22949 if (fde->dw_fde_second_begin)
22950 {
22951 if (dwarf_version >= 3 || !dwarf_strict)
22952 {
22953 /* We should use ranges for non-contiguous code section
22954 addresses. Use the actual code range for the initial
22955 section, since the HOT/COLD labels might precede an
22956 alignment offset. */
22957 bool range_list_added = false;
22958 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22959 fde->dw_fde_end, &range_list_added,
22960 false);
22961 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22962 fde->dw_fde_second_end,
22963 &range_list_added, false);
22964 if (range_list_added)
22965 add_ranges (NULL);
22966 }
22967 else
22968 {
22969 /* There is no real support in DW2 for this .. so we make
22970 a work-around. First, emit the pub name for the segment
22971 containing the function label. Then make and emit a
22972 simplified subprogram DIE for the second segment with the
22973 name pre-fixed by __hot/cold_sect_of_. We use the same
22974 linkage name for the second die so that gdb will find both
22975 sections when given "b foo". */
22976 const char *name = NULL;
22977 tree decl_name = DECL_NAME (decl);
22978 dw_die_ref seg_die;
22979
22980 /* Do the 'primary' section. */
22981 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22982 fde->dw_fde_end, false);
22983
22984 /* Build a minimal DIE for the secondary section. */
22985 seg_die = new_die (DW_TAG_subprogram,
22986 subr_die->die_parent, decl);
22987
22988 if (TREE_PUBLIC (decl))
22989 add_AT_flag (seg_die, DW_AT_external, 1);
22990
22991 if (decl_name != NULL
22992 && IDENTIFIER_POINTER (decl_name) != NULL)
22993 {
22994 name = dwarf2_name (decl, 1);
22995 if (! DECL_ARTIFICIAL (decl))
22996 add_src_coords_attributes (seg_die, decl);
22997
22998 add_linkage_name (seg_die, decl);
22999 }
23000 gcc_assert (name != NULL);
23001 add_pure_or_virtual_attribute (seg_die, decl);
23002 if (DECL_ARTIFICIAL (decl))
23003 add_AT_flag (seg_die, DW_AT_artificial, 1);
23004
23005 name = concat ("__second_sect_of_", name, NULL);
23006 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23007 fde->dw_fde_second_end, false);
23008 add_name_attribute (seg_die, name);
23009 if (want_pubnames ())
23010 add_pubname_string (name, seg_die);
23011 }
23012 }
23013 else
23014 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23015 false);
23016 }
23017
23018 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23019
23020 /* We define the "frame base" as the function's CFA. This is more
23021 convenient for several reasons: (1) It's stable across the prologue
23022 and epilogue, which makes it better than just a frame pointer,
23023 (2) With dwarf3, there exists a one-byte encoding that allows us
23024 to reference the .debug_frame data by proxy, but failing that,
23025 (3) We can at least reuse the code inspection and interpretation
23026 code that determines the CFA position at various points in the
23027 function. */
23028 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23029 {
23030 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23031 add_AT_loc (subr_die, DW_AT_frame_base, op);
23032 }
23033 else
23034 {
23035 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23036 if (list->dw_loc_next)
23037 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23038 else
23039 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23040 }
23041
23042 /* Compute a displacement from the "steady-state frame pointer" to
23043 the CFA. The former is what all stack slots and argument slots
23044 will reference in the rtl; the latter is what we've told the
23045 debugger about. We'll need to adjust all frame_base references
23046 by this displacement. */
23047 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23048
23049 if (fun->static_chain_decl)
23050 {
23051 /* DWARF requires here a location expression that computes the
23052 address of the enclosing subprogram's frame base. The machinery
23053 in tree-nested.c is supposed to store this specific address in the
23054 last field of the FRAME record. */
23055 const tree frame_type
23056 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23057 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23058
23059 tree fb_expr
23060 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23061 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23062 fb_expr, fb_decl, NULL_TREE);
23063
23064 add_AT_location_description (subr_die, DW_AT_static_link,
23065 loc_list_from_tree (fb_expr, 0, NULL));
23066 }
23067
23068 resolve_variable_values ();
23069 }
23070
23071 /* Generate child dies for template paramaters. */
23072 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23073 gen_generic_params_dies (decl);
23074
23075 /* Now output descriptions of the arguments for this function. This gets
23076 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23077 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23078 `...' at the end of the formal parameter list. In order to find out if
23079 there was a trailing ellipsis or not, we must instead look at the type
23080 associated with the FUNCTION_DECL. This will be a node of type
23081 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23082 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23083 an ellipsis at the end. */
23084
23085 /* In the case where we are describing a mere function declaration, all we
23086 need to do here (and all we *can* do here) is to describe the *types* of
23087 its formal parameters. */
23088 if (debug_info_level <= DINFO_LEVEL_TERSE)
23089 ;
23090 else if (declaration)
23091 gen_formal_types_die (decl, subr_die);
23092 else
23093 {
23094 /* Generate DIEs to represent all known formal parameters. */
23095 tree parm = DECL_ARGUMENTS (decl);
23096 tree generic_decl = early_dwarf
23097 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23098 tree generic_decl_parm = generic_decl
23099 ? DECL_ARGUMENTS (generic_decl)
23100 : NULL;
23101
23102 /* Now we want to walk the list of parameters of the function and
23103 emit their relevant DIEs.
23104
23105 We consider the case of DECL being an instance of a generic function
23106 as well as it being a normal function.
23107
23108 If DECL is an instance of a generic function we walk the
23109 parameters of the generic function declaration _and_ the parameters of
23110 DECL itself. This is useful because we want to emit specific DIEs for
23111 function parameter packs and those are declared as part of the
23112 generic function declaration. In that particular case,
23113 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23114 That DIE has children DIEs representing the set of arguments
23115 of the pack. Note that the set of pack arguments can be empty.
23116 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23117 children DIE.
23118
23119 Otherwise, we just consider the parameters of DECL. */
23120 while (generic_decl_parm || parm)
23121 {
23122 if (generic_decl_parm
23123 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23124 gen_formal_parameter_pack_die (generic_decl_parm,
23125 parm, subr_die,
23126 &parm);
23127 else if (parm)
23128 {
23129 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23130
23131 if (early_dwarf
23132 && parm == DECL_ARGUMENTS (decl)
23133 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23134 && parm_die
23135 && (dwarf_version >= 3 || !dwarf_strict))
23136 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23137
23138 parm = DECL_CHAIN (parm);
23139 }
23140 else if (parm)
23141 parm = DECL_CHAIN (parm);
23142
23143 if (generic_decl_parm)
23144 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23145 }
23146
23147 /* Decide whether we need an unspecified_parameters DIE at the end.
23148 There are 2 more cases to do this for: 1) the ansi ... declaration -
23149 this is detectable when the end of the arg list is not a
23150 void_type_node 2) an unprototyped function declaration (not a
23151 definition). This just means that we have no info about the
23152 parameters at all. */
23153 if (early_dwarf)
23154 {
23155 if (prototype_p (TREE_TYPE (decl)))
23156 {
23157 /* This is the prototyped case, check for.... */
23158 if (stdarg_p (TREE_TYPE (decl)))
23159 gen_unspecified_parameters_die (decl, subr_die);
23160 }
23161 else if (DECL_INITIAL (decl) == NULL_TREE)
23162 gen_unspecified_parameters_die (decl, subr_die);
23163 }
23164 }
23165
23166 if (subr_die != old_die)
23167 /* Add the calling convention attribute if requested. */
23168 add_calling_convention_attribute (subr_die, decl);
23169
23170 /* Output Dwarf info for all of the stuff within the body of the function
23171 (if it has one - it may be just a declaration).
23172
23173 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23174 a function. This BLOCK actually represents the outermost binding contour
23175 for the function, i.e. the contour in which the function's formal
23176 parameters and labels get declared. Curiously, it appears that the front
23177 end doesn't actually put the PARM_DECL nodes for the current function onto
23178 the BLOCK_VARS list for this outer scope, but are strung off of the
23179 DECL_ARGUMENTS list for the function instead.
23180
23181 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23182 the LABEL_DECL nodes for the function however, and we output DWARF info
23183 for those in decls_for_scope. Just within the `outer_scope' there will be
23184 a BLOCK node representing the function's outermost pair of curly braces,
23185 and any blocks used for the base and member initializers of a C++
23186 constructor function. */
23187 tree outer_scope = DECL_INITIAL (decl);
23188 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23189 {
23190 int call_site_note_count = 0;
23191 int tail_call_site_note_count = 0;
23192
23193 /* Emit a DW_TAG_variable DIE for a named return value. */
23194 if (DECL_NAME (DECL_RESULT (decl)))
23195 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23196
23197 /* The first time through decls_for_scope we will generate the
23198 DIEs for the locals. The second time, we fill in the
23199 location info. */
23200 decls_for_scope (outer_scope, subr_die);
23201
23202 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23203 {
23204 struct call_arg_loc_node *ca_loc;
23205 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23206 {
23207 dw_die_ref die = NULL;
23208 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23209 rtx arg, next_arg;
23210 tree arg_decl = NULL_TREE;
23211
23212 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23213 ? XEXP (ca_loc->call_arg_loc_note, 0)
23214 : NULL_RTX);
23215 arg; arg = next_arg)
23216 {
23217 dw_loc_descr_ref reg, val;
23218 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23219 dw_die_ref cdie, tdie = NULL;
23220
23221 next_arg = XEXP (arg, 1);
23222 if (REG_P (XEXP (XEXP (arg, 0), 0))
23223 && next_arg
23224 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23225 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23226 && REGNO (XEXP (XEXP (arg, 0), 0))
23227 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23228 next_arg = XEXP (next_arg, 1);
23229 if (mode == VOIDmode)
23230 {
23231 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23232 if (mode == VOIDmode)
23233 mode = GET_MODE (XEXP (arg, 0));
23234 }
23235 if (mode == VOIDmode || mode == BLKmode)
23236 continue;
23237 /* Get dynamic information about call target only if we
23238 have no static information: we cannot generate both
23239 DW_AT_call_origin and DW_AT_call_target
23240 attributes. */
23241 if (ca_loc->symbol_ref == NULL_RTX)
23242 {
23243 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23244 {
23245 tloc = XEXP (XEXP (arg, 0), 1);
23246 continue;
23247 }
23248 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23249 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23250 {
23251 tlocc = XEXP (XEXP (arg, 0), 1);
23252 continue;
23253 }
23254 }
23255 reg = NULL;
23256 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23257 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23258 VAR_INIT_STATUS_INITIALIZED);
23259 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23260 {
23261 rtx mem = XEXP (XEXP (arg, 0), 0);
23262 reg = mem_loc_descriptor (XEXP (mem, 0),
23263 get_address_mode (mem),
23264 GET_MODE (mem),
23265 VAR_INIT_STATUS_INITIALIZED);
23266 }
23267 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23268 == DEBUG_PARAMETER_REF)
23269 {
23270 tree tdecl
23271 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23272 tdie = lookup_decl_die (tdecl);
23273 if (tdie == NULL)
23274 continue;
23275 arg_decl = tdecl;
23276 }
23277 else
23278 continue;
23279 if (reg == NULL
23280 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23281 != DEBUG_PARAMETER_REF)
23282 continue;
23283 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23284 VOIDmode,
23285 VAR_INIT_STATUS_INITIALIZED);
23286 if (val == NULL)
23287 continue;
23288 if (die == NULL)
23289 die = gen_call_site_die (decl, subr_die, ca_loc);
23290 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23291 NULL_TREE);
23292 add_desc_attribute (cdie, arg_decl);
23293 if (reg != NULL)
23294 add_AT_loc (cdie, DW_AT_location, reg);
23295 else if (tdie != NULL)
23296 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23297 tdie);
23298 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23299 if (next_arg != XEXP (arg, 1))
23300 {
23301 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23302 if (mode == VOIDmode)
23303 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23304 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23305 0), 1),
23306 mode, VOIDmode,
23307 VAR_INIT_STATUS_INITIALIZED);
23308 if (val != NULL)
23309 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23310 val);
23311 }
23312 }
23313 if (die == NULL
23314 && (ca_loc->symbol_ref || tloc))
23315 die = gen_call_site_die (decl, subr_die, ca_loc);
23316 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23317 {
23318 dw_loc_descr_ref tval = NULL;
23319
23320 if (tloc != NULL_RTX)
23321 tval = mem_loc_descriptor (tloc,
23322 GET_MODE (tloc) == VOIDmode
23323 ? Pmode : GET_MODE (tloc),
23324 VOIDmode,
23325 VAR_INIT_STATUS_INITIALIZED);
23326 if (tval)
23327 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23328 else if (tlocc != NULL_RTX)
23329 {
23330 tval = mem_loc_descriptor (tlocc,
23331 GET_MODE (tlocc) == VOIDmode
23332 ? Pmode : GET_MODE (tlocc),
23333 VOIDmode,
23334 VAR_INIT_STATUS_INITIALIZED);
23335 if (tval)
23336 add_AT_loc (die,
23337 dwarf_AT (DW_AT_call_target_clobbered),
23338 tval);
23339 }
23340 }
23341 if (die != NULL)
23342 {
23343 call_site_note_count++;
23344 if (ca_loc->tail_call_p)
23345 tail_call_site_note_count++;
23346 }
23347 }
23348 }
23349 call_arg_locations = NULL;
23350 call_arg_loc_last = NULL;
23351 if (tail_call_site_count >= 0
23352 && tail_call_site_count == tail_call_site_note_count
23353 && (!dwarf_strict || dwarf_version >= 5))
23354 {
23355 if (call_site_count >= 0
23356 && call_site_count == call_site_note_count)
23357 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23358 else
23359 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23360 }
23361 call_site_count = -1;
23362 tail_call_site_count = -1;
23363 }
23364
23365 /* Mark used types after we have created DIEs for the functions scopes. */
23366 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23367 }
23368
23369 /* Returns a hash value for X (which really is a die_struct). */
23370
23371 hashval_t
23372 block_die_hasher::hash (die_struct *d)
23373 {
23374 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23375 }
23376
23377 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23378 as decl_id and die_parent of die_struct Y. */
23379
23380 bool
23381 block_die_hasher::equal (die_struct *x, die_struct *y)
23382 {
23383 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23384 }
23385
23386 /* Hold information about markers for inlined entry points. */
23387 struct GTY ((for_user)) inline_entry_data
23388 {
23389 /* The block that's the inlined_function_outer_scope for an inlined
23390 function. */
23391 tree block;
23392
23393 /* The label at the inlined entry point. */
23394 const char *label_pfx;
23395 unsigned int label_num;
23396
23397 /* The view number to be used as the inlined entry point. */
23398 var_loc_view view;
23399 };
23400
23401 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23402 {
23403 typedef tree compare_type;
23404 static inline hashval_t hash (const inline_entry_data *);
23405 static inline bool equal (const inline_entry_data *, const_tree);
23406 };
23407
23408 /* Hash table routines for inline_entry_data. */
23409
23410 inline hashval_t
23411 inline_entry_data_hasher::hash (const inline_entry_data *data)
23412 {
23413 return htab_hash_pointer (data->block);
23414 }
23415
23416 inline bool
23417 inline_entry_data_hasher::equal (const inline_entry_data *data,
23418 const_tree block)
23419 {
23420 return data->block == block;
23421 }
23422
23423 /* Inlined entry points pending DIE creation in this compilation unit. */
23424
23425 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23426
23427
23428 /* Return TRUE if DECL, which may have been previously generated as
23429 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23430 true if decl (or its origin) is either an extern declaration or a
23431 class/namespace scoped declaration.
23432
23433 The declare_in_namespace support causes us to get two DIEs for one
23434 variable, both of which are declarations. We want to avoid
23435 considering one to be a specification, so we must test for
23436 DECLARATION and DW_AT_declaration. */
23437 static inline bool
23438 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23439 {
23440 return (old_die && TREE_STATIC (decl) && !declaration
23441 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23442 }
23443
23444 /* Return true if DECL is a local static. */
23445
23446 static inline bool
23447 local_function_static (tree decl)
23448 {
23449 gcc_assert (VAR_P (decl));
23450 return TREE_STATIC (decl)
23451 && DECL_CONTEXT (decl)
23452 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23453 }
23454
23455 /* Generate a DIE to represent a declared data object.
23456 Either DECL or ORIGIN must be non-null. */
23457
23458 static void
23459 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23460 {
23461 HOST_WIDE_INT off = 0;
23462 tree com_decl;
23463 tree decl_or_origin = decl ? decl : origin;
23464 tree ultimate_origin;
23465 dw_die_ref var_die;
23466 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23467 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23468 || class_or_namespace_scope_p (context_die));
23469 bool specialization_p = false;
23470 bool no_linkage_name = false;
23471
23472 /* While C++ inline static data members have definitions inside of the
23473 class, force the first DIE to be a declaration, then let gen_member_die
23474 reparent it to the class context and call gen_variable_die again
23475 to create the outside of the class DIE for the definition. */
23476 if (!declaration
23477 && old_die == NULL
23478 && decl
23479 && DECL_CONTEXT (decl)
23480 && TYPE_P (DECL_CONTEXT (decl))
23481 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23482 {
23483 declaration = true;
23484 if (dwarf_version < 5)
23485 no_linkage_name = true;
23486 }
23487
23488 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23489 if (decl || ultimate_origin)
23490 origin = ultimate_origin;
23491 com_decl = fortran_common (decl_or_origin, &off);
23492
23493 /* Symbol in common gets emitted as a child of the common block, in the form
23494 of a data member. */
23495 if (com_decl)
23496 {
23497 dw_die_ref com_die;
23498 dw_loc_list_ref loc = NULL;
23499 die_node com_die_arg;
23500
23501 var_die = lookup_decl_die (decl_or_origin);
23502 if (var_die)
23503 {
23504 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23505 {
23506 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23507 if (loc)
23508 {
23509 if (off)
23510 {
23511 /* Optimize the common case. */
23512 if (single_element_loc_list_p (loc)
23513 && loc->expr->dw_loc_opc == DW_OP_addr
23514 && loc->expr->dw_loc_next == NULL
23515 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23516 == SYMBOL_REF)
23517 {
23518 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23519 loc->expr->dw_loc_oprnd1.v.val_addr
23520 = plus_constant (GET_MODE (x), x , off);
23521 }
23522 else
23523 loc_list_plus_const (loc, off);
23524 }
23525 add_AT_location_description (var_die, DW_AT_location, loc);
23526 remove_AT (var_die, DW_AT_declaration);
23527 }
23528 }
23529 return;
23530 }
23531
23532 if (common_block_die_table == NULL)
23533 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23534
23535 com_die_arg.decl_id = DECL_UID (com_decl);
23536 com_die_arg.die_parent = context_die;
23537 com_die = common_block_die_table->find (&com_die_arg);
23538 if (! early_dwarf)
23539 loc = loc_list_from_tree (com_decl, 2, NULL);
23540 if (com_die == NULL)
23541 {
23542 const char *cnam
23543 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23544 die_node **slot;
23545
23546 com_die = new_die (DW_TAG_common_block, context_die, decl);
23547 add_name_and_src_coords_attributes (com_die, com_decl);
23548 if (loc)
23549 {
23550 add_AT_location_description (com_die, DW_AT_location, loc);
23551 /* Avoid sharing the same loc descriptor between
23552 DW_TAG_common_block and DW_TAG_variable. */
23553 loc = loc_list_from_tree (com_decl, 2, NULL);
23554 }
23555 else if (DECL_EXTERNAL (decl_or_origin))
23556 add_AT_flag (com_die, DW_AT_declaration, 1);
23557 if (want_pubnames ())
23558 add_pubname_string (cnam, com_die); /* ??? needed? */
23559 com_die->decl_id = DECL_UID (com_decl);
23560 slot = common_block_die_table->find_slot (com_die, INSERT);
23561 *slot = com_die;
23562 }
23563 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23564 {
23565 add_AT_location_description (com_die, DW_AT_location, loc);
23566 loc = loc_list_from_tree (com_decl, 2, NULL);
23567 remove_AT (com_die, DW_AT_declaration);
23568 }
23569 var_die = new_die (DW_TAG_variable, com_die, decl);
23570 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23571 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23572 decl_quals (decl_or_origin), false,
23573 context_die);
23574 add_alignment_attribute (var_die, decl);
23575 add_AT_flag (var_die, DW_AT_external, 1);
23576 if (loc)
23577 {
23578 if (off)
23579 {
23580 /* Optimize the common case. */
23581 if (single_element_loc_list_p (loc)
23582 && loc->expr->dw_loc_opc == DW_OP_addr
23583 && loc->expr->dw_loc_next == NULL
23584 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23585 {
23586 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23587 loc->expr->dw_loc_oprnd1.v.val_addr
23588 = plus_constant (GET_MODE (x), x, off);
23589 }
23590 else
23591 loc_list_plus_const (loc, off);
23592 }
23593 add_AT_location_description (var_die, DW_AT_location, loc);
23594 }
23595 else if (DECL_EXTERNAL (decl_or_origin))
23596 add_AT_flag (var_die, DW_AT_declaration, 1);
23597 if (decl)
23598 equate_decl_number_to_die (decl, var_die);
23599 return;
23600 }
23601
23602 if (old_die)
23603 {
23604 if (declaration)
23605 {
23606 /* A declaration that has been previously dumped, needs no
23607 further annotations, since it doesn't need location on
23608 the second pass. */
23609 return;
23610 }
23611 else if (decl_will_get_specification_p (old_die, decl, declaration)
23612 && !get_AT (old_die, DW_AT_specification))
23613 {
23614 /* Fall-thru so we can make a new variable die along with a
23615 DW_AT_specification. */
23616 }
23617 else if (origin && old_die->die_parent != context_die)
23618 {
23619 /* If we will be creating an inlined instance, we need a
23620 new DIE that will get annotated with
23621 DW_AT_abstract_origin. */
23622 gcc_assert (!DECL_ABSTRACT_P (decl));
23623 }
23624 else
23625 {
23626 /* If a DIE was dumped early, it still needs location info.
23627 Skip to where we fill the location bits. */
23628 var_die = old_die;
23629
23630 /* ??? In LTRANS we cannot annotate early created variably
23631 modified type DIEs without copying them and adjusting all
23632 references to them. Thus we dumped them again. Also add a
23633 reference to them but beware of -g0 compile and -g link
23634 in which case the reference will be already present. */
23635 tree type = TREE_TYPE (decl_or_origin);
23636 if (in_lto_p
23637 && ! get_AT (var_die, DW_AT_type)
23638 && variably_modified_type_p
23639 (type, decl_function_context (decl_or_origin)))
23640 {
23641 if (decl_by_reference_p (decl_or_origin))
23642 add_type_attribute (var_die, TREE_TYPE (type),
23643 TYPE_UNQUALIFIED, false, context_die);
23644 else
23645 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23646 false, context_die);
23647 }
23648
23649 goto gen_variable_die_location;
23650 }
23651 }
23652
23653 /* For static data members, the declaration in the class is supposed
23654 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23655 also in DWARF2; the specification should still be DW_TAG_variable
23656 referencing the DW_TAG_member DIE. */
23657 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23658 var_die = new_die (DW_TAG_member, context_die, decl);
23659 else
23660 var_die = new_die (DW_TAG_variable, context_die, decl);
23661
23662 if (origin != NULL)
23663 add_abstract_origin_attribute (var_die, origin);
23664
23665 /* Loop unrolling can create multiple blocks that refer to the same
23666 static variable, so we must test for the DW_AT_declaration flag.
23667
23668 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23669 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23670 sharing them.
23671
23672 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23673 else if (decl_will_get_specification_p (old_die, decl, declaration))
23674 {
23675 /* This is a definition of a C++ class level static. */
23676 add_AT_specification (var_die, old_die);
23677 specialization_p = true;
23678 if (DECL_NAME (decl))
23679 {
23680 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23681 struct dwarf_file_data * file_index = lookup_filename (s.file);
23682
23683 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23684 add_AT_file (var_die, DW_AT_decl_file, file_index);
23685
23686 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23687 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23688
23689 if (debug_column_info
23690 && s.column
23691 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23692 != (unsigned) s.column))
23693 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23694
23695 if (old_die->die_tag == DW_TAG_member)
23696 add_linkage_name (var_die, decl);
23697 }
23698 }
23699 else
23700 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23701
23702 if ((origin == NULL && !specialization_p)
23703 || (origin != NULL
23704 && !DECL_ABSTRACT_P (decl_or_origin)
23705 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23706 decl_function_context
23707 (decl_or_origin))))
23708 {
23709 tree type = TREE_TYPE (decl_or_origin);
23710
23711 if (decl_by_reference_p (decl_or_origin))
23712 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23713 context_die);
23714 else
23715 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23716 context_die);
23717 }
23718
23719 if (origin == NULL && !specialization_p)
23720 {
23721 if (TREE_PUBLIC (decl))
23722 add_AT_flag (var_die, DW_AT_external, 1);
23723
23724 if (DECL_ARTIFICIAL (decl))
23725 add_AT_flag (var_die, DW_AT_artificial, 1);
23726
23727 add_alignment_attribute (var_die, decl);
23728
23729 add_accessibility_attribute (var_die, decl);
23730 }
23731
23732 if (declaration)
23733 add_AT_flag (var_die, DW_AT_declaration, 1);
23734
23735 if (decl && (DECL_ABSTRACT_P (decl)
23736 || !old_die || is_declaration_die (old_die)))
23737 equate_decl_number_to_die (decl, var_die);
23738
23739 gen_variable_die_location:
23740 if (! declaration
23741 && (! DECL_ABSTRACT_P (decl_or_origin)
23742 /* Local static vars are shared between all clones/inlines,
23743 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23744 already set. */
23745 || (VAR_P (decl_or_origin)
23746 && TREE_STATIC (decl_or_origin)
23747 && DECL_RTL_SET_P (decl_or_origin))))
23748 {
23749 if (early_dwarf)
23750 add_pubname (decl_or_origin, var_die);
23751 else
23752 add_location_or_const_value_attribute (var_die, decl_or_origin,
23753 decl == NULL);
23754 }
23755 else
23756 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23757
23758 if ((dwarf_version >= 4 || !dwarf_strict)
23759 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23760 DW_AT_const_expr) == 1
23761 && !get_AT (var_die, DW_AT_const_expr)
23762 && !specialization_p)
23763 add_AT_flag (var_die, DW_AT_const_expr, 1);
23764
23765 if (!dwarf_strict)
23766 {
23767 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23768 DW_AT_inline);
23769 if (inl != -1
23770 && !get_AT (var_die, DW_AT_inline)
23771 && !specialization_p)
23772 add_AT_unsigned (var_die, DW_AT_inline, inl);
23773 }
23774 }
23775
23776 /* Generate a DIE to represent a named constant. */
23777
23778 static void
23779 gen_const_die (tree decl, dw_die_ref context_die)
23780 {
23781 dw_die_ref const_die;
23782 tree type = TREE_TYPE (decl);
23783
23784 const_die = lookup_decl_die (decl);
23785 if (const_die)
23786 return;
23787
23788 const_die = new_die (DW_TAG_constant, context_die, decl);
23789 equate_decl_number_to_die (decl, const_die);
23790 add_name_and_src_coords_attributes (const_die, decl);
23791 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23792 if (TREE_PUBLIC (decl))
23793 add_AT_flag (const_die, DW_AT_external, 1);
23794 if (DECL_ARTIFICIAL (decl))
23795 add_AT_flag (const_die, DW_AT_artificial, 1);
23796 tree_add_const_value_attribute_for_decl (const_die, decl);
23797 }
23798
23799 /* Generate a DIE to represent a label identifier. */
23800
23801 static void
23802 gen_label_die (tree decl, dw_die_ref context_die)
23803 {
23804 tree origin = decl_ultimate_origin (decl);
23805 dw_die_ref lbl_die = lookup_decl_die (decl);
23806 rtx insn;
23807 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23808
23809 if (!lbl_die)
23810 {
23811 lbl_die = new_die (DW_TAG_label, context_die, decl);
23812 equate_decl_number_to_die (decl, lbl_die);
23813
23814 if (origin != NULL)
23815 add_abstract_origin_attribute (lbl_die, origin);
23816 else
23817 add_name_and_src_coords_attributes (lbl_die, decl);
23818 }
23819
23820 if (DECL_ABSTRACT_P (decl))
23821 equate_decl_number_to_die (decl, lbl_die);
23822 else if (! early_dwarf)
23823 {
23824 insn = DECL_RTL_IF_SET (decl);
23825
23826 /* Deleted labels are programmer specified labels which have been
23827 eliminated because of various optimizations. We still emit them
23828 here so that it is possible to put breakpoints on them. */
23829 if (insn
23830 && (LABEL_P (insn)
23831 || ((NOTE_P (insn)
23832 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23833 {
23834 /* When optimization is enabled (via -O) some parts of the compiler
23835 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23836 represent source-level labels which were explicitly declared by
23837 the user. This really shouldn't be happening though, so catch
23838 it if it ever does happen. */
23839 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23840
23841 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23842 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23843 }
23844 else if (insn
23845 && NOTE_P (insn)
23846 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23847 && CODE_LABEL_NUMBER (insn) != -1)
23848 {
23849 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23850 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23851 }
23852 }
23853 }
23854
23855 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23856 attributes to the DIE for a block STMT, to describe where the inlined
23857 function was called from. This is similar to add_src_coords_attributes. */
23858
23859 static inline void
23860 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23861 {
23862 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23863
23864 if (dwarf_version >= 3 || !dwarf_strict)
23865 {
23866 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23867 add_AT_unsigned (die, DW_AT_call_line, s.line);
23868 if (debug_column_info && s.column)
23869 add_AT_unsigned (die, DW_AT_call_column, s.column);
23870 }
23871 }
23872
23873
23874 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23875 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23876
23877 static inline void
23878 add_high_low_attributes (tree stmt, dw_die_ref die)
23879 {
23880 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23881
23882 if (inline_entry_data **iedp
23883 = !inline_entry_data_table ? NULL
23884 : inline_entry_data_table->find_slot_with_hash (stmt,
23885 htab_hash_pointer (stmt),
23886 NO_INSERT))
23887 {
23888 inline_entry_data *ied = *iedp;
23889 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23890 gcc_assert (debug_inline_points);
23891 gcc_assert (inlined_function_outer_scope_p (stmt));
23892
23893 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23894 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23895
23896 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23897 && !dwarf_strict)
23898 {
23899 if (!output_asm_line_debug_info ())
23900 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23901 else
23902 {
23903 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23904 /* FIXME: this will resolve to a small number. Could we
23905 possibly emit smaller data? Ideally we'd emit a
23906 uleb128, but that would make the size of DIEs
23907 impossible for the compiler to compute, since it's
23908 the assembler that computes the value of the view
23909 label in this case. Ideally, we'd have a single form
23910 encompassing both the address and the view, and
23911 indirecting them through a table might make things
23912 easier, but even that would be more wasteful,
23913 space-wise, than what we have now. */
23914 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23915 }
23916 }
23917
23918 inline_entry_data_table->clear_slot (iedp);
23919 }
23920
23921 if (BLOCK_FRAGMENT_CHAIN (stmt)
23922 && (dwarf_version >= 3 || !dwarf_strict))
23923 {
23924 tree chain, superblock = NULL_TREE;
23925 dw_die_ref pdie;
23926 dw_attr_node *attr = NULL;
23927
23928 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23929 {
23930 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23931 BLOCK_NUMBER (stmt));
23932 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23933 }
23934
23935 /* Optimize duplicate .debug_ranges lists or even tails of
23936 lists. If this BLOCK has same ranges as its supercontext,
23937 lookup DW_AT_ranges attribute in the supercontext (and
23938 recursively so), verify that the ranges_table contains the
23939 right values and use it instead of adding a new .debug_range. */
23940 for (chain = stmt, pdie = die;
23941 BLOCK_SAME_RANGE (chain);
23942 chain = BLOCK_SUPERCONTEXT (chain))
23943 {
23944 dw_attr_node *new_attr;
23945
23946 pdie = pdie->die_parent;
23947 if (pdie == NULL)
23948 break;
23949 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23950 break;
23951 new_attr = get_AT (pdie, DW_AT_ranges);
23952 if (new_attr == NULL
23953 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23954 break;
23955 attr = new_attr;
23956 superblock = BLOCK_SUPERCONTEXT (chain);
23957 }
23958 if (attr != NULL
23959 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23960 == BLOCK_NUMBER (superblock))
23961 && BLOCK_FRAGMENT_CHAIN (superblock))
23962 {
23963 unsigned long off = attr->dw_attr_val.v.val_offset;
23964 unsigned long supercnt = 0, thiscnt = 0;
23965 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23966 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23967 {
23968 ++supercnt;
23969 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23970 == BLOCK_NUMBER (chain));
23971 }
23972 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23973 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23974 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23975 ++thiscnt;
23976 gcc_assert (supercnt >= thiscnt);
23977 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23978 false);
23979 note_rnglist_head (off + supercnt - thiscnt);
23980 return;
23981 }
23982
23983 unsigned int offset = add_ranges (stmt, true);
23984 add_AT_range_list (die, DW_AT_ranges, offset, false);
23985 note_rnglist_head (offset);
23986
23987 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23988 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23989 do
23990 {
23991 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23992 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23993 chain = BLOCK_FRAGMENT_CHAIN (chain);
23994 }
23995 while (chain);
23996 add_ranges (NULL);
23997 }
23998 else
23999 {
24000 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24001 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24002 BLOCK_NUMBER (stmt));
24003 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24004 BLOCK_NUMBER (stmt));
24005 add_AT_low_high_pc (die, label, label_high, false);
24006 }
24007 }
24008
24009 /* Generate a DIE for a lexical block. */
24010
24011 static void
24012 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24013 {
24014 dw_die_ref old_die = BLOCK_DIE (stmt);
24015 dw_die_ref stmt_die = NULL;
24016 if (!old_die)
24017 {
24018 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24019 BLOCK_DIE (stmt) = stmt_die;
24020 }
24021
24022 if (BLOCK_ABSTRACT (stmt))
24023 {
24024 if (old_die)
24025 {
24026 /* This must have been generated early and it won't even
24027 need location information since it's a DW_AT_inline
24028 function. */
24029 if (flag_checking)
24030 for (dw_die_ref c = context_die; c; c = c->die_parent)
24031 if (c->die_tag == DW_TAG_inlined_subroutine
24032 || c->die_tag == DW_TAG_subprogram)
24033 {
24034 gcc_assert (get_AT (c, DW_AT_inline));
24035 break;
24036 }
24037 return;
24038 }
24039 }
24040 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24041 {
24042 /* If this is an inlined instance, create a new lexical die for
24043 anything below to attach DW_AT_abstract_origin to. */
24044 if (old_die)
24045 {
24046 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24047 BLOCK_DIE (stmt) = stmt_die;
24048 old_die = NULL;
24049 }
24050
24051 tree origin = block_ultimate_origin (stmt);
24052 if (origin != NULL_TREE && origin != stmt)
24053 add_abstract_origin_attribute (stmt_die, origin);
24054 }
24055
24056 if (old_die)
24057 stmt_die = old_die;
24058
24059 /* A non abstract block whose blocks have already been reordered
24060 should have the instruction range for this block. If so, set the
24061 high/low attributes. */
24062 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24063 {
24064 gcc_assert (stmt_die);
24065 add_high_low_attributes (stmt, stmt_die);
24066 }
24067
24068 decls_for_scope (stmt, stmt_die);
24069 }
24070
24071 /* Generate a DIE for an inlined subprogram. */
24072
24073 static void
24074 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24075 {
24076 tree decl;
24077
24078 /* The instance of function that is effectively being inlined shall not
24079 be abstract. */
24080 gcc_assert (! BLOCK_ABSTRACT (stmt));
24081
24082 decl = block_ultimate_origin (stmt);
24083
24084 /* Make sure any inlined functions are known to be inlineable. */
24085 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24086 || cgraph_function_possibly_inlined_p (decl));
24087
24088 if (! BLOCK_ABSTRACT (stmt))
24089 {
24090 dw_die_ref subr_die
24091 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24092
24093 if (call_arg_locations || debug_inline_points)
24094 BLOCK_DIE (stmt) = subr_die;
24095 add_abstract_origin_attribute (subr_die, decl);
24096 if (TREE_ASM_WRITTEN (stmt))
24097 add_high_low_attributes (stmt, subr_die);
24098 add_call_src_coords_attributes (stmt, subr_die);
24099
24100 decls_for_scope (stmt, subr_die);
24101 }
24102 }
24103
24104 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24105 the comment for VLR_CONTEXT. */
24106
24107 static void
24108 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24109 {
24110 dw_die_ref decl_die;
24111
24112 if (TREE_TYPE (decl) == error_mark_node)
24113 return;
24114
24115 decl_die = new_die (DW_TAG_member, context_die, decl);
24116 add_name_and_src_coords_attributes (decl_die, decl);
24117 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24118 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24119 context_die);
24120
24121 if (DECL_BIT_FIELD_TYPE (decl))
24122 {
24123 add_byte_size_attribute (decl_die, decl);
24124 add_bit_size_attribute (decl_die, decl);
24125 add_bit_offset_attribute (decl_die, decl, ctx);
24126 }
24127
24128 add_alignment_attribute (decl_die, decl);
24129
24130 /* If we have a variant part offset, then we are supposed to process a member
24131 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24132 trees. */
24133 gcc_assert (ctx->variant_part_offset == NULL_TREE
24134 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24135 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24136 add_data_member_location_attribute (decl_die, decl, ctx);
24137
24138 if (DECL_ARTIFICIAL (decl))
24139 add_AT_flag (decl_die, DW_AT_artificial, 1);
24140
24141 add_accessibility_attribute (decl_die, decl);
24142
24143 /* Equate decl number to die, so that we can look up this decl later on. */
24144 equate_decl_number_to_die (decl, decl_die);
24145 }
24146
24147 /* Generate a DIE for a pointer to a member type. TYPE can be an
24148 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24149 pointer to member function. */
24150
24151 static void
24152 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24153 {
24154 if (lookup_type_die (type))
24155 return;
24156
24157 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24158 scope_die_for (type, context_die), type);
24159
24160 equate_type_number_to_die (type, ptr_die);
24161 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24162 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24163 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24164 context_die);
24165 add_alignment_attribute (ptr_die, type);
24166
24167 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24168 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24169 {
24170 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24171 add_AT_loc (ptr_die, DW_AT_use_location, op);
24172 }
24173 }
24174
24175 static char *producer_string;
24176
24177 /* Return a heap allocated producer string including command line options
24178 if -grecord-gcc-switches. */
24179
24180 static char *
24181 gen_producer_string (void)
24182 {
24183 size_t j;
24184 auto_vec<const char *> switches;
24185 const char *language_string = lang_hooks.name;
24186 char *producer, *tail;
24187 const char *p;
24188 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24189 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24190
24191 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24192 switch (save_decoded_options[j].opt_index)
24193 {
24194 case OPT_o:
24195 case OPT_d:
24196 case OPT_dumpbase:
24197 case OPT_dumpdir:
24198 case OPT_auxbase:
24199 case OPT_auxbase_strip:
24200 case OPT_quiet:
24201 case OPT_version:
24202 case OPT_v:
24203 case OPT_w:
24204 case OPT_L:
24205 case OPT_D:
24206 case OPT_I:
24207 case OPT_U:
24208 case OPT_SPECIAL_unknown:
24209 case OPT_SPECIAL_ignore:
24210 case OPT_SPECIAL_deprecated:
24211 case OPT_SPECIAL_program_name:
24212 case OPT_SPECIAL_input_file:
24213 case OPT_grecord_gcc_switches:
24214 case OPT__output_pch_:
24215 case OPT_fdiagnostics_show_location_:
24216 case OPT_fdiagnostics_show_option:
24217 case OPT_fdiagnostics_show_caret:
24218 case OPT_fdiagnostics_show_labels:
24219 case OPT_fdiagnostics_show_line_numbers:
24220 case OPT_fdiagnostics_color_:
24221 case OPT_fverbose_asm:
24222 case OPT____:
24223 case OPT__sysroot_:
24224 case OPT_nostdinc:
24225 case OPT_nostdinc__:
24226 case OPT_fpreprocessed:
24227 case OPT_fltrans_output_list_:
24228 case OPT_fresolution_:
24229 case OPT_fdebug_prefix_map_:
24230 case OPT_fmacro_prefix_map_:
24231 case OPT_ffile_prefix_map_:
24232 case OPT_fcompare_debug:
24233 case OPT_fchecking:
24234 case OPT_fchecking_:
24235 /* Ignore these. */
24236 continue;
24237 default:
24238 if (cl_options[save_decoded_options[j].opt_index].flags
24239 & CL_NO_DWARF_RECORD)
24240 continue;
24241 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24242 == '-');
24243 switch (save_decoded_options[j].canonical_option[0][1])
24244 {
24245 case 'M':
24246 case 'i':
24247 case 'W':
24248 continue;
24249 case 'f':
24250 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24251 "dump", 4) == 0)
24252 continue;
24253 break;
24254 default:
24255 break;
24256 }
24257 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24258 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24259 break;
24260 }
24261
24262 producer = XNEWVEC (char, plen + 1 + len + 1);
24263 tail = producer;
24264 sprintf (tail, "%s %s", language_string, version_string);
24265 tail += plen;
24266
24267 FOR_EACH_VEC_ELT (switches, j, p)
24268 {
24269 len = strlen (p);
24270 *tail = ' ';
24271 memcpy (tail + 1, p, len);
24272 tail += len + 1;
24273 }
24274
24275 *tail = '\0';
24276 return producer;
24277 }
24278
24279 /* Given a C and/or C++ language/version string return the "highest".
24280 C++ is assumed to be "higher" than C in this case. Used for merging
24281 LTO translation unit languages. */
24282 static const char *
24283 highest_c_language (const char *lang1, const char *lang2)
24284 {
24285 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24286 return "GNU C++17";
24287 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24288 return "GNU C++14";
24289 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24290 return "GNU C++11";
24291 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24292 return "GNU C++98";
24293
24294 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24295 return "GNU C17";
24296 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24297 return "GNU C11";
24298 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24299 return "GNU C99";
24300 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24301 return "GNU C89";
24302
24303 gcc_unreachable ();
24304 }
24305
24306
24307 /* Generate the DIE for the compilation unit. */
24308
24309 static dw_die_ref
24310 gen_compile_unit_die (const char *filename)
24311 {
24312 dw_die_ref die;
24313 const char *language_string = lang_hooks.name;
24314 int language;
24315
24316 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24317
24318 if (filename)
24319 {
24320 add_name_attribute (die, filename);
24321 /* Don't add cwd for <built-in>. */
24322 if (filename[0] != '<')
24323 add_comp_dir_attribute (die);
24324 }
24325
24326 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24327
24328 /* If our producer is LTO try to figure out a common language to use
24329 from the global list of translation units. */
24330 if (strcmp (language_string, "GNU GIMPLE") == 0)
24331 {
24332 unsigned i;
24333 tree t;
24334 const char *common_lang = NULL;
24335
24336 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24337 {
24338 if (!TRANSLATION_UNIT_LANGUAGE (t))
24339 continue;
24340 if (!common_lang)
24341 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24342 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24343 ;
24344 else if (strncmp (common_lang, "GNU C", 5) == 0
24345 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24346 /* Mixing C and C++ is ok, use C++ in that case. */
24347 common_lang = highest_c_language (common_lang,
24348 TRANSLATION_UNIT_LANGUAGE (t));
24349 else
24350 {
24351 /* Fall back to C. */
24352 common_lang = NULL;
24353 break;
24354 }
24355 }
24356
24357 if (common_lang)
24358 language_string = common_lang;
24359 }
24360
24361 language = DW_LANG_C;
24362 if (strncmp (language_string, "GNU C", 5) == 0
24363 && ISDIGIT (language_string[5]))
24364 {
24365 language = DW_LANG_C89;
24366 if (dwarf_version >= 3 || !dwarf_strict)
24367 {
24368 if (strcmp (language_string, "GNU C89") != 0)
24369 language = DW_LANG_C99;
24370
24371 if (dwarf_version >= 5 /* || !dwarf_strict */)
24372 if (strcmp (language_string, "GNU C11") == 0
24373 || strcmp (language_string, "GNU C17") == 0)
24374 language = DW_LANG_C11;
24375 }
24376 }
24377 else if (strncmp (language_string, "GNU C++", 7) == 0)
24378 {
24379 language = DW_LANG_C_plus_plus;
24380 if (dwarf_version >= 5 /* || !dwarf_strict */)
24381 {
24382 if (strcmp (language_string, "GNU C++11") == 0)
24383 language = DW_LANG_C_plus_plus_11;
24384 else if (strcmp (language_string, "GNU C++14") == 0)
24385 language = DW_LANG_C_plus_plus_14;
24386 else if (strcmp (language_string, "GNU C++17") == 0)
24387 /* For now. */
24388 language = DW_LANG_C_plus_plus_14;
24389 }
24390 }
24391 else if (strcmp (language_string, "GNU F77") == 0)
24392 language = DW_LANG_Fortran77;
24393 else if (dwarf_version >= 3 || !dwarf_strict)
24394 {
24395 if (strcmp (language_string, "GNU Ada") == 0)
24396 language = DW_LANG_Ada95;
24397 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24398 {
24399 language = DW_LANG_Fortran95;
24400 if (dwarf_version >= 5 /* || !dwarf_strict */)
24401 {
24402 if (strcmp (language_string, "GNU Fortran2003") == 0)
24403 language = DW_LANG_Fortran03;
24404 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24405 language = DW_LANG_Fortran08;
24406 }
24407 }
24408 else if (strcmp (language_string, "GNU Objective-C") == 0)
24409 language = DW_LANG_ObjC;
24410 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24411 language = DW_LANG_ObjC_plus_plus;
24412 else if (dwarf_version >= 5 || !dwarf_strict)
24413 {
24414 if (strcmp (language_string, "GNU Go") == 0)
24415 language = DW_LANG_Go;
24416 }
24417 }
24418 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24419 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24420 language = DW_LANG_Fortran90;
24421 /* Likewise for Ada. */
24422 else if (strcmp (language_string, "GNU Ada") == 0)
24423 language = DW_LANG_Ada83;
24424
24425 add_AT_unsigned (die, DW_AT_language, language);
24426
24427 switch (language)
24428 {
24429 case DW_LANG_Fortran77:
24430 case DW_LANG_Fortran90:
24431 case DW_LANG_Fortran95:
24432 case DW_LANG_Fortran03:
24433 case DW_LANG_Fortran08:
24434 /* Fortran has case insensitive identifiers and the front-end
24435 lowercases everything. */
24436 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24437 break;
24438 default:
24439 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24440 break;
24441 }
24442 return die;
24443 }
24444
24445 /* Generate the DIE for a base class. */
24446
24447 static void
24448 gen_inheritance_die (tree binfo, tree access, tree type,
24449 dw_die_ref context_die)
24450 {
24451 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24452 struct vlr_context ctx = { type, NULL };
24453
24454 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24455 context_die);
24456 add_data_member_location_attribute (die, binfo, &ctx);
24457
24458 if (BINFO_VIRTUAL_P (binfo))
24459 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24460
24461 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24462 children, otherwise the default is DW_ACCESS_public. In DWARF2
24463 the default has always been DW_ACCESS_private. */
24464 if (access == access_public_node)
24465 {
24466 if (dwarf_version == 2
24467 || context_die->die_tag == DW_TAG_class_type)
24468 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24469 }
24470 else if (access == access_protected_node)
24471 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24472 else if (dwarf_version > 2
24473 && context_die->die_tag != DW_TAG_class_type)
24474 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24475 }
24476
24477 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24478 structure. */
24479 static bool
24480 is_variant_part (tree decl)
24481 {
24482 return (TREE_CODE (decl) == FIELD_DECL
24483 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24484 }
24485
24486 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24487 return the FIELD_DECL. Return NULL_TREE otherwise. */
24488
24489 static tree
24490 analyze_discr_in_predicate (tree operand, tree struct_type)
24491 {
24492 bool continue_stripping = true;
24493 while (continue_stripping)
24494 switch (TREE_CODE (operand))
24495 {
24496 CASE_CONVERT:
24497 operand = TREE_OPERAND (operand, 0);
24498 break;
24499 default:
24500 continue_stripping = false;
24501 break;
24502 }
24503
24504 /* Match field access to members of struct_type only. */
24505 if (TREE_CODE (operand) == COMPONENT_REF
24506 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24507 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24508 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24509 return TREE_OPERAND (operand, 1);
24510 else
24511 return NULL_TREE;
24512 }
24513
24514 /* Check that SRC is a constant integer that can be represented as a native
24515 integer constant (either signed or unsigned). If so, store it into DEST and
24516 return true. Return false otherwise. */
24517
24518 static bool
24519 get_discr_value (tree src, dw_discr_value *dest)
24520 {
24521 tree discr_type = TREE_TYPE (src);
24522
24523 if (lang_hooks.types.get_debug_type)
24524 {
24525 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24526 if (debug_type != NULL)
24527 discr_type = debug_type;
24528 }
24529
24530 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24531 return false;
24532
24533 /* Signedness can vary between the original type and the debug type. This
24534 can happen for character types in Ada for instance: the character type
24535 used for code generation can be signed, to be compatible with the C one,
24536 but from a debugger point of view, it must be unsigned. */
24537 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24538 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24539
24540 if (is_orig_unsigned != is_debug_unsigned)
24541 src = fold_convert (discr_type, src);
24542
24543 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24544 return false;
24545
24546 dest->pos = is_debug_unsigned;
24547 if (is_debug_unsigned)
24548 dest->v.uval = tree_to_uhwi (src);
24549 else
24550 dest->v.sval = tree_to_shwi (src);
24551
24552 return true;
24553 }
24554
24555 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24556 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24557 store NULL_TREE in DISCR_DECL. Otherwise:
24558
24559 - store the discriminant field in STRUCT_TYPE that controls the variant
24560 part to *DISCR_DECL
24561
24562 - put in *DISCR_LISTS_P an array where for each variant, the item
24563 represents the corresponding matching list of discriminant values.
24564
24565 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24566 the above array.
24567
24568 Note that when the array is allocated (i.e. when the analysis is
24569 successful), it is up to the caller to free the array. */
24570
24571 static void
24572 analyze_variants_discr (tree variant_part_decl,
24573 tree struct_type,
24574 tree *discr_decl,
24575 dw_discr_list_ref **discr_lists_p,
24576 unsigned *discr_lists_length)
24577 {
24578 tree variant_part_type = TREE_TYPE (variant_part_decl);
24579 tree variant;
24580 dw_discr_list_ref *discr_lists;
24581 unsigned i;
24582
24583 /* Compute how many variants there are in this variant part. */
24584 *discr_lists_length = 0;
24585 for (variant = TYPE_FIELDS (variant_part_type);
24586 variant != NULL_TREE;
24587 variant = DECL_CHAIN (variant))
24588 ++*discr_lists_length;
24589
24590 *discr_decl = NULL_TREE;
24591 *discr_lists_p
24592 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24593 sizeof (**discr_lists_p));
24594 discr_lists = *discr_lists_p;
24595
24596 /* And then analyze all variants to extract discriminant information for all
24597 of them. This analysis is conservative: as soon as we detect something we
24598 do not support, abort everything and pretend we found nothing. */
24599 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24600 variant != NULL_TREE;
24601 variant = DECL_CHAIN (variant), ++i)
24602 {
24603 tree match_expr = DECL_QUALIFIER (variant);
24604
24605 /* Now, try to analyze the predicate and deduce a discriminant for
24606 it. */
24607 if (match_expr == boolean_true_node)
24608 /* Typically happens for the default variant: it matches all cases that
24609 previous variants rejected. Don't output any matching value for
24610 this one. */
24611 continue;
24612
24613 /* The following loop tries to iterate over each discriminant
24614 possibility: single values or ranges. */
24615 while (match_expr != NULL_TREE)
24616 {
24617 tree next_round_match_expr;
24618 tree candidate_discr = NULL_TREE;
24619 dw_discr_list_ref new_node = NULL;
24620
24621 /* Possibilities are matched one after the other by nested
24622 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24623 continue with the rest at next iteration. */
24624 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24625 {
24626 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24627 match_expr = TREE_OPERAND (match_expr, 1);
24628 }
24629 else
24630 next_round_match_expr = NULL_TREE;
24631
24632 if (match_expr == boolean_false_node)
24633 /* This sub-expression matches nothing: just wait for the next
24634 one. */
24635 ;
24636
24637 else if (TREE_CODE (match_expr) == EQ_EXPR)
24638 {
24639 /* We are matching: <discr_field> == <integer_cst>
24640 This sub-expression matches a single value. */
24641 tree integer_cst = TREE_OPERAND (match_expr, 1);
24642
24643 candidate_discr
24644 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24645 struct_type);
24646
24647 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24648 if (!get_discr_value (integer_cst,
24649 &new_node->dw_discr_lower_bound))
24650 goto abort;
24651 new_node->dw_discr_range = false;
24652 }
24653
24654 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24655 {
24656 /* We are matching:
24657 <discr_field> > <integer_cst>
24658 && <discr_field> < <integer_cst>.
24659 This sub-expression matches the range of values between the
24660 two matched integer constants. Note that comparisons can be
24661 inclusive or exclusive. */
24662 tree candidate_discr_1, candidate_discr_2;
24663 tree lower_cst, upper_cst;
24664 bool lower_cst_included, upper_cst_included;
24665 tree lower_op = TREE_OPERAND (match_expr, 0);
24666 tree upper_op = TREE_OPERAND (match_expr, 1);
24667
24668 /* When the comparison is exclusive, the integer constant is not
24669 the discriminant range bound we are looking for: we will have
24670 to increment or decrement it. */
24671 if (TREE_CODE (lower_op) == GE_EXPR)
24672 lower_cst_included = true;
24673 else if (TREE_CODE (lower_op) == GT_EXPR)
24674 lower_cst_included = false;
24675 else
24676 goto abort;
24677
24678 if (TREE_CODE (upper_op) == LE_EXPR)
24679 upper_cst_included = true;
24680 else if (TREE_CODE (upper_op) == LT_EXPR)
24681 upper_cst_included = false;
24682 else
24683 goto abort;
24684
24685 /* Extract the discriminant from the first operand and check it
24686 is consistant with the same analysis in the second
24687 operand. */
24688 candidate_discr_1
24689 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24690 struct_type);
24691 candidate_discr_2
24692 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24693 struct_type);
24694 if (candidate_discr_1 == candidate_discr_2)
24695 candidate_discr = candidate_discr_1;
24696 else
24697 goto abort;
24698
24699 /* Extract bounds from both. */
24700 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24701 lower_cst = TREE_OPERAND (lower_op, 1);
24702 upper_cst = TREE_OPERAND (upper_op, 1);
24703
24704 if (!lower_cst_included)
24705 lower_cst
24706 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24707 build_int_cst (TREE_TYPE (lower_cst), 1));
24708 if (!upper_cst_included)
24709 upper_cst
24710 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24711 build_int_cst (TREE_TYPE (upper_cst), 1));
24712
24713 if (!get_discr_value (lower_cst,
24714 &new_node->dw_discr_lower_bound)
24715 || !get_discr_value (upper_cst,
24716 &new_node->dw_discr_upper_bound))
24717 goto abort;
24718
24719 new_node->dw_discr_range = true;
24720 }
24721
24722 else
24723 /* Unsupported sub-expression: we cannot determine the set of
24724 matching discriminant values. Abort everything. */
24725 goto abort;
24726
24727 /* If the discriminant info is not consistant with what we saw so
24728 far, consider the analysis failed and abort everything. */
24729 if (candidate_discr == NULL_TREE
24730 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24731 goto abort;
24732 else
24733 *discr_decl = candidate_discr;
24734
24735 if (new_node != NULL)
24736 {
24737 new_node->dw_discr_next = discr_lists[i];
24738 discr_lists[i] = new_node;
24739 }
24740 match_expr = next_round_match_expr;
24741 }
24742 }
24743
24744 /* If we reach this point, we could match everything we were interested
24745 in. */
24746 return;
24747
24748 abort:
24749 /* Clean all data structure and return no result. */
24750 free (*discr_lists_p);
24751 *discr_lists_p = NULL;
24752 *discr_decl = NULL_TREE;
24753 }
24754
24755 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24756 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24757 under CONTEXT_DIE.
24758
24759 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24760 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24761 this type, which are record types, represent the available variants and each
24762 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24763 values are inferred from these attributes.
24764
24765 In trees, the offsets for the fields inside these sub-records are relative
24766 to the variant part itself, whereas the corresponding DIEs should have
24767 offset attributes that are relative to the embedding record base address.
24768 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24769 must be an expression that computes the offset of the variant part to
24770 describe in DWARF. */
24771
24772 static void
24773 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24774 dw_die_ref context_die)
24775 {
24776 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24777 tree variant_part_offset = vlr_ctx->variant_part_offset;
24778 struct loc_descr_context ctx = {
24779 vlr_ctx->struct_type, /* context_type */
24780 NULL_TREE, /* base_decl */
24781 NULL, /* dpi */
24782 false, /* placeholder_arg */
24783 false /* placeholder_seen */
24784 };
24785
24786 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24787 NULL_TREE if there is no such field. */
24788 tree discr_decl = NULL_TREE;
24789 dw_discr_list_ref *discr_lists;
24790 unsigned discr_lists_length = 0;
24791 unsigned i;
24792
24793 dw_die_ref dwarf_proc_die = NULL;
24794 dw_die_ref variant_part_die
24795 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24796
24797 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24798
24799 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24800 &discr_decl, &discr_lists, &discr_lists_length);
24801
24802 if (discr_decl != NULL_TREE)
24803 {
24804 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24805
24806 if (discr_die)
24807 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24808 else
24809 /* We have no DIE for the discriminant, so just discard all
24810 discrimimant information in the output. */
24811 discr_decl = NULL_TREE;
24812 }
24813
24814 /* If the offset for this variant part is more complex than a constant,
24815 create a DWARF procedure for it so that we will not have to generate DWARF
24816 expressions for it for each member. */
24817 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24818 && (dwarf_version >= 3 || !dwarf_strict))
24819 {
24820 const tree dwarf_proc_fndecl
24821 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24822 build_function_type (TREE_TYPE (variant_part_offset),
24823 NULL_TREE));
24824 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24825 const dw_loc_descr_ref dwarf_proc_body
24826 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24827
24828 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24829 dwarf_proc_fndecl, context_die);
24830 if (dwarf_proc_die != NULL)
24831 variant_part_offset = dwarf_proc_call;
24832 }
24833
24834 /* Output DIEs for all variants. */
24835 i = 0;
24836 for (tree variant = TYPE_FIELDS (variant_part_type);
24837 variant != NULL_TREE;
24838 variant = DECL_CHAIN (variant), ++i)
24839 {
24840 tree variant_type = TREE_TYPE (variant);
24841 dw_die_ref variant_die;
24842
24843 /* All variants (i.e. members of a variant part) are supposed to be
24844 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24845 under these records. */
24846 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24847
24848 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24849 equate_decl_number_to_die (variant, variant_die);
24850
24851 /* Output discriminant values this variant matches, if any. */
24852 if (discr_decl == NULL || discr_lists[i] == NULL)
24853 /* In the case we have discriminant information at all, this is
24854 probably the default variant: as the standard says, don't
24855 output any discriminant value/list attribute. */
24856 ;
24857 else if (discr_lists[i]->dw_discr_next == NULL
24858 && !discr_lists[i]->dw_discr_range)
24859 /* If there is only one accepted value, don't bother outputting a
24860 list. */
24861 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24862 else
24863 add_discr_list (variant_die, discr_lists[i]);
24864
24865 for (tree member = TYPE_FIELDS (variant_type);
24866 member != NULL_TREE;
24867 member = DECL_CHAIN (member))
24868 {
24869 struct vlr_context vlr_sub_ctx = {
24870 vlr_ctx->struct_type, /* struct_type */
24871 NULL /* variant_part_offset */
24872 };
24873 if (is_variant_part (member))
24874 {
24875 /* All offsets for fields inside variant parts are relative to
24876 the top-level embedding RECORD_TYPE's base address. On the
24877 other hand, offsets in GCC's types are relative to the
24878 nested-most variant part. So we have to sum offsets each time
24879 we recurse. */
24880
24881 vlr_sub_ctx.variant_part_offset
24882 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24883 variant_part_offset, byte_position (member));
24884 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24885 }
24886 else
24887 {
24888 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24889 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24890 }
24891 }
24892 }
24893
24894 free (discr_lists);
24895 }
24896
24897 /* Generate a DIE for a class member. */
24898
24899 static void
24900 gen_member_die (tree type, dw_die_ref context_die)
24901 {
24902 tree member;
24903 tree binfo = TYPE_BINFO (type);
24904
24905 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24906
24907 /* If this is not an incomplete type, output descriptions of each of its
24908 members. Note that as we output the DIEs necessary to represent the
24909 members of this record or union type, we will also be trying to output
24910 DIEs to represent the *types* of those members. However the `type'
24911 function (above) will specifically avoid generating type DIEs for member
24912 types *within* the list of member DIEs for this (containing) type except
24913 for those types (of members) which are explicitly marked as also being
24914 members of this (containing) type themselves. The g++ front- end can
24915 force any given type to be treated as a member of some other (containing)
24916 type by setting the TYPE_CONTEXT of the given (member) type to point to
24917 the TREE node representing the appropriate (containing) type. */
24918
24919 /* First output info about the base classes. */
24920 if (binfo)
24921 {
24922 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24923 int i;
24924 tree base;
24925
24926 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24927 gen_inheritance_die (base,
24928 (accesses ? (*accesses)[i] : access_public_node),
24929 type,
24930 context_die);
24931 }
24932
24933 /* Now output info about the data members and type members. */
24934 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24935 {
24936 struct vlr_context vlr_ctx = { type, NULL_TREE };
24937 bool static_inline_p
24938 = (TREE_STATIC (member)
24939 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24940 != -1));
24941
24942 /* Ignore clones. */
24943 if (DECL_ABSTRACT_ORIGIN (member))
24944 continue;
24945
24946 /* If we thought we were generating minimal debug info for TYPE
24947 and then changed our minds, some of the member declarations
24948 may have already been defined. Don't define them again, but
24949 do put them in the right order. */
24950
24951 if (dw_die_ref child = lookup_decl_die (member))
24952 {
24953 /* Handle inline static data members, which only have in-class
24954 declarations. */
24955 dw_die_ref ref = NULL;
24956 if (child->die_tag == DW_TAG_variable
24957 && child->die_parent == comp_unit_die ())
24958 {
24959 ref = get_AT_ref (child, DW_AT_specification);
24960 /* For C++17 inline static data members followed by redundant
24961 out of class redeclaration, we might get here with
24962 child being the DIE created for the out of class
24963 redeclaration and with its DW_AT_specification being
24964 the DIE created for in-class definition. We want to
24965 reparent the latter, and don't want to create another
24966 DIE with DW_AT_specification in that case, because
24967 we already have one. */
24968 if (ref
24969 && static_inline_p
24970 && ref->die_tag == DW_TAG_variable
24971 && ref->die_parent == comp_unit_die ()
24972 && get_AT (ref, DW_AT_specification) == NULL)
24973 {
24974 child = ref;
24975 ref = NULL;
24976 static_inline_p = false;
24977 }
24978 }
24979
24980 if (child->die_tag == DW_TAG_variable
24981 && child->die_parent == comp_unit_die ()
24982 && ref == NULL)
24983 {
24984 reparent_child (child, context_die);
24985 if (dwarf_version < 5)
24986 child->die_tag = DW_TAG_member;
24987 }
24988 else
24989 splice_child_die (context_die, child);
24990 }
24991
24992 /* Do not generate standard DWARF for variant parts if we are generating
24993 the corresponding GNAT encodings: DIEs generated for both would
24994 conflict in our mappings. */
24995 else if (is_variant_part (member)
24996 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24997 {
24998 vlr_ctx.variant_part_offset = byte_position (member);
24999 gen_variant_part (member, &vlr_ctx, context_die);
25000 }
25001 else
25002 {
25003 vlr_ctx.variant_part_offset = NULL_TREE;
25004 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25005 }
25006
25007 /* For C++ inline static data members emit immediately a DW_TAG_variable
25008 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25009 DW_AT_specification. */
25010 if (static_inline_p)
25011 {
25012 int old_extern = DECL_EXTERNAL (member);
25013 DECL_EXTERNAL (member) = 0;
25014 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25015 DECL_EXTERNAL (member) = old_extern;
25016 }
25017 }
25018 }
25019
25020 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25021 is set, we pretend that the type was never defined, so we only get the
25022 member DIEs needed by later specification DIEs. */
25023
25024 static void
25025 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25026 enum debug_info_usage usage)
25027 {
25028 if (TREE_ASM_WRITTEN (type))
25029 {
25030 /* Fill in the bound of variable-length fields in late dwarf if
25031 still incomplete. */
25032 if (!early_dwarf && variably_modified_type_p (type, NULL))
25033 for (tree member = TYPE_FIELDS (type);
25034 member;
25035 member = DECL_CHAIN (member))
25036 fill_variable_array_bounds (TREE_TYPE (member));
25037 return;
25038 }
25039
25040 dw_die_ref type_die = lookup_type_die (type);
25041 dw_die_ref scope_die = 0;
25042 int nested = 0;
25043 int complete = (TYPE_SIZE (type)
25044 && (! TYPE_STUB_DECL (type)
25045 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25046 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25047 complete = complete && should_emit_struct_debug (type, usage);
25048
25049 if (type_die && ! complete)
25050 return;
25051
25052 if (TYPE_CONTEXT (type) != NULL_TREE
25053 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25054 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25055 nested = 1;
25056
25057 scope_die = scope_die_for (type, context_die);
25058
25059 /* Generate child dies for template paramaters. */
25060 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25061 schedule_generic_params_dies_gen (type);
25062
25063 if (! type_die || (nested && is_cu_die (scope_die)))
25064 /* First occurrence of type or toplevel definition of nested class. */
25065 {
25066 dw_die_ref old_die = type_die;
25067
25068 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25069 ? record_type_tag (type) : DW_TAG_union_type,
25070 scope_die, type);
25071 equate_type_number_to_die (type, type_die);
25072 if (old_die)
25073 add_AT_specification (type_die, old_die);
25074 else
25075 add_name_attribute (type_die, type_tag (type));
25076 }
25077 else
25078 remove_AT (type_die, DW_AT_declaration);
25079
25080 /* If this type has been completed, then give it a byte_size attribute and
25081 then give a list of members. */
25082 if (complete && !ns_decl)
25083 {
25084 /* Prevent infinite recursion in cases where the type of some member of
25085 this type is expressed in terms of this type itself. */
25086 TREE_ASM_WRITTEN (type) = 1;
25087 add_byte_size_attribute (type_die, type);
25088 add_alignment_attribute (type_die, type);
25089 if (TYPE_STUB_DECL (type) != NULL_TREE)
25090 {
25091 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25092 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25093 }
25094
25095 /* If the first reference to this type was as the return type of an
25096 inline function, then it may not have a parent. Fix this now. */
25097 if (type_die->die_parent == NULL)
25098 add_child_die (scope_die, type_die);
25099
25100 gen_member_die (type, type_die);
25101
25102 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25103 if (TYPE_ARTIFICIAL (type))
25104 add_AT_flag (type_die, DW_AT_artificial, 1);
25105
25106 /* GNU extension: Record what type our vtable lives in. */
25107 if (TYPE_VFIELD (type))
25108 {
25109 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25110
25111 gen_type_die (vtype, context_die);
25112 add_AT_die_ref (type_die, DW_AT_containing_type,
25113 lookup_type_die (vtype));
25114 }
25115 }
25116 else
25117 {
25118 add_AT_flag (type_die, DW_AT_declaration, 1);
25119
25120 /* We don't need to do this for function-local types. */
25121 if (TYPE_STUB_DECL (type)
25122 && ! decl_function_context (TYPE_STUB_DECL (type)))
25123 vec_safe_push (incomplete_types, type);
25124 }
25125
25126 if (get_AT (type_die, DW_AT_name))
25127 add_pubtype (type, type_die);
25128 }
25129
25130 /* Generate a DIE for a subroutine _type_. */
25131
25132 static void
25133 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25134 {
25135 tree return_type = TREE_TYPE (type);
25136 dw_die_ref subr_die
25137 = new_die (DW_TAG_subroutine_type,
25138 scope_die_for (type, context_die), type);
25139
25140 equate_type_number_to_die (type, subr_die);
25141 add_prototyped_attribute (subr_die, type);
25142 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25143 context_die);
25144 add_alignment_attribute (subr_die, type);
25145 gen_formal_types_die (type, subr_die);
25146
25147 if (get_AT (subr_die, DW_AT_name))
25148 add_pubtype (type, subr_die);
25149 if ((dwarf_version >= 5 || !dwarf_strict)
25150 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25151 add_AT_flag (subr_die, DW_AT_reference, 1);
25152 if ((dwarf_version >= 5 || !dwarf_strict)
25153 && lang_hooks.types.type_dwarf_attribute (type,
25154 DW_AT_rvalue_reference) != -1)
25155 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25156 }
25157
25158 /* Generate a DIE for a type definition. */
25159
25160 static void
25161 gen_typedef_die (tree decl, dw_die_ref context_die)
25162 {
25163 dw_die_ref type_die;
25164 tree type;
25165
25166 if (TREE_ASM_WRITTEN (decl))
25167 {
25168 if (DECL_ORIGINAL_TYPE (decl))
25169 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25170 return;
25171 }
25172
25173 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25174 checks in process_scope_var and modified_type_die), this should be called
25175 only for original types. */
25176 gcc_assert (decl_ultimate_origin (decl) == NULL
25177 || decl_ultimate_origin (decl) == decl);
25178
25179 TREE_ASM_WRITTEN (decl) = 1;
25180 type_die = new_die (DW_TAG_typedef, context_die, decl);
25181
25182 add_name_and_src_coords_attributes (type_die, decl);
25183 if (DECL_ORIGINAL_TYPE (decl))
25184 {
25185 type = DECL_ORIGINAL_TYPE (decl);
25186 if (type == error_mark_node)
25187 return;
25188
25189 gcc_assert (type != TREE_TYPE (decl));
25190 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25191 }
25192 else
25193 {
25194 type = TREE_TYPE (decl);
25195 if (type == error_mark_node)
25196 return;
25197
25198 if (is_naming_typedef_decl (TYPE_NAME (type)))
25199 {
25200 /* Here, we are in the case of decl being a typedef naming
25201 an anonymous type, e.g:
25202 typedef struct {...} foo;
25203 In that case TREE_TYPE (decl) is not a typedef variant
25204 type and TYPE_NAME of the anonymous type is set to the
25205 TYPE_DECL of the typedef. This construct is emitted by
25206 the C++ FE.
25207
25208 TYPE is the anonymous struct named by the typedef
25209 DECL. As we need the DW_AT_type attribute of the
25210 DW_TAG_typedef to point to the DIE of TYPE, let's
25211 generate that DIE right away. add_type_attribute
25212 called below will then pick (via lookup_type_die) that
25213 anonymous struct DIE. */
25214 if (!TREE_ASM_WRITTEN (type))
25215 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25216
25217 /* This is a GNU Extension. We are adding a
25218 DW_AT_linkage_name attribute to the DIE of the
25219 anonymous struct TYPE. The value of that attribute
25220 is the name of the typedef decl naming the anonymous
25221 struct. This greatly eases the work of consumers of
25222 this debug info. */
25223 add_linkage_name_raw (lookup_type_die (type), decl);
25224 }
25225 }
25226
25227 add_type_attribute (type_die, type, decl_quals (decl), false,
25228 context_die);
25229
25230 if (is_naming_typedef_decl (decl))
25231 /* We want that all subsequent calls to lookup_type_die with
25232 TYPE in argument yield the DW_TAG_typedef we have just
25233 created. */
25234 equate_type_number_to_die (type, type_die);
25235
25236 add_alignment_attribute (type_die, TREE_TYPE (decl));
25237
25238 add_accessibility_attribute (type_die, decl);
25239
25240 if (DECL_ABSTRACT_P (decl))
25241 equate_decl_number_to_die (decl, type_die);
25242
25243 if (get_AT (type_die, DW_AT_name))
25244 add_pubtype (decl, type_die);
25245 }
25246
25247 /* Generate a DIE for a struct, class, enum or union type. */
25248
25249 static void
25250 gen_tagged_type_die (tree type,
25251 dw_die_ref context_die,
25252 enum debug_info_usage usage)
25253 {
25254 if (type == NULL_TREE
25255 || !is_tagged_type (type))
25256 return;
25257
25258 if (TREE_ASM_WRITTEN (type))
25259 ;
25260 /* If this is a nested type whose containing class hasn't been written
25261 out yet, writing it out will cover this one, too. This does not apply
25262 to instantiations of member class templates; they need to be added to
25263 the containing class as they are generated. FIXME: This hurts the
25264 idea of combining type decls from multiple TUs, since we can't predict
25265 what set of template instantiations we'll get. */
25266 else if (TYPE_CONTEXT (type)
25267 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25268 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25269 {
25270 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25271
25272 if (TREE_ASM_WRITTEN (type))
25273 return;
25274
25275 /* If that failed, attach ourselves to the stub. */
25276 context_die = lookup_type_die (TYPE_CONTEXT (type));
25277 }
25278 else if (TYPE_CONTEXT (type) != NULL_TREE
25279 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25280 {
25281 /* If this type is local to a function that hasn't been written
25282 out yet, use a NULL context for now; it will be fixed up in
25283 decls_for_scope. */
25284 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25285 /* A declaration DIE doesn't count; nested types need to go in the
25286 specification. */
25287 if (context_die && is_declaration_die (context_die))
25288 context_die = NULL;
25289 }
25290 else
25291 context_die = declare_in_namespace (type, context_die);
25292
25293 if (TREE_CODE (type) == ENUMERAL_TYPE)
25294 {
25295 /* This might have been written out by the call to
25296 declare_in_namespace. */
25297 if (!TREE_ASM_WRITTEN (type))
25298 gen_enumeration_type_die (type, context_die);
25299 }
25300 else
25301 gen_struct_or_union_type_die (type, context_die, usage);
25302
25303 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25304 it up if it is ever completed. gen_*_type_die will set it for us
25305 when appropriate. */
25306 }
25307
25308 /* Generate a type description DIE. */
25309
25310 static void
25311 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25312 enum debug_info_usage usage)
25313 {
25314 struct array_descr_info info;
25315
25316 if (type == NULL_TREE || type == error_mark_node)
25317 return;
25318
25319 if (flag_checking && type)
25320 verify_type (type);
25321
25322 if (TYPE_NAME (type) != NULL_TREE
25323 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25324 && is_redundant_typedef (TYPE_NAME (type))
25325 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25326 /* The DECL of this type is a typedef we don't want to emit debug
25327 info for but we want debug info for its underlying typedef.
25328 This can happen for e.g, the injected-class-name of a C++
25329 type. */
25330 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25331
25332 /* If TYPE is a typedef type variant, let's generate debug info
25333 for the parent typedef which TYPE is a type of. */
25334 if (typedef_variant_p (type))
25335 {
25336 if (TREE_ASM_WRITTEN (type))
25337 return;
25338
25339 tree name = TYPE_NAME (type);
25340 tree origin = decl_ultimate_origin (name);
25341 if (origin != NULL && origin != name)
25342 {
25343 gen_decl_die (origin, NULL, NULL, context_die);
25344 return;
25345 }
25346
25347 /* Prevent broken recursion; we can't hand off to the same type. */
25348 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25349
25350 /* Give typedefs the right scope. */
25351 context_die = scope_die_for (type, context_die);
25352
25353 TREE_ASM_WRITTEN (type) = 1;
25354
25355 gen_decl_die (name, NULL, NULL, context_die);
25356 return;
25357 }
25358
25359 /* If type is an anonymous tagged type named by a typedef, let's
25360 generate debug info for the typedef. */
25361 if (is_naming_typedef_decl (TYPE_NAME (type)))
25362 {
25363 /* Give typedefs the right scope. */
25364 context_die = scope_die_for (type, context_die);
25365
25366 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25367 return;
25368 }
25369
25370 if (lang_hooks.types.get_debug_type)
25371 {
25372 tree debug_type = lang_hooks.types.get_debug_type (type);
25373
25374 if (debug_type != NULL_TREE && debug_type != type)
25375 {
25376 gen_type_die_with_usage (debug_type, context_die, usage);
25377 return;
25378 }
25379 }
25380
25381 /* We are going to output a DIE to represent the unqualified version
25382 of this type (i.e. without any const or volatile qualifiers) so
25383 get the main variant (i.e. the unqualified version) of this type
25384 now. (Vectors and arrays are special because the debugging info is in the
25385 cloned type itself. Similarly function/method types can contain extra
25386 ref-qualification). */
25387 if (TREE_CODE (type) == FUNCTION_TYPE
25388 || TREE_CODE (type) == METHOD_TYPE)
25389 {
25390 /* For function/method types, can't use type_main_variant here,
25391 because that can have different ref-qualifiers for C++,
25392 but try to canonicalize. */
25393 tree main = TYPE_MAIN_VARIANT (type);
25394 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25395 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25396 && check_base_type (t, main)
25397 && check_lang_type (t, type))
25398 {
25399 type = t;
25400 break;
25401 }
25402 }
25403 else if (TREE_CODE (type) != VECTOR_TYPE
25404 && TREE_CODE (type) != ARRAY_TYPE)
25405 type = type_main_variant (type);
25406
25407 /* If this is an array type with hidden descriptor, handle it first. */
25408 if (!TREE_ASM_WRITTEN (type)
25409 && lang_hooks.types.get_array_descr_info)
25410 {
25411 memset (&info, 0, sizeof (info));
25412 if (lang_hooks.types.get_array_descr_info (type, &info))
25413 {
25414 /* Fortran sometimes emits array types with no dimension. */
25415 gcc_assert (info.ndimensions >= 0
25416 && (info.ndimensions
25417 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25418 gen_descr_array_type_die (type, &info, context_die);
25419 TREE_ASM_WRITTEN (type) = 1;
25420 return;
25421 }
25422 }
25423
25424 if (TREE_ASM_WRITTEN (type))
25425 {
25426 /* Variable-length types may be incomplete even if
25427 TREE_ASM_WRITTEN. For such types, fall through to
25428 gen_array_type_die() and possibly fill in
25429 DW_AT_{upper,lower}_bound attributes. */
25430 if ((TREE_CODE (type) != ARRAY_TYPE
25431 && TREE_CODE (type) != RECORD_TYPE
25432 && TREE_CODE (type) != UNION_TYPE
25433 && TREE_CODE (type) != QUAL_UNION_TYPE)
25434 || !variably_modified_type_p (type, NULL))
25435 return;
25436 }
25437
25438 switch (TREE_CODE (type))
25439 {
25440 case ERROR_MARK:
25441 break;
25442
25443 case POINTER_TYPE:
25444 case REFERENCE_TYPE:
25445 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25446 ensures that the gen_type_die recursion will terminate even if the
25447 type is recursive. Recursive types are possible in Ada. */
25448 /* ??? We could perhaps do this for all types before the switch
25449 statement. */
25450 TREE_ASM_WRITTEN (type) = 1;
25451
25452 /* For these types, all that is required is that we output a DIE (or a
25453 set of DIEs) to represent the "basis" type. */
25454 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25455 DINFO_USAGE_IND_USE);
25456 break;
25457
25458 case OFFSET_TYPE:
25459 /* This code is used for C++ pointer-to-data-member types.
25460 Output a description of the relevant class type. */
25461 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25462 DINFO_USAGE_IND_USE);
25463
25464 /* Output a description of the type of the object pointed to. */
25465 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25466 DINFO_USAGE_IND_USE);
25467
25468 /* Now output a DIE to represent this pointer-to-data-member type
25469 itself. */
25470 gen_ptr_to_mbr_type_die (type, context_die);
25471 break;
25472
25473 case FUNCTION_TYPE:
25474 /* Force out return type (in case it wasn't forced out already). */
25475 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25476 DINFO_USAGE_DIR_USE);
25477 gen_subroutine_type_die (type, context_die);
25478 break;
25479
25480 case METHOD_TYPE:
25481 /* Force out return type (in case it wasn't forced out already). */
25482 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25483 DINFO_USAGE_DIR_USE);
25484 gen_subroutine_type_die (type, context_die);
25485 break;
25486
25487 case ARRAY_TYPE:
25488 case VECTOR_TYPE:
25489 gen_array_type_die (type, context_die);
25490 break;
25491
25492 case ENUMERAL_TYPE:
25493 case RECORD_TYPE:
25494 case UNION_TYPE:
25495 case QUAL_UNION_TYPE:
25496 gen_tagged_type_die (type, context_die, usage);
25497 return;
25498
25499 case VOID_TYPE:
25500 case INTEGER_TYPE:
25501 case REAL_TYPE:
25502 case FIXED_POINT_TYPE:
25503 case COMPLEX_TYPE:
25504 case BOOLEAN_TYPE:
25505 /* No DIEs needed for fundamental types. */
25506 break;
25507
25508 case NULLPTR_TYPE:
25509 case LANG_TYPE:
25510 /* Just use DW_TAG_unspecified_type. */
25511 {
25512 dw_die_ref type_die = lookup_type_die (type);
25513 if (type_die == NULL)
25514 {
25515 tree name = TYPE_IDENTIFIER (type);
25516 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25517 type);
25518 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25519 equate_type_number_to_die (type, type_die);
25520 }
25521 }
25522 break;
25523
25524 default:
25525 if (is_cxx_auto (type))
25526 {
25527 tree name = TYPE_IDENTIFIER (type);
25528 dw_die_ref *die = (name == get_identifier ("auto")
25529 ? &auto_die : &decltype_auto_die);
25530 if (!*die)
25531 {
25532 *die = new_die (DW_TAG_unspecified_type,
25533 comp_unit_die (), NULL_TREE);
25534 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25535 }
25536 equate_type_number_to_die (type, *die);
25537 break;
25538 }
25539 gcc_unreachable ();
25540 }
25541
25542 TREE_ASM_WRITTEN (type) = 1;
25543 }
25544
25545 static void
25546 gen_type_die (tree type, dw_die_ref context_die)
25547 {
25548 if (type != error_mark_node)
25549 {
25550 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25551 if (flag_checking)
25552 {
25553 dw_die_ref die = lookup_type_die (type);
25554 if (die)
25555 check_die (die);
25556 }
25557 }
25558 }
25559
25560 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25561 things which are local to the given block. */
25562
25563 static void
25564 gen_block_die (tree stmt, dw_die_ref context_die)
25565 {
25566 int must_output_die = 0;
25567 bool inlined_func;
25568
25569 /* Ignore blocks that are NULL. */
25570 if (stmt == NULL_TREE)
25571 return;
25572
25573 inlined_func = inlined_function_outer_scope_p (stmt);
25574
25575 /* If the block is one fragment of a non-contiguous block, do not
25576 process the variables, since they will have been done by the
25577 origin block. Do process subblocks. */
25578 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25579 {
25580 tree sub;
25581
25582 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25583 gen_block_die (sub, context_die);
25584
25585 return;
25586 }
25587
25588 /* Determine if we need to output any Dwarf DIEs at all to represent this
25589 block. */
25590 if (inlined_func)
25591 /* The outer scopes for inlinings *must* always be represented. We
25592 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25593 must_output_die = 1;
25594 else if (BLOCK_DIE (stmt))
25595 /* If we already have a DIE then it was filled early. Meanwhile
25596 we might have pruned all BLOCK_VARS as optimized out but we
25597 still want to generate high/low PC attributes so output it. */
25598 must_output_die = 1;
25599 else if (TREE_USED (stmt)
25600 || TREE_ASM_WRITTEN (stmt)
25601 || BLOCK_ABSTRACT (stmt))
25602 {
25603 /* Determine if this block directly contains any "significant"
25604 local declarations which we will need to output DIEs for. */
25605 if (debug_info_level > DINFO_LEVEL_TERSE)
25606 {
25607 /* We are not in terse mode so any local declaration that
25608 is not ignored for debug purposes counts as being a
25609 "significant" one. */
25610 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25611 must_output_die = 1;
25612 else
25613 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25614 if (!DECL_IGNORED_P (var))
25615 {
25616 must_output_die = 1;
25617 break;
25618 }
25619 }
25620 else if (!dwarf2out_ignore_block (stmt))
25621 must_output_die = 1;
25622 }
25623
25624 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25625 DIE for any block which contains no significant local declarations at
25626 all. Rather, in such cases we just call `decls_for_scope' so that any
25627 needed Dwarf info for any sub-blocks will get properly generated. Note
25628 that in terse mode, our definition of what constitutes a "significant"
25629 local declaration gets restricted to include only inlined function
25630 instances and local (nested) function definitions. */
25631 if (must_output_die)
25632 {
25633 if (inlined_func)
25634 {
25635 /* If STMT block is abstract, that means we have been called
25636 indirectly from dwarf2out_abstract_function.
25637 That function rightfully marks the descendent blocks (of
25638 the abstract function it is dealing with) as being abstract,
25639 precisely to prevent us from emitting any
25640 DW_TAG_inlined_subroutine DIE as a descendent
25641 of an abstract function instance. So in that case, we should
25642 not call gen_inlined_subroutine_die.
25643
25644 Later though, when cgraph asks dwarf2out to emit info
25645 for the concrete instance of the function decl into which
25646 the concrete instance of STMT got inlined, the later will lead
25647 to the generation of a DW_TAG_inlined_subroutine DIE. */
25648 if (! BLOCK_ABSTRACT (stmt))
25649 gen_inlined_subroutine_die (stmt, context_die);
25650 }
25651 else
25652 gen_lexical_block_die (stmt, context_die);
25653 }
25654 else
25655 decls_for_scope (stmt, context_die);
25656 }
25657
25658 /* Process variable DECL (or variable with origin ORIGIN) within
25659 block STMT and add it to CONTEXT_DIE. */
25660 static void
25661 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25662 {
25663 dw_die_ref die;
25664 tree decl_or_origin = decl ? decl : origin;
25665
25666 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25667 die = lookup_decl_die (decl_or_origin);
25668 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25669 {
25670 if (TYPE_DECL_IS_STUB (decl_or_origin))
25671 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25672 else
25673 die = lookup_decl_die (decl_or_origin);
25674 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25675 if (! die && ! early_dwarf)
25676 return;
25677 }
25678 else
25679 die = NULL;
25680
25681 /* Avoid creating DIEs for local typedefs and concrete static variables that
25682 will only be pruned later. */
25683 if ((origin || decl_ultimate_origin (decl))
25684 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25685 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25686 {
25687 origin = decl_ultimate_origin (decl_or_origin);
25688 if (decl && VAR_P (decl) && die != NULL)
25689 {
25690 die = lookup_decl_die (origin);
25691 if (die != NULL)
25692 equate_decl_number_to_die (decl, die);
25693 }
25694 return;
25695 }
25696
25697 if (die != NULL && die->die_parent == NULL)
25698 add_child_die (context_die, die);
25699 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25700 {
25701 if (early_dwarf)
25702 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25703 stmt, context_die);
25704 }
25705 else
25706 {
25707 if (decl && DECL_P (decl))
25708 {
25709 die = lookup_decl_die (decl);
25710
25711 /* Early created DIEs do not have a parent as the decls refer
25712 to the function as DECL_CONTEXT rather than the BLOCK. */
25713 if (die && die->die_parent == NULL)
25714 {
25715 gcc_assert (in_lto_p);
25716 add_child_die (context_die, die);
25717 }
25718 }
25719
25720 gen_decl_die (decl, origin, NULL, context_die);
25721 }
25722 }
25723
25724 /* Generate all of the decls declared within a given scope and (recursively)
25725 all of its sub-blocks. */
25726
25727 static void
25728 decls_for_scope (tree stmt, dw_die_ref context_die)
25729 {
25730 tree decl;
25731 unsigned int i;
25732 tree subblocks;
25733
25734 /* Ignore NULL blocks. */
25735 if (stmt == NULL_TREE)
25736 return;
25737
25738 /* Output the DIEs to represent all of the data objects and typedefs
25739 declared directly within this block but not within any nested
25740 sub-blocks. Also, nested function and tag DIEs have been
25741 generated with a parent of NULL; fix that up now. We don't
25742 have to do this if we're at -g1. */
25743 if (debug_info_level > DINFO_LEVEL_TERSE)
25744 {
25745 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25746 process_scope_var (stmt, decl, NULL_TREE, context_die);
25747 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25748 origin - avoid doing this twice as we have no good way to see
25749 if we've done it once already. */
25750 if (! early_dwarf)
25751 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25752 {
25753 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25754 if (decl == current_function_decl)
25755 /* Ignore declarations of the current function, while they
25756 are declarations, gen_subprogram_die would treat them
25757 as definitions again, because they are equal to
25758 current_function_decl and endlessly recurse. */;
25759 else if (TREE_CODE (decl) == FUNCTION_DECL)
25760 process_scope_var (stmt, decl, NULL_TREE, context_die);
25761 else
25762 process_scope_var (stmt, NULL_TREE, decl, context_die);
25763 }
25764 }
25765
25766 /* Even if we're at -g1, we need to process the subblocks in order to get
25767 inlined call information. */
25768
25769 /* Output the DIEs to represent all sub-blocks (and the items declared
25770 therein) of this block. */
25771 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25772 subblocks != NULL;
25773 subblocks = BLOCK_CHAIN (subblocks))
25774 gen_block_die (subblocks, context_die);
25775 }
25776
25777 /* Is this a typedef we can avoid emitting? */
25778
25779 static bool
25780 is_redundant_typedef (const_tree decl)
25781 {
25782 if (TYPE_DECL_IS_STUB (decl))
25783 return true;
25784
25785 if (DECL_ARTIFICIAL (decl)
25786 && DECL_CONTEXT (decl)
25787 && is_tagged_type (DECL_CONTEXT (decl))
25788 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25789 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25790 /* Also ignore the artificial member typedef for the class name. */
25791 return true;
25792
25793 return false;
25794 }
25795
25796 /* Return TRUE if TYPE is a typedef that names a type for linkage
25797 purposes. This kind of typedefs is produced by the C++ FE for
25798 constructs like:
25799
25800 typedef struct {...} foo;
25801
25802 In that case, there is no typedef variant type produced for foo.
25803 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25804 struct type. */
25805
25806 static bool
25807 is_naming_typedef_decl (const_tree decl)
25808 {
25809 if (decl == NULL_TREE
25810 || TREE_CODE (decl) != TYPE_DECL
25811 || DECL_NAMELESS (decl)
25812 || !is_tagged_type (TREE_TYPE (decl))
25813 || DECL_IS_BUILTIN (decl)
25814 || is_redundant_typedef (decl)
25815 /* It looks like Ada produces TYPE_DECLs that are very similar
25816 to C++ naming typedefs but that have different
25817 semantics. Let's be specific to c++ for now. */
25818 || !is_cxx (decl))
25819 return FALSE;
25820
25821 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25822 && TYPE_NAME (TREE_TYPE (decl)) == decl
25823 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25824 != TYPE_NAME (TREE_TYPE (decl))));
25825 }
25826
25827 /* Looks up the DIE for a context. */
25828
25829 static inline dw_die_ref
25830 lookup_context_die (tree context)
25831 {
25832 if (context)
25833 {
25834 /* Find die that represents this context. */
25835 if (TYPE_P (context))
25836 {
25837 context = TYPE_MAIN_VARIANT (context);
25838 dw_die_ref ctx = lookup_type_die (context);
25839 if (!ctx)
25840 return NULL;
25841 return strip_naming_typedef (context, ctx);
25842 }
25843 else
25844 return lookup_decl_die (context);
25845 }
25846 return comp_unit_die ();
25847 }
25848
25849 /* Returns the DIE for a context. */
25850
25851 static inline dw_die_ref
25852 get_context_die (tree context)
25853 {
25854 if (context)
25855 {
25856 /* Find die that represents this context. */
25857 if (TYPE_P (context))
25858 {
25859 context = TYPE_MAIN_VARIANT (context);
25860 return strip_naming_typedef (context, force_type_die (context));
25861 }
25862 else
25863 return force_decl_die (context);
25864 }
25865 return comp_unit_die ();
25866 }
25867
25868 /* Returns the DIE for decl. A DIE will always be returned. */
25869
25870 static dw_die_ref
25871 force_decl_die (tree decl)
25872 {
25873 dw_die_ref decl_die;
25874 unsigned saved_external_flag;
25875 tree save_fn = NULL_TREE;
25876 decl_die = lookup_decl_die (decl);
25877 if (!decl_die)
25878 {
25879 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25880
25881 decl_die = lookup_decl_die (decl);
25882 if (decl_die)
25883 return decl_die;
25884
25885 switch (TREE_CODE (decl))
25886 {
25887 case FUNCTION_DECL:
25888 /* Clear current_function_decl, so that gen_subprogram_die thinks
25889 that this is a declaration. At this point, we just want to force
25890 declaration die. */
25891 save_fn = current_function_decl;
25892 current_function_decl = NULL_TREE;
25893 gen_subprogram_die (decl, context_die);
25894 current_function_decl = save_fn;
25895 break;
25896
25897 case VAR_DECL:
25898 /* Set external flag to force declaration die. Restore it after
25899 gen_decl_die() call. */
25900 saved_external_flag = DECL_EXTERNAL (decl);
25901 DECL_EXTERNAL (decl) = 1;
25902 gen_decl_die (decl, NULL, NULL, context_die);
25903 DECL_EXTERNAL (decl) = saved_external_flag;
25904 break;
25905
25906 case NAMESPACE_DECL:
25907 if (dwarf_version >= 3 || !dwarf_strict)
25908 dwarf2out_decl (decl);
25909 else
25910 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25911 decl_die = comp_unit_die ();
25912 break;
25913
25914 case TRANSLATION_UNIT_DECL:
25915 decl_die = comp_unit_die ();
25916 break;
25917
25918 default:
25919 gcc_unreachable ();
25920 }
25921
25922 /* We should be able to find the DIE now. */
25923 if (!decl_die)
25924 decl_die = lookup_decl_die (decl);
25925 gcc_assert (decl_die);
25926 }
25927
25928 return decl_die;
25929 }
25930
25931 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25932 always returned. */
25933
25934 static dw_die_ref
25935 force_type_die (tree type)
25936 {
25937 dw_die_ref type_die;
25938
25939 type_die = lookup_type_die (type);
25940 if (!type_die)
25941 {
25942 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25943
25944 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25945 false, context_die);
25946 gcc_assert (type_die);
25947 }
25948 return type_die;
25949 }
25950
25951 /* Force out any required namespaces to be able to output DECL,
25952 and return the new context_die for it, if it's changed. */
25953
25954 static dw_die_ref
25955 setup_namespace_context (tree thing, dw_die_ref context_die)
25956 {
25957 tree context = (DECL_P (thing)
25958 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25959 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25960 /* Force out the namespace. */
25961 context_die = force_decl_die (context);
25962
25963 return context_die;
25964 }
25965
25966 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25967 type) within its namespace, if appropriate.
25968
25969 For compatibility with older debuggers, namespace DIEs only contain
25970 declarations; all definitions are emitted at CU scope, with
25971 DW_AT_specification pointing to the declaration (like with class
25972 members). */
25973
25974 static dw_die_ref
25975 declare_in_namespace (tree thing, dw_die_ref context_die)
25976 {
25977 dw_die_ref ns_context;
25978
25979 if (debug_info_level <= DINFO_LEVEL_TERSE)
25980 return context_die;
25981
25982 /* External declarations in the local scope only need to be emitted
25983 once, not once in the namespace and once in the scope.
25984
25985 This avoids declaring the `extern' below in the
25986 namespace DIE as well as in the innermost scope:
25987
25988 namespace S
25989 {
25990 int i=5;
25991 int foo()
25992 {
25993 int i=8;
25994 extern int i;
25995 return i;
25996 }
25997 }
25998 */
25999 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26000 return context_die;
26001
26002 /* If this decl is from an inlined function, then don't try to emit it in its
26003 namespace, as we will get confused. It would have already been emitted
26004 when the abstract instance of the inline function was emitted anyways. */
26005 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26006 return context_die;
26007
26008 ns_context = setup_namespace_context (thing, context_die);
26009
26010 if (ns_context != context_die)
26011 {
26012 if (is_fortran ())
26013 return ns_context;
26014 if (DECL_P (thing))
26015 gen_decl_die (thing, NULL, NULL, ns_context);
26016 else
26017 gen_type_die (thing, ns_context);
26018 }
26019 return context_die;
26020 }
26021
26022 /* Generate a DIE for a namespace or namespace alias. */
26023
26024 static void
26025 gen_namespace_die (tree decl, dw_die_ref context_die)
26026 {
26027 dw_die_ref namespace_die;
26028
26029 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26030 they are an alias of. */
26031 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26032 {
26033 /* Output a real namespace or module. */
26034 context_die = setup_namespace_context (decl, comp_unit_die ());
26035 namespace_die = new_die (is_fortran ()
26036 ? DW_TAG_module : DW_TAG_namespace,
26037 context_die, decl);
26038 /* For Fortran modules defined in different CU don't add src coords. */
26039 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26040 {
26041 const char *name = dwarf2_name (decl, 0);
26042 if (name)
26043 add_name_attribute (namespace_die, name);
26044 }
26045 else
26046 add_name_and_src_coords_attributes (namespace_die, decl);
26047 if (DECL_EXTERNAL (decl))
26048 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26049 equate_decl_number_to_die (decl, namespace_die);
26050 }
26051 else
26052 {
26053 /* Output a namespace alias. */
26054
26055 /* Force out the namespace we are an alias of, if necessary. */
26056 dw_die_ref origin_die
26057 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26058
26059 if (DECL_FILE_SCOPE_P (decl)
26060 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26061 context_die = setup_namespace_context (decl, comp_unit_die ());
26062 /* Now create the namespace alias DIE. */
26063 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26064 add_name_and_src_coords_attributes (namespace_die, decl);
26065 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26066 equate_decl_number_to_die (decl, namespace_die);
26067 }
26068 if ((dwarf_version >= 5 || !dwarf_strict)
26069 && lang_hooks.decls.decl_dwarf_attribute (decl,
26070 DW_AT_export_symbols) == 1)
26071 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26072
26073 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26074 if (want_pubnames ())
26075 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26076 }
26077
26078 /* Generate Dwarf debug information for a decl described by DECL.
26079 The return value is currently only meaningful for PARM_DECLs,
26080 for all other decls it returns NULL.
26081
26082 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26083 It can be NULL otherwise. */
26084
26085 static dw_die_ref
26086 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26087 dw_die_ref context_die)
26088 {
26089 tree decl_or_origin = decl ? decl : origin;
26090 tree class_origin = NULL, ultimate_origin;
26091
26092 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26093 return NULL;
26094
26095 switch (TREE_CODE (decl_or_origin))
26096 {
26097 case ERROR_MARK:
26098 break;
26099
26100 case CONST_DECL:
26101 if (!is_fortran () && !is_ada ())
26102 {
26103 /* The individual enumerators of an enum type get output when we output
26104 the Dwarf representation of the relevant enum type itself. */
26105 break;
26106 }
26107
26108 /* Emit its type. */
26109 gen_type_die (TREE_TYPE (decl), context_die);
26110
26111 /* And its containing namespace. */
26112 context_die = declare_in_namespace (decl, context_die);
26113
26114 gen_const_die (decl, context_die);
26115 break;
26116
26117 case FUNCTION_DECL:
26118 #if 0
26119 /* FIXME */
26120 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26121 on local redeclarations of global functions. That seems broken. */
26122 if (current_function_decl != decl)
26123 /* This is only a declaration. */;
26124 #endif
26125
26126 /* We should have abstract copies already and should not generate
26127 stray type DIEs in late LTO dumping. */
26128 if (! early_dwarf)
26129 ;
26130
26131 /* If we're emitting a clone, emit info for the abstract instance. */
26132 else if (origin || DECL_ORIGIN (decl) != decl)
26133 dwarf2out_abstract_function (origin
26134 ? DECL_ORIGIN (origin)
26135 : DECL_ABSTRACT_ORIGIN (decl));
26136
26137 /* If we're emitting a possibly inlined function emit it as
26138 abstract instance. */
26139 else if (cgraph_function_possibly_inlined_p (decl)
26140 && ! DECL_ABSTRACT_P (decl)
26141 && ! class_or_namespace_scope_p (context_die)
26142 /* dwarf2out_abstract_function won't emit a die if this is just
26143 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26144 that case, because that works only if we have a die. */
26145 && DECL_INITIAL (decl) != NULL_TREE)
26146 dwarf2out_abstract_function (decl);
26147
26148 /* Otherwise we're emitting the primary DIE for this decl. */
26149 else if (debug_info_level > DINFO_LEVEL_TERSE)
26150 {
26151 /* Before we describe the FUNCTION_DECL itself, make sure that we
26152 have its containing type. */
26153 if (!origin)
26154 origin = decl_class_context (decl);
26155 if (origin != NULL_TREE)
26156 gen_type_die (origin, context_die);
26157
26158 /* And its return type. */
26159 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26160
26161 /* And its virtual context. */
26162 if (DECL_VINDEX (decl) != NULL_TREE)
26163 gen_type_die (DECL_CONTEXT (decl), context_die);
26164
26165 /* Make sure we have a member DIE for decl. */
26166 if (origin != NULL_TREE)
26167 gen_type_die_for_member (origin, decl, context_die);
26168
26169 /* And its containing namespace. */
26170 context_die = declare_in_namespace (decl, context_die);
26171 }
26172
26173 /* Now output a DIE to represent the function itself. */
26174 if (decl)
26175 gen_subprogram_die (decl, context_die);
26176 break;
26177
26178 case TYPE_DECL:
26179 /* If we are in terse mode, don't generate any DIEs to represent any
26180 actual typedefs. */
26181 if (debug_info_level <= DINFO_LEVEL_TERSE)
26182 break;
26183
26184 /* In the special case of a TYPE_DECL node representing the declaration
26185 of some type tag, if the given TYPE_DECL is marked as having been
26186 instantiated from some other (original) TYPE_DECL node (e.g. one which
26187 was generated within the original definition of an inline function) we
26188 used to generate a special (abbreviated) DW_TAG_structure_type,
26189 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26190 should be actually referencing those DIEs, as variable DIEs with that
26191 type would be emitted already in the abstract origin, so it was always
26192 removed during unused type prunning. Don't add anything in this
26193 case. */
26194 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26195 break;
26196
26197 if (is_redundant_typedef (decl))
26198 gen_type_die (TREE_TYPE (decl), context_die);
26199 else
26200 /* Output a DIE to represent the typedef itself. */
26201 gen_typedef_die (decl, context_die);
26202 break;
26203
26204 case LABEL_DECL:
26205 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26206 gen_label_die (decl, context_die);
26207 break;
26208
26209 case VAR_DECL:
26210 case RESULT_DECL:
26211 /* If we are in terse mode, don't generate any DIEs to represent any
26212 variable declarations or definitions. */
26213 if (debug_info_level <= DINFO_LEVEL_TERSE)
26214 break;
26215
26216 /* Avoid generating stray type DIEs during late dwarf dumping.
26217 All types have been dumped early. */
26218 if (early_dwarf
26219 /* ??? But in LTRANS we cannot annotate early created variably
26220 modified type DIEs without copying them and adjusting all
26221 references to them. Dump them again as happens for inlining
26222 which copies both the decl and the types. */
26223 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26224 in VLA bound information for example. */
26225 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26226 current_function_decl)))
26227 {
26228 /* Output any DIEs that are needed to specify the type of this data
26229 object. */
26230 if (decl_by_reference_p (decl_or_origin))
26231 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26232 else
26233 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26234 }
26235
26236 if (early_dwarf)
26237 {
26238 /* And its containing type. */
26239 class_origin = decl_class_context (decl_or_origin);
26240 if (class_origin != NULL_TREE)
26241 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26242
26243 /* And its containing namespace. */
26244 context_die = declare_in_namespace (decl_or_origin, context_die);
26245 }
26246
26247 /* Now output the DIE to represent the data object itself. This gets
26248 complicated because of the possibility that the VAR_DECL really
26249 represents an inlined instance of a formal parameter for an inline
26250 function. */
26251 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26252 if (ultimate_origin != NULL_TREE
26253 && TREE_CODE (ultimate_origin) == PARM_DECL)
26254 gen_formal_parameter_die (decl, origin,
26255 true /* Emit name attribute. */,
26256 context_die);
26257 else
26258 gen_variable_die (decl, origin, context_die);
26259 break;
26260
26261 case FIELD_DECL:
26262 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26263 /* Ignore the nameless fields that are used to skip bits but handle C++
26264 anonymous unions and structs. */
26265 if (DECL_NAME (decl) != NULL_TREE
26266 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26267 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26268 {
26269 gen_type_die (member_declared_type (decl), context_die);
26270 gen_field_die (decl, ctx, context_die);
26271 }
26272 break;
26273
26274 case PARM_DECL:
26275 /* Avoid generating stray type DIEs during late dwarf dumping.
26276 All types have been dumped early. */
26277 if (early_dwarf
26278 /* ??? But in LTRANS we cannot annotate early created variably
26279 modified type DIEs without copying them and adjusting all
26280 references to them. Dump them again as happens for inlining
26281 which copies both the decl and the types. */
26282 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26283 in VLA bound information for example. */
26284 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26285 current_function_decl)))
26286 {
26287 if (DECL_BY_REFERENCE (decl_or_origin))
26288 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26289 else
26290 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26291 }
26292 return gen_formal_parameter_die (decl, origin,
26293 true /* Emit name attribute. */,
26294 context_die);
26295
26296 case NAMESPACE_DECL:
26297 if (dwarf_version >= 3 || !dwarf_strict)
26298 gen_namespace_die (decl, context_die);
26299 break;
26300
26301 case IMPORTED_DECL:
26302 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26303 DECL_CONTEXT (decl), context_die);
26304 break;
26305
26306 case NAMELIST_DECL:
26307 gen_namelist_decl (DECL_NAME (decl), context_die,
26308 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26309 break;
26310
26311 default:
26312 /* Probably some frontend-internal decl. Assume we don't care. */
26313 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26314 break;
26315 }
26316
26317 return NULL;
26318 }
26319 \f
26320 /* Output initial debug information for global DECL. Called at the
26321 end of the parsing process.
26322
26323 This is the initial debug generation process. As such, the DIEs
26324 generated may be incomplete. A later debug generation pass
26325 (dwarf2out_late_global_decl) will augment the information generated
26326 in this pass (e.g., with complete location info). */
26327
26328 static void
26329 dwarf2out_early_global_decl (tree decl)
26330 {
26331 set_early_dwarf s;
26332
26333 /* gen_decl_die() will set DECL_ABSTRACT because
26334 cgraph_function_possibly_inlined_p() returns true. This is in
26335 turn will cause DW_AT_inline attributes to be set.
26336
26337 This happens because at early dwarf generation, there is no
26338 cgraph information, causing cgraph_function_possibly_inlined_p()
26339 to return true. Trick cgraph_function_possibly_inlined_p()
26340 while we generate dwarf early. */
26341 bool save = symtab->global_info_ready;
26342 symtab->global_info_ready = true;
26343
26344 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26345 other DECLs and they can point to template types or other things
26346 that dwarf2out can't handle when done via dwarf2out_decl. */
26347 if (TREE_CODE (decl) != TYPE_DECL
26348 && TREE_CODE (decl) != PARM_DECL)
26349 {
26350 if (TREE_CODE (decl) == FUNCTION_DECL)
26351 {
26352 tree save_fndecl = current_function_decl;
26353
26354 /* For nested functions, make sure we have DIEs for the parents first
26355 so that all nested DIEs are generated at the proper scope in the
26356 first shot. */
26357 tree context = decl_function_context (decl);
26358 if (context != NULL)
26359 {
26360 dw_die_ref context_die = lookup_decl_die (context);
26361 current_function_decl = context;
26362
26363 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26364 enough so that it lands in its own context. This avoids type
26365 pruning issues later on. */
26366 if (context_die == NULL || is_declaration_die (context_die))
26367 dwarf2out_decl (context);
26368 }
26369
26370 /* Emit an abstract origin of a function first. This happens
26371 with C++ constructor clones for example and makes
26372 dwarf2out_abstract_function happy which requires the early
26373 DIE of the abstract instance to be present. */
26374 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26375 dw_die_ref origin_die;
26376 if (origin != NULL
26377 /* Do not emit the DIE multiple times but make sure to
26378 process it fully here in case we just saw a declaration. */
26379 && ((origin_die = lookup_decl_die (origin)) == NULL
26380 || is_declaration_die (origin_die)))
26381 {
26382 current_function_decl = origin;
26383 dwarf2out_decl (origin);
26384 }
26385
26386 /* Emit the DIE for decl but avoid doing that multiple times. */
26387 dw_die_ref old_die;
26388 if ((old_die = lookup_decl_die (decl)) == NULL
26389 || is_declaration_die (old_die))
26390 {
26391 current_function_decl = decl;
26392 dwarf2out_decl (decl);
26393 }
26394
26395 current_function_decl = save_fndecl;
26396 }
26397 else
26398 dwarf2out_decl (decl);
26399 }
26400 symtab->global_info_ready = save;
26401 }
26402
26403 /* Return whether EXPR is an expression with the following pattern:
26404 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26405
26406 static bool
26407 is_trivial_indirect_ref (tree expr)
26408 {
26409 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26410 return false;
26411
26412 tree nop = TREE_OPERAND (expr, 0);
26413 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26414 return false;
26415
26416 tree int_cst = TREE_OPERAND (nop, 0);
26417 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26418 }
26419
26420 /* Output debug information for global decl DECL. Called from
26421 toplev.c after compilation proper has finished. */
26422
26423 static void
26424 dwarf2out_late_global_decl (tree decl)
26425 {
26426 /* Fill-in any location information we were unable to determine
26427 on the first pass. */
26428 if (VAR_P (decl))
26429 {
26430 dw_die_ref die = lookup_decl_die (decl);
26431
26432 /* We may have to generate early debug late for LTO in case debug
26433 was not enabled at compile-time or the target doesn't support
26434 the LTO early debug scheme. */
26435 if (! die && in_lto_p)
26436 {
26437 dwarf2out_decl (decl);
26438 die = lookup_decl_die (decl);
26439 }
26440
26441 if (die)
26442 {
26443 /* We get called via the symtab code invoking late_global_decl
26444 for symbols that are optimized out.
26445
26446 Do not add locations for those, except if they have a
26447 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26448 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26449 INDIRECT_REF expression, as this could generate relocations to
26450 text symbols in LTO object files, which is invalid. */
26451 varpool_node *node = varpool_node::get (decl);
26452 if ((! node || ! node->definition)
26453 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26454 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26455 tree_add_const_value_attribute_for_decl (die, decl);
26456 else
26457 add_location_or_const_value_attribute (die, decl, false);
26458 }
26459 }
26460 }
26461
26462 /* Output debug information for type decl DECL. Called from toplev.c
26463 and from language front ends (to record built-in types). */
26464 static void
26465 dwarf2out_type_decl (tree decl, int local)
26466 {
26467 if (!local)
26468 {
26469 set_early_dwarf s;
26470 dwarf2out_decl (decl);
26471 }
26472 }
26473
26474 /* Output debug information for imported module or decl DECL.
26475 NAME is non-NULL name in the lexical block if the decl has been renamed.
26476 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26477 that DECL belongs to.
26478 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26479 static void
26480 dwarf2out_imported_module_or_decl_1 (tree decl,
26481 tree name,
26482 tree lexical_block,
26483 dw_die_ref lexical_block_die)
26484 {
26485 expanded_location xloc;
26486 dw_die_ref imported_die = NULL;
26487 dw_die_ref at_import_die;
26488
26489 if (TREE_CODE (decl) == IMPORTED_DECL)
26490 {
26491 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26492 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26493 gcc_assert (decl);
26494 }
26495 else
26496 xloc = expand_location (input_location);
26497
26498 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26499 {
26500 at_import_die = force_type_die (TREE_TYPE (decl));
26501 /* For namespace N { typedef void T; } using N::T; base_type_die
26502 returns NULL, but DW_TAG_imported_declaration requires
26503 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26504 if (!at_import_die)
26505 {
26506 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26507 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26508 at_import_die = lookup_type_die (TREE_TYPE (decl));
26509 gcc_assert (at_import_die);
26510 }
26511 }
26512 else
26513 {
26514 at_import_die = lookup_decl_die (decl);
26515 if (!at_import_die)
26516 {
26517 /* If we're trying to avoid duplicate debug info, we may not have
26518 emitted the member decl for this field. Emit it now. */
26519 if (TREE_CODE (decl) == FIELD_DECL)
26520 {
26521 tree type = DECL_CONTEXT (decl);
26522
26523 if (TYPE_CONTEXT (type)
26524 && TYPE_P (TYPE_CONTEXT (type))
26525 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26526 DINFO_USAGE_DIR_USE))
26527 return;
26528 gen_type_die_for_member (type, decl,
26529 get_context_die (TYPE_CONTEXT (type)));
26530 }
26531 if (TREE_CODE (decl) == NAMELIST_DECL)
26532 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26533 get_context_die (DECL_CONTEXT (decl)),
26534 NULL_TREE);
26535 else
26536 at_import_die = force_decl_die (decl);
26537 }
26538 }
26539
26540 if (TREE_CODE (decl) == NAMESPACE_DECL)
26541 {
26542 if (dwarf_version >= 3 || !dwarf_strict)
26543 imported_die = new_die (DW_TAG_imported_module,
26544 lexical_block_die,
26545 lexical_block);
26546 else
26547 return;
26548 }
26549 else
26550 imported_die = new_die (DW_TAG_imported_declaration,
26551 lexical_block_die,
26552 lexical_block);
26553
26554 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26555 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26556 if (debug_column_info && xloc.column)
26557 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26558 if (name)
26559 add_AT_string (imported_die, DW_AT_name,
26560 IDENTIFIER_POINTER (name));
26561 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26562 }
26563
26564 /* Output debug information for imported module or decl DECL.
26565 NAME is non-NULL name in context if the decl has been renamed.
26566 CHILD is true if decl is one of the renamed decls as part of
26567 importing whole module.
26568 IMPLICIT is set if this hook is called for an implicit import
26569 such as inline namespace. */
26570
26571 static void
26572 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26573 bool child, bool implicit)
26574 {
26575 /* dw_die_ref at_import_die; */
26576 dw_die_ref scope_die;
26577
26578 if (debug_info_level <= DINFO_LEVEL_TERSE)
26579 return;
26580
26581 gcc_assert (decl);
26582
26583 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26584 should be enough, for DWARF4 and older even if we emit as extension
26585 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26586 for the benefit of consumers unaware of DW_AT_export_symbols. */
26587 if (implicit
26588 && dwarf_version >= 5
26589 && lang_hooks.decls.decl_dwarf_attribute (decl,
26590 DW_AT_export_symbols) == 1)
26591 return;
26592
26593 set_early_dwarf s;
26594
26595 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26596 We need decl DIE for reference and scope die. First, get DIE for the decl
26597 itself. */
26598
26599 /* Get the scope die for decl context. Use comp_unit_die for global module
26600 or decl. If die is not found for non globals, force new die. */
26601 if (context
26602 && TYPE_P (context)
26603 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26604 return;
26605
26606 scope_die = get_context_die (context);
26607
26608 if (child)
26609 {
26610 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26611 there is nothing we can do, here. */
26612 if (dwarf_version < 3 && dwarf_strict)
26613 return;
26614
26615 gcc_assert (scope_die->die_child);
26616 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26617 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26618 scope_die = scope_die->die_child;
26619 }
26620
26621 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26622 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26623 }
26624
26625 /* Output debug information for namelists. */
26626
26627 static dw_die_ref
26628 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26629 {
26630 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26631 tree value;
26632 unsigned i;
26633
26634 if (debug_info_level <= DINFO_LEVEL_TERSE)
26635 return NULL;
26636
26637 gcc_assert (scope_die != NULL);
26638 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26639 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26640
26641 /* If there are no item_decls, we have a nondefining namelist, e.g.
26642 with USE association; hence, set DW_AT_declaration. */
26643 if (item_decls == NULL_TREE)
26644 {
26645 add_AT_flag (nml_die, DW_AT_declaration, 1);
26646 return nml_die;
26647 }
26648
26649 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26650 {
26651 nml_item_ref_die = lookup_decl_die (value);
26652 if (!nml_item_ref_die)
26653 nml_item_ref_die = force_decl_die (value);
26654
26655 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26656 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26657 }
26658 return nml_die;
26659 }
26660
26661
26662 /* Write the debugging output for DECL and return the DIE. */
26663
26664 static void
26665 dwarf2out_decl (tree decl)
26666 {
26667 dw_die_ref context_die = comp_unit_die ();
26668
26669 switch (TREE_CODE (decl))
26670 {
26671 case ERROR_MARK:
26672 return;
26673
26674 case FUNCTION_DECL:
26675 /* If we're a nested function, initially use a parent of NULL; if we're
26676 a plain function, this will be fixed up in decls_for_scope. If
26677 we're a method, it will be ignored, since we already have a DIE.
26678 Avoid doing this late though since clones of class methods may
26679 otherwise end up in limbo and create type DIEs late. */
26680 if (early_dwarf
26681 && decl_function_context (decl)
26682 /* But if we're in terse mode, we don't care about scope. */
26683 && debug_info_level > DINFO_LEVEL_TERSE)
26684 context_die = NULL;
26685 break;
26686
26687 case VAR_DECL:
26688 /* For local statics lookup proper context die. */
26689 if (local_function_static (decl))
26690 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26691
26692 /* If we are in terse mode, don't generate any DIEs to represent any
26693 variable declarations or definitions. */
26694 if (debug_info_level <= DINFO_LEVEL_TERSE)
26695 return;
26696 break;
26697
26698 case CONST_DECL:
26699 if (debug_info_level <= DINFO_LEVEL_TERSE)
26700 return;
26701 if (!is_fortran () && !is_ada ())
26702 return;
26703 if (TREE_STATIC (decl) && decl_function_context (decl))
26704 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26705 break;
26706
26707 case NAMESPACE_DECL:
26708 case IMPORTED_DECL:
26709 if (debug_info_level <= DINFO_LEVEL_TERSE)
26710 return;
26711 if (lookup_decl_die (decl) != NULL)
26712 return;
26713 break;
26714
26715 case TYPE_DECL:
26716 /* Don't emit stubs for types unless they are needed by other DIEs. */
26717 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26718 return;
26719
26720 /* Don't bother trying to generate any DIEs to represent any of the
26721 normal built-in types for the language we are compiling. */
26722 if (DECL_IS_BUILTIN (decl))
26723 return;
26724
26725 /* If we are in terse mode, don't generate any DIEs for types. */
26726 if (debug_info_level <= DINFO_LEVEL_TERSE)
26727 return;
26728
26729 /* If we're a function-scope tag, initially use a parent of NULL;
26730 this will be fixed up in decls_for_scope. */
26731 if (decl_function_context (decl))
26732 context_die = NULL;
26733
26734 break;
26735
26736 case NAMELIST_DECL:
26737 break;
26738
26739 default:
26740 return;
26741 }
26742
26743 gen_decl_die (decl, NULL, NULL, context_die);
26744
26745 if (flag_checking)
26746 {
26747 dw_die_ref die = lookup_decl_die (decl);
26748 if (die)
26749 check_die (die);
26750 }
26751 }
26752
26753 /* Write the debugging output for DECL. */
26754
26755 static void
26756 dwarf2out_function_decl (tree decl)
26757 {
26758 dwarf2out_decl (decl);
26759 call_arg_locations = NULL;
26760 call_arg_loc_last = NULL;
26761 call_site_count = -1;
26762 tail_call_site_count = -1;
26763 decl_loc_table->empty ();
26764 cached_dw_loc_list_table->empty ();
26765 }
26766
26767 /* Output a marker (i.e. a label) for the beginning of the generated code for
26768 a lexical block. */
26769
26770 static void
26771 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26772 unsigned int blocknum)
26773 {
26774 switch_to_section (current_function_section ());
26775 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26776 }
26777
26778 /* Output a marker (i.e. a label) for the end of the generated code for a
26779 lexical block. */
26780
26781 static void
26782 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26783 {
26784 switch_to_section (current_function_section ());
26785 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26786 }
26787
26788 /* Returns nonzero if it is appropriate not to emit any debugging
26789 information for BLOCK, because it doesn't contain any instructions.
26790
26791 Don't allow this for blocks with nested functions or local classes
26792 as we would end up with orphans, and in the presence of scheduling
26793 we may end up calling them anyway. */
26794
26795 static bool
26796 dwarf2out_ignore_block (const_tree block)
26797 {
26798 tree decl;
26799 unsigned int i;
26800
26801 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26802 if (TREE_CODE (decl) == FUNCTION_DECL
26803 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26804 return 0;
26805 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26806 {
26807 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26808 if (TREE_CODE (decl) == FUNCTION_DECL
26809 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26810 return 0;
26811 }
26812
26813 return 1;
26814 }
26815
26816 /* Hash table routines for file_hash. */
26817
26818 bool
26819 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26820 {
26821 return filename_cmp (p1->filename, p2) == 0;
26822 }
26823
26824 hashval_t
26825 dwarf_file_hasher::hash (dwarf_file_data *p)
26826 {
26827 return htab_hash_string (p->filename);
26828 }
26829
26830 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26831 dwarf2out.c) and return its "index". The index of each (known) filename is
26832 just a unique number which is associated with only that one filename. We
26833 need such numbers for the sake of generating labels (in the .debug_sfnames
26834 section) and references to those files numbers (in the .debug_srcinfo
26835 and .debug_macinfo sections). If the filename given as an argument is not
26836 found in our current list, add it to the list and assign it the next
26837 available unique index number. */
26838
26839 static struct dwarf_file_data *
26840 lookup_filename (const char *file_name)
26841 {
26842 struct dwarf_file_data * created;
26843
26844 if (!file_name)
26845 return NULL;
26846
26847 dwarf_file_data **slot
26848 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26849 INSERT);
26850 if (*slot)
26851 return *slot;
26852
26853 created = ggc_alloc<dwarf_file_data> ();
26854 created->filename = file_name;
26855 created->emitted_number = 0;
26856 *slot = created;
26857 return created;
26858 }
26859
26860 /* If the assembler will construct the file table, then translate the compiler
26861 internal file table number into the assembler file table number, and emit
26862 a .file directive if we haven't already emitted one yet. The file table
26863 numbers are different because we prune debug info for unused variables and
26864 types, which may include filenames. */
26865
26866 static int
26867 maybe_emit_file (struct dwarf_file_data * fd)
26868 {
26869 if (! fd->emitted_number)
26870 {
26871 if (last_emitted_file)
26872 fd->emitted_number = last_emitted_file->emitted_number + 1;
26873 else
26874 fd->emitted_number = 1;
26875 last_emitted_file = fd;
26876
26877 if (output_asm_line_debug_info ())
26878 {
26879 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26880 output_quoted_string (asm_out_file,
26881 remap_debug_filename (fd->filename));
26882 fputc ('\n', asm_out_file);
26883 }
26884 }
26885
26886 return fd->emitted_number;
26887 }
26888
26889 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26890 That generation should happen after function debug info has been
26891 generated. The value of the attribute is the constant value of ARG. */
26892
26893 static void
26894 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26895 {
26896 die_arg_entry entry;
26897
26898 if (!die || !arg)
26899 return;
26900
26901 gcc_assert (early_dwarf);
26902
26903 if (!tmpl_value_parm_die_table)
26904 vec_alloc (tmpl_value_parm_die_table, 32);
26905
26906 entry.die = die;
26907 entry.arg = arg;
26908 vec_safe_push (tmpl_value_parm_die_table, entry);
26909 }
26910
26911 /* Return TRUE if T is an instance of generic type, FALSE
26912 otherwise. */
26913
26914 static bool
26915 generic_type_p (tree t)
26916 {
26917 if (t == NULL_TREE || !TYPE_P (t))
26918 return false;
26919 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26920 }
26921
26922 /* Schedule the generation of the generic parameter dies for the
26923 instance of generic type T. The proper generation itself is later
26924 done by gen_scheduled_generic_parms_dies. */
26925
26926 static void
26927 schedule_generic_params_dies_gen (tree t)
26928 {
26929 if (!generic_type_p (t))
26930 return;
26931
26932 gcc_assert (early_dwarf);
26933
26934 if (!generic_type_instances)
26935 vec_alloc (generic_type_instances, 256);
26936
26937 vec_safe_push (generic_type_instances, t);
26938 }
26939
26940 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26941 by append_entry_to_tmpl_value_parm_die_table. This function must
26942 be called after function DIEs have been generated. */
26943
26944 static void
26945 gen_remaining_tmpl_value_param_die_attribute (void)
26946 {
26947 if (tmpl_value_parm_die_table)
26948 {
26949 unsigned i, j;
26950 die_arg_entry *e;
26951
26952 /* We do this in two phases - first get the cases we can
26953 handle during early-finish, preserving those we cannot
26954 (containing symbolic constants where we don't yet know
26955 whether we are going to output the referenced symbols).
26956 For those we try again at late-finish. */
26957 j = 0;
26958 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26959 {
26960 if (!e->die->removed
26961 && !tree_add_const_value_attribute (e->die, e->arg))
26962 {
26963 dw_loc_descr_ref loc = NULL;
26964 if (! early_dwarf
26965 && (dwarf_version >= 5 || !dwarf_strict))
26966 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26967 if (loc)
26968 add_AT_loc (e->die, DW_AT_location, loc);
26969 else
26970 (*tmpl_value_parm_die_table)[j++] = *e;
26971 }
26972 }
26973 tmpl_value_parm_die_table->truncate (j);
26974 }
26975 }
26976
26977 /* Generate generic parameters DIEs for instances of generic types
26978 that have been previously scheduled by
26979 schedule_generic_params_dies_gen. This function must be called
26980 after all the types of the CU have been laid out. */
26981
26982 static void
26983 gen_scheduled_generic_parms_dies (void)
26984 {
26985 unsigned i;
26986 tree t;
26987
26988 if (!generic_type_instances)
26989 return;
26990
26991 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26992 if (COMPLETE_TYPE_P (t))
26993 gen_generic_params_dies (t);
26994
26995 generic_type_instances = NULL;
26996 }
26997
26998
26999 /* Replace DW_AT_name for the decl with name. */
27000
27001 static void
27002 dwarf2out_set_name (tree decl, tree name)
27003 {
27004 dw_die_ref die;
27005 dw_attr_node *attr;
27006 const char *dname;
27007
27008 die = TYPE_SYMTAB_DIE (decl);
27009 if (!die)
27010 return;
27011
27012 dname = dwarf2_name (name, 0);
27013 if (!dname)
27014 return;
27015
27016 attr = get_AT (die, DW_AT_name);
27017 if (attr)
27018 {
27019 struct indirect_string_node *node;
27020
27021 node = find_AT_string (dname);
27022 /* replace the string. */
27023 attr->dw_attr_val.v.val_str = node;
27024 }
27025
27026 else
27027 add_name_attribute (die, dname);
27028 }
27029
27030 /* True if before or during processing of the first function being emitted. */
27031 static bool in_first_function_p = true;
27032 /* True if loc_note during dwarf2out_var_location call might still be
27033 before first real instruction at address equal to .Ltext0. */
27034 static bool maybe_at_text_label_p = true;
27035 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27036 static unsigned int first_loclabel_num_not_at_text_label;
27037
27038 /* Look ahead for a real insn, or for a begin stmt marker. */
27039
27040 static rtx_insn *
27041 dwarf2out_next_real_insn (rtx_insn *loc_note)
27042 {
27043 rtx_insn *next_real = NEXT_INSN (loc_note);
27044
27045 while (next_real)
27046 if (INSN_P (next_real))
27047 break;
27048 else
27049 next_real = NEXT_INSN (next_real);
27050
27051 return next_real;
27052 }
27053
27054 /* Called by the final INSN scan whenever we see a var location. We
27055 use it to drop labels in the right places, and throw the location in
27056 our lookup table. */
27057
27058 static void
27059 dwarf2out_var_location (rtx_insn *loc_note)
27060 {
27061 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27062 struct var_loc_node *newloc;
27063 rtx_insn *next_real, *next_note;
27064 rtx_insn *call_insn = NULL;
27065 static const char *last_label;
27066 static const char *last_postcall_label;
27067 static bool last_in_cold_section_p;
27068 static rtx_insn *expected_next_loc_note;
27069 tree decl;
27070 bool var_loc_p;
27071 var_loc_view view = 0;
27072
27073 if (!NOTE_P (loc_note))
27074 {
27075 if (CALL_P (loc_note))
27076 {
27077 maybe_reset_location_view (loc_note, cur_line_info_table);
27078 call_site_count++;
27079 if (SIBLING_CALL_P (loc_note))
27080 tail_call_site_count++;
27081 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27082 {
27083 call_insn = loc_note;
27084 loc_note = NULL;
27085 var_loc_p = false;
27086
27087 next_real = dwarf2out_next_real_insn (call_insn);
27088 next_note = NULL;
27089 cached_next_real_insn = NULL;
27090 goto create_label;
27091 }
27092 if (optimize == 0 && !flag_var_tracking)
27093 {
27094 /* When the var-tracking pass is not running, there is no note
27095 for indirect calls whose target is compile-time known. In this
27096 case, process such calls specifically so that we generate call
27097 sites for them anyway. */
27098 rtx x = PATTERN (loc_note);
27099 if (GET_CODE (x) == PARALLEL)
27100 x = XVECEXP (x, 0, 0);
27101 if (GET_CODE (x) == SET)
27102 x = SET_SRC (x);
27103 if (GET_CODE (x) == CALL)
27104 x = XEXP (x, 0);
27105 if (!MEM_P (x)
27106 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27107 || !SYMBOL_REF_DECL (XEXP (x, 0))
27108 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27109 != FUNCTION_DECL))
27110 {
27111 call_insn = loc_note;
27112 loc_note = NULL;
27113 var_loc_p = false;
27114
27115 next_real = dwarf2out_next_real_insn (call_insn);
27116 next_note = NULL;
27117 cached_next_real_insn = NULL;
27118 goto create_label;
27119 }
27120 }
27121 }
27122 else if (!debug_variable_location_views)
27123 gcc_unreachable ();
27124 else
27125 maybe_reset_location_view (loc_note, cur_line_info_table);
27126
27127 return;
27128 }
27129
27130 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27131 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27132 return;
27133
27134 /* Optimize processing a large consecutive sequence of location
27135 notes so we don't spend too much time in next_real_insn. If the
27136 next insn is another location note, remember the next_real_insn
27137 calculation for next time. */
27138 next_real = cached_next_real_insn;
27139 if (next_real)
27140 {
27141 if (expected_next_loc_note != loc_note)
27142 next_real = NULL;
27143 }
27144
27145 next_note = NEXT_INSN (loc_note);
27146 if (! next_note
27147 || next_note->deleted ()
27148 || ! NOTE_P (next_note)
27149 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27150 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27151 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27152 next_note = NULL;
27153
27154 if (! next_real)
27155 next_real = dwarf2out_next_real_insn (loc_note);
27156
27157 if (next_note)
27158 {
27159 expected_next_loc_note = next_note;
27160 cached_next_real_insn = next_real;
27161 }
27162 else
27163 cached_next_real_insn = NULL;
27164
27165 /* If there are no instructions which would be affected by this note,
27166 don't do anything. */
27167 if (var_loc_p
27168 && next_real == NULL_RTX
27169 && !NOTE_DURING_CALL_P (loc_note))
27170 return;
27171
27172 create_label:
27173
27174 if (next_real == NULL_RTX)
27175 next_real = get_last_insn ();
27176
27177 /* If there were any real insns between note we processed last time
27178 and this note (or if it is the first note), clear
27179 last_{,postcall_}label so that they are not reused this time. */
27180 if (last_var_location_insn == NULL_RTX
27181 || last_var_location_insn != next_real
27182 || last_in_cold_section_p != in_cold_section_p)
27183 {
27184 last_label = NULL;
27185 last_postcall_label = NULL;
27186 }
27187
27188 if (var_loc_p)
27189 {
27190 const char *label
27191 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27192 view = cur_line_info_table->view;
27193 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27194 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27195 if (newloc == NULL)
27196 return;
27197 }
27198 else
27199 {
27200 decl = NULL_TREE;
27201 newloc = NULL;
27202 }
27203
27204 /* If there were no real insns between note we processed last time
27205 and this note, use the label we emitted last time. Otherwise
27206 create a new label and emit it. */
27207 if (last_label == NULL)
27208 {
27209 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27210 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27211 loclabel_num++;
27212 last_label = ggc_strdup (loclabel);
27213 /* See if loclabel might be equal to .Ltext0. If yes,
27214 bump first_loclabel_num_not_at_text_label. */
27215 if (!have_multiple_function_sections
27216 && in_first_function_p
27217 && maybe_at_text_label_p)
27218 {
27219 static rtx_insn *last_start;
27220 rtx_insn *insn;
27221 for (insn = loc_note; insn; insn = previous_insn (insn))
27222 if (insn == last_start)
27223 break;
27224 else if (!NONDEBUG_INSN_P (insn))
27225 continue;
27226 else
27227 {
27228 rtx body = PATTERN (insn);
27229 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27230 continue;
27231 /* Inline asm could occupy zero bytes. */
27232 else if (GET_CODE (body) == ASM_INPUT
27233 || asm_noperands (body) >= 0)
27234 continue;
27235 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27236 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27237 continue;
27238 #endif
27239 else
27240 {
27241 /* Assume insn has non-zero length. */
27242 maybe_at_text_label_p = false;
27243 break;
27244 }
27245 }
27246 if (maybe_at_text_label_p)
27247 {
27248 last_start = loc_note;
27249 first_loclabel_num_not_at_text_label = loclabel_num;
27250 }
27251 }
27252 }
27253
27254 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27255 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27256
27257 if (!var_loc_p)
27258 {
27259 struct call_arg_loc_node *ca_loc
27260 = ggc_cleared_alloc<call_arg_loc_node> ();
27261 rtx_insn *prev = call_insn;
27262
27263 ca_loc->call_arg_loc_note
27264 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27265 ca_loc->next = NULL;
27266 ca_loc->label = last_label;
27267 gcc_assert (prev
27268 && (CALL_P (prev)
27269 || (NONJUMP_INSN_P (prev)
27270 && GET_CODE (PATTERN (prev)) == SEQUENCE
27271 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27272 if (!CALL_P (prev))
27273 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27274 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27275
27276 /* Look for a SYMBOL_REF in the "prev" instruction. */
27277 rtx x = get_call_rtx_from (PATTERN (prev));
27278 if (x)
27279 {
27280 /* Try to get the call symbol, if any. */
27281 if (MEM_P (XEXP (x, 0)))
27282 x = XEXP (x, 0);
27283 /* First, look for a memory access to a symbol_ref. */
27284 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27285 && SYMBOL_REF_DECL (XEXP (x, 0))
27286 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27287 ca_loc->symbol_ref = XEXP (x, 0);
27288 /* Otherwise, look at a compile-time known user-level function
27289 declaration. */
27290 else if (MEM_P (x)
27291 && MEM_EXPR (x)
27292 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27293 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27294 }
27295
27296 ca_loc->block = insn_scope (prev);
27297 if (call_arg_locations)
27298 call_arg_loc_last->next = ca_loc;
27299 else
27300 call_arg_locations = ca_loc;
27301 call_arg_loc_last = ca_loc;
27302 }
27303 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27304 {
27305 newloc->label = last_label;
27306 newloc->view = view;
27307 }
27308 else
27309 {
27310 if (!last_postcall_label)
27311 {
27312 sprintf (loclabel, "%s-1", last_label);
27313 last_postcall_label = ggc_strdup (loclabel);
27314 }
27315 newloc->label = last_postcall_label;
27316 /* ??? This view is at last_label, not last_label-1, but we
27317 could only assume view at last_label-1 is zero if we could
27318 assume calls always have length greater than one. This is
27319 probably true in general, though there might be a rare
27320 exception to this rule, e.g. if a call insn is optimized out
27321 by target magic. Then, even the -1 in the label will be
27322 wrong, which might invalidate the range. Anyway, using view,
27323 though technically possibly incorrect, will work as far as
27324 ranges go: since L-1 is in the middle of the call insn,
27325 (L-1).0 and (L-1).V shouldn't make any difference, and having
27326 the loclist entry refer to the .loc entry might be useful, so
27327 leave it like this. */
27328 newloc->view = view;
27329 }
27330
27331 if (var_loc_p && flag_debug_asm)
27332 {
27333 const char *name, *sep, *patstr;
27334 if (decl && DECL_NAME (decl))
27335 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27336 else
27337 name = "";
27338 if (NOTE_VAR_LOCATION_LOC (loc_note))
27339 {
27340 sep = " => ";
27341 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27342 }
27343 else
27344 {
27345 sep = " ";
27346 patstr = "RESET";
27347 }
27348 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27349 name, sep, patstr);
27350 }
27351
27352 last_var_location_insn = next_real;
27353 last_in_cold_section_p = in_cold_section_p;
27354 }
27355
27356 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27357 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27358 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27359 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27360 BLOCK_FRAGMENT_ORIGIN links. */
27361 static bool
27362 block_within_block_p (tree block, tree outer, bool bothways)
27363 {
27364 if (block == outer)
27365 return true;
27366
27367 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27368 for (tree context = BLOCK_SUPERCONTEXT (block);
27369 context != outer;
27370 context = BLOCK_SUPERCONTEXT (context))
27371 if (!context || TREE_CODE (context) != BLOCK)
27372 return false;
27373
27374 if (!bothways)
27375 return true;
27376
27377 /* Now check that each block is actually referenced by its
27378 parent. */
27379 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27380 context = BLOCK_SUPERCONTEXT (context))
27381 {
27382 if (BLOCK_FRAGMENT_ORIGIN (context))
27383 {
27384 gcc_assert (!BLOCK_SUBBLOCKS (context));
27385 context = BLOCK_FRAGMENT_ORIGIN (context);
27386 }
27387 for (tree sub = BLOCK_SUBBLOCKS (context);
27388 sub != block;
27389 sub = BLOCK_CHAIN (sub))
27390 if (!sub)
27391 return false;
27392 if (context == outer)
27393 return true;
27394 else
27395 block = context;
27396 }
27397 }
27398
27399 /* Called during final while assembling the marker of the entry point
27400 for an inlined function. */
27401
27402 static void
27403 dwarf2out_inline_entry (tree block)
27404 {
27405 gcc_assert (debug_inline_points);
27406
27407 /* If we can't represent it, don't bother. */
27408 if (!(dwarf_version >= 3 || !dwarf_strict))
27409 return;
27410
27411 gcc_assert (DECL_P (block_ultimate_origin (block)));
27412
27413 /* Sanity check the block tree. This would catch a case in which
27414 BLOCK got removed from the tree reachable from the outermost
27415 lexical block, but got retained in markers. It would still link
27416 back to its parents, but some ancestor would be missing a link
27417 down the path to the sub BLOCK. If the block got removed, its
27418 BLOCK_NUMBER will not be a usable value. */
27419 if (flag_checking)
27420 gcc_assert (block_within_block_p (block,
27421 DECL_INITIAL (current_function_decl),
27422 true));
27423
27424 gcc_assert (inlined_function_outer_scope_p (block));
27425 gcc_assert (!BLOCK_DIE (block));
27426
27427 if (BLOCK_FRAGMENT_ORIGIN (block))
27428 block = BLOCK_FRAGMENT_ORIGIN (block);
27429 /* Can the entry point ever not be at the beginning of an
27430 unfragmented lexical block? */
27431 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27432 || (cur_line_info_table
27433 && !ZERO_VIEW_P (cur_line_info_table->view))))
27434 return;
27435
27436 if (!inline_entry_data_table)
27437 inline_entry_data_table
27438 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27439
27440
27441 inline_entry_data **iedp
27442 = inline_entry_data_table->find_slot_with_hash (block,
27443 htab_hash_pointer (block),
27444 INSERT);
27445 if (*iedp)
27446 /* ??? Ideally, we'd record all entry points for the same inlined
27447 function (some may have been duplicated by e.g. unrolling), but
27448 we have no way to represent that ATM. */
27449 return;
27450
27451 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27452 ied->block = block;
27453 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27454 ied->label_num = BLOCK_NUMBER (block);
27455 if (cur_line_info_table)
27456 ied->view = cur_line_info_table->view;
27457
27458 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27459
27460 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27461 BLOCK_NUMBER (block));
27462 ASM_OUTPUT_LABEL (asm_out_file, label);
27463 }
27464
27465 /* Called from finalize_size_functions for size functions so that their body
27466 can be encoded in the debug info to describe the layout of variable-length
27467 structures. */
27468
27469 static void
27470 dwarf2out_size_function (tree decl)
27471 {
27472 function_to_dwarf_procedure (decl);
27473 }
27474
27475 /* Note in one location list that text section has changed. */
27476
27477 int
27478 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27479 {
27480 var_loc_list *list = *slot;
27481 if (list->first)
27482 list->last_before_switch
27483 = list->last->next ? list->last->next : list->last;
27484 return 1;
27485 }
27486
27487 /* Note in all location lists that text section has changed. */
27488
27489 static void
27490 var_location_switch_text_section (void)
27491 {
27492 if (decl_loc_table == NULL)
27493 return;
27494
27495 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27496 }
27497
27498 /* Create a new line number table. */
27499
27500 static dw_line_info_table *
27501 new_line_info_table (void)
27502 {
27503 dw_line_info_table *table;
27504
27505 table = ggc_cleared_alloc<dw_line_info_table> ();
27506 table->file_num = 1;
27507 table->line_num = 1;
27508 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27509 FORCE_RESET_NEXT_VIEW (table->view);
27510 table->symviews_since_reset = 0;
27511
27512 return table;
27513 }
27514
27515 /* Lookup the "current" table into which we emit line info, so
27516 that we don't have to do it for every source line. */
27517
27518 static void
27519 set_cur_line_info_table (section *sec)
27520 {
27521 dw_line_info_table *table;
27522
27523 if (sec == text_section)
27524 table = text_section_line_info;
27525 else if (sec == cold_text_section)
27526 {
27527 table = cold_text_section_line_info;
27528 if (!table)
27529 {
27530 cold_text_section_line_info = table = new_line_info_table ();
27531 table->end_label = cold_end_label;
27532 }
27533 }
27534 else
27535 {
27536 const char *end_label;
27537
27538 if (crtl->has_bb_partition)
27539 {
27540 if (in_cold_section_p)
27541 end_label = crtl->subsections.cold_section_end_label;
27542 else
27543 end_label = crtl->subsections.hot_section_end_label;
27544 }
27545 else
27546 {
27547 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27548 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27549 current_function_funcdef_no);
27550 end_label = ggc_strdup (label);
27551 }
27552
27553 table = new_line_info_table ();
27554 table->end_label = end_label;
27555
27556 vec_safe_push (separate_line_info, table);
27557 }
27558
27559 if (output_asm_line_debug_info ())
27560 table->is_stmt = (cur_line_info_table
27561 ? cur_line_info_table->is_stmt
27562 : DWARF_LINE_DEFAULT_IS_STMT_START);
27563 cur_line_info_table = table;
27564 }
27565
27566
27567 /* We need to reset the locations at the beginning of each
27568 function. We can't do this in the end_function hook, because the
27569 declarations that use the locations won't have been output when
27570 that hook is called. Also compute have_multiple_function_sections here. */
27571
27572 static void
27573 dwarf2out_begin_function (tree fun)
27574 {
27575 section *sec = function_section (fun);
27576
27577 if (sec != text_section)
27578 have_multiple_function_sections = true;
27579
27580 if (crtl->has_bb_partition && !cold_text_section)
27581 {
27582 gcc_assert (current_function_decl == fun);
27583 cold_text_section = unlikely_text_section ();
27584 switch_to_section (cold_text_section);
27585 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27586 switch_to_section (sec);
27587 }
27588
27589 dwarf2out_note_section_used ();
27590 call_site_count = 0;
27591 tail_call_site_count = 0;
27592
27593 set_cur_line_info_table (sec);
27594 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27595 }
27596
27597 /* Helper function of dwarf2out_end_function, called only after emitting
27598 the very first function into assembly. Check if some .debug_loc range
27599 might end with a .LVL* label that could be equal to .Ltext0.
27600 In that case we must force using absolute addresses in .debug_loc ranges,
27601 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27602 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27603 list terminator.
27604 Set have_multiple_function_sections to true in that case and
27605 terminate htab traversal. */
27606
27607 int
27608 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27609 {
27610 var_loc_list *entry = *slot;
27611 struct var_loc_node *node;
27612
27613 node = entry->first;
27614 if (node && node->next && node->next->label)
27615 {
27616 unsigned int i;
27617 const char *label = node->next->label;
27618 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27619
27620 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27621 {
27622 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27623 if (strcmp (label, loclabel) == 0)
27624 {
27625 have_multiple_function_sections = true;
27626 return 0;
27627 }
27628 }
27629 }
27630 return 1;
27631 }
27632
27633 /* Hook called after emitting a function into assembly.
27634 This does something only for the very first function emitted. */
27635
27636 static void
27637 dwarf2out_end_function (unsigned int)
27638 {
27639 if (in_first_function_p
27640 && !have_multiple_function_sections
27641 && first_loclabel_num_not_at_text_label
27642 && decl_loc_table)
27643 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27644 in_first_function_p = false;
27645 maybe_at_text_label_p = false;
27646 }
27647
27648 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27649 front-ends register a translation unit even before dwarf2out_init is
27650 called. */
27651 static tree main_translation_unit = NULL_TREE;
27652
27653 /* Hook called by front-ends after they built their main translation unit.
27654 Associate comp_unit_die to UNIT. */
27655
27656 static void
27657 dwarf2out_register_main_translation_unit (tree unit)
27658 {
27659 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27660 && main_translation_unit == NULL_TREE);
27661 main_translation_unit = unit;
27662 /* If dwarf2out_init has not been called yet, it will perform the association
27663 itself looking at main_translation_unit. */
27664 if (decl_die_table != NULL)
27665 equate_decl_number_to_die (unit, comp_unit_die ());
27666 }
27667
27668 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27669
27670 static void
27671 push_dw_line_info_entry (dw_line_info_table *table,
27672 enum dw_line_info_opcode opcode, unsigned int val)
27673 {
27674 dw_line_info_entry e;
27675 e.opcode = opcode;
27676 e.val = val;
27677 vec_safe_push (table->entries, e);
27678 }
27679
27680 /* Output a label to mark the beginning of a source code line entry
27681 and record information relating to this source line, in
27682 'line_info_table' for later output of the .debug_line section. */
27683 /* ??? The discriminator parameter ought to be unsigned. */
27684
27685 static void
27686 dwarf2out_source_line (unsigned int line, unsigned int column,
27687 const char *filename,
27688 int discriminator, bool is_stmt)
27689 {
27690 unsigned int file_num;
27691 dw_line_info_table *table;
27692 static var_loc_view lvugid;
27693
27694 if (debug_info_level < DINFO_LEVEL_TERSE)
27695 return;
27696
27697 table = cur_line_info_table;
27698
27699 if (line == 0)
27700 {
27701 if (debug_variable_location_views
27702 && output_asm_line_debug_info ()
27703 && table && !RESETTING_VIEW_P (table->view))
27704 {
27705 /* If we're using the assembler to compute view numbers, we
27706 can't issue a .loc directive for line zero, so we can't
27707 get a view number at this point. We might attempt to
27708 compute it from the previous view, or equate it to a
27709 subsequent view (though it might not be there!), but
27710 since we're omitting the line number entry, we might as
27711 well omit the view number as well. That means pretending
27712 it's a view number zero, which might very well turn out
27713 to be correct. ??? Extend the assembler so that the
27714 compiler could emit e.g. ".locview .LVU#", to output a
27715 view without changing line number information. We'd then
27716 have to count it in symviews_since_reset; when it's omitted,
27717 it doesn't count. */
27718 if (!zero_view_p)
27719 zero_view_p = BITMAP_GGC_ALLOC ();
27720 bitmap_set_bit (zero_view_p, table->view);
27721 if (flag_debug_asm)
27722 {
27723 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27724 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27725 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27726 ASM_COMMENT_START);
27727 assemble_name (asm_out_file, label);
27728 putc ('\n', asm_out_file);
27729 }
27730 table->view = ++lvugid;
27731 }
27732 return;
27733 }
27734
27735 /* The discriminator column was added in dwarf4. Simplify the below
27736 by simply removing it if we're not supposed to output it. */
27737 if (dwarf_version < 4 && dwarf_strict)
27738 discriminator = 0;
27739
27740 if (!debug_column_info)
27741 column = 0;
27742
27743 file_num = maybe_emit_file (lookup_filename (filename));
27744
27745 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27746 the debugger has used the second (possibly duplicate) line number
27747 at the beginning of the function to mark the end of the prologue.
27748 We could eliminate any other duplicates within the function. For
27749 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27750 that second line number entry. */
27751 /* Recall that this end-of-prologue indication is *not* the same thing
27752 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27753 to which the hook corresponds, follows the last insn that was
27754 emitted by gen_prologue. What we need is to precede the first insn
27755 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27756 insn that corresponds to something the user wrote. These may be
27757 very different locations once scheduling is enabled. */
27758
27759 if (0 && file_num == table->file_num
27760 && line == table->line_num
27761 && column == table->column_num
27762 && discriminator == table->discrim_num
27763 && is_stmt == table->is_stmt)
27764 return;
27765
27766 switch_to_section (current_function_section ());
27767
27768 /* If requested, emit something human-readable. */
27769 if (flag_debug_asm)
27770 {
27771 if (debug_column_info)
27772 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27773 filename, line, column);
27774 else
27775 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27776 filename, line);
27777 }
27778
27779 if (output_asm_line_debug_info ())
27780 {
27781 /* Emit the .loc directive understood by GNU as. */
27782 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27783 file_num, line, is_stmt, discriminator */
27784 fputs ("\t.loc ", asm_out_file);
27785 fprint_ul (asm_out_file, file_num);
27786 putc (' ', asm_out_file);
27787 fprint_ul (asm_out_file, line);
27788 putc (' ', asm_out_file);
27789 fprint_ul (asm_out_file, column);
27790
27791 if (is_stmt != table->is_stmt)
27792 {
27793 #if HAVE_GAS_LOC_STMT
27794 fputs (" is_stmt ", asm_out_file);
27795 putc (is_stmt ? '1' : '0', asm_out_file);
27796 #endif
27797 }
27798 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27799 {
27800 gcc_assert (discriminator > 0);
27801 fputs (" discriminator ", asm_out_file);
27802 fprint_ul (asm_out_file, (unsigned long) discriminator);
27803 }
27804 if (debug_variable_location_views)
27805 {
27806 if (!RESETTING_VIEW_P (table->view))
27807 {
27808 table->symviews_since_reset++;
27809 if (table->symviews_since_reset > symview_upper_bound)
27810 symview_upper_bound = table->symviews_since_reset;
27811 /* When we're using the assembler to compute view
27812 numbers, we output symbolic labels after "view" in
27813 .loc directives, and the assembler will set them for
27814 us, so that we can refer to the view numbers in
27815 location lists. The only exceptions are when we know
27816 a view will be zero: "-0" is a forced reset, used
27817 e.g. in the beginning of functions, whereas "0" tells
27818 the assembler to check that there was a PC change
27819 since the previous view, in a way that implicitly
27820 resets the next view. */
27821 fputs (" view ", asm_out_file);
27822 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27823 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27824 assemble_name (asm_out_file, label);
27825 table->view = ++lvugid;
27826 }
27827 else
27828 {
27829 table->symviews_since_reset = 0;
27830 if (FORCE_RESETTING_VIEW_P (table->view))
27831 fputs (" view -0", asm_out_file);
27832 else
27833 fputs (" view 0", asm_out_file);
27834 /* Mark the present view as a zero view. Earlier debug
27835 binds may have already added its id to loclists to be
27836 emitted later, so we can't reuse the id for something
27837 else. However, it's good to know whether a view is
27838 known to be zero, because then we may be able to
27839 optimize out locviews that are all zeros, so take
27840 note of it in zero_view_p. */
27841 if (!zero_view_p)
27842 zero_view_p = BITMAP_GGC_ALLOC ();
27843 bitmap_set_bit (zero_view_p, lvugid);
27844 table->view = ++lvugid;
27845 }
27846 }
27847 putc ('\n', asm_out_file);
27848 }
27849 else
27850 {
27851 unsigned int label_num = ++line_info_label_num;
27852
27853 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27854
27855 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27856 push_dw_line_info_entry (table, LI_adv_address, label_num);
27857 else
27858 push_dw_line_info_entry (table, LI_set_address, label_num);
27859 if (debug_variable_location_views)
27860 {
27861 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27862 if (resetting)
27863 table->view = 0;
27864
27865 if (flag_debug_asm)
27866 fprintf (asm_out_file, "\t%s view %s%d\n",
27867 ASM_COMMENT_START,
27868 resetting ? "-" : "",
27869 table->view);
27870
27871 table->view++;
27872 }
27873 if (file_num != table->file_num)
27874 push_dw_line_info_entry (table, LI_set_file, file_num);
27875 if (discriminator != table->discrim_num)
27876 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27877 if (is_stmt != table->is_stmt)
27878 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27879 push_dw_line_info_entry (table, LI_set_line, line);
27880 if (debug_column_info)
27881 push_dw_line_info_entry (table, LI_set_column, column);
27882 }
27883
27884 table->file_num = file_num;
27885 table->line_num = line;
27886 table->column_num = column;
27887 table->discrim_num = discriminator;
27888 table->is_stmt = is_stmt;
27889 table->in_use = true;
27890 }
27891
27892 /* Record the beginning of a new source file. */
27893
27894 static void
27895 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27896 {
27897 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27898 {
27899 macinfo_entry e;
27900 e.code = DW_MACINFO_start_file;
27901 e.lineno = lineno;
27902 e.info = ggc_strdup (filename);
27903 vec_safe_push (macinfo_table, e);
27904 }
27905 }
27906
27907 /* Record the end of a source file. */
27908
27909 static void
27910 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27911 {
27912 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27913 {
27914 macinfo_entry e;
27915 e.code = DW_MACINFO_end_file;
27916 e.lineno = lineno;
27917 e.info = NULL;
27918 vec_safe_push (macinfo_table, e);
27919 }
27920 }
27921
27922 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27923 the tail part of the directive line, i.e. the part which is past the
27924 initial whitespace, #, whitespace, directive-name, whitespace part. */
27925
27926 static void
27927 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27928 const char *buffer ATTRIBUTE_UNUSED)
27929 {
27930 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27931 {
27932 macinfo_entry e;
27933 /* Insert a dummy first entry to be able to optimize the whole
27934 predefined macro block using DW_MACRO_import. */
27935 if (macinfo_table->is_empty () && lineno <= 1)
27936 {
27937 e.code = 0;
27938 e.lineno = 0;
27939 e.info = NULL;
27940 vec_safe_push (macinfo_table, e);
27941 }
27942 e.code = DW_MACINFO_define;
27943 e.lineno = lineno;
27944 e.info = ggc_strdup (buffer);
27945 vec_safe_push (macinfo_table, e);
27946 }
27947 }
27948
27949 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27950 the tail part of the directive line, i.e. the part which is past the
27951 initial whitespace, #, whitespace, directive-name, whitespace part. */
27952
27953 static void
27954 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27955 const char *buffer ATTRIBUTE_UNUSED)
27956 {
27957 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27958 {
27959 macinfo_entry e;
27960 /* Insert a dummy first entry to be able to optimize the whole
27961 predefined macro block using DW_MACRO_import. */
27962 if (macinfo_table->is_empty () && lineno <= 1)
27963 {
27964 e.code = 0;
27965 e.lineno = 0;
27966 e.info = NULL;
27967 vec_safe_push (macinfo_table, e);
27968 }
27969 e.code = DW_MACINFO_undef;
27970 e.lineno = lineno;
27971 e.info = ggc_strdup (buffer);
27972 vec_safe_push (macinfo_table, e);
27973 }
27974 }
27975
27976 /* Helpers to manipulate hash table of CUs. */
27977
27978 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27979 {
27980 static inline hashval_t hash (const macinfo_entry *);
27981 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27982 };
27983
27984 inline hashval_t
27985 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27986 {
27987 return htab_hash_string (entry->info);
27988 }
27989
27990 inline bool
27991 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27992 const macinfo_entry *entry2)
27993 {
27994 return !strcmp (entry1->info, entry2->info);
27995 }
27996
27997 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27998
27999 /* Output a single .debug_macinfo entry. */
28000
28001 static void
28002 output_macinfo_op (macinfo_entry *ref)
28003 {
28004 int file_num;
28005 size_t len;
28006 struct indirect_string_node *node;
28007 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28008 struct dwarf_file_data *fd;
28009
28010 switch (ref->code)
28011 {
28012 case DW_MACINFO_start_file:
28013 fd = lookup_filename (ref->info);
28014 file_num = maybe_emit_file (fd);
28015 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28016 dw2_asm_output_data_uleb128 (ref->lineno,
28017 "Included from line number %lu",
28018 (unsigned long) ref->lineno);
28019 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28020 break;
28021 case DW_MACINFO_end_file:
28022 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28023 break;
28024 case DW_MACINFO_define:
28025 case DW_MACINFO_undef:
28026 len = strlen (ref->info) + 1;
28027 if (!dwarf_strict
28028 && len > DWARF_OFFSET_SIZE
28029 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28030 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28031 {
28032 ref->code = ref->code == DW_MACINFO_define
28033 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28034 output_macinfo_op (ref);
28035 return;
28036 }
28037 dw2_asm_output_data (1, ref->code,
28038 ref->code == DW_MACINFO_define
28039 ? "Define macro" : "Undefine macro");
28040 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28041 (unsigned long) ref->lineno);
28042 dw2_asm_output_nstring (ref->info, -1, "The macro");
28043 break;
28044 case DW_MACRO_define_strp:
28045 case DW_MACRO_undef_strp:
28046 node = find_AT_string (ref->info);
28047 gcc_assert (node
28048 && (node->form == DW_FORM_strp
28049 || node->form == dwarf_FORM (DW_FORM_strx)));
28050 dw2_asm_output_data (1, ref->code,
28051 ref->code == DW_MACRO_define_strp
28052 ? "Define macro strp"
28053 : "Undefine macro strp");
28054 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28055 (unsigned long) ref->lineno);
28056 if (node->form == DW_FORM_strp)
28057 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28058 debug_str_section, "The macro: \"%s\"",
28059 ref->info);
28060 else
28061 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28062 ref->info);
28063 break;
28064 case DW_MACRO_import:
28065 dw2_asm_output_data (1, ref->code, "Import");
28066 ASM_GENERATE_INTERNAL_LABEL (label,
28067 DEBUG_MACRO_SECTION_LABEL,
28068 ref->lineno + macinfo_label_base);
28069 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28070 break;
28071 default:
28072 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28073 ASM_COMMENT_START, (unsigned long) ref->code);
28074 break;
28075 }
28076 }
28077
28078 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28079 other compilation unit .debug_macinfo sections. IDX is the first
28080 index of a define/undef, return the number of ops that should be
28081 emitted in a comdat .debug_macinfo section and emit
28082 a DW_MACRO_import entry referencing it.
28083 If the define/undef entry should be emitted normally, return 0. */
28084
28085 static unsigned
28086 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28087 macinfo_hash_type **macinfo_htab)
28088 {
28089 macinfo_entry *first, *second, *cur, *inc;
28090 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28091 unsigned char checksum[16];
28092 struct md5_ctx ctx;
28093 char *grp_name, *tail;
28094 const char *base;
28095 unsigned int i, count, encoded_filename_len, linebuf_len;
28096 macinfo_entry **slot;
28097
28098 first = &(*macinfo_table)[idx];
28099 second = &(*macinfo_table)[idx + 1];
28100
28101 /* Optimize only if there are at least two consecutive define/undef ops,
28102 and either all of them are before first DW_MACINFO_start_file
28103 with lineno {0,1} (i.e. predefined macro block), or all of them are
28104 in some included header file. */
28105 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28106 return 0;
28107 if (vec_safe_is_empty (files))
28108 {
28109 if (first->lineno > 1 || second->lineno > 1)
28110 return 0;
28111 }
28112 else if (first->lineno == 0)
28113 return 0;
28114
28115 /* Find the last define/undef entry that can be grouped together
28116 with first and at the same time compute md5 checksum of their
28117 codes, linenumbers and strings. */
28118 md5_init_ctx (&ctx);
28119 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28120 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28121 break;
28122 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28123 break;
28124 else
28125 {
28126 unsigned char code = cur->code;
28127 md5_process_bytes (&code, 1, &ctx);
28128 checksum_uleb128 (cur->lineno, &ctx);
28129 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28130 }
28131 md5_finish_ctx (&ctx, checksum);
28132 count = i - idx;
28133
28134 /* From the containing include filename (if any) pick up just
28135 usable characters from its basename. */
28136 if (vec_safe_is_empty (files))
28137 base = "";
28138 else
28139 base = lbasename (files->last ().info);
28140 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28141 if (ISIDNUM (base[i]) || base[i] == '.')
28142 encoded_filename_len++;
28143 /* Count . at the end. */
28144 if (encoded_filename_len)
28145 encoded_filename_len++;
28146
28147 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28148 linebuf_len = strlen (linebuf);
28149
28150 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28151 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28152 + 16 * 2 + 1);
28153 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28154 tail = grp_name + 4;
28155 if (encoded_filename_len)
28156 {
28157 for (i = 0; base[i]; i++)
28158 if (ISIDNUM (base[i]) || base[i] == '.')
28159 *tail++ = base[i];
28160 *tail++ = '.';
28161 }
28162 memcpy (tail, linebuf, linebuf_len);
28163 tail += linebuf_len;
28164 *tail++ = '.';
28165 for (i = 0; i < 16; i++)
28166 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28167
28168 /* Construct a macinfo_entry for DW_MACRO_import
28169 in the empty vector entry before the first define/undef. */
28170 inc = &(*macinfo_table)[idx - 1];
28171 inc->code = DW_MACRO_import;
28172 inc->lineno = 0;
28173 inc->info = ggc_strdup (grp_name);
28174 if (!*macinfo_htab)
28175 *macinfo_htab = new macinfo_hash_type (10);
28176 /* Avoid emitting duplicates. */
28177 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28178 if (*slot != NULL)
28179 {
28180 inc->code = 0;
28181 inc->info = NULL;
28182 /* If such an entry has been used before, just emit
28183 a DW_MACRO_import op. */
28184 inc = *slot;
28185 output_macinfo_op (inc);
28186 /* And clear all macinfo_entry in the range to avoid emitting them
28187 in the second pass. */
28188 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28189 {
28190 cur->code = 0;
28191 cur->info = NULL;
28192 }
28193 }
28194 else
28195 {
28196 *slot = inc;
28197 inc->lineno = (*macinfo_htab)->elements ();
28198 output_macinfo_op (inc);
28199 }
28200 return count;
28201 }
28202
28203 /* Save any strings needed by the macinfo table in the debug str
28204 table. All strings must be collected into the table by the time
28205 index_string is called. */
28206
28207 static void
28208 save_macinfo_strings (void)
28209 {
28210 unsigned len;
28211 unsigned i;
28212 macinfo_entry *ref;
28213
28214 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28215 {
28216 switch (ref->code)
28217 {
28218 /* Match the logic in output_macinfo_op to decide on
28219 indirect strings. */
28220 case DW_MACINFO_define:
28221 case DW_MACINFO_undef:
28222 len = strlen (ref->info) + 1;
28223 if (!dwarf_strict
28224 && len > DWARF_OFFSET_SIZE
28225 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28226 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28227 set_indirect_string (find_AT_string (ref->info));
28228 break;
28229 case DW_MACINFO_start_file:
28230 /* -gsplit-dwarf -g3 will also output filename as indirect
28231 string. */
28232 if (!dwarf_split_debug_info)
28233 break;
28234 /* Fall through. */
28235 case DW_MACRO_define_strp:
28236 case DW_MACRO_undef_strp:
28237 set_indirect_string (find_AT_string (ref->info));
28238 break;
28239 default:
28240 break;
28241 }
28242 }
28243 }
28244
28245 /* Output macinfo section(s). */
28246
28247 static void
28248 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28249 {
28250 unsigned i;
28251 unsigned long length = vec_safe_length (macinfo_table);
28252 macinfo_entry *ref;
28253 vec<macinfo_entry, va_gc> *files = NULL;
28254 macinfo_hash_type *macinfo_htab = NULL;
28255 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28256
28257 if (! length)
28258 return;
28259
28260 /* output_macinfo* uses these interchangeably. */
28261 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28262 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28263 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28264 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28265
28266 /* AIX Assembler inserts the length, so adjust the reference to match the
28267 offset expected by debuggers. */
28268 strcpy (dl_section_ref, debug_line_label);
28269 if (XCOFF_DEBUGGING_INFO)
28270 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28271
28272 /* For .debug_macro emit the section header. */
28273 if (!dwarf_strict || dwarf_version >= 5)
28274 {
28275 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28276 "DWARF macro version number");
28277 if (DWARF_OFFSET_SIZE == 8)
28278 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28279 else
28280 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28281 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28282 debug_line_section, NULL);
28283 }
28284
28285 /* In the first loop, it emits the primary .debug_macinfo section
28286 and after each emitted op the macinfo_entry is cleared.
28287 If a longer range of define/undef ops can be optimized using
28288 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28289 the vector before the first define/undef in the range and the
28290 whole range of define/undef ops is not emitted and kept. */
28291 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28292 {
28293 switch (ref->code)
28294 {
28295 case DW_MACINFO_start_file:
28296 vec_safe_push (files, *ref);
28297 break;
28298 case DW_MACINFO_end_file:
28299 if (!vec_safe_is_empty (files))
28300 files->pop ();
28301 break;
28302 case DW_MACINFO_define:
28303 case DW_MACINFO_undef:
28304 if ((!dwarf_strict || dwarf_version >= 5)
28305 && HAVE_COMDAT_GROUP
28306 && vec_safe_length (files) != 1
28307 && i > 0
28308 && i + 1 < length
28309 && (*macinfo_table)[i - 1].code == 0)
28310 {
28311 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28312 if (count)
28313 {
28314 i += count - 1;
28315 continue;
28316 }
28317 }
28318 break;
28319 case 0:
28320 /* A dummy entry may be inserted at the beginning to be able
28321 to optimize the whole block of predefined macros. */
28322 if (i == 0)
28323 continue;
28324 default:
28325 break;
28326 }
28327 output_macinfo_op (ref);
28328 ref->info = NULL;
28329 ref->code = 0;
28330 }
28331
28332 if (!macinfo_htab)
28333 return;
28334
28335 /* Save the number of transparent includes so we can adjust the
28336 label number for the fat LTO object DWARF. */
28337 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28338
28339 delete macinfo_htab;
28340 macinfo_htab = NULL;
28341
28342 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28343 terminate the current chain and switch to a new comdat .debug_macinfo
28344 section and emit the define/undef entries within it. */
28345 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28346 switch (ref->code)
28347 {
28348 case 0:
28349 continue;
28350 case DW_MACRO_import:
28351 {
28352 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28353 tree comdat_key = get_identifier (ref->info);
28354 /* Terminate the previous .debug_macinfo section. */
28355 dw2_asm_output_data (1, 0, "End compilation unit");
28356 targetm.asm_out.named_section (debug_macinfo_section_name,
28357 SECTION_DEBUG
28358 | SECTION_LINKONCE
28359 | (early_lto_debug
28360 ? SECTION_EXCLUDE : 0),
28361 comdat_key);
28362 ASM_GENERATE_INTERNAL_LABEL (label,
28363 DEBUG_MACRO_SECTION_LABEL,
28364 ref->lineno + macinfo_label_base);
28365 ASM_OUTPUT_LABEL (asm_out_file, label);
28366 ref->code = 0;
28367 ref->info = NULL;
28368 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28369 "DWARF macro version number");
28370 if (DWARF_OFFSET_SIZE == 8)
28371 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28372 else
28373 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28374 }
28375 break;
28376 case DW_MACINFO_define:
28377 case DW_MACINFO_undef:
28378 output_macinfo_op (ref);
28379 ref->code = 0;
28380 ref->info = NULL;
28381 break;
28382 default:
28383 gcc_unreachable ();
28384 }
28385
28386 macinfo_label_base += macinfo_label_base_adj;
28387 }
28388
28389 /* Initialize the various sections and labels for dwarf output and prefix
28390 them with PREFIX if non-NULL. Returns the generation (zero based
28391 number of times function was called). */
28392
28393 static unsigned
28394 init_sections_and_labels (bool early_lto_debug)
28395 {
28396 /* As we may get called multiple times have a generation count for
28397 labels. */
28398 static unsigned generation = 0;
28399
28400 if (early_lto_debug)
28401 {
28402 if (!dwarf_split_debug_info)
28403 {
28404 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28405 SECTION_DEBUG | SECTION_EXCLUDE,
28406 NULL);
28407 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28408 SECTION_DEBUG | SECTION_EXCLUDE,
28409 NULL);
28410 debug_macinfo_section_name
28411 = ((dwarf_strict && dwarf_version < 5)
28412 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28413 debug_macinfo_section = get_section (debug_macinfo_section_name,
28414 SECTION_DEBUG
28415 | SECTION_EXCLUDE, NULL);
28416 }
28417 else
28418 {
28419 /* ??? Which of the following do we need early? */
28420 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28421 SECTION_DEBUG | SECTION_EXCLUDE,
28422 NULL);
28423 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28424 SECTION_DEBUG | SECTION_EXCLUDE,
28425 NULL);
28426 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28427 SECTION_DEBUG
28428 | SECTION_EXCLUDE, NULL);
28429 debug_skeleton_abbrev_section
28430 = get_section (DEBUG_LTO_ABBREV_SECTION,
28431 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28432 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28433 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28434 generation);
28435
28436 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28437 stay in the main .o, but the skeleton_line goes into the split
28438 off dwo. */
28439 debug_skeleton_line_section
28440 = get_section (DEBUG_LTO_LINE_SECTION,
28441 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28442 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28443 DEBUG_SKELETON_LINE_SECTION_LABEL,
28444 generation);
28445 debug_str_offsets_section
28446 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28447 SECTION_DEBUG | SECTION_EXCLUDE,
28448 NULL);
28449 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28450 DEBUG_SKELETON_INFO_SECTION_LABEL,
28451 generation);
28452 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28453 DEBUG_STR_DWO_SECTION_FLAGS,
28454 NULL);
28455 debug_macinfo_section_name
28456 = ((dwarf_strict && dwarf_version < 5)
28457 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28458 debug_macinfo_section = get_section (debug_macinfo_section_name,
28459 SECTION_DEBUG | SECTION_EXCLUDE,
28460 NULL);
28461 }
28462 /* For macro info and the file table we have to refer to a
28463 debug_line section. */
28464 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28465 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28466 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28467 DEBUG_LINE_SECTION_LABEL, generation);
28468
28469 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28470 DEBUG_STR_SECTION_FLAGS
28471 | SECTION_EXCLUDE, NULL);
28472 if (!dwarf_split_debug_info)
28473 debug_line_str_section
28474 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28475 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28476 }
28477 else
28478 {
28479 if (!dwarf_split_debug_info)
28480 {
28481 debug_info_section = get_section (DEBUG_INFO_SECTION,
28482 SECTION_DEBUG, NULL);
28483 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28484 SECTION_DEBUG, NULL);
28485 debug_loc_section = get_section (dwarf_version >= 5
28486 ? DEBUG_LOCLISTS_SECTION
28487 : DEBUG_LOC_SECTION,
28488 SECTION_DEBUG, NULL);
28489 debug_macinfo_section_name
28490 = ((dwarf_strict && dwarf_version < 5)
28491 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28492 debug_macinfo_section = get_section (debug_macinfo_section_name,
28493 SECTION_DEBUG, NULL);
28494 }
28495 else
28496 {
28497 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28498 SECTION_DEBUG | SECTION_EXCLUDE,
28499 NULL);
28500 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28501 SECTION_DEBUG | SECTION_EXCLUDE,
28502 NULL);
28503 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28504 SECTION_DEBUG, NULL);
28505 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28506 SECTION_DEBUG, NULL);
28507 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28508 SECTION_DEBUG, NULL);
28509 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28510 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28511 generation);
28512
28513 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28514 stay in the main .o, but the skeleton_line goes into the
28515 split off dwo. */
28516 debug_skeleton_line_section
28517 = get_section (DEBUG_DWO_LINE_SECTION,
28518 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28519 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28520 DEBUG_SKELETON_LINE_SECTION_LABEL,
28521 generation);
28522 debug_str_offsets_section
28523 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28524 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28525 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28526 DEBUG_SKELETON_INFO_SECTION_LABEL,
28527 generation);
28528 debug_loc_section = get_section (dwarf_version >= 5
28529 ? DEBUG_DWO_LOCLISTS_SECTION
28530 : DEBUG_DWO_LOC_SECTION,
28531 SECTION_DEBUG | SECTION_EXCLUDE,
28532 NULL);
28533 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28534 DEBUG_STR_DWO_SECTION_FLAGS,
28535 NULL);
28536 debug_macinfo_section_name
28537 = ((dwarf_strict && dwarf_version < 5)
28538 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28539 debug_macinfo_section = get_section (debug_macinfo_section_name,
28540 SECTION_DEBUG | SECTION_EXCLUDE,
28541 NULL);
28542 }
28543 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28544 SECTION_DEBUG, NULL);
28545 debug_line_section = get_section (DEBUG_LINE_SECTION,
28546 SECTION_DEBUG, NULL);
28547 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28548 SECTION_DEBUG, NULL);
28549 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28550 SECTION_DEBUG, NULL);
28551 debug_str_section = get_section (DEBUG_STR_SECTION,
28552 DEBUG_STR_SECTION_FLAGS, NULL);
28553 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28554 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28555 DEBUG_STR_SECTION_FLAGS, NULL);
28556
28557 debug_ranges_section = get_section (dwarf_version >= 5
28558 ? DEBUG_RNGLISTS_SECTION
28559 : DEBUG_RANGES_SECTION,
28560 SECTION_DEBUG, NULL);
28561 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28562 SECTION_DEBUG, NULL);
28563 }
28564
28565 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28566 DEBUG_ABBREV_SECTION_LABEL, generation);
28567 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28568 DEBUG_INFO_SECTION_LABEL, generation);
28569 info_section_emitted = false;
28570 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28571 DEBUG_LINE_SECTION_LABEL, generation);
28572 /* There are up to 4 unique ranges labels per generation.
28573 See also output_rnglists. */
28574 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28575 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28576 if (dwarf_version >= 5 && dwarf_split_debug_info)
28577 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28578 DEBUG_RANGES_SECTION_LABEL,
28579 1 + generation * 4);
28580 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28581 DEBUG_ADDR_SECTION_LABEL, generation);
28582 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28583 (dwarf_strict && dwarf_version < 5)
28584 ? DEBUG_MACINFO_SECTION_LABEL
28585 : DEBUG_MACRO_SECTION_LABEL, generation);
28586 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28587 generation);
28588
28589 ++generation;
28590 return generation - 1;
28591 }
28592
28593 /* Set up for Dwarf output at the start of compilation. */
28594
28595 static void
28596 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28597 {
28598 /* Allocate the file_table. */
28599 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28600
28601 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28602 /* Allocate the decl_die_table. */
28603 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28604
28605 /* Allocate the decl_loc_table. */
28606 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28607
28608 /* Allocate the cached_dw_loc_list_table. */
28609 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28610
28611 /* Allocate the initial hunk of the abbrev_die_table. */
28612 vec_alloc (abbrev_die_table, 256);
28613 /* Zero-th entry is allocated, but unused. */
28614 abbrev_die_table->quick_push (NULL);
28615
28616 /* Allocate the dwarf_proc_stack_usage_map. */
28617 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28618
28619 /* Allocate the pubtypes and pubnames vectors. */
28620 vec_alloc (pubname_table, 32);
28621 vec_alloc (pubtype_table, 32);
28622
28623 vec_alloc (incomplete_types, 64);
28624
28625 vec_alloc (used_rtx_array, 32);
28626
28627 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28628 vec_alloc (macinfo_table, 64);
28629 #endif
28630
28631 /* If front-ends already registered a main translation unit but we were not
28632 ready to perform the association, do this now. */
28633 if (main_translation_unit != NULL_TREE)
28634 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28635 }
28636
28637 /* Called before compile () starts outputtting functions, variables
28638 and toplevel asms into assembly. */
28639
28640 static void
28641 dwarf2out_assembly_start (void)
28642 {
28643 if (text_section_line_info)
28644 return;
28645
28646 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28647 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28648 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28649 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28650 COLD_TEXT_SECTION_LABEL, 0);
28651 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28652
28653 switch_to_section (text_section);
28654 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28655 #endif
28656
28657 /* Make sure the line number table for .text always exists. */
28658 text_section_line_info = new_line_info_table ();
28659 text_section_line_info->end_label = text_end_label;
28660
28661 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28662 cur_line_info_table = text_section_line_info;
28663 #endif
28664
28665 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28666 && dwarf2out_do_cfi_asm ()
28667 && !dwarf2out_do_eh_frame ())
28668 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28669 }
28670
28671 /* A helper function for dwarf2out_finish called through
28672 htab_traverse. Assign a string its index. All strings must be
28673 collected into the table by the time index_string is called,
28674 because the indexing code relies on htab_traverse to traverse nodes
28675 in the same order for each run. */
28676
28677 int
28678 index_string (indirect_string_node **h, unsigned int *index)
28679 {
28680 indirect_string_node *node = *h;
28681
28682 find_string_form (node);
28683 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28684 {
28685 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28686 node->index = *index;
28687 *index += 1;
28688 }
28689 return 1;
28690 }
28691
28692 /* A helper function for output_indirect_strings called through
28693 htab_traverse. Output the offset to a string and update the
28694 current offset. */
28695
28696 int
28697 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28698 {
28699 indirect_string_node *node = *h;
28700
28701 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28702 {
28703 /* Assert that this node has been assigned an index. */
28704 gcc_assert (node->index != NO_INDEX_ASSIGNED
28705 && node->index != NOT_INDEXED);
28706 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28707 "indexed string 0x%x: %s", node->index, node->str);
28708 *offset += strlen (node->str) + 1;
28709 }
28710 return 1;
28711 }
28712
28713 /* A helper function for dwarf2out_finish called through
28714 htab_traverse. Output the indexed string. */
28715
28716 int
28717 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28718 {
28719 struct indirect_string_node *node = *h;
28720
28721 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28722 {
28723 /* Assert that the strings are output in the same order as their
28724 indexes were assigned. */
28725 gcc_assert (*cur_idx == node->index);
28726 assemble_string (node->str, strlen (node->str) + 1);
28727 *cur_idx += 1;
28728 }
28729 return 1;
28730 }
28731
28732 /* A helper function for output_indirect_strings. Counts the number
28733 of index strings offsets. Must match the logic of the functions
28734 output_index_string[_offsets] above. */
28735 int
28736 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28737 {
28738 struct indirect_string_node *node = *h;
28739
28740 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28741 *last_idx += 1;
28742 return 1;
28743 }
28744
28745 /* A helper function for dwarf2out_finish called through
28746 htab_traverse. Emit one queued .debug_str string. */
28747
28748 int
28749 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28750 {
28751 struct indirect_string_node *node = *h;
28752
28753 node->form = find_string_form (node);
28754 if (node->form == form && node->refcount > 0)
28755 {
28756 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28757 assemble_string (node->str, strlen (node->str) + 1);
28758 }
28759
28760 return 1;
28761 }
28762
28763 /* Output the indexed string table. */
28764
28765 static void
28766 output_indirect_strings (void)
28767 {
28768 switch_to_section (debug_str_section);
28769 if (!dwarf_split_debug_info)
28770 debug_str_hash->traverse<enum dwarf_form,
28771 output_indirect_string> (DW_FORM_strp);
28772 else
28773 {
28774 unsigned int offset = 0;
28775 unsigned int cur_idx = 0;
28776
28777 if (skeleton_debug_str_hash)
28778 skeleton_debug_str_hash->traverse<enum dwarf_form,
28779 output_indirect_string> (DW_FORM_strp);
28780
28781 switch_to_section (debug_str_offsets_section);
28782 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28783 header. Note that we don't need to generate a label to the
28784 actual index table following the header here, because this is
28785 for the split dwarf case only. In an .dwo file there is only
28786 one string offsets table (and one debug info section). But
28787 if we would start using string offset tables for the main (or
28788 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28789 pointing to the actual index after the header. Split dwarf
28790 units will never have a string offsets base attribute. When
28791 a split unit is moved into a .dwp file the string offsets can
28792 be found through the .debug_cu_index section table. */
28793 if (dwarf_version >= 5)
28794 {
28795 unsigned int last_idx = 0;
28796 unsigned long str_offsets_length;
28797
28798 debug_str_hash->traverse_noresize
28799 <unsigned int *, count_index_strings> (&last_idx);
28800 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28802 dw2_asm_output_data (4, 0xffffffff,
28803 "Escape value for 64-bit DWARF extension");
28804 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28805 "Length of string offsets unit");
28806 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28807 dw2_asm_output_data (2, 0, "Header zero padding");
28808 }
28809 debug_str_hash->traverse_noresize
28810 <unsigned int *, output_index_string_offset> (&offset);
28811 switch_to_section (debug_str_dwo_section);
28812 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28813 (&cur_idx);
28814 }
28815 }
28816
28817 /* Callback for htab_traverse to assign an index to an entry in the
28818 table, and to write that entry to the .debug_addr section. */
28819
28820 int
28821 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28822 {
28823 addr_table_entry *entry = *slot;
28824
28825 if (entry->refcount == 0)
28826 {
28827 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28828 || entry->index == NOT_INDEXED);
28829 return 1;
28830 }
28831
28832 gcc_assert (entry->index == *cur_index);
28833 (*cur_index)++;
28834
28835 switch (entry->kind)
28836 {
28837 case ate_kind_rtx:
28838 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28839 "0x%x", entry->index);
28840 break;
28841 case ate_kind_rtx_dtprel:
28842 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28843 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28844 DWARF2_ADDR_SIZE,
28845 entry->addr.rtl);
28846 fputc ('\n', asm_out_file);
28847 break;
28848 case ate_kind_label:
28849 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28850 "0x%x", entry->index);
28851 break;
28852 default:
28853 gcc_unreachable ();
28854 }
28855 return 1;
28856 }
28857
28858 /* A helper function for dwarf2out_finish. Counts the number
28859 of indexed addresses. Must match the logic of the functions
28860 output_addr_table_entry above. */
28861 int
28862 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28863 {
28864 addr_table_entry *entry = *slot;
28865
28866 if (entry->refcount > 0)
28867 *last_idx += 1;
28868 return 1;
28869 }
28870
28871 /* Produce the .debug_addr section. */
28872
28873 static void
28874 output_addr_table (void)
28875 {
28876 unsigned int index = 0;
28877 if (addr_index_table == NULL || addr_index_table->size () == 0)
28878 return;
28879
28880 switch_to_section (debug_addr_section);
28881 addr_index_table
28882 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28883 }
28884
28885 #if ENABLE_ASSERT_CHECKING
28886 /* Verify that all marks are clear. */
28887
28888 static void
28889 verify_marks_clear (dw_die_ref die)
28890 {
28891 dw_die_ref c;
28892
28893 gcc_assert (! die->die_mark);
28894 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28895 }
28896 #endif /* ENABLE_ASSERT_CHECKING */
28897
28898 /* Clear the marks for a die and its children.
28899 Be cool if the mark isn't set. */
28900
28901 static void
28902 prune_unmark_dies (dw_die_ref die)
28903 {
28904 dw_die_ref c;
28905
28906 if (die->die_mark)
28907 die->die_mark = 0;
28908 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28909 }
28910
28911 /* Given LOC that is referenced by a DIE we're marking as used, find all
28912 referenced DWARF procedures it references and mark them as used. */
28913
28914 static void
28915 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28916 {
28917 for (; loc != NULL; loc = loc->dw_loc_next)
28918 switch (loc->dw_loc_opc)
28919 {
28920 case DW_OP_implicit_pointer:
28921 case DW_OP_convert:
28922 case DW_OP_reinterpret:
28923 case DW_OP_GNU_implicit_pointer:
28924 case DW_OP_GNU_convert:
28925 case DW_OP_GNU_reinterpret:
28926 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28927 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28928 break;
28929 case DW_OP_GNU_variable_value:
28930 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28931 {
28932 dw_die_ref ref
28933 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28934 if (ref == NULL)
28935 break;
28936 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28937 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28938 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28939 }
28940 /* FALLTHRU */
28941 case DW_OP_call2:
28942 case DW_OP_call4:
28943 case DW_OP_call_ref:
28944 case DW_OP_const_type:
28945 case DW_OP_GNU_const_type:
28946 case DW_OP_GNU_parameter_ref:
28947 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28948 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28949 break;
28950 case DW_OP_regval_type:
28951 case DW_OP_deref_type:
28952 case DW_OP_GNU_regval_type:
28953 case DW_OP_GNU_deref_type:
28954 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28955 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28956 break;
28957 case DW_OP_entry_value:
28958 case DW_OP_GNU_entry_value:
28959 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28960 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28961 break;
28962 default:
28963 break;
28964 }
28965 }
28966
28967 /* Given DIE that we're marking as used, find any other dies
28968 it references as attributes and mark them as used. */
28969
28970 static void
28971 prune_unused_types_walk_attribs (dw_die_ref die)
28972 {
28973 dw_attr_node *a;
28974 unsigned ix;
28975
28976 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28977 {
28978 switch (AT_class (a))
28979 {
28980 /* Make sure DWARF procedures referenced by location descriptions will
28981 get emitted. */
28982 case dw_val_class_loc:
28983 prune_unused_types_walk_loc_descr (AT_loc (a));
28984 break;
28985 case dw_val_class_loc_list:
28986 for (dw_loc_list_ref list = AT_loc_list (a);
28987 list != NULL;
28988 list = list->dw_loc_next)
28989 prune_unused_types_walk_loc_descr (list->expr);
28990 break;
28991
28992 case dw_val_class_view_list:
28993 /* This points to a loc_list in another attribute, so it's
28994 already covered. */
28995 break;
28996
28997 case dw_val_class_die_ref:
28998 /* A reference to another DIE.
28999 Make sure that it will get emitted.
29000 If it was broken out into a comdat group, don't follow it. */
29001 if (! AT_ref (a)->comdat_type_p
29002 || a->dw_attr == DW_AT_specification)
29003 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29004 break;
29005
29006 case dw_val_class_str:
29007 /* Set the string's refcount to 0 so that prune_unused_types_mark
29008 accounts properly for it. */
29009 a->dw_attr_val.v.val_str->refcount = 0;
29010 break;
29011
29012 default:
29013 break;
29014 }
29015 }
29016 }
29017
29018 /* Mark the generic parameters and arguments children DIEs of DIE. */
29019
29020 static void
29021 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29022 {
29023 dw_die_ref c;
29024
29025 if (die == NULL || die->die_child == NULL)
29026 return;
29027 c = die->die_child;
29028 do
29029 {
29030 if (is_template_parameter (c))
29031 prune_unused_types_mark (c, 1);
29032 c = c->die_sib;
29033 } while (c && c != die->die_child);
29034 }
29035
29036 /* Mark DIE as being used. If DOKIDS is true, then walk down
29037 to DIE's children. */
29038
29039 static void
29040 prune_unused_types_mark (dw_die_ref die, int dokids)
29041 {
29042 dw_die_ref c;
29043
29044 if (die->die_mark == 0)
29045 {
29046 /* We haven't done this node yet. Mark it as used. */
29047 die->die_mark = 1;
29048 /* If this is the DIE of a generic type instantiation,
29049 mark the children DIEs that describe its generic parms and
29050 args. */
29051 prune_unused_types_mark_generic_parms_dies (die);
29052
29053 /* We also have to mark its parents as used.
29054 (But we don't want to mark our parent's kids due to this,
29055 unless it is a class.) */
29056 if (die->die_parent)
29057 prune_unused_types_mark (die->die_parent,
29058 class_scope_p (die->die_parent));
29059
29060 /* Mark any referenced nodes. */
29061 prune_unused_types_walk_attribs (die);
29062
29063 /* If this node is a specification,
29064 also mark the definition, if it exists. */
29065 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29066 prune_unused_types_mark (die->die_definition, 1);
29067 }
29068
29069 if (dokids && die->die_mark != 2)
29070 {
29071 /* We need to walk the children, but haven't done so yet.
29072 Remember that we've walked the kids. */
29073 die->die_mark = 2;
29074
29075 /* If this is an array type, we need to make sure our
29076 kids get marked, even if they're types. If we're
29077 breaking out types into comdat sections, do this
29078 for all type definitions. */
29079 if (die->die_tag == DW_TAG_array_type
29080 || (use_debug_types
29081 && is_type_die (die) && ! is_declaration_die (die)))
29082 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29083 else
29084 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29085 }
29086 }
29087
29088 /* For local classes, look if any static member functions were emitted
29089 and if so, mark them. */
29090
29091 static void
29092 prune_unused_types_walk_local_classes (dw_die_ref die)
29093 {
29094 dw_die_ref c;
29095
29096 if (die->die_mark == 2)
29097 return;
29098
29099 switch (die->die_tag)
29100 {
29101 case DW_TAG_structure_type:
29102 case DW_TAG_union_type:
29103 case DW_TAG_class_type:
29104 break;
29105
29106 case DW_TAG_subprogram:
29107 if (!get_AT_flag (die, DW_AT_declaration)
29108 || die->die_definition != NULL)
29109 prune_unused_types_mark (die, 1);
29110 return;
29111
29112 default:
29113 return;
29114 }
29115
29116 /* Mark children. */
29117 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29118 }
29119
29120 /* Walk the tree DIE and mark types that we actually use. */
29121
29122 static void
29123 prune_unused_types_walk (dw_die_ref die)
29124 {
29125 dw_die_ref c;
29126
29127 /* Don't do anything if this node is already marked and
29128 children have been marked as well. */
29129 if (die->die_mark == 2)
29130 return;
29131
29132 switch (die->die_tag)
29133 {
29134 case DW_TAG_structure_type:
29135 case DW_TAG_union_type:
29136 case DW_TAG_class_type:
29137 if (die->die_perennial_p)
29138 break;
29139
29140 for (c = die->die_parent; c; c = c->die_parent)
29141 if (c->die_tag == DW_TAG_subprogram)
29142 break;
29143
29144 /* Finding used static member functions inside of classes
29145 is needed just for local classes, because for other classes
29146 static member function DIEs with DW_AT_specification
29147 are emitted outside of the DW_TAG_*_type. If we ever change
29148 it, we'd need to call this even for non-local classes. */
29149 if (c)
29150 prune_unused_types_walk_local_classes (die);
29151
29152 /* It's a type node --- don't mark it. */
29153 return;
29154
29155 case DW_TAG_const_type:
29156 case DW_TAG_packed_type:
29157 case DW_TAG_pointer_type:
29158 case DW_TAG_reference_type:
29159 case DW_TAG_rvalue_reference_type:
29160 case DW_TAG_volatile_type:
29161 case DW_TAG_typedef:
29162 case DW_TAG_array_type:
29163 case DW_TAG_interface_type:
29164 case DW_TAG_friend:
29165 case DW_TAG_enumeration_type:
29166 case DW_TAG_subroutine_type:
29167 case DW_TAG_string_type:
29168 case DW_TAG_set_type:
29169 case DW_TAG_subrange_type:
29170 case DW_TAG_ptr_to_member_type:
29171 case DW_TAG_file_type:
29172 /* Type nodes are useful only when other DIEs reference them --- don't
29173 mark them. */
29174 /* FALLTHROUGH */
29175
29176 case DW_TAG_dwarf_procedure:
29177 /* Likewise for DWARF procedures. */
29178
29179 if (die->die_perennial_p)
29180 break;
29181
29182 return;
29183
29184 default:
29185 /* Mark everything else. */
29186 break;
29187 }
29188
29189 if (die->die_mark == 0)
29190 {
29191 die->die_mark = 1;
29192
29193 /* Now, mark any dies referenced from here. */
29194 prune_unused_types_walk_attribs (die);
29195 }
29196
29197 die->die_mark = 2;
29198
29199 /* Mark children. */
29200 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29201 }
29202
29203 /* Increment the string counts on strings referred to from DIE's
29204 attributes. */
29205
29206 static void
29207 prune_unused_types_update_strings (dw_die_ref die)
29208 {
29209 dw_attr_node *a;
29210 unsigned ix;
29211
29212 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29213 if (AT_class (a) == dw_val_class_str)
29214 {
29215 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29216 s->refcount++;
29217 /* Avoid unnecessarily putting strings that are used less than
29218 twice in the hash table. */
29219 if (s->refcount
29220 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29221 {
29222 indirect_string_node **slot
29223 = debug_str_hash->find_slot_with_hash (s->str,
29224 htab_hash_string (s->str),
29225 INSERT);
29226 gcc_assert (*slot == NULL);
29227 *slot = s;
29228 }
29229 }
29230 }
29231
29232 /* Mark DIE and its children as removed. */
29233
29234 static void
29235 mark_removed (dw_die_ref die)
29236 {
29237 dw_die_ref c;
29238 die->removed = true;
29239 FOR_EACH_CHILD (die, c, mark_removed (c));
29240 }
29241
29242 /* Remove from the tree DIE any dies that aren't marked. */
29243
29244 static void
29245 prune_unused_types_prune (dw_die_ref die)
29246 {
29247 dw_die_ref c;
29248
29249 gcc_assert (die->die_mark);
29250 prune_unused_types_update_strings (die);
29251
29252 if (! die->die_child)
29253 return;
29254
29255 c = die->die_child;
29256 do {
29257 dw_die_ref prev = c, next;
29258 for (c = c->die_sib; ! c->die_mark; c = next)
29259 if (c == die->die_child)
29260 {
29261 /* No marked children between 'prev' and the end of the list. */
29262 if (prev == c)
29263 /* No marked children at all. */
29264 die->die_child = NULL;
29265 else
29266 {
29267 prev->die_sib = c->die_sib;
29268 die->die_child = prev;
29269 }
29270 c->die_sib = NULL;
29271 mark_removed (c);
29272 return;
29273 }
29274 else
29275 {
29276 next = c->die_sib;
29277 c->die_sib = NULL;
29278 mark_removed (c);
29279 }
29280
29281 if (c != prev->die_sib)
29282 prev->die_sib = c;
29283 prune_unused_types_prune (c);
29284 } while (c != die->die_child);
29285 }
29286
29287 /* Remove dies representing declarations that we never use. */
29288
29289 static void
29290 prune_unused_types (void)
29291 {
29292 unsigned int i;
29293 limbo_die_node *node;
29294 comdat_type_node *ctnode;
29295 pubname_entry *pub;
29296 dw_die_ref base_type;
29297
29298 #if ENABLE_ASSERT_CHECKING
29299 /* All the marks should already be clear. */
29300 verify_marks_clear (comp_unit_die ());
29301 for (node = limbo_die_list; node; node = node->next)
29302 verify_marks_clear (node->die);
29303 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29304 verify_marks_clear (ctnode->root_die);
29305 #endif /* ENABLE_ASSERT_CHECKING */
29306
29307 /* Mark types that are used in global variables. */
29308 premark_types_used_by_global_vars ();
29309
29310 /* Set the mark on nodes that are actually used. */
29311 prune_unused_types_walk (comp_unit_die ());
29312 for (node = limbo_die_list; node; node = node->next)
29313 prune_unused_types_walk (node->die);
29314 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29315 {
29316 prune_unused_types_walk (ctnode->root_die);
29317 prune_unused_types_mark (ctnode->type_die, 1);
29318 }
29319
29320 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29321 are unusual in that they are pubnames that are the children of pubtypes.
29322 They should only be marked via their parent DW_TAG_enumeration_type die,
29323 not as roots in themselves. */
29324 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29325 if (pub->die->die_tag != DW_TAG_enumerator)
29326 prune_unused_types_mark (pub->die, 1);
29327 for (i = 0; base_types.iterate (i, &base_type); i++)
29328 prune_unused_types_mark (base_type, 1);
29329
29330 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29331 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29332 callees). */
29333 cgraph_node *cnode;
29334 FOR_EACH_FUNCTION (cnode)
29335 if (cnode->referred_to_p (false))
29336 {
29337 dw_die_ref die = lookup_decl_die (cnode->decl);
29338 if (die == NULL || die->die_mark)
29339 continue;
29340 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29341 if (e->caller != cnode
29342 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29343 {
29344 prune_unused_types_mark (die, 1);
29345 break;
29346 }
29347 }
29348
29349 if (debug_str_hash)
29350 debug_str_hash->empty ();
29351 if (skeleton_debug_str_hash)
29352 skeleton_debug_str_hash->empty ();
29353 prune_unused_types_prune (comp_unit_die ());
29354 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29355 {
29356 node = *pnode;
29357 if (!node->die->die_mark)
29358 *pnode = node->next;
29359 else
29360 {
29361 prune_unused_types_prune (node->die);
29362 pnode = &node->next;
29363 }
29364 }
29365 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29366 prune_unused_types_prune (ctnode->root_die);
29367
29368 /* Leave the marks clear. */
29369 prune_unmark_dies (comp_unit_die ());
29370 for (node = limbo_die_list; node; node = node->next)
29371 prune_unmark_dies (node->die);
29372 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29373 prune_unmark_dies (ctnode->root_die);
29374 }
29375
29376 /* Helpers to manipulate hash table of comdat type units. */
29377
29378 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29379 {
29380 static inline hashval_t hash (const comdat_type_node *);
29381 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29382 };
29383
29384 inline hashval_t
29385 comdat_type_hasher::hash (const comdat_type_node *type_node)
29386 {
29387 hashval_t h;
29388 memcpy (&h, type_node->signature, sizeof (h));
29389 return h;
29390 }
29391
29392 inline bool
29393 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29394 const comdat_type_node *type_node_2)
29395 {
29396 return (! memcmp (type_node_1->signature, type_node_2->signature,
29397 DWARF_TYPE_SIGNATURE_SIZE));
29398 }
29399
29400 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29401 to the location it would have been added, should we know its
29402 DECL_ASSEMBLER_NAME when we added other attributes. This will
29403 probably improve compactness of debug info, removing equivalent
29404 abbrevs, and hide any differences caused by deferring the
29405 computation of the assembler name, triggered by e.g. PCH. */
29406
29407 static inline void
29408 move_linkage_attr (dw_die_ref die)
29409 {
29410 unsigned ix = vec_safe_length (die->die_attr);
29411 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29412
29413 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29414 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29415
29416 while (--ix > 0)
29417 {
29418 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29419
29420 if (prev->dw_attr == DW_AT_decl_line
29421 || prev->dw_attr == DW_AT_decl_column
29422 || prev->dw_attr == DW_AT_name)
29423 break;
29424 }
29425
29426 if (ix != vec_safe_length (die->die_attr) - 1)
29427 {
29428 die->die_attr->pop ();
29429 die->die_attr->quick_insert (ix, linkage);
29430 }
29431 }
29432
29433 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29434 referenced from typed stack ops and count how often they are used. */
29435
29436 static void
29437 mark_base_types (dw_loc_descr_ref loc)
29438 {
29439 dw_die_ref base_type = NULL;
29440
29441 for (; loc; loc = loc->dw_loc_next)
29442 {
29443 switch (loc->dw_loc_opc)
29444 {
29445 case DW_OP_regval_type:
29446 case DW_OP_deref_type:
29447 case DW_OP_GNU_regval_type:
29448 case DW_OP_GNU_deref_type:
29449 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29450 break;
29451 case DW_OP_convert:
29452 case DW_OP_reinterpret:
29453 case DW_OP_GNU_convert:
29454 case DW_OP_GNU_reinterpret:
29455 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29456 continue;
29457 /* FALLTHRU */
29458 case DW_OP_const_type:
29459 case DW_OP_GNU_const_type:
29460 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29461 break;
29462 case DW_OP_entry_value:
29463 case DW_OP_GNU_entry_value:
29464 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29465 continue;
29466 default:
29467 continue;
29468 }
29469 gcc_assert (base_type->die_parent == comp_unit_die ());
29470 if (base_type->die_mark)
29471 base_type->die_mark++;
29472 else
29473 {
29474 base_types.safe_push (base_type);
29475 base_type->die_mark = 1;
29476 }
29477 }
29478 }
29479
29480 /* Comparison function for sorting marked base types. */
29481
29482 static int
29483 base_type_cmp (const void *x, const void *y)
29484 {
29485 dw_die_ref dx = *(const dw_die_ref *) x;
29486 dw_die_ref dy = *(const dw_die_ref *) y;
29487 unsigned int byte_size1, byte_size2;
29488 unsigned int encoding1, encoding2;
29489 unsigned int align1, align2;
29490 if (dx->die_mark > dy->die_mark)
29491 return -1;
29492 if (dx->die_mark < dy->die_mark)
29493 return 1;
29494 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29495 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29496 if (byte_size1 < byte_size2)
29497 return 1;
29498 if (byte_size1 > byte_size2)
29499 return -1;
29500 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29501 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29502 if (encoding1 < encoding2)
29503 return 1;
29504 if (encoding1 > encoding2)
29505 return -1;
29506 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29507 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29508 if (align1 < align2)
29509 return 1;
29510 if (align1 > align2)
29511 return -1;
29512 return 0;
29513 }
29514
29515 /* Move base types marked by mark_base_types as early as possible
29516 in the CU, sorted by decreasing usage count both to make the
29517 uleb128 references as small as possible and to make sure they
29518 will have die_offset already computed by calc_die_sizes when
29519 sizes of typed stack loc ops is computed. */
29520
29521 static void
29522 move_marked_base_types (void)
29523 {
29524 unsigned int i;
29525 dw_die_ref base_type, die, c;
29526
29527 if (base_types.is_empty ())
29528 return;
29529
29530 /* Sort by decreasing usage count, they will be added again in that
29531 order later on. */
29532 base_types.qsort (base_type_cmp);
29533 die = comp_unit_die ();
29534 c = die->die_child;
29535 do
29536 {
29537 dw_die_ref prev = c;
29538 c = c->die_sib;
29539 while (c->die_mark)
29540 {
29541 remove_child_with_prev (c, prev);
29542 /* As base types got marked, there must be at least
29543 one node other than DW_TAG_base_type. */
29544 gcc_assert (die->die_child != NULL);
29545 c = prev->die_sib;
29546 }
29547 }
29548 while (c != die->die_child);
29549 gcc_assert (die->die_child);
29550 c = die->die_child;
29551 for (i = 0; base_types.iterate (i, &base_type); i++)
29552 {
29553 base_type->die_mark = 0;
29554 base_type->die_sib = c->die_sib;
29555 c->die_sib = base_type;
29556 c = base_type;
29557 }
29558 }
29559
29560 /* Helper function for resolve_addr, attempt to resolve
29561 one CONST_STRING, return true if successful. Similarly verify that
29562 SYMBOL_REFs refer to variables emitted in the current CU. */
29563
29564 static bool
29565 resolve_one_addr (rtx *addr)
29566 {
29567 rtx rtl = *addr;
29568
29569 if (GET_CODE (rtl) == CONST_STRING)
29570 {
29571 size_t len = strlen (XSTR (rtl, 0)) + 1;
29572 tree t = build_string (len, XSTR (rtl, 0));
29573 tree tlen = size_int (len - 1);
29574 TREE_TYPE (t)
29575 = build_array_type (char_type_node, build_index_type (tlen));
29576 rtl = lookup_constant_def (t);
29577 if (!rtl || !MEM_P (rtl))
29578 return false;
29579 rtl = XEXP (rtl, 0);
29580 if (GET_CODE (rtl) == SYMBOL_REF
29581 && SYMBOL_REF_DECL (rtl)
29582 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29583 return false;
29584 vec_safe_push (used_rtx_array, rtl);
29585 *addr = rtl;
29586 return true;
29587 }
29588
29589 if (GET_CODE (rtl) == SYMBOL_REF
29590 && SYMBOL_REF_DECL (rtl))
29591 {
29592 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29593 {
29594 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29595 return false;
29596 }
29597 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29598 return false;
29599 }
29600
29601 if (GET_CODE (rtl) == CONST)
29602 {
29603 subrtx_ptr_iterator::array_type array;
29604 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29605 if (!resolve_one_addr (*iter))
29606 return false;
29607 }
29608
29609 return true;
29610 }
29611
29612 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29613 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29614 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29615
29616 static rtx
29617 string_cst_pool_decl (tree t)
29618 {
29619 rtx rtl = output_constant_def (t, 1);
29620 unsigned char *array;
29621 dw_loc_descr_ref l;
29622 tree decl;
29623 size_t len;
29624 dw_die_ref ref;
29625
29626 if (!rtl || !MEM_P (rtl))
29627 return NULL_RTX;
29628 rtl = XEXP (rtl, 0);
29629 if (GET_CODE (rtl) != SYMBOL_REF
29630 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29631 return NULL_RTX;
29632
29633 decl = SYMBOL_REF_DECL (rtl);
29634 if (!lookup_decl_die (decl))
29635 {
29636 len = TREE_STRING_LENGTH (t);
29637 vec_safe_push (used_rtx_array, rtl);
29638 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29639 array = ggc_vec_alloc<unsigned char> (len);
29640 memcpy (array, TREE_STRING_POINTER (t), len);
29641 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29642 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29643 l->dw_loc_oprnd2.v.val_vec.length = len;
29644 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29645 l->dw_loc_oprnd2.v.val_vec.array = array;
29646 add_AT_loc (ref, DW_AT_location, l);
29647 equate_decl_number_to_die (decl, ref);
29648 }
29649 return rtl;
29650 }
29651
29652 /* Helper function of resolve_addr_in_expr. LOC is
29653 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29654 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29655 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29656 with DW_OP_implicit_pointer if possible
29657 and return true, if unsuccessful, return false. */
29658
29659 static bool
29660 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29661 {
29662 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29663 HOST_WIDE_INT offset = 0;
29664 dw_die_ref ref = NULL;
29665 tree decl;
29666
29667 if (GET_CODE (rtl) == CONST
29668 && GET_CODE (XEXP (rtl, 0)) == PLUS
29669 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29670 {
29671 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29672 rtl = XEXP (XEXP (rtl, 0), 0);
29673 }
29674 if (GET_CODE (rtl) == CONST_STRING)
29675 {
29676 size_t len = strlen (XSTR (rtl, 0)) + 1;
29677 tree t = build_string (len, XSTR (rtl, 0));
29678 tree tlen = size_int (len - 1);
29679
29680 TREE_TYPE (t)
29681 = build_array_type (char_type_node, build_index_type (tlen));
29682 rtl = string_cst_pool_decl (t);
29683 if (!rtl)
29684 return false;
29685 }
29686 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29687 {
29688 decl = SYMBOL_REF_DECL (rtl);
29689 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29690 {
29691 ref = lookup_decl_die (decl);
29692 if (ref && (get_AT (ref, DW_AT_location)
29693 || get_AT (ref, DW_AT_const_value)))
29694 {
29695 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29696 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29697 loc->dw_loc_oprnd1.val_entry = NULL;
29698 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29699 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29700 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29701 loc->dw_loc_oprnd2.v.val_int = offset;
29702 return true;
29703 }
29704 }
29705 }
29706 return false;
29707 }
29708
29709 /* Helper function for resolve_addr, handle one location
29710 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29711 the location list couldn't be resolved. */
29712
29713 static bool
29714 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29715 {
29716 dw_loc_descr_ref keep = NULL;
29717 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29718 switch (loc->dw_loc_opc)
29719 {
29720 case DW_OP_addr:
29721 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29722 {
29723 if ((prev == NULL
29724 || prev->dw_loc_opc == DW_OP_piece
29725 || prev->dw_loc_opc == DW_OP_bit_piece)
29726 && loc->dw_loc_next
29727 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29728 && (!dwarf_strict || dwarf_version >= 5)
29729 && optimize_one_addr_into_implicit_ptr (loc))
29730 break;
29731 return false;
29732 }
29733 break;
29734 case DW_OP_GNU_addr_index:
29735 case DW_OP_addrx:
29736 case DW_OP_GNU_const_index:
29737 case DW_OP_constx:
29738 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29739 || loc->dw_loc_opc == DW_OP_addrx)
29740 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29741 || loc->dw_loc_opc == DW_OP_constx)
29742 && loc->dtprel))
29743 {
29744 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29745 if (!resolve_one_addr (&rtl))
29746 return false;
29747 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29748 loc->dw_loc_oprnd1.val_entry
29749 = add_addr_table_entry (rtl, ate_kind_rtx);
29750 }
29751 break;
29752 case DW_OP_const4u:
29753 case DW_OP_const8u:
29754 if (loc->dtprel
29755 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29756 return false;
29757 break;
29758 case DW_OP_plus_uconst:
29759 if (size_of_loc_descr (loc)
29760 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29761 + 1
29762 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29763 {
29764 dw_loc_descr_ref repl
29765 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29766 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29767 add_loc_descr (&repl, loc->dw_loc_next);
29768 *loc = *repl;
29769 }
29770 break;
29771 case DW_OP_implicit_value:
29772 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29773 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29774 return false;
29775 break;
29776 case DW_OP_implicit_pointer:
29777 case DW_OP_GNU_implicit_pointer:
29778 case DW_OP_GNU_parameter_ref:
29779 case DW_OP_GNU_variable_value:
29780 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29781 {
29782 dw_die_ref ref
29783 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29784 if (ref == NULL)
29785 return false;
29786 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29787 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29788 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29789 }
29790 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29791 {
29792 if (prev == NULL
29793 && loc->dw_loc_next == NULL
29794 && AT_class (a) == dw_val_class_loc)
29795 switch (a->dw_attr)
29796 {
29797 /* Following attributes allow both exprloc and reference,
29798 so if the whole expression is DW_OP_GNU_variable_value
29799 alone we could transform it into reference. */
29800 case DW_AT_byte_size:
29801 case DW_AT_bit_size:
29802 case DW_AT_lower_bound:
29803 case DW_AT_upper_bound:
29804 case DW_AT_bit_stride:
29805 case DW_AT_count:
29806 case DW_AT_allocated:
29807 case DW_AT_associated:
29808 case DW_AT_byte_stride:
29809 a->dw_attr_val.val_class = dw_val_class_die_ref;
29810 a->dw_attr_val.val_entry = NULL;
29811 a->dw_attr_val.v.val_die_ref.die
29812 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29813 a->dw_attr_val.v.val_die_ref.external = 0;
29814 return true;
29815 default:
29816 break;
29817 }
29818 if (dwarf_strict)
29819 return false;
29820 }
29821 break;
29822 case DW_OP_const_type:
29823 case DW_OP_regval_type:
29824 case DW_OP_deref_type:
29825 case DW_OP_convert:
29826 case DW_OP_reinterpret:
29827 case DW_OP_GNU_const_type:
29828 case DW_OP_GNU_regval_type:
29829 case DW_OP_GNU_deref_type:
29830 case DW_OP_GNU_convert:
29831 case DW_OP_GNU_reinterpret:
29832 while (loc->dw_loc_next
29833 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29834 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29835 {
29836 dw_die_ref base1, base2;
29837 unsigned enc1, enc2, size1, size2;
29838 if (loc->dw_loc_opc == DW_OP_regval_type
29839 || loc->dw_loc_opc == DW_OP_deref_type
29840 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29841 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29842 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29843 else if (loc->dw_loc_oprnd1.val_class
29844 == dw_val_class_unsigned_const)
29845 break;
29846 else
29847 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29848 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29849 == dw_val_class_unsigned_const)
29850 break;
29851 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29852 gcc_assert (base1->die_tag == DW_TAG_base_type
29853 && base2->die_tag == DW_TAG_base_type);
29854 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29855 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29856 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29857 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29858 if (size1 == size2
29859 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29860 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29861 && loc != keep)
29862 || enc1 == enc2))
29863 {
29864 /* Optimize away next DW_OP_convert after
29865 adjusting LOC's base type die reference. */
29866 if (loc->dw_loc_opc == DW_OP_regval_type
29867 || loc->dw_loc_opc == DW_OP_deref_type
29868 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29869 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29870 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29871 else
29872 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29873 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29874 continue;
29875 }
29876 /* Don't change integer DW_OP_convert after e.g. floating
29877 point typed stack entry. */
29878 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29879 keep = loc->dw_loc_next;
29880 break;
29881 }
29882 break;
29883 default:
29884 break;
29885 }
29886 return true;
29887 }
29888
29889 /* Helper function of resolve_addr. DIE had DW_AT_location of
29890 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29891 and DW_OP_addr couldn't be resolved. resolve_addr has already
29892 removed the DW_AT_location attribute. This function attempts to
29893 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29894 to it or DW_AT_const_value attribute, if possible. */
29895
29896 static void
29897 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29898 {
29899 if (!VAR_P (decl)
29900 || lookup_decl_die (decl) != die
29901 || DECL_EXTERNAL (decl)
29902 || !TREE_STATIC (decl)
29903 || DECL_INITIAL (decl) == NULL_TREE
29904 || DECL_P (DECL_INITIAL (decl))
29905 || get_AT (die, DW_AT_const_value))
29906 return;
29907
29908 tree init = DECL_INITIAL (decl);
29909 HOST_WIDE_INT offset = 0;
29910 /* For variables that have been optimized away and thus
29911 don't have a memory location, see if we can emit
29912 DW_AT_const_value instead. */
29913 if (tree_add_const_value_attribute (die, init))
29914 return;
29915 if (dwarf_strict && dwarf_version < 5)
29916 return;
29917 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29918 and ADDR_EXPR refers to a decl that has DW_AT_location or
29919 DW_AT_const_value (but isn't addressable, otherwise
29920 resolving the original DW_OP_addr wouldn't fail), see if
29921 we can add DW_OP_implicit_pointer. */
29922 STRIP_NOPS (init);
29923 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29924 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29925 {
29926 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29927 init = TREE_OPERAND (init, 0);
29928 STRIP_NOPS (init);
29929 }
29930 if (TREE_CODE (init) != ADDR_EXPR)
29931 return;
29932 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29933 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29934 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29935 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29936 && TREE_OPERAND (init, 0) != decl))
29937 {
29938 dw_die_ref ref;
29939 dw_loc_descr_ref l;
29940
29941 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29942 {
29943 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29944 if (!rtl)
29945 return;
29946 decl = SYMBOL_REF_DECL (rtl);
29947 }
29948 else
29949 decl = TREE_OPERAND (init, 0);
29950 ref = lookup_decl_die (decl);
29951 if (ref == NULL
29952 || (!get_AT (ref, DW_AT_location)
29953 && !get_AT (ref, DW_AT_const_value)))
29954 return;
29955 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29956 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29957 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29958 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29959 add_AT_loc (die, DW_AT_location, l);
29960 }
29961 }
29962
29963 /* Return NULL if l is a DWARF expression, or first op that is not
29964 valid DWARF expression. */
29965
29966 static dw_loc_descr_ref
29967 non_dwarf_expression (dw_loc_descr_ref l)
29968 {
29969 while (l)
29970 {
29971 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29972 return l;
29973 switch (l->dw_loc_opc)
29974 {
29975 case DW_OP_regx:
29976 case DW_OP_implicit_value:
29977 case DW_OP_stack_value:
29978 case DW_OP_implicit_pointer:
29979 case DW_OP_GNU_implicit_pointer:
29980 case DW_OP_GNU_parameter_ref:
29981 case DW_OP_piece:
29982 case DW_OP_bit_piece:
29983 return l;
29984 default:
29985 break;
29986 }
29987 l = l->dw_loc_next;
29988 }
29989 return NULL;
29990 }
29991
29992 /* Return adjusted copy of EXPR:
29993 If it is empty DWARF expression, return it.
29994 If it is valid non-empty DWARF expression,
29995 return copy of EXPR with DW_OP_deref appended to it.
29996 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29997 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29998 If it is DWARF expression followed by DW_OP_stack_value, return
29999 copy of the DWARF expression without anything appended.
30000 Otherwise, return NULL. */
30001
30002 static dw_loc_descr_ref
30003 copy_deref_exprloc (dw_loc_descr_ref expr)
30004 {
30005 dw_loc_descr_ref tail = NULL;
30006
30007 if (expr == NULL)
30008 return NULL;
30009
30010 dw_loc_descr_ref l = non_dwarf_expression (expr);
30011 if (l && l->dw_loc_next)
30012 return NULL;
30013
30014 if (l)
30015 {
30016 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30017 tail = new_loc_descr ((enum dwarf_location_atom)
30018 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30019 0, 0);
30020 else
30021 switch (l->dw_loc_opc)
30022 {
30023 case DW_OP_regx:
30024 tail = new_loc_descr (DW_OP_bregx,
30025 l->dw_loc_oprnd1.v.val_unsigned, 0);
30026 break;
30027 case DW_OP_stack_value:
30028 break;
30029 default:
30030 return NULL;
30031 }
30032 }
30033 else
30034 tail = new_loc_descr (DW_OP_deref, 0, 0);
30035
30036 dw_loc_descr_ref ret = NULL, *p = &ret;
30037 while (expr != l)
30038 {
30039 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30040 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30041 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30042 p = &(*p)->dw_loc_next;
30043 expr = expr->dw_loc_next;
30044 }
30045 *p = tail;
30046 return ret;
30047 }
30048
30049 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30050 reference to a variable or argument, adjust it if needed and return:
30051 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30052 attribute if present should be removed
30053 0 keep the attribute perhaps with minor modifications, no need to rescan
30054 1 if the attribute has been successfully adjusted. */
30055
30056 static int
30057 optimize_string_length (dw_attr_node *a)
30058 {
30059 dw_loc_descr_ref l = AT_loc (a), lv;
30060 dw_die_ref die;
30061 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30062 {
30063 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30064 die = lookup_decl_die (decl);
30065 if (die)
30066 {
30067 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30068 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30069 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30070 }
30071 else
30072 return -1;
30073 }
30074 else
30075 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30076
30077 /* DWARF5 allows reference class, so we can then reference the DIE.
30078 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30079 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30080 {
30081 a->dw_attr_val.val_class = dw_val_class_die_ref;
30082 a->dw_attr_val.val_entry = NULL;
30083 a->dw_attr_val.v.val_die_ref.die = die;
30084 a->dw_attr_val.v.val_die_ref.external = 0;
30085 return 0;
30086 }
30087
30088 dw_attr_node *av = get_AT (die, DW_AT_location);
30089 dw_loc_list_ref d;
30090 bool non_dwarf_expr = false;
30091
30092 if (av == NULL)
30093 return dwarf_strict ? -1 : 0;
30094 switch (AT_class (av))
30095 {
30096 case dw_val_class_loc_list:
30097 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30098 if (d->expr && non_dwarf_expression (d->expr))
30099 non_dwarf_expr = true;
30100 break;
30101 case dw_val_class_view_list:
30102 gcc_unreachable ();
30103 case dw_val_class_loc:
30104 lv = AT_loc (av);
30105 if (lv == NULL)
30106 return dwarf_strict ? -1 : 0;
30107 if (non_dwarf_expression (lv))
30108 non_dwarf_expr = true;
30109 break;
30110 default:
30111 return dwarf_strict ? -1 : 0;
30112 }
30113
30114 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30115 into DW_OP_call4 or DW_OP_GNU_variable_value into
30116 DW_OP_call4 DW_OP_deref, do so. */
30117 if (!non_dwarf_expr
30118 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30119 {
30120 l->dw_loc_opc = DW_OP_call4;
30121 if (l->dw_loc_next)
30122 l->dw_loc_next = NULL;
30123 else
30124 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30125 return 0;
30126 }
30127
30128 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30129 copy over the DW_AT_location attribute from die to a. */
30130 if (l->dw_loc_next != NULL)
30131 {
30132 a->dw_attr_val = av->dw_attr_val;
30133 return 1;
30134 }
30135
30136 dw_loc_list_ref list, *p;
30137 switch (AT_class (av))
30138 {
30139 case dw_val_class_loc_list:
30140 p = &list;
30141 list = NULL;
30142 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30143 {
30144 lv = copy_deref_exprloc (d->expr);
30145 if (lv)
30146 {
30147 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30148 p = &(*p)->dw_loc_next;
30149 }
30150 else if (!dwarf_strict && d->expr)
30151 return 0;
30152 }
30153 if (list == NULL)
30154 return dwarf_strict ? -1 : 0;
30155 a->dw_attr_val.val_class = dw_val_class_loc_list;
30156 gen_llsym (list);
30157 *AT_loc_list_ptr (a) = list;
30158 return 1;
30159 case dw_val_class_loc:
30160 lv = copy_deref_exprloc (AT_loc (av));
30161 if (lv == NULL)
30162 return dwarf_strict ? -1 : 0;
30163 a->dw_attr_val.v.val_loc = lv;
30164 return 1;
30165 default:
30166 gcc_unreachable ();
30167 }
30168 }
30169
30170 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30171 an address in .rodata section if the string literal is emitted there,
30172 or remove the containing location list or replace DW_AT_const_value
30173 with DW_AT_location and empty location expression, if it isn't found
30174 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30175 to something that has been emitted in the current CU. */
30176
30177 static void
30178 resolve_addr (dw_die_ref die)
30179 {
30180 dw_die_ref c;
30181 dw_attr_node *a;
30182 dw_loc_list_ref *curr, *start, loc;
30183 unsigned ix;
30184 bool remove_AT_byte_size = false;
30185
30186 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30187 switch (AT_class (a))
30188 {
30189 case dw_val_class_loc_list:
30190 start = curr = AT_loc_list_ptr (a);
30191 loc = *curr;
30192 gcc_assert (loc);
30193 /* The same list can be referenced more than once. See if we have
30194 already recorded the result from a previous pass. */
30195 if (loc->replaced)
30196 *curr = loc->dw_loc_next;
30197 else if (!loc->resolved_addr)
30198 {
30199 /* As things stand, we do not expect or allow one die to
30200 reference a suffix of another die's location list chain.
30201 References must be identical or completely separate.
30202 There is therefore no need to cache the result of this
30203 pass on any list other than the first; doing so
30204 would lead to unnecessary writes. */
30205 while (*curr)
30206 {
30207 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30208 if (!resolve_addr_in_expr (a, (*curr)->expr))
30209 {
30210 dw_loc_list_ref next = (*curr)->dw_loc_next;
30211 dw_loc_descr_ref l = (*curr)->expr;
30212
30213 if (next && (*curr)->ll_symbol)
30214 {
30215 gcc_assert (!next->ll_symbol);
30216 next->ll_symbol = (*curr)->ll_symbol;
30217 next->vl_symbol = (*curr)->vl_symbol;
30218 }
30219 if (dwarf_split_debug_info)
30220 remove_loc_list_addr_table_entries (l);
30221 *curr = next;
30222 }
30223 else
30224 {
30225 mark_base_types ((*curr)->expr);
30226 curr = &(*curr)->dw_loc_next;
30227 }
30228 }
30229 if (loc == *start)
30230 loc->resolved_addr = 1;
30231 else
30232 {
30233 loc->replaced = 1;
30234 loc->dw_loc_next = *start;
30235 }
30236 }
30237 if (!*start)
30238 {
30239 remove_AT (die, a->dw_attr);
30240 ix--;
30241 }
30242 break;
30243 case dw_val_class_view_list:
30244 {
30245 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30246 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30247 dw_val_node *llnode
30248 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30249 /* If we no longer have a loclist, or it no longer needs
30250 views, drop this attribute. */
30251 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30252 {
30253 remove_AT (die, a->dw_attr);
30254 ix--;
30255 }
30256 break;
30257 }
30258 case dw_val_class_loc:
30259 {
30260 dw_loc_descr_ref l = AT_loc (a);
30261 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30262 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30263 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30264 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30265 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30266 with DW_FORM_ref referencing the same DIE as
30267 DW_OP_GNU_variable_value used to reference. */
30268 if (a->dw_attr == DW_AT_string_length
30269 && l
30270 && l->dw_loc_opc == DW_OP_GNU_variable_value
30271 && (l->dw_loc_next == NULL
30272 || (l->dw_loc_next->dw_loc_next == NULL
30273 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30274 {
30275 switch (optimize_string_length (a))
30276 {
30277 case -1:
30278 remove_AT (die, a->dw_attr);
30279 ix--;
30280 /* If we drop DW_AT_string_length, we need to drop also
30281 DW_AT_{string_length_,}byte_size. */
30282 remove_AT_byte_size = true;
30283 continue;
30284 default:
30285 break;
30286 case 1:
30287 /* Even if we keep the optimized DW_AT_string_length,
30288 it might have changed AT_class, so process it again. */
30289 ix--;
30290 continue;
30291 }
30292 }
30293 /* For -gdwarf-2 don't attempt to optimize
30294 DW_AT_data_member_location containing
30295 DW_OP_plus_uconst - older consumers might
30296 rely on it being that op instead of a more complex,
30297 but shorter, location description. */
30298 if ((dwarf_version > 2
30299 || a->dw_attr != DW_AT_data_member_location
30300 || l == NULL
30301 || l->dw_loc_opc != DW_OP_plus_uconst
30302 || l->dw_loc_next != NULL)
30303 && !resolve_addr_in_expr (a, l))
30304 {
30305 if (dwarf_split_debug_info)
30306 remove_loc_list_addr_table_entries (l);
30307 if (l != NULL
30308 && l->dw_loc_next == NULL
30309 && l->dw_loc_opc == DW_OP_addr
30310 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30311 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30312 && a->dw_attr == DW_AT_location)
30313 {
30314 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30315 remove_AT (die, a->dw_attr);
30316 ix--;
30317 optimize_location_into_implicit_ptr (die, decl);
30318 break;
30319 }
30320 if (a->dw_attr == DW_AT_string_length)
30321 /* If we drop DW_AT_string_length, we need to drop also
30322 DW_AT_{string_length_,}byte_size. */
30323 remove_AT_byte_size = true;
30324 remove_AT (die, a->dw_attr);
30325 ix--;
30326 }
30327 else
30328 mark_base_types (l);
30329 }
30330 break;
30331 case dw_val_class_addr:
30332 if (a->dw_attr == DW_AT_const_value
30333 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30334 {
30335 if (AT_index (a) != NOT_INDEXED)
30336 remove_addr_table_entry (a->dw_attr_val.val_entry);
30337 remove_AT (die, a->dw_attr);
30338 ix--;
30339 }
30340 if ((die->die_tag == DW_TAG_call_site
30341 && a->dw_attr == DW_AT_call_origin)
30342 || (die->die_tag == DW_TAG_GNU_call_site
30343 && a->dw_attr == DW_AT_abstract_origin))
30344 {
30345 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30346 dw_die_ref tdie = lookup_decl_die (tdecl);
30347 dw_die_ref cdie;
30348 if (tdie == NULL
30349 && DECL_EXTERNAL (tdecl)
30350 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30351 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30352 {
30353 dw_die_ref pdie = cdie;
30354 /* Make sure we don't add these DIEs into type units.
30355 We could emit skeleton DIEs for context (namespaces,
30356 outer structs/classes) and a skeleton DIE for the
30357 innermost context with DW_AT_signature pointing to the
30358 type unit. See PR78835. */
30359 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30360 pdie = pdie->die_parent;
30361 if (pdie == NULL)
30362 {
30363 /* Creating a full DIE for tdecl is overly expensive and
30364 at this point even wrong when in the LTO phase
30365 as it can end up generating new type DIEs we didn't
30366 output and thus optimize_external_refs will crash. */
30367 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30368 add_AT_flag (tdie, DW_AT_external, 1);
30369 add_AT_flag (tdie, DW_AT_declaration, 1);
30370 add_linkage_attr (tdie, tdecl);
30371 add_name_and_src_coords_attributes (tdie, tdecl, true);
30372 equate_decl_number_to_die (tdecl, tdie);
30373 }
30374 }
30375 if (tdie)
30376 {
30377 a->dw_attr_val.val_class = dw_val_class_die_ref;
30378 a->dw_attr_val.v.val_die_ref.die = tdie;
30379 a->dw_attr_val.v.val_die_ref.external = 0;
30380 }
30381 else
30382 {
30383 if (AT_index (a) != NOT_INDEXED)
30384 remove_addr_table_entry (a->dw_attr_val.val_entry);
30385 remove_AT (die, a->dw_attr);
30386 ix--;
30387 }
30388 }
30389 break;
30390 default:
30391 break;
30392 }
30393
30394 if (remove_AT_byte_size)
30395 remove_AT (die, dwarf_version >= 5
30396 ? DW_AT_string_length_byte_size
30397 : DW_AT_byte_size);
30398
30399 FOR_EACH_CHILD (die, c, resolve_addr (c));
30400 }
30401 \f
30402 /* Helper routines for optimize_location_lists.
30403 This pass tries to share identical local lists in .debug_loc
30404 section. */
30405
30406 /* Iteratively hash operands of LOC opcode into HSTATE. */
30407
30408 static void
30409 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30410 {
30411 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30412 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30413
30414 switch (loc->dw_loc_opc)
30415 {
30416 case DW_OP_const4u:
30417 case DW_OP_const8u:
30418 if (loc->dtprel)
30419 goto hash_addr;
30420 /* FALLTHRU */
30421 case DW_OP_const1u:
30422 case DW_OP_const1s:
30423 case DW_OP_const2u:
30424 case DW_OP_const2s:
30425 case DW_OP_const4s:
30426 case DW_OP_const8s:
30427 case DW_OP_constu:
30428 case DW_OP_consts:
30429 case DW_OP_pick:
30430 case DW_OP_plus_uconst:
30431 case DW_OP_breg0:
30432 case DW_OP_breg1:
30433 case DW_OP_breg2:
30434 case DW_OP_breg3:
30435 case DW_OP_breg4:
30436 case DW_OP_breg5:
30437 case DW_OP_breg6:
30438 case DW_OP_breg7:
30439 case DW_OP_breg8:
30440 case DW_OP_breg9:
30441 case DW_OP_breg10:
30442 case DW_OP_breg11:
30443 case DW_OP_breg12:
30444 case DW_OP_breg13:
30445 case DW_OP_breg14:
30446 case DW_OP_breg15:
30447 case DW_OP_breg16:
30448 case DW_OP_breg17:
30449 case DW_OP_breg18:
30450 case DW_OP_breg19:
30451 case DW_OP_breg20:
30452 case DW_OP_breg21:
30453 case DW_OP_breg22:
30454 case DW_OP_breg23:
30455 case DW_OP_breg24:
30456 case DW_OP_breg25:
30457 case DW_OP_breg26:
30458 case DW_OP_breg27:
30459 case DW_OP_breg28:
30460 case DW_OP_breg29:
30461 case DW_OP_breg30:
30462 case DW_OP_breg31:
30463 case DW_OP_regx:
30464 case DW_OP_fbreg:
30465 case DW_OP_piece:
30466 case DW_OP_deref_size:
30467 case DW_OP_xderef_size:
30468 hstate.add_object (val1->v.val_int);
30469 break;
30470 case DW_OP_skip:
30471 case DW_OP_bra:
30472 {
30473 int offset;
30474
30475 gcc_assert (val1->val_class == dw_val_class_loc);
30476 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30477 hstate.add_object (offset);
30478 }
30479 break;
30480 case DW_OP_implicit_value:
30481 hstate.add_object (val1->v.val_unsigned);
30482 switch (val2->val_class)
30483 {
30484 case dw_val_class_const:
30485 hstate.add_object (val2->v.val_int);
30486 break;
30487 case dw_val_class_vec:
30488 {
30489 unsigned int elt_size = val2->v.val_vec.elt_size;
30490 unsigned int len = val2->v.val_vec.length;
30491
30492 hstate.add_int (elt_size);
30493 hstate.add_int (len);
30494 hstate.add (val2->v.val_vec.array, len * elt_size);
30495 }
30496 break;
30497 case dw_val_class_const_double:
30498 hstate.add_object (val2->v.val_double.low);
30499 hstate.add_object (val2->v.val_double.high);
30500 break;
30501 case dw_val_class_wide_int:
30502 hstate.add (val2->v.val_wide->get_val (),
30503 get_full_len (*val2->v.val_wide)
30504 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30505 break;
30506 case dw_val_class_addr:
30507 inchash::add_rtx (val2->v.val_addr, hstate);
30508 break;
30509 default:
30510 gcc_unreachable ();
30511 }
30512 break;
30513 case DW_OP_bregx:
30514 case DW_OP_bit_piece:
30515 hstate.add_object (val1->v.val_int);
30516 hstate.add_object (val2->v.val_int);
30517 break;
30518 case DW_OP_addr:
30519 hash_addr:
30520 if (loc->dtprel)
30521 {
30522 unsigned char dtprel = 0xd1;
30523 hstate.add_object (dtprel);
30524 }
30525 inchash::add_rtx (val1->v.val_addr, hstate);
30526 break;
30527 case DW_OP_GNU_addr_index:
30528 case DW_OP_addrx:
30529 case DW_OP_GNU_const_index:
30530 case DW_OP_constx:
30531 {
30532 if (loc->dtprel)
30533 {
30534 unsigned char dtprel = 0xd1;
30535 hstate.add_object (dtprel);
30536 }
30537 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30538 }
30539 break;
30540 case DW_OP_implicit_pointer:
30541 case DW_OP_GNU_implicit_pointer:
30542 hstate.add_int (val2->v.val_int);
30543 break;
30544 case DW_OP_entry_value:
30545 case DW_OP_GNU_entry_value:
30546 hstate.add_object (val1->v.val_loc);
30547 break;
30548 case DW_OP_regval_type:
30549 case DW_OP_deref_type:
30550 case DW_OP_GNU_regval_type:
30551 case DW_OP_GNU_deref_type:
30552 {
30553 unsigned int byte_size
30554 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30555 unsigned int encoding
30556 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30557 hstate.add_object (val1->v.val_int);
30558 hstate.add_object (byte_size);
30559 hstate.add_object (encoding);
30560 }
30561 break;
30562 case DW_OP_convert:
30563 case DW_OP_reinterpret:
30564 case DW_OP_GNU_convert:
30565 case DW_OP_GNU_reinterpret:
30566 if (val1->val_class == dw_val_class_unsigned_const)
30567 {
30568 hstate.add_object (val1->v.val_unsigned);
30569 break;
30570 }
30571 /* FALLTHRU */
30572 case DW_OP_const_type:
30573 case DW_OP_GNU_const_type:
30574 {
30575 unsigned int byte_size
30576 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30577 unsigned int encoding
30578 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30579 hstate.add_object (byte_size);
30580 hstate.add_object (encoding);
30581 if (loc->dw_loc_opc != DW_OP_const_type
30582 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30583 break;
30584 hstate.add_object (val2->val_class);
30585 switch (val2->val_class)
30586 {
30587 case dw_val_class_const:
30588 hstate.add_object (val2->v.val_int);
30589 break;
30590 case dw_val_class_vec:
30591 {
30592 unsigned int elt_size = val2->v.val_vec.elt_size;
30593 unsigned int len = val2->v.val_vec.length;
30594
30595 hstate.add_object (elt_size);
30596 hstate.add_object (len);
30597 hstate.add (val2->v.val_vec.array, len * elt_size);
30598 }
30599 break;
30600 case dw_val_class_const_double:
30601 hstate.add_object (val2->v.val_double.low);
30602 hstate.add_object (val2->v.val_double.high);
30603 break;
30604 case dw_val_class_wide_int:
30605 hstate.add (val2->v.val_wide->get_val (),
30606 get_full_len (*val2->v.val_wide)
30607 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30608 break;
30609 default:
30610 gcc_unreachable ();
30611 }
30612 }
30613 break;
30614
30615 default:
30616 /* Other codes have no operands. */
30617 break;
30618 }
30619 }
30620
30621 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30622
30623 static inline void
30624 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30625 {
30626 dw_loc_descr_ref l;
30627 bool sizes_computed = false;
30628 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30629 size_of_locs (loc);
30630
30631 for (l = loc; l != NULL; l = l->dw_loc_next)
30632 {
30633 enum dwarf_location_atom opc = l->dw_loc_opc;
30634 hstate.add_object (opc);
30635 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30636 {
30637 size_of_locs (loc);
30638 sizes_computed = true;
30639 }
30640 hash_loc_operands (l, hstate);
30641 }
30642 }
30643
30644 /* Compute hash of the whole location list LIST_HEAD. */
30645
30646 static inline void
30647 hash_loc_list (dw_loc_list_ref list_head)
30648 {
30649 dw_loc_list_ref curr = list_head;
30650 inchash::hash hstate;
30651
30652 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30653 {
30654 hstate.add (curr->begin, strlen (curr->begin) + 1);
30655 hstate.add (curr->end, strlen (curr->end) + 1);
30656 hstate.add_object (curr->vbegin);
30657 hstate.add_object (curr->vend);
30658 if (curr->section)
30659 hstate.add (curr->section, strlen (curr->section) + 1);
30660 hash_locs (curr->expr, hstate);
30661 }
30662 list_head->hash = hstate.end ();
30663 }
30664
30665 /* Return true if X and Y opcodes have the same operands. */
30666
30667 static inline bool
30668 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30669 {
30670 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30671 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30672 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30673 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30674
30675 switch (x->dw_loc_opc)
30676 {
30677 case DW_OP_const4u:
30678 case DW_OP_const8u:
30679 if (x->dtprel)
30680 goto hash_addr;
30681 /* FALLTHRU */
30682 case DW_OP_const1u:
30683 case DW_OP_const1s:
30684 case DW_OP_const2u:
30685 case DW_OP_const2s:
30686 case DW_OP_const4s:
30687 case DW_OP_const8s:
30688 case DW_OP_constu:
30689 case DW_OP_consts:
30690 case DW_OP_pick:
30691 case DW_OP_plus_uconst:
30692 case DW_OP_breg0:
30693 case DW_OP_breg1:
30694 case DW_OP_breg2:
30695 case DW_OP_breg3:
30696 case DW_OP_breg4:
30697 case DW_OP_breg5:
30698 case DW_OP_breg6:
30699 case DW_OP_breg7:
30700 case DW_OP_breg8:
30701 case DW_OP_breg9:
30702 case DW_OP_breg10:
30703 case DW_OP_breg11:
30704 case DW_OP_breg12:
30705 case DW_OP_breg13:
30706 case DW_OP_breg14:
30707 case DW_OP_breg15:
30708 case DW_OP_breg16:
30709 case DW_OP_breg17:
30710 case DW_OP_breg18:
30711 case DW_OP_breg19:
30712 case DW_OP_breg20:
30713 case DW_OP_breg21:
30714 case DW_OP_breg22:
30715 case DW_OP_breg23:
30716 case DW_OP_breg24:
30717 case DW_OP_breg25:
30718 case DW_OP_breg26:
30719 case DW_OP_breg27:
30720 case DW_OP_breg28:
30721 case DW_OP_breg29:
30722 case DW_OP_breg30:
30723 case DW_OP_breg31:
30724 case DW_OP_regx:
30725 case DW_OP_fbreg:
30726 case DW_OP_piece:
30727 case DW_OP_deref_size:
30728 case DW_OP_xderef_size:
30729 return valx1->v.val_int == valy1->v.val_int;
30730 case DW_OP_skip:
30731 case DW_OP_bra:
30732 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30733 can cause irrelevant differences in dw_loc_addr. */
30734 gcc_assert (valx1->val_class == dw_val_class_loc
30735 && valy1->val_class == dw_val_class_loc
30736 && (dwarf_split_debug_info
30737 || x->dw_loc_addr == y->dw_loc_addr));
30738 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30739 case DW_OP_implicit_value:
30740 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30741 || valx2->val_class != valy2->val_class)
30742 return false;
30743 switch (valx2->val_class)
30744 {
30745 case dw_val_class_const:
30746 return valx2->v.val_int == valy2->v.val_int;
30747 case dw_val_class_vec:
30748 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30749 && valx2->v.val_vec.length == valy2->v.val_vec.length
30750 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30751 valx2->v.val_vec.elt_size
30752 * valx2->v.val_vec.length) == 0;
30753 case dw_val_class_const_double:
30754 return valx2->v.val_double.low == valy2->v.val_double.low
30755 && valx2->v.val_double.high == valy2->v.val_double.high;
30756 case dw_val_class_wide_int:
30757 return *valx2->v.val_wide == *valy2->v.val_wide;
30758 case dw_val_class_addr:
30759 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30760 default:
30761 gcc_unreachable ();
30762 }
30763 case DW_OP_bregx:
30764 case DW_OP_bit_piece:
30765 return valx1->v.val_int == valy1->v.val_int
30766 && valx2->v.val_int == valy2->v.val_int;
30767 case DW_OP_addr:
30768 hash_addr:
30769 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30770 case DW_OP_GNU_addr_index:
30771 case DW_OP_addrx:
30772 case DW_OP_GNU_const_index:
30773 case DW_OP_constx:
30774 {
30775 rtx ax1 = valx1->val_entry->addr.rtl;
30776 rtx ay1 = valy1->val_entry->addr.rtl;
30777 return rtx_equal_p (ax1, ay1);
30778 }
30779 case DW_OP_implicit_pointer:
30780 case DW_OP_GNU_implicit_pointer:
30781 return valx1->val_class == dw_val_class_die_ref
30782 && valx1->val_class == valy1->val_class
30783 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30784 && valx2->v.val_int == valy2->v.val_int;
30785 case DW_OP_entry_value:
30786 case DW_OP_GNU_entry_value:
30787 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30788 case DW_OP_const_type:
30789 case DW_OP_GNU_const_type:
30790 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30791 || valx2->val_class != valy2->val_class)
30792 return false;
30793 switch (valx2->val_class)
30794 {
30795 case dw_val_class_const:
30796 return valx2->v.val_int == valy2->v.val_int;
30797 case dw_val_class_vec:
30798 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30799 && valx2->v.val_vec.length == valy2->v.val_vec.length
30800 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30801 valx2->v.val_vec.elt_size
30802 * valx2->v.val_vec.length) == 0;
30803 case dw_val_class_const_double:
30804 return valx2->v.val_double.low == valy2->v.val_double.low
30805 && valx2->v.val_double.high == valy2->v.val_double.high;
30806 case dw_val_class_wide_int:
30807 return *valx2->v.val_wide == *valy2->v.val_wide;
30808 default:
30809 gcc_unreachable ();
30810 }
30811 case DW_OP_regval_type:
30812 case DW_OP_deref_type:
30813 case DW_OP_GNU_regval_type:
30814 case DW_OP_GNU_deref_type:
30815 return valx1->v.val_int == valy1->v.val_int
30816 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30817 case DW_OP_convert:
30818 case DW_OP_reinterpret:
30819 case DW_OP_GNU_convert:
30820 case DW_OP_GNU_reinterpret:
30821 if (valx1->val_class != valy1->val_class)
30822 return false;
30823 if (valx1->val_class == dw_val_class_unsigned_const)
30824 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30825 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30826 case DW_OP_GNU_parameter_ref:
30827 return valx1->val_class == dw_val_class_die_ref
30828 && valx1->val_class == valy1->val_class
30829 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30830 default:
30831 /* Other codes have no operands. */
30832 return true;
30833 }
30834 }
30835
30836 /* Return true if DWARF location expressions X and Y are the same. */
30837
30838 static inline bool
30839 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30840 {
30841 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30842 if (x->dw_loc_opc != y->dw_loc_opc
30843 || x->dtprel != y->dtprel
30844 || !compare_loc_operands (x, y))
30845 break;
30846 return x == NULL && y == NULL;
30847 }
30848
30849 /* Hashtable helpers. */
30850
30851 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30852 {
30853 static inline hashval_t hash (const dw_loc_list_struct *);
30854 static inline bool equal (const dw_loc_list_struct *,
30855 const dw_loc_list_struct *);
30856 };
30857
30858 /* Return precomputed hash of location list X. */
30859
30860 inline hashval_t
30861 loc_list_hasher::hash (const dw_loc_list_struct *x)
30862 {
30863 return x->hash;
30864 }
30865
30866 /* Return true if location lists A and B are the same. */
30867
30868 inline bool
30869 loc_list_hasher::equal (const dw_loc_list_struct *a,
30870 const dw_loc_list_struct *b)
30871 {
30872 if (a == b)
30873 return 1;
30874 if (a->hash != b->hash)
30875 return 0;
30876 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30877 if (strcmp (a->begin, b->begin) != 0
30878 || strcmp (a->end, b->end) != 0
30879 || (a->section == NULL) != (b->section == NULL)
30880 || (a->section && strcmp (a->section, b->section) != 0)
30881 || a->vbegin != b->vbegin || a->vend != b->vend
30882 || !compare_locs (a->expr, b->expr))
30883 break;
30884 return a == NULL && b == NULL;
30885 }
30886
30887 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30888
30889
30890 /* Recursively optimize location lists referenced from DIE
30891 children and share them whenever possible. */
30892
30893 static void
30894 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30895 {
30896 dw_die_ref c;
30897 dw_attr_node *a;
30898 unsigned ix;
30899 dw_loc_list_struct **slot;
30900 bool drop_locviews = false;
30901 bool has_locviews = false;
30902
30903 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30904 if (AT_class (a) == dw_val_class_loc_list)
30905 {
30906 dw_loc_list_ref list = AT_loc_list (a);
30907 /* TODO: perform some optimizations here, before hashing
30908 it and storing into the hash table. */
30909 hash_loc_list (list);
30910 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30911 if (*slot == NULL)
30912 {
30913 *slot = list;
30914 if (loc_list_has_views (list))
30915 gcc_assert (list->vl_symbol);
30916 else if (list->vl_symbol)
30917 {
30918 drop_locviews = true;
30919 list->vl_symbol = NULL;
30920 }
30921 }
30922 else
30923 {
30924 if (list->vl_symbol && !(*slot)->vl_symbol)
30925 drop_locviews = true;
30926 a->dw_attr_val.v.val_loc_list = *slot;
30927 }
30928 }
30929 else if (AT_class (a) == dw_val_class_view_list)
30930 {
30931 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30932 has_locviews = true;
30933 }
30934
30935
30936 if (drop_locviews && has_locviews)
30937 remove_AT (die, DW_AT_GNU_locviews);
30938
30939 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30940 }
30941
30942
30943 /* Recursively assign each location list a unique index into the debug_addr
30944 section. */
30945
30946 static void
30947 index_location_lists (dw_die_ref die)
30948 {
30949 dw_die_ref c;
30950 dw_attr_node *a;
30951 unsigned ix;
30952
30953 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30954 if (AT_class (a) == dw_val_class_loc_list)
30955 {
30956 dw_loc_list_ref list = AT_loc_list (a);
30957 dw_loc_list_ref curr;
30958 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30959 {
30960 /* Don't index an entry that has already been indexed
30961 or won't be output. Make sure skip_loc_list_entry doesn't
30962 call size_of_locs, because that might cause circular dependency,
30963 index_location_lists requiring address table indexes to be
30964 computed, but adding new indexes through add_addr_table_entry
30965 and address table index computation requiring no new additions
30966 to the hash table. In the rare case of DWARF[234] >= 64KB
30967 location expression, we'll just waste unused address table entry
30968 for it. */
30969 if (curr->begin_entry != NULL
30970 || skip_loc_list_entry (curr))
30971 continue;
30972
30973 curr->begin_entry
30974 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30975 }
30976 }
30977
30978 FOR_EACH_CHILD (die, c, index_location_lists (c));
30979 }
30980
30981 /* Optimize location lists referenced from DIE
30982 children and share them whenever possible. */
30983
30984 static void
30985 optimize_location_lists (dw_die_ref die)
30986 {
30987 loc_list_hash_type htab (500);
30988 optimize_location_lists_1 (die, &htab);
30989 }
30990 \f
30991 /* Traverse the limbo die list, and add parent/child links. The only
30992 dies without parents that should be here are concrete instances of
30993 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30994 For concrete instances, we can get the parent die from the abstract
30995 instance. */
30996
30997 static void
30998 flush_limbo_die_list (void)
30999 {
31000 limbo_die_node *node;
31001
31002 /* get_context_die calls force_decl_die, which can put new DIEs on the
31003 limbo list in LTO mode when nested functions are put in a different
31004 partition than that of their parent function. */
31005 while ((node = limbo_die_list))
31006 {
31007 dw_die_ref die = node->die;
31008 limbo_die_list = node->next;
31009
31010 if (die->die_parent == NULL)
31011 {
31012 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31013
31014 if (origin && origin->die_parent)
31015 add_child_die (origin->die_parent, die);
31016 else if (is_cu_die (die))
31017 ;
31018 else if (seen_error ())
31019 /* It's OK to be confused by errors in the input. */
31020 add_child_die (comp_unit_die (), die);
31021 else
31022 {
31023 /* In certain situations, the lexical block containing a
31024 nested function can be optimized away, which results
31025 in the nested function die being orphaned. Likewise
31026 with the return type of that nested function. Force
31027 this to be a child of the containing function.
31028
31029 It may happen that even the containing function got fully
31030 inlined and optimized out. In that case we are lost and
31031 assign the empty child. This should not be big issue as
31032 the function is likely unreachable too. */
31033 gcc_assert (node->created_for);
31034
31035 if (DECL_P (node->created_for))
31036 origin = get_context_die (DECL_CONTEXT (node->created_for));
31037 else if (TYPE_P (node->created_for))
31038 origin = scope_die_for (node->created_for, comp_unit_die ());
31039 else
31040 origin = comp_unit_die ();
31041
31042 add_child_die (origin, die);
31043 }
31044 }
31045 }
31046 }
31047
31048 /* Reset DIEs so we can output them again. */
31049
31050 static void
31051 reset_dies (dw_die_ref die)
31052 {
31053 dw_die_ref c;
31054
31055 /* Remove stuff we re-generate. */
31056 die->die_mark = 0;
31057 die->die_offset = 0;
31058 die->die_abbrev = 0;
31059 remove_AT (die, DW_AT_sibling);
31060
31061 FOR_EACH_CHILD (die, c, reset_dies (c));
31062 }
31063
31064 /* Output stuff that dwarf requires at the end of every file,
31065 and generate the DWARF-2 debugging info. */
31066
31067 static void
31068 dwarf2out_finish (const char *filename)
31069 {
31070 comdat_type_node *ctnode;
31071 dw_die_ref main_comp_unit_die;
31072 unsigned char checksum[16];
31073 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31074
31075 /* Flush out any latecomers to the limbo party. */
31076 flush_limbo_die_list ();
31077
31078 if (inline_entry_data_table)
31079 gcc_assert (inline_entry_data_table->elements () == 0);
31080
31081 if (flag_checking)
31082 {
31083 verify_die (comp_unit_die ());
31084 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31085 verify_die (node->die);
31086 }
31087
31088 /* We shouldn't have any symbols with delayed asm names for
31089 DIEs generated after early finish. */
31090 gcc_assert (deferred_asm_name == NULL);
31091
31092 gen_remaining_tmpl_value_param_die_attribute ();
31093
31094 if (flag_generate_lto || flag_generate_offload)
31095 {
31096 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31097
31098 /* Prune stuff so that dwarf2out_finish runs successfully
31099 for the fat part of the object. */
31100 reset_dies (comp_unit_die ());
31101 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31102 reset_dies (node->die);
31103
31104 hash_table<comdat_type_hasher> comdat_type_table (100);
31105 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31106 {
31107 comdat_type_node **slot
31108 = comdat_type_table.find_slot (ctnode, INSERT);
31109
31110 /* Don't reset types twice. */
31111 if (*slot != HTAB_EMPTY_ENTRY)
31112 continue;
31113
31114 /* Remove the pointer to the line table. */
31115 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31116
31117 if (debug_info_level >= DINFO_LEVEL_TERSE)
31118 reset_dies (ctnode->root_die);
31119
31120 *slot = ctnode;
31121 }
31122
31123 /* Reset die CU symbol so we don't output it twice. */
31124 comp_unit_die ()->die_id.die_symbol = NULL;
31125
31126 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31127 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31128 if (have_macinfo)
31129 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31130
31131 /* Remove indirect string decisions. */
31132 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31133 if (debug_line_str_hash)
31134 {
31135 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31136 debug_line_str_hash = NULL;
31137 }
31138 }
31139
31140 #if ENABLE_ASSERT_CHECKING
31141 {
31142 dw_die_ref die = comp_unit_die (), c;
31143 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31144 }
31145 #endif
31146 resolve_addr (comp_unit_die ());
31147 move_marked_base_types ();
31148
31149 if (dump_file)
31150 {
31151 fprintf (dump_file, "DWARF for %s\n", filename);
31152 print_die (comp_unit_die (), dump_file);
31153 }
31154
31155 /* Initialize sections and labels used for actual assembler output. */
31156 unsigned generation = init_sections_and_labels (false);
31157
31158 /* Traverse the DIE's and add sibling attributes to those DIE's that
31159 have children. */
31160 add_sibling_attributes (comp_unit_die ());
31161 limbo_die_node *node;
31162 for (node = cu_die_list; node; node = node->next)
31163 add_sibling_attributes (node->die);
31164 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31165 add_sibling_attributes (ctnode->root_die);
31166
31167 /* When splitting DWARF info, we put some attributes in the
31168 skeleton compile_unit DIE that remains in the .o, while
31169 most attributes go in the DWO compile_unit_die. */
31170 if (dwarf_split_debug_info)
31171 {
31172 limbo_die_node *cu;
31173 main_comp_unit_die = gen_compile_unit_die (NULL);
31174 if (dwarf_version >= 5)
31175 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31176 cu = limbo_die_list;
31177 gcc_assert (cu->die == main_comp_unit_die);
31178 limbo_die_list = limbo_die_list->next;
31179 cu->next = cu_die_list;
31180 cu_die_list = cu;
31181 }
31182 else
31183 main_comp_unit_die = comp_unit_die ();
31184
31185 /* Output a terminator label for the .text section. */
31186 switch_to_section (text_section);
31187 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31188 if (cold_text_section)
31189 {
31190 switch_to_section (cold_text_section);
31191 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31192 }
31193
31194 /* We can only use the low/high_pc attributes if all of the code was
31195 in .text. */
31196 if (!have_multiple_function_sections
31197 || (dwarf_version < 3 && dwarf_strict))
31198 {
31199 /* Don't add if the CU has no associated code. */
31200 if (text_section_used)
31201 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31202 text_end_label, true);
31203 }
31204 else
31205 {
31206 unsigned fde_idx;
31207 dw_fde_ref fde;
31208 bool range_list_added = false;
31209
31210 if (text_section_used)
31211 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31212 text_end_label, &range_list_added, true);
31213 if (cold_text_section_used)
31214 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31215 cold_end_label, &range_list_added, true);
31216
31217 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31218 {
31219 if (DECL_IGNORED_P (fde->decl))
31220 continue;
31221 if (!fde->in_std_section)
31222 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31223 fde->dw_fde_end, &range_list_added,
31224 true);
31225 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31226 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31227 fde->dw_fde_second_end, &range_list_added,
31228 true);
31229 }
31230
31231 if (range_list_added)
31232 {
31233 /* We need to give .debug_loc and .debug_ranges an appropriate
31234 "base address". Use zero so that these addresses become
31235 absolute. Historically, we've emitted the unexpected
31236 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31237 Emit both to give time for other tools to adapt. */
31238 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31239 if (! dwarf_strict && dwarf_version < 4)
31240 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31241
31242 add_ranges (NULL);
31243 }
31244 }
31245
31246 /* AIX Assembler inserts the length, so adjust the reference to match the
31247 offset expected by debuggers. */
31248 strcpy (dl_section_ref, debug_line_section_label);
31249 if (XCOFF_DEBUGGING_INFO)
31250 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31251
31252 if (debug_info_level >= DINFO_LEVEL_TERSE)
31253 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31254 dl_section_ref);
31255
31256 if (have_macinfo)
31257 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31258 macinfo_section_label);
31259
31260 if (dwarf_split_debug_info)
31261 {
31262 if (have_location_lists)
31263 {
31264 /* Since we generate the loclists in the split DWARF .dwo
31265 file itself, we don't need to generate a loclists_base
31266 attribute for the split compile unit DIE. That attribute
31267 (and using relocatable sec_offset FORMs) isn't allowed
31268 for a split compile unit. Only if the .debug_loclists
31269 section was in the main file, would we need to generate a
31270 loclists_base attribute here (for the full or skeleton
31271 unit DIE). */
31272
31273 /* optimize_location_lists calculates the size of the lists,
31274 so index them first, and assign indices to the entries.
31275 Although optimize_location_lists will remove entries from
31276 the table, it only does so for duplicates, and therefore
31277 only reduces ref_counts to 1. */
31278 index_location_lists (comp_unit_die ());
31279 }
31280
31281 if (addr_index_table != NULL)
31282 {
31283 unsigned int index = 0;
31284 addr_index_table
31285 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31286 (&index);
31287 }
31288 }
31289
31290 loc_list_idx = 0;
31291 if (have_location_lists)
31292 {
31293 optimize_location_lists (comp_unit_die ());
31294 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31295 if (dwarf_version >= 5 && dwarf_split_debug_info)
31296 assign_location_list_indexes (comp_unit_die ());
31297 }
31298
31299 save_macinfo_strings ();
31300
31301 if (dwarf_split_debug_info)
31302 {
31303 unsigned int index = 0;
31304
31305 /* Add attributes common to skeleton compile_units and
31306 type_units. Because these attributes include strings, it
31307 must be done before freezing the string table. Top-level
31308 skeleton die attrs are added when the skeleton type unit is
31309 created, so ensure it is created by this point. */
31310 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31311 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31312 }
31313
31314 /* Output all of the compilation units. We put the main one last so that
31315 the offsets are available to output_pubnames. */
31316 for (node = cu_die_list; node; node = node->next)
31317 output_comp_unit (node->die, 0, NULL);
31318
31319 hash_table<comdat_type_hasher> comdat_type_table (100);
31320 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31321 {
31322 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31323
31324 /* Don't output duplicate types. */
31325 if (*slot != HTAB_EMPTY_ENTRY)
31326 continue;
31327
31328 /* Add a pointer to the line table for the main compilation unit
31329 so that the debugger can make sense of DW_AT_decl_file
31330 attributes. */
31331 if (debug_info_level >= DINFO_LEVEL_TERSE)
31332 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31333 (!dwarf_split_debug_info
31334 ? dl_section_ref
31335 : debug_skeleton_line_section_label));
31336
31337 output_comdat_type_unit (ctnode);
31338 *slot = ctnode;
31339 }
31340
31341 if (dwarf_split_debug_info)
31342 {
31343 int mark;
31344 struct md5_ctx ctx;
31345
31346 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31347 index_rnglists ();
31348
31349 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31350 md5_init_ctx (&ctx);
31351 mark = 0;
31352 die_checksum (comp_unit_die (), &ctx, &mark);
31353 unmark_all_dies (comp_unit_die ());
31354 md5_finish_ctx (&ctx, checksum);
31355
31356 if (dwarf_version < 5)
31357 {
31358 /* Use the first 8 bytes of the checksum as the dwo_id,
31359 and add it to both comp-unit DIEs. */
31360 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31361 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31362 }
31363
31364 /* Add the base offset of the ranges table to the skeleton
31365 comp-unit DIE. */
31366 if (!vec_safe_is_empty (ranges_table))
31367 {
31368 if (dwarf_version >= 5)
31369 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31370 ranges_base_label);
31371 else
31372 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31373 ranges_section_label);
31374 }
31375
31376 switch_to_section (debug_addr_section);
31377 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31378 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31379 before DWARF5, didn't have a header for .debug_addr units.
31380 DWARF5 specifies a small header when address tables are used. */
31381 if (dwarf_version >= 5)
31382 {
31383 unsigned int last_idx = 0;
31384 unsigned long addrs_length;
31385
31386 addr_index_table->traverse_noresize
31387 <unsigned int *, count_index_addrs> (&last_idx);
31388 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31389
31390 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31391 dw2_asm_output_data (4, 0xffffffff,
31392 "Escape value for 64-bit DWARF extension");
31393 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31394 "Length of Address Unit");
31395 dw2_asm_output_data (2, 5, "DWARF addr version");
31396 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31397 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31398 }
31399 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31400 output_addr_table ();
31401 }
31402
31403 /* Output the main compilation unit if non-empty or if .debug_macinfo
31404 or .debug_macro will be emitted. */
31405 output_comp_unit (comp_unit_die (), have_macinfo,
31406 dwarf_split_debug_info ? checksum : NULL);
31407
31408 if (dwarf_split_debug_info && info_section_emitted)
31409 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31410
31411 /* Output the abbreviation table. */
31412 if (vec_safe_length (abbrev_die_table) != 1)
31413 {
31414 switch_to_section (debug_abbrev_section);
31415 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31416 output_abbrev_section ();
31417 }
31418
31419 /* Output location list section if necessary. */
31420 if (have_location_lists)
31421 {
31422 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31423 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31424 /* Output the location lists info. */
31425 switch_to_section (debug_loc_section);
31426 if (dwarf_version >= 5)
31427 {
31428 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31429 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31430 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31431 dw2_asm_output_data (4, 0xffffffff,
31432 "Initial length escape value indicating "
31433 "64-bit DWARF extension");
31434 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31435 "Length of Location Lists");
31436 ASM_OUTPUT_LABEL (asm_out_file, l1);
31437 output_dwarf_version ();
31438 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31439 dw2_asm_output_data (1, 0, "Segment Size");
31440 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31441 "Offset Entry Count");
31442 }
31443 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31444 if (dwarf_version >= 5 && dwarf_split_debug_info)
31445 {
31446 unsigned int save_loc_list_idx = loc_list_idx;
31447 loc_list_idx = 0;
31448 output_loclists_offsets (comp_unit_die ());
31449 gcc_assert (save_loc_list_idx == loc_list_idx);
31450 }
31451 output_location_lists (comp_unit_die ());
31452 if (dwarf_version >= 5)
31453 ASM_OUTPUT_LABEL (asm_out_file, l2);
31454 }
31455
31456 output_pubtables ();
31457
31458 /* Output the address range information if a CU (.debug_info section)
31459 was emitted. We output an empty table even if we had no functions
31460 to put in it. This because the consumer has no way to tell the
31461 difference between an empty table that we omitted and failure to
31462 generate a table that would have contained data. */
31463 if (info_section_emitted)
31464 {
31465 switch_to_section (debug_aranges_section);
31466 output_aranges ();
31467 }
31468
31469 /* Output ranges section if necessary. */
31470 if (!vec_safe_is_empty (ranges_table))
31471 {
31472 if (dwarf_version >= 5)
31473 output_rnglists (generation);
31474 else
31475 output_ranges ();
31476 }
31477
31478 /* Have to end the macro section. */
31479 if (have_macinfo)
31480 {
31481 switch_to_section (debug_macinfo_section);
31482 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31483 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31484 : debug_skeleton_line_section_label, false);
31485 dw2_asm_output_data (1, 0, "End compilation unit");
31486 }
31487
31488 /* Output the source line correspondence table. We must do this
31489 even if there is no line information. Otherwise, on an empty
31490 translation unit, we will generate a present, but empty,
31491 .debug_info section. IRIX 6.5 `nm' will then complain when
31492 examining the file. This is done late so that any filenames
31493 used by the debug_info section are marked as 'used'. */
31494 switch_to_section (debug_line_section);
31495 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31496 if (! output_asm_line_debug_info ())
31497 output_line_info (false);
31498
31499 if (dwarf_split_debug_info && info_section_emitted)
31500 {
31501 switch_to_section (debug_skeleton_line_section);
31502 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31503 output_line_info (true);
31504 }
31505
31506 /* If we emitted any indirect strings, output the string table too. */
31507 if (debug_str_hash || skeleton_debug_str_hash)
31508 output_indirect_strings ();
31509 if (debug_line_str_hash)
31510 {
31511 switch_to_section (debug_line_str_section);
31512 const enum dwarf_form form = DW_FORM_line_strp;
31513 debug_line_str_hash->traverse<enum dwarf_form,
31514 output_indirect_string> (form);
31515 }
31516
31517 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31518 symview_upper_bound = 0;
31519 if (zero_view_p)
31520 bitmap_clear (zero_view_p);
31521 }
31522
31523 /* Returns a hash value for X (which really is a variable_value_struct). */
31524
31525 inline hashval_t
31526 variable_value_hasher::hash (variable_value_struct *x)
31527 {
31528 return (hashval_t) x->decl_id;
31529 }
31530
31531 /* Return nonzero if decl_id of variable_value_struct X is the same as
31532 UID of decl Y. */
31533
31534 inline bool
31535 variable_value_hasher::equal (variable_value_struct *x, tree y)
31536 {
31537 return x->decl_id == DECL_UID (y);
31538 }
31539
31540 /* Helper function for resolve_variable_value, handle
31541 DW_OP_GNU_variable_value in one location expression.
31542 Return true if exprloc has been changed into loclist. */
31543
31544 static bool
31545 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31546 {
31547 dw_loc_descr_ref next;
31548 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31549 {
31550 next = loc->dw_loc_next;
31551 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31552 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31553 continue;
31554
31555 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31556 if (DECL_CONTEXT (decl) != current_function_decl)
31557 continue;
31558
31559 dw_die_ref ref = lookup_decl_die (decl);
31560 if (ref)
31561 {
31562 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31563 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31564 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31565 continue;
31566 }
31567 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31568 if (l == NULL)
31569 continue;
31570 if (l->dw_loc_next)
31571 {
31572 if (AT_class (a) != dw_val_class_loc)
31573 continue;
31574 switch (a->dw_attr)
31575 {
31576 /* Following attributes allow both exprloc and loclist
31577 classes, so we can change them into a loclist. */
31578 case DW_AT_location:
31579 case DW_AT_string_length:
31580 case DW_AT_return_addr:
31581 case DW_AT_data_member_location:
31582 case DW_AT_frame_base:
31583 case DW_AT_segment:
31584 case DW_AT_static_link:
31585 case DW_AT_use_location:
31586 case DW_AT_vtable_elem_location:
31587 if (prev)
31588 {
31589 prev->dw_loc_next = NULL;
31590 prepend_loc_descr_to_each (l, AT_loc (a));
31591 }
31592 if (next)
31593 add_loc_descr_to_each (l, next);
31594 a->dw_attr_val.val_class = dw_val_class_loc_list;
31595 a->dw_attr_val.val_entry = NULL;
31596 a->dw_attr_val.v.val_loc_list = l;
31597 have_location_lists = true;
31598 return true;
31599 /* Following attributes allow both exprloc and reference,
31600 so if the whole expression is DW_OP_GNU_variable_value alone
31601 we could transform it into reference. */
31602 case DW_AT_byte_size:
31603 case DW_AT_bit_size:
31604 case DW_AT_lower_bound:
31605 case DW_AT_upper_bound:
31606 case DW_AT_bit_stride:
31607 case DW_AT_count:
31608 case DW_AT_allocated:
31609 case DW_AT_associated:
31610 case DW_AT_byte_stride:
31611 if (prev == NULL && next == NULL)
31612 break;
31613 /* FALLTHRU */
31614 default:
31615 if (dwarf_strict)
31616 continue;
31617 break;
31618 }
31619 /* Create DW_TAG_variable that we can refer to. */
31620 gen_decl_die (decl, NULL_TREE, NULL,
31621 lookup_decl_die (current_function_decl));
31622 ref = lookup_decl_die (decl);
31623 if (ref)
31624 {
31625 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31626 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31627 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31628 }
31629 continue;
31630 }
31631 if (prev)
31632 {
31633 prev->dw_loc_next = l->expr;
31634 add_loc_descr (&prev->dw_loc_next, next);
31635 free_loc_descr (loc, NULL);
31636 next = prev->dw_loc_next;
31637 }
31638 else
31639 {
31640 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31641 add_loc_descr (&loc, next);
31642 next = loc;
31643 }
31644 loc = prev;
31645 }
31646 return false;
31647 }
31648
31649 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31650
31651 static void
31652 resolve_variable_value (dw_die_ref die)
31653 {
31654 dw_attr_node *a;
31655 dw_loc_list_ref loc;
31656 unsigned ix;
31657
31658 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31659 switch (AT_class (a))
31660 {
31661 case dw_val_class_loc:
31662 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31663 break;
31664 /* FALLTHRU */
31665 case dw_val_class_loc_list:
31666 loc = AT_loc_list (a);
31667 gcc_assert (loc);
31668 for (; loc; loc = loc->dw_loc_next)
31669 resolve_variable_value_in_expr (a, loc->expr);
31670 break;
31671 default:
31672 break;
31673 }
31674 }
31675
31676 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31677 temporaries in the current function. */
31678
31679 static void
31680 resolve_variable_values (void)
31681 {
31682 if (!variable_value_hash || !current_function_decl)
31683 return;
31684
31685 struct variable_value_struct *node
31686 = variable_value_hash->find_with_hash (current_function_decl,
31687 DECL_UID (current_function_decl));
31688
31689 if (node == NULL)
31690 return;
31691
31692 unsigned int i;
31693 dw_die_ref die;
31694 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31695 resolve_variable_value (die);
31696 }
31697
31698 /* Helper function for note_variable_value, handle one location
31699 expression. */
31700
31701 static void
31702 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31703 {
31704 for (; loc; loc = loc->dw_loc_next)
31705 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31706 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31707 {
31708 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31709 dw_die_ref ref = lookup_decl_die (decl);
31710 if (! ref && (flag_generate_lto || flag_generate_offload))
31711 {
31712 /* ??? This is somewhat a hack because we do not create DIEs
31713 for variables not in BLOCK trees early but when generating
31714 early LTO output we need the dw_val_class_decl_ref to be
31715 fully resolved. For fat LTO objects we'd also like to
31716 undo this after LTO dwarf output. */
31717 gcc_assert (DECL_CONTEXT (decl));
31718 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31719 gcc_assert (ctx != NULL);
31720 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31721 ref = lookup_decl_die (decl);
31722 gcc_assert (ref != NULL);
31723 }
31724 if (ref)
31725 {
31726 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31727 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31728 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31729 continue;
31730 }
31731 if (VAR_P (decl)
31732 && DECL_CONTEXT (decl)
31733 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31734 && lookup_decl_die (DECL_CONTEXT (decl)))
31735 {
31736 if (!variable_value_hash)
31737 variable_value_hash
31738 = hash_table<variable_value_hasher>::create_ggc (10);
31739
31740 tree fndecl = DECL_CONTEXT (decl);
31741 struct variable_value_struct *node;
31742 struct variable_value_struct **slot
31743 = variable_value_hash->find_slot_with_hash (fndecl,
31744 DECL_UID (fndecl),
31745 INSERT);
31746 if (*slot == NULL)
31747 {
31748 node = ggc_cleared_alloc<variable_value_struct> ();
31749 node->decl_id = DECL_UID (fndecl);
31750 *slot = node;
31751 }
31752 else
31753 node = *slot;
31754
31755 vec_safe_push (node->dies, die);
31756 }
31757 }
31758 }
31759
31760 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31761 with dw_val_class_decl_ref operand. */
31762
31763 static void
31764 note_variable_value (dw_die_ref die)
31765 {
31766 dw_die_ref c;
31767 dw_attr_node *a;
31768 dw_loc_list_ref loc;
31769 unsigned ix;
31770
31771 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31772 switch (AT_class (a))
31773 {
31774 case dw_val_class_loc_list:
31775 loc = AT_loc_list (a);
31776 gcc_assert (loc);
31777 if (!loc->noted_variable_value)
31778 {
31779 loc->noted_variable_value = 1;
31780 for (; loc; loc = loc->dw_loc_next)
31781 note_variable_value_in_expr (die, loc->expr);
31782 }
31783 break;
31784 case dw_val_class_loc:
31785 note_variable_value_in_expr (die, AT_loc (a));
31786 break;
31787 default:
31788 break;
31789 }
31790
31791 /* Mark children. */
31792 FOR_EACH_CHILD (die, c, note_variable_value (c));
31793 }
31794
31795 /* Perform any cleanups needed after the early debug generation pass
31796 has run. */
31797
31798 static void
31799 dwarf2out_early_finish (const char *filename)
31800 {
31801 set_early_dwarf s;
31802 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31803
31804 /* PCH might result in DW_AT_producer string being restored from the
31805 header compilation, so always fill it with empty string initially
31806 and overwrite only here. */
31807 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31808 producer_string = gen_producer_string ();
31809 producer->dw_attr_val.v.val_str->refcount--;
31810 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31811
31812 /* Add the name for the main input file now. We delayed this from
31813 dwarf2out_init to avoid complications with PCH. */
31814 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31815 add_comp_dir_attribute (comp_unit_die ());
31816
31817 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31818 DW_AT_comp_dir into .debug_line_str section. */
31819 if (!output_asm_line_debug_info ()
31820 && dwarf_version >= 5
31821 && DWARF5_USE_DEBUG_LINE_STR)
31822 {
31823 for (int i = 0; i < 2; i++)
31824 {
31825 dw_attr_node *a = get_AT (comp_unit_die (),
31826 i ? DW_AT_comp_dir : DW_AT_name);
31827 if (a == NULL
31828 || AT_class (a) != dw_val_class_str
31829 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31830 continue;
31831
31832 if (! debug_line_str_hash)
31833 debug_line_str_hash
31834 = hash_table<indirect_string_hasher>::create_ggc (10);
31835
31836 struct indirect_string_node *node
31837 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31838 set_indirect_string (node);
31839 node->form = DW_FORM_line_strp;
31840 a->dw_attr_val.v.val_str->refcount--;
31841 a->dw_attr_val.v.val_str = node;
31842 }
31843 }
31844
31845 /* With LTO early dwarf was really finished at compile-time, so make
31846 sure to adjust the phase after annotating the LTRANS CU DIE. */
31847 if (in_lto_p)
31848 {
31849 early_dwarf_finished = true;
31850 if (dump_file)
31851 {
31852 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31853 print_die (comp_unit_die (), dump_file);
31854 }
31855 return;
31856 }
31857
31858 /* Walk through the list of incomplete types again, trying once more to
31859 emit full debugging info for them. */
31860 retry_incomplete_types ();
31861
31862 /* The point here is to flush out the limbo list so that it is empty
31863 and we don't need to stream it for LTO. */
31864 flush_limbo_die_list ();
31865
31866 gen_scheduled_generic_parms_dies ();
31867 gen_remaining_tmpl_value_param_die_attribute ();
31868
31869 /* Add DW_AT_linkage_name for all deferred DIEs. */
31870 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31871 {
31872 tree decl = node->created_for;
31873 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31874 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31875 ended up in deferred_asm_name before we knew it was
31876 constant and never written to disk. */
31877 && DECL_ASSEMBLER_NAME (decl))
31878 {
31879 add_linkage_attr (node->die, decl);
31880 move_linkage_attr (node->die);
31881 }
31882 }
31883 deferred_asm_name = NULL;
31884
31885 if (flag_eliminate_unused_debug_types)
31886 prune_unused_types ();
31887
31888 /* Generate separate COMDAT sections for type DIEs. */
31889 if (use_debug_types)
31890 {
31891 break_out_comdat_types (comp_unit_die ());
31892
31893 /* Each new type_unit DIE was added to the limbo die list when created.
31894 Since these have all been added to comdat_type_list, clear the
31895 limbo die list. */
31896 limbo_die_list = NULL;
31897
31898 /* For each new comdat type unit, copy declarations for incomplete
31899 types to make the new unit self-contained (i.e., no direct
31900 references to the main compile unit). */
31901 for (comdat_type_node *ctnode = comdat_type_list;
31902 ctnode != NULL; ctnode = ctnode->next)
31903 copy_decls_for_unworthy_types (ctnode->root_die);
31904 copy_decls_for_unworthy_types (comp_unit_die ());
31905
31906 /* In the process of copying declarations from one unit to another,
31907 we may have left some declarations behind that are no longer
31908 referenced. Prune them. */
31909 prune_unused_types ();
31910 }
31911
31912 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31913 with dw_val_class_decl_ref operand. */
31914 note_variable_value (comp_unit_die ());
31915 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31916 note_variable_value (node->die);
31917 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31918 ctnode = ctnode->next)
31919 note_variable_value (ctnode->root_die);
31920 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31921 note_variable_value (node->die);
31922
31923 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31924 both the main_cu and all skeleton TUs. Making this call unconditional
31925 would end up either adding a second copy of the AT_pubnames attribute, or
31926 requiring a special case in add_top_level_skeleton_die_attrs. */
31927 if (!dwarf_split_debug_info)
31928 add_AT_pubnames (comp_unit_die ());
31929
31930 /* The early debug phase is now finished. */
31931 early_dwarf_finished = true;
31932 if (dump_file)
31933 {
31934 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
31935 print_die (comp_unit_die (), dump_file);
31936 }
31937
31938 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31939 if ((!flag_generate_lto && !flag_generate_offload)
31940 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31941 copy_lto_debug_sections operation of the simple object support in
31942 libiberty is not implemented for them yet. */
31943 || TARGET_PECOFF || TARGET_COFF)
31944 return;
31945
31946 /* Now as we are going to output for LTO initialize sections and labels
31947 to the LTO variants. We don't need a random-seed postfix as other
31948 LTO sections as linking the LTO debug sections into one in a partial
31949 link is fine. */
31950 init_sections_and_labels (true);
31951
31952 /* The output below is modeled after dwarf2out_finish with all
31953 location related output removed and some LTO specific changes.
31954 Some refactoring might make both smaller and easier to match up. */
31955
31956 /* Traverse the DIE's and add add sibling attributes to those DIE's
31957 that have children. */
31958 add_sibling_attributes (comp_unit_die ());
31959 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31960 add_sibling_attributes (node->die);
31961 for (comdat_type_node *ctnode = comdat_type_list;
31962 ctnode != NULL; ctnode = ctnode->next)
31963 add_sibling_attributes (ctnode->root_die);
31964
31965 /* AIX Assembler inserts the length, so adjust the reference to match the
31966 offset expected by debuggers. */
31967 strcpy (dl_section_ref, debug_line_section_label);
31968 if (XCOFF_DEBUGGING_INFO)
31969 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31970
31971 if (debug_info_level >= DINFO_LEVEL_TERSE)
31972 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31973
31974 if (have_macinfo)
31975 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31976 macinfo_section_label);
31977
31978 save_macinfo_strings ();
31979
31980 if (dwarf_split_debug_info)
31981 {
31982 unsigned int index = 0;
31983 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31984 }
31985
31986 /* Output all of the compilation units. We put the main one last so that
31987 the offsets are available to output_pubnames. */
31988 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31989 output_comp_unit (node->die, 0, NULL);
31990
31991 hash_table<comdat_type_hasher> comdat_type_table (100);
31992 for (comdat_type_node *ctnode = comdat_type_list;
31993 ctnode != NULL; ctnode = ctnode->next)
31994 {
31995 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31996
31997 /* Don't output duplicate types. */
31998 if (*slot != HTAB_EMPTY_ENTRY)
31999 continue;
32000
32001 /* Add a pointer to the line table for the main compilation unit
32002 so that the debugger can make sense of DW_AT_decl_file
32003 attributes. */
32004 if (debug_info_level >= DINFO_LEVEL_TERSE)
32005 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32006 (!dwarf_split_debug_info
32007 ? debug_line_section_label
32008 : debug_skeleton_line_section_label));
32009
32010 output_comdat_type_unit (ctnode);
32011 *slot = ctnode;
32012 }
32013
32014 /* Stick a unique symbol to the main debuginfo section. */
32015 compute_comp_unit_symbol (comp_unit_die ());
32016
32017 /* Output the main compilation unit. We always need it if only for
32018 the CU symbol. */
32019 output_comp_unit (comp_unit_die (), true, NULL);
32020
32021 /* Output the abbreviation table. */
32022 if (vec_safe_length (abbrev_die_table) != 1)
32023 {
32024 switch_to_section (debug_abbrev_section);
32025 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32026 output_abbrev_section ();
32027 }
32028
32029 /* Have to end the macro section. */
32030 if (have_macinfo)
32031 {
32032 /* We have to save macinfo state if we need to output it again
32033 for the FAT part of the object. */
32034 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32035 if (flag_fat_lto_objects)
32036 macinfo_table = macinfo_table->copy ();
32037
32038 switch_to_section (debug_macinfo_section);
32039 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32040 output_macinfo (debug_line_section_label, true);
32041 dw2_asm_output_data (1, 0, "End compilation unit");
32042
32043 if (flag_fat_lto_objects)
32044 {
32045 vec_free (macinfo_table);
32046 macinfo_table = saved_macinfo_table;
32047 }
32048 }
32049
32050 /* Emit a skeleton debug_line section. */
32051 switch_to_section (debug_line_section);
32052 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32053 output_line_info (true);
32054
32055 /* If we emitted any indirect strings, output the string table too. */
32056 if (debug_str_hash || skeleton_debug_str_hash)
32057 output_indirect_strings ();
32058 if (debug_line_str_hash)
32059 {
32060 switch_to_section (debug_line_str_section);
32061 const enum dwarf_form form = DW_FORM_line_strp;
32062 debug_line_str_hash->traverse<enum dwarf_form,
32063 output_indirect_string> (form);
32064 }
32065
32066 /* Switch back to the text section. */
32067 switch_to_section (text_section);
32068 }
32069
32070 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32071 within the same process. For use by toplev::finalize. */
32072
32073 void
32074 dwarf2out_c_finalize (void)
32075 {
32076 last_var_location_insn = NULL;
32077 cached_next_real_insn = NULL;
32078 used_rtx_array = NULL;
32079 incomplete_types = NULL;
32080 debug_info_section = NULL;
32081 debug_skeleton_info_section = NULL;
32082 debug_abbrev_section = NULL;
32083 debug_skeleton_abbrev_section = NULL;
32084 debug_aranges_section = NULL;
32085 debug_addr_section = NULL;
32086 debug_macinfo_section = NULL;
32087 debug_line_section = NULL;
32088 debug_skeleton_line_section = NULL;
32089 debug_loc_section = NULL;
32090 debug_pubnames_section = NULL;
32091 debug_pubtypes_section = NULL;
32092 debug_str_section = NULL;
32093 debug_line_str_section = NULL;
32094 debug_str_dwo_section = NULL;
32095 debug_str_offsets_section = NULL;
32096 debug_ranges_section = NULL;
32097 debug_frame_section = NULL;
32098 fde_vec = NULL;
32099 debug_str_hash = NULL;
32100 debug_line_str_hash = NULL;
32101 skeleton_debug_str_hash = NULL;
32102 dw2_string_counter = 0;
32103 have_multiple_function_sections = false;
32104 text_section_used = false;
32105 cold_text_section_used = false;
32106 cold_text_section = NULL;
32107 current_unit_personality = NULL;
32108
32109 early_dwarf = false;
32110 early_dwarf_finished = false;
32111
32112 next_die_offset = 0;
32113 single_comp_unit_die = NULL;
32114 comdat_type_list = NULL;
32115 limbo_die_list = NULL;
32116 file_table = NULL;
32117 decl_die_table = NULL;
32118 common_block_die_table = NULL;
32119 decl_loc_table = NULL;
32120 call_arg_locations = NULL;
32121 call_arg_loc_last = NULL;
32122 call_site_count = -1;
32123 tail_call_site_count = -1;
32124 cached_dw_loc_list_table = NULL;
32125 abbrev_die_table = NULL;
32126 delete dwarf_proc_stack_usage_map;
32127 dwarf_proc_stack_usage_map = NULL;
32128 line_info_label_num = 0;
32129 cur_line_info_table = NULL;
32130 text_section_line_info = NULL;
32131 cold_text_section_line_info = NULL;
32132 separate_line_info = NULL;
32133 info_section_emitted = false;
32134 pubname_table = NULL;
32135 pubtype_table = NULL;
32136 macinfo_table = NULL;
32137 ranges_table = NULL;
32138 ranges_by_label = NULL;
32139 rnglist_idx = 0;
32140 have_location_lists = false;
32141 loclabel_num = 0;
32142 poc_label_num = 0;
32143 last_emitted_file = NULL;
32144 label_num = 0;
32145 tmpl_value_parm_die_table = NULL;
32146 generic_type_instances = NULL;
32147 frame_pointer_fb_offset = 0;
32148 frame_pointer_fb_offset_valid = false;
32149 base_types.release ();
32150 XDELETEVEC (producer_string);
32151 producer_string = NULL;
32152 }
32153
32154 #include "gt-dwarf2out.h"
This page took 1.415716 seconds and 6 git commands to generate.