]> gcc.gnu.org Git - gcc.git/blob - gcc/lto/lto-common.cc
Make -fwhole-program to work with incremental LTO linking
[gcc.git] / gcc / lto / lto-common.cc
1 /* Top-level LTO routines.
2 Copyright (C) 2009-2022 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "function.h"
26 #include "bitmap.h"
27 #include "basic-block.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tree-streamer.h"
34 #include "cgraph.h"
35 #include "opts.h"
36 #include "toplev.h"
37 #include "stor-layout.h"
38 #include "symbol-summary.h"
39 #include "tree-vrp.h"
40 #include "ipa-prop.h"
41 #include "common.h"
42 #include "debug.h"
43 #include "lto.h"
44 #include "lto-section-names.h"
45 #include "splay-tree.h"
46 #include "lto-partition.h"
47 #include "context.h"
48 #include "pass_manager.h"
49 #include "ipa-fnsummary.h"
50 #include "ipa-utils.h"
51 #include "gomp-constants.h"
52 #include "lto-symtab.h"
53 #include "stringpool.h"
54 #include "fold-const.h"
55 #include "attribs.h"
56 #include "builtins.h"
57 #include "lto-common.h"
58 #include "tree-pretty-print.h"
59 #include "print-tree.h"
60
61 /* True when no new types are going to be streamd from the global stream. */
62
63 static bool type_streaming_finished = false;
64
65 GTY(()) tree first_personality_decl;
66
67 GTY(()) const unsigned char *lto_mode_identity_table;
68
69 /* Returns a hash code for P. */
70
71 static hashval_t
72 hash_name (const void *p)
73 {
74 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
75 return (hashval_t) htab_hash_string (ds->name);
76 }
77
78
79 /* Returns nonzero if P1 and P2 are equal. */
80
81 static int
82 eq_name (const void *p1, const void *p2)
83 {
84 const struct lto_section_slot *s1
85 = (const struct lto_section_slot *) p1;
86 const struct lto_section_slot *s2
87 = (const struct lto_section_slot *) p2;
88
89 return strcmp (s1->name, s2->name) == 0;
90 }
91
92 /* Free lto_section_slot. */
93
94 static void
95 free_with_string (void *arg)
96 {
97 struct lto_section_slot *s = (struct lto_section_slot *)arg;
98
99 free (CONST_CAST (char *, s->name));
100 free (arg);
101 }
102
103 /* Create section hash table. */
104
105 htab_t
106 lto_obj_create_section_hash_table (void)
107 {
108 return htab_create (37, hash_name, eq_name, free_with_string);
109 }
110
111 /* Delete an allocated integer KEY in the splay tree. */
112
113 static void
114 lto_splay_tree_delete_id (splay_tree_key key)
115 {
116 free ((void *) key);
117 }
118
119 /* Compare splay tree node ids A and B. */
120
121 static int
122 lto_splay_tree_compare_ids (splay_tree_key a, splay_tree_key b)
123 {
124 unsigned HOST_WIDE_INT ai;
125 unsigned HOST_WIDE_INT bi;
126
127 ai = *(unsigned HOST_WIDE_INT *) a;
128 bi = *(unsigned HOST_WIDE_INT *) b;
129
130 if (ai < bi)
131 return -1;
132 else if (ai > bi)
133 return 1;
134 return 0;
135 }
136
137 /* Look up splay tree node by ID in splay tree T. */
138
139 static splay_tree_node
140 lto_splay_tree_lookup (splay_tree t, unsigned HOST_WIDE_INT id)
141 {
142 return splay_tree_lookup (t, (splay_tree_key) &id);
143 }
144
145 /* Check if KEY has ID. */
146
147 static bool
148 lto_splay_tree_id_equal_p (splay_tree_key key, unsigned HOST_WIDE_INT id)
149 {
150 return *(unsigned HOST_WIDE_INT *) key == id;
151 }
152
153 /* Insert a splay tree node into tree T with ID as key and FILE_DATA as value.
154 The ID is allocated separately because we need HOST_WIDE_INTs which may
155 be wider than a splay_tree_key. */
156
157 static void
158 lto_splay_tree_insert (splay_tree t, unsigned HOST_WIDE_INT id,
159 struct lto_file_decl_data *file_data)
160 {
161 unsigned HOST_WIDE_INT *idp = XCNEW (unsigned HOST_WIDE_INT);
162 *idp = id;
163 splay_tree_insert (t, (splay_tree_key) idp, (splay_tree_value) file_data);
164 }
165
166 /* Create a splay tree. */
167
168 static splay_tree
169 lto_splay_tree_new (void)
170 {
171 return splay_tree_new (lto_splay_tree_compare_ids,
172 lto_splay_tree_delete_id,
173 NULL);
174 }
175
176 /* Decode the content of memory pointed to by DATA in the in decl
177 state object STATE. DATA_IN points to a data_in structure for
178 decoding. Return the address after the decoded object in the
179 input. */
180
181 static const uint32_t *
182 lto_read_in_decl_state (class data_in *data_in, const uint32_t *data,
183 struct lto_in_decl_state *state)
184 {
185 uint32_t ix;
186 tree decl;
187 uint32_t i, j;
188
189 ix = *data++;
190 state->compressed = ix & 1;
191 ix /= 2;
192 decl = streamer_tree_cache_get_tree (data_in->reader_cache, ix);
193 if (!VAR_OR_FUNCTION_DECL_P (decl))
194 {
195 gcc_assert (decl == void_type_node);
196 decl = NULL_TREE;
197 }
198 state->fn_decl = decl;
199
200 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
201 {
202 uint32_t size = *data++;
203 vec<tree, va_gc> *decls = NULL;
204 vec_alloc (decls, size);
205
206 for (j = 0; j < size; j++)
207 vec_safe_push (decls,
208 streamer_tree_cache_get_tree (data_in->reader_cache,
209 data[j]));
210
211 state->streams[i] = decls;
212 data += size;
213 }
214
215 return data;
216 }
217
218
219 /* Global canonical type table. */
220 static htab_t gimple_canonical_types;
221 static hash_map<const_tree, hashval_t> *canonical_type_hash_cache;
222 static unsigned long num_canonical_type_hash_entries;
223 static unsigned long num_canonical_type_hash_queries;
224
225 /* Types postponed for registration to the canonical type table.
226 During streaming we postpone all TYPE_CXX_ODR_P types so we can alter
227 decide whether there is conflict with non-ODR type or not. */
228 static GTY(()) vec<tree, va_gc> *types_to_register = NULL;
229
230 static void iterative_hash_canonical_type (tree type, inchash::hash &hstate);
231 static hashval_t gimple_canonical_type_hash (const void *p);
232 static hashval_t gimple_register_canonical_type_1 (tree t, hashval_t hash);
233
234 /* Returning a hash value for gimple type TYPE.
235
236 The hash value returned is equal for types considered compatible
237 by gimple_canonical_types_compatible_p. */
238
239 static hashval_t
240 hash_canonical_type (tree type)
241 {
242 inchash::hash hstate;
243 enum tree_code code;
244
245 /* We compute alias sets only for types that needs them.
246 Be sure we do not recurse to something else as we cannot hash incomplete
247 types in a way they would have same hash value as compatible complete
248 types. */
249 gcc_checking_assert (type_with_alias_set_p (type));
250
251 /* Combine a few common features of types so that types are grouped into
252 smaller sets; when searching for existing matching types to merge,
253 only existing types having the same features as the new type will be
254 checked. */
255 code = tree_code_for_canonical_type_merging (TREE_CODE (type));
256 hstate.add_int (code);
257 hstate.add_int (TYPE_MODE (type));
258
259 /* Incorporate common features of numerical types. */
260 if (INTEGRAL_TYPE_P (type)
261 || SCALAR_FLOAT_TYPE_P (type)
262 || FIXED_POINT_TYPE_P (type)
263 || TREE_CODE (type) == OFFSET_TYPE
264 || POINTER_TYPE_P (type))
265 {
266 hstate.add_int (TYPE_PRECISION (type));
267 if (!type_with_interoperable_signedness (type))
268 hstate.add_int (TYPE_UNSIGNED (type));
269 }
270
271 if (VECTOR_TYPE_P (type))
272 {
273 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
274 hstate.add_int (TYPE_UNSIGNED (type));
275 }
276
277 if (TREE_CODE (type) == COMPLEX_TYPE)
278 hstate.add_int (TYPE_UNSIGNED (type));
279
280 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
281 interoperable with "signed char". Unless all frontends are revisited to
282 agree on these types, we must ignore the flag completely. */
283
284 /* Fortran standard define C_PTR type that is compatible with every
285 C pointer. For this reason we need to glob all pointers into one.
286 Still pointers in different address spaces are not compatible. */
287 if (POINTER_TYPE_P (type))
288 hstate.add_int (TYPE_ADDR_SPACE (TREE_TYPE (type)));
289
290 /* For array types hash the domain bounds and the string flag. */
291 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
292 {
293 hstate.add_int (TYPE_STRING_FLAG (type));
294 /* OMP lowering can introduce error_mark_node in place of
295 random local decls in types. */
296 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
297 inchash::add_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), hstate);
298 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
299 inchash::add_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), hstate);
300 }
301
302 /* Recurse for aggregates with a single element type. */
303 if (TREE_CODE (type) == ARRAY_TYPE
304 || TREE_CODE (type) == COMPLEX_TYPE
305 || TREE_CODE (type) == VECTOR_TYPE)
306 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
307
308 /* Incorporate function return and argument types. */
309 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
310 {
311 unsigned na;
312 tree p;
313
314 iterative_hash_canonical_type (TREE_TYPE (type), hstate);
315
316 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
317 {
318 iterative_hash_canonical_type (TREE_VALUE (p), hstate);
319 na++;
320 }
321
322 hstate.add_int (na);
323 }
324
325 if (RECORD_OR_UNION_TYPE_P (type))
326 {
327 unsigned nf;
328 tree f;
329
330 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
331 if (TREE_CODE (f) == FIELD_DECL
332 && (! DECL_SIZE (f)
333 || ! integer_zerop (DECL_SIZE (f))))
334 {
335 iterative_hash_canonical_type (TREE_TYPE (f), hstate);
336 nf++;
337 }
338
339 hstate.add_int (nf);
340 }
341
342 return hstate.end();
343 }
344
345 /* Returning a hash value for gimple type TYPE combined with VAL. */
346
347 static void
348 iterative_hash_canonical_type (tree type, inchash::hash &hstate)
349 {
350 hashval_t v;
351
352 /* All type variants have same TYPE_CANONICAL. */
353 type = TYPE_MAIN_VARIANT (type);
354
355 if (!canonical_type_used_p (type))
356 v = hash_canonical_type (type);
357 /* An already processed type. */
358 else if (TYPE_CANONICAL (type))
359 {
360 type = TYPE_CANONICAL (type);
361 v = gimple_canonical_type_hash (type);
362 }
363 else
364 {
365 /* Canonical types should not be able to form SCCs by design, this
366 recursion is just because we do not register canonical types in
367 optimal order. To avoid quadratic behavior also register the
368 type here. */
369 v = hash_canonical_type (type);
370 v = gimple_register_canonical_type_1 (type, v);
371 }
372 hstate.merge_hash (v);
373 }
374
375 /* Returns the hash for a canonical type P. */
376
377 static hashval_t
378 gimple_canonical_type_hash (const void *p)
379 {
380 num_canonical_type_hash_queries++;
381 hashval_t *slot = canonical_type_hash_cache->get ((const_tree) p);
382 gcc_assert (slot != NULL);
383 return *slot;
384 }
385
386
387
388 /* Returns nonzero if P1 and P2 are equal. */
389
390 static int
391 gimple_canonical_type_eq (const void *p1, const void *p2)
392 {
393 const_tree t1 = (const_tree) p1;
394 const_tree t2 = (const_tree) p2;
395 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
396 CONST_CAST_TREE (t2));
397 }
398
399 /* Main worker for gimple_register_canonical_type. */
400
401 static hashval_t
402 gimple_register_canonical_type_1 (tree t, hashval_t hash)
403 {
404 void **slot;
405
406 gcc_checking_assert (TYPE_P (t) && !TYPE_CANONICAL (t)
407 && type_with_alias_set_p (t)
408 && canonical_type_used_p (t));
409
410 /* ODR types for which there is no ODR violation and we did not record
411 structurally equivalent non-ODR type can be treated as unique by their
412 name.
413
414 hash passed to gimple_register_canonical_type_1 is a structural hash
415 that we can use to lookup structurally equivalent non-ODR type.
416 In case we decide to treat type as unique ODR type we recompute hash based
417 on name and let TBAA machinery know about our decision. */
418 if (RECORD_OR_UNION_TYPE_P (t) && odr_type_p (t)
419 && TYPE_CXX_ODR_P (t) && !odr_type_violation_reported_p (t))
420 {
421 /* Anonymous namespace types never conflict with non-C++ types. */
422 if (type_with_linkage_p (t) && type_in_anonymous_namespace_p (t))
423 slot = NULL;
424 else
425 {
426 /* Here we rely on fact that all non-ODR types was inserted into
427 canonical type hash and thus we can safely detect conflicts between
428 ODR types and interoperable non-ODR types. */
429 gcc_checking_assert (type_streaming_finished
430 && TYPE_MAIN_VARIANT (t) == t);
431 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash,
432 NO_INSERT);
433 }
434 if (slot && !TYPE_CXX_ODR_P (*(tree *)slot))
435 {
436 tree nonodr = *(tree *)slot;
437 gcc_checking_assert (!flag_ltrans);
438 if (symtab->dump_file)
439 {
440 fprintf (symtab->dump_file,
441 "ODR and non-ODR type conflict: ");
442 print_generic_expr (symtab->dump_file, t);
443 fprintf (symtab->dump_file, " and ");
444 print_generic_expr (symtab->dump_file, nonodr);
445 fprintf (symtab->dump_file, " mangled:%s\n",
446 IDENTIFIER_POINTER
447 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
448 }
449 /* Set canonical for T and all other ODR equivalent duplicates
450 including incomplete structures. */
451 set_type_canonical_for_odr_type (t, nonodr);
452 }
453 else
454 {
455 tree prevail = prevailing_odr_type (t);
456
457 if (symtab->dump_file)
458 {
459 fprintf (symtab->dump_file,
460 "New canonical ODR type: ");
461 print_generic_expr (symtab->dump_file, t);
462 fprintf (symtab->dump_file, " mangled:%s\n",
463 IDENTIFIER_POINTER
464 (DECL_ASSEMBLER_NAME (TYPE_NAME (t))));
465 }
466 /* Set canonical for T and all other ODR equivalent duplicates
467 including incomplete structures. */
468 set_type_canonical_for_odr_type (t, prevail);
469 enable_odr_based_tbaa (t);
470 if (!type_in_anonymous_namespace_p (t))
471 hash = htab_hash_string (IDENTIFIER_POINTER
472 (DECL_ASSEMBLER_NAME
473 (TYPE_NAME (t))));
474 else
475 hash = TYPE_UID (t);
476
477 /* All variants of t now have TYPE_CANONICAL set to prevail.
478 Update canonical type hash cache accordingly. */
479 num_canonical_type_hash_entries++;
480 bool existed_p = canonical_type_hash_cache->put (prevail, hash);
481 gcc_checking_assert (!existed_p);
482 }
483 return hash;
484 }
485
486 slot = htab_find_slot_with_hash (gimple_canonical_types, t, hash, INSERT);
487 if (*slot)
488 {
489 tree new_type = (tree)(*slot);
490 gcc_checking_assert (new_type != t);
491 TYPE_CANONICAL (t) = new_type;
492 }
493 else
494 {
495 TYPE_CANONICAL (t) = t;
496 *slot = (void *) t;
497 /* Cache the just computed hash value. */
498 num_canonical_type_hash_entries++;
499 bool existed_p = canonical_type_hash_cache->put (t, hash);
500 gcc_assert (!existed_p);
501 }
502 return hash;
503 }
504
505 /* Register type T in the global type table gimple_types and set
506 TYPE_CANONICAL of T accordingly.
507 This is used by LTO to merge structurally equivalent types for
508 type-based aliasing purposes across different TUs and languages.
509
510 ??? This merging does not exactly match how the tree.cc middle-end
511 functions will assign TYPE_CANONICAL when new types are created
512 during optimization (which at least happens for pointer and array
513 types). */
514
515 static void
516 gimple_register_canonical_type (tree t)
517 {
518 if (TYPE_CANONICAL (t) || !type_with_alias_set_p (t)
519 || !canonical_type_used_p (t))
520 return;
521
522 /* Canonical types are same among all complete variants. */
523 if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (t)))
524 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
525 else
526 {
527 hashval_t h = hash_canonical_type (TYPE_MAIN_VARIANT (t));
528 gimple_register_canonical_type_1 (TYPE_MAIN_VARIANT (t), h);
529 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_MAIN_VARIANT (t));
530 }
531 }
532
533 /* Re-compute TYPE_CANONICAL for NODE and related types. */
534
535 static void
536 lto_register_canonical_types (tree node, bool first_p)
537 {
538 if (!node
539 || !TYPE_P (node))
540 return;
541
542 if (first_p)
543 TYPE_CANONICAL (node) = NULL_TREE;
544
545 if (POINTER_TYPE_P (node)
546 || TREE_CODE (node) == COMPLEX_TYPE
547 || TREE_CODE (node) == ARRAY_TYPE)
548 lto_register_canonical_types (TREE_TYPE (node), first_p);
549
550 if (!first_p)
551 gimple_register_canonical_type (node);
552 }
553
554 /* Finish canonical type calculation: after all units has been streamed in we
555 can check if given ODR type structurally conflicts with a non-ODR type. In
556 the first case we set type canonical according to the canonical type hash.
557 In the second case we use type names. */
558
559 static void
560 lto_register_canonical_types_for_odr_types ()
561 {
562 tree t;
563 unsigned int i;
564
565 if (!types_to_register)
566 return;
567
568 type_streaming_finished = true;
569
570 /* Be sure that no types derived from ODR types was
571 not inserted into the hash table. */
572 if (flag_checking)
573 FOR_EACH_VEC_ELT (*types_to_register, i, t)
574 gcc_assert (!TYPE_CANONICAL (t));
575
576 /* Register all remaining types. */
577 FOR_EACH_VEC_ELT (*types_to_register, i, t)
578 {
579 /* For pre-streamed types like va-arg it is possible that main variant
580 is !CXX_ODR_P while the variant (which is streamed) is.
581 Copy CXX_ODR_P to make type verifier happy. This is safe because
582 in canonical type calculation we only consider main variants.
583 However we can not change this flag before streaming is finished
584 to not affect tree merging. */
585 TYPE_CXX_ODR_P (t) = TYPE_CXX_ODR_P (TYPE_MAIN_VARIANT (t));
586 if (!TYPE_CANONICAL (t))
587 gimple_register_canonical_type (t);
588 }
589 }
590
591
592 /* Remember trees that contains references to declarations. */
593 vec <tree, va_gc> *tree_with_vars;
594
595 #define CHECK_VAR(tt) \
596 do \
597 { \
598 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
599 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
600 return true; \
601 } while (0)
602
603 #define CHECK_NO_VAR(tt) \
604 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
605
606 /* Check presence of pointers to decls in fields of a tree_typed T. */
607
608 static inline bool
609 mentions_vars_p_typed (tree t)
610 {
611 CHECK_NO_VAR (TREE_TYPE (t));
612 return false;
613 }
614
615 /* Check presence of pointers to decls in fields of a tree_common T. */
616
617 static inline bool
618 mentions_vars_p_common (tree t)
619 {
620 if (mentions_vars_p_typed (t))
621 return true;
622 CHECK_NO_VAR (TREE_CHAIN (t));
623 return false;
624 }
625
626 /* Check presence of pointers to decls in fields of a decl_minimal T. */
627
628 static inline bool
629 mentions_vars_p_decl_minimal (tree t)
630 {
631 if (mentions_vars_p_common (t))
632 return true;
633 CHECK_NO_VAR (DECL_NAME (t));
634 CHECK_VAR (DECL_CONTEXT (t));
635 return false;
636 }
637
638 /* Check presence of pointers to decls in fields of a decl_common T. */
639
640 static inline bool
641 mentions_vars_p_decl_common (tree t)
642 {
643 if (mentions_vars_p_decl_minimal (t))
644 return true;
645 CHECK_VAR (DECL_SIZE (t));
646 CHECK_VAR (DECL_SIZE_UNIT (t));
647 CHECK_VAR (DECL_INITIAL (t));
648 CHECK_NO_VAR (DECL_ATTRIBUTES (t));
649 CHECK_VAR (DECL_ABSTRACT_ORIGIN (t));
650 return false;
651 }
652
653 /* Check presence of pointers to decls in fields of a decl_with_vis T. */
654
655 static inline bool
656 mentions_vars_p_decl_with_vis (tree t)
657 {
658 if (mentions_vars_p_decl_common (t))
659 return true;
660
661 /* Accessor macro has side-effects, use field-name here. */
662 CHECK_NO_VAR (DECL_ASSEMBLER_NAME_RAW (t));
663 return false;
664 }
665
666 /* Check presence of pointers to decls in fields of a decl_non_common T. */
667
668 static inline bool
669 mentions_vars_p_decl_non_common (tree t)
670 {
671 if (mentions_vars_p_decl_with_vis (t))
672 return true;
673 CHECK_NO_VAR (DECL_RESULT_FLD (t));
674 return false;
675 }
676
677 /* Check presence of pointers to decls in fields of a decl_non_common T. */
678
679 static bool
680 mentions_vars_p_function (tree t)
681 {
682 if (mentions_vars_p_decl_non_common (t))
683 return true;
684 CHECK_NO_VAR (DECL_ARGUMENTS (t));
685 CHECK_NO_VAR (DECL_VINDEX (t));
686 CHECK_VAR (DECL_FUNCTION_PERSONALITY (t));
687 return false;
688 }
689
690 /* Check presence of pointers to decls in fields of a field_decl T. */
691
692 static bool
693 mentions_vars_p_field_decl (tree t)
694 {
695 if (mentions_vars_p_decl_common (t))
696 return true;
697 CHECK_VAR (DECL_FIELD_OFFSET (t));
698 CHECK_NO_VAR (DECL_BIT_FIELD_TYPE (t));
699 CHECK_NO_VAR (DECL_QUALIFIER (t));
700 CHECK_NO_VAR (DECL_FIELD_BIT_OFFSET (t));
701 CHECK_NO_VAR (DECL_FCONTEXT (t));
702 return false;
703 }
704
705 /* Check presence of pointers to decls in fields of a type T. */
706
707 static bool
708 mentions_vars_p_type (tree t)
709 {
710 if (mentions_vars_p_common (t))
711 return true;
712 CHECK_NO_VAR (TYPE_CACHED_VALUES (t));
713 CHECK_VAR (TYPE_SIZE (t));
714 CHECK_VAR (TYPE_SIZE_UNIT (t));
715 CHECK_NO_VAR (TYPE_ATTRIBUTES (t));
716 CHECK_NO_VAR (TYPE_NAME (t));
717
718 CHECK_VAR (TYPE_MIN_VALUE_RAW (t));
719 CHECK_VAR (TYPE_MAX_VALUE_RAW (t));
720
721 /* Accessor is for derived node types only. */
722 CHECK_NO_VAR (TYPE_LANG_SLOT_1 (t));
723
724 CHECK_VAR (TYPE_CONTEXT (t));
725 CHECK_NO_VAR (TYPE_CANONICAL (t));
726 CHECK_NO_VAR (TYPE_MAIN_VARIANT (t));
727 CHECK_NO_VAR (TYPE_NEXT_VARIANT (t));
728 return false;
729 }
730
731 /* Check presence of pointers to decls in fields of a BINFO T. */
732
733 static bool
734 mentions_vars_p_binfo (tree t)
735 {
736 unsigned HOST_WIDE_INT i, n;
737
738 if (mentions_vars_p_common (t))
739 return true;
740 CHECK_VAR (BINFO_VTABLE (t));
741 CHECK_NO_VAR (BINFO_OFFSET (t));
742 CHECK_NO_VAR (BINFO_VIRTUALS (t));
743 CHECK_NO_VAR (BINFO_VPTR_FIELD (t));
744 n = vec_safe_length (BINFO_BASE_ACCESSES (t));
745 for (i = 0; i < n; i++)
746 CHECK_NO_VAR (BINFO_BASE_ACCESS (t, i));
747 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
748 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
749 n = BINFO_N_BASE_BINFOS (t);
750 for (i = 0; i < n; i++)
751 CHECK_NO_VAR (BINFO_BASE_BINFO (t, i));
752 return false;
753 }
754
755 /* Check presence of pointers to decls in fields of a CONSTRUCTOR T. */
756
757 static bool
758 mentions_vars_p_constructor (tree t)
759 {
760 unsigned HOST_WIDE_INT idx;
761 constructor_elt *ce;
762
763 if (mentions_vars_p_typed (t))
764 return true;
765
766 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
767 {
768 CHECK_NO_VAR (ce->index);
769 CHECK_VAR (ce->value);
770 }
771 return false;
772 }
773
774 /* Check presence of pointers to decls in fields of an expression tree T. */
775
776 static bool
777 mentions_vars_p_expr (tree t)
778 {
779 int i;
780 if (mentions_vars_p_typed (t))
781 return true;
782 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
783 CHECK_VAR (TREE_OPERAND (t, i));
784 return false;
785 }
786
787 /* Check presence of pointers to decls in fields of an OMP_CLAUSE T. */
788
789 static bool
790 mentions_vars_p_omp_clause (tree t)
791 {
792 int i;
793 if (mentions_vars_p_common (t))
794 return true;
795 for (i = omp_clause_num_ops[OMP_CLAUSE_CODE (t)] - 1; i >= 0; --i)
796 CHECK_VAR (OMP_CLAUSE_OPERAND (t, i));
797 return false;
798 }
799
800 /* Check presence of pointers to decls that needs later fixup in T. */
801
802 static bool
803 mentions_vars_p (tree t)
804 {
805 switch (TREE_CODE (t))
806 {
807 case IDENTIFIER_NODE:
808 break;
809
810 case TREE_LIST:
811 CHECK_VAR (TREE_VALUE (t));
812 CHECK_VAR (TREE_PURPOSE (t));
813 CHECK_NO_VAR (TREE_CHAIN (t));
814 break;
815
816 case FIELD_DECL:
817 return mentions_vars_p_field_decl (t);
818
819 case LABEL_DECL:
820 case CONST_DECL:
821 case PARM_DECL:
822 case RESULT_DECL:
823 case IMPORTED_DECL:
824 case NAMESPACE_DECL:
825 case NAMELIST_DECL:
826 return mentions_vars_p_decl_common (t);
827
828 case VAR_DECL:
829 return mentions_vars_p_decl_with_vis (t);
830
831 case TYPE_DECL:
832 return mentions_vars_p_decl_non_common (t);
833
834 case FUNCTION_DECL:
835 return mentions_vars_p_function (t);
836
837 case TREE_BINFO:
838 return mentions_vars_p_binfo (t);
839
840 case PLACEHOLDER_EXPR:
841 return mentions_vars_p_common (t);
842
843 case BLOCK:
844 case TRANSLATION_UNIT_DECL:
845 case OPTIMIZATION_NODE:
846 case TARGET_OPTION_NODE:
847 break;
848
849 case CONSTRUCTOR:
850 return mentions_vars_p_constructor (t);
851
852 case OMP_CLAUSE:
853 return mentions_vars_p_omp_clause (t);
854
855 default:
856 if (TYPE_P (t))
857 {
858 if (mentions_vars_p_type (t))
859 return true;
860 }
861 else if (EXPR_P (t))
862 {
863 if (mentions_vars_p_expr (t))
864 return true;
865 }
866 else if (CONSTANT_CLASS_P (t))
867 CHECK_NO_VAR (TREE_TYPE (t));
868 else
869 gcc_unreachable ();
870 }
871 return false;
872 }
873
874
875 /* Return the resolution for the decl with index INDEX from DATA_IN. */
876
877 static enum ld_plugin_symbol_resolution
878 get_resolution (class data_in *data_in, unsigned index)
879 {
880 if (data_in->globals_resolution.exists ())
881 {
882 ld_plugin_symbol_resolution_t ret;
883 /* We can have references to not emitted functions in
884 DECL_FUNCTION_PERSONALITY at least. So we can and have
885 to indeed return LDPR_UNKNOWN in some cases. */
886 if (data_in->globals_resolution.length () <= index)
887 return LDPR_UNKNOWN;
888 ret = data_in->globals_resolution[index];
889 return ret;
890 }
891 else
892 /* Delay resolution finding until decl merging. */
893 return LDPR_UNKNOWN;
894 }
895
896 /* We need to record resolutions until symbol table is read. */
897 static void
898 register_resolution (struct lto_file_decl_data *file_data, tree decl,
899 enum ld_plugin_symbol_resolution resolution)
900 {
901 bool existed;
902 if (resolution == LDPR_UNKNOWN)
903 return;
904 if (!file_data->resolution_map)
905 file_data->resolution_map
906 = new hash_map<tree, ld_plugin_symbol_resolution>;
907 ld_plugin_symbol_resolution_t &res
908 = file_data->resolution_map->get_or_insert (decl, &existed);
909 if (!existed
910 || resolution == LDPR_PREVAILING_DEF_IRONLY
911 || resolution == LDPR_PREVAILING_DEF
912 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
913 res = resolution;
914 }
915
916 /* Register DECL with the global symbol table and change its
917 name if necessary to avoid name clashes for static globals across
918 different files. */
919
920 static void
921 lto_register_var_decl_in_symtab (class data_in *data_in, tree decl,
922 unsigned ix)
923 {
924 tree context;
925
926 /* Variable has file scope, not local. */
927 if (!TREE_PUBLIC (decl)
928 && !((context = decl_function_context (decl))
929 && auto_var_in_fn_p (decl, context)))
930 rest_of_decl_compilation (decl, 1, 0);
931
932 /* If this variable has already been declared, queue the
933 declaration for merging. */
934 if (TREE_PUBLIC (decl))
935 register_resolution (data_in->file_data,
936 decl, get_resolution (data_in, ix));
937 }
938
939
940 /* Register DECL with the global symbol table and change its
941 name if necessary to avoid name clashes for static globals across
942 different files. DATA_IN contains descriptors and tables for the
943 file being read. */
944
945 static void
946 lto_register_function_decl_in_symtab (class data_in *data_in, tree decl,
947 unsigned ix)
948 {
949 /* If this variable has already been declared, queue the
950 declaration for merging. */
951 if (TREE_PUBLIC (decl) && !DECL_ABSTRACT_P (decl))
952 register_resolution (data_in->file_data,
953 decl, get_resolution (data_in, ix));
954 }
955
956 /* Check if T is a decl and needs register its resolution info. */
957
958 static void
959 lto_maybe_register_decl (class data_in *data_in, tree t, unsigned ix)
960 {
961 if (TREE_CODE (t) == VAR_DECL)
962 lto_register_var_decl_in_symtab (data_in, t, ix);
963 else if (TREE_CODE (t) == FUNCTION_DECL
964 && !fndecl_built_in_p (t))
965 lto_register_function_decl_in_symtab (data_in, t, ix);
966 }
967
968
969 /* For the type T re-materialize it in the type variant list and
970 the pointer/reference-to chains. */
971
972 static void
973 lto_fixup_prevailing_type (tree t)
974 {
975 /* The following re-creates proper variant lists while fixing up
976 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
977 variant list state before fixup is broken. */
978
979 /* If we are not our own variant leader link us into our new leaders
980 variant list. */
981 if (TYPE_MAIN_VARIANT (t) != t)
982 {
983 tree mv = TYPE_MAIN_VARIANT (t);
984 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
985 TYPE_NEXT_VARIANT (mv) = t;
986 }
987
988 /* The following reconstructs the pointer chains
989 of the new pointed-to type if we are a main variant. We do
990 not stream those so they are broken before fixup. */
991 if (TREE_CODE (t) == POINTER_TYPE
992 && TYPE_MAIN_VARIANT (t) == t)
993 {
994 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
995 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
996 }
997 else if (TREE_CODE (t) == REFERENCE_TYPE
998 && TYPE_MAIN_VARIANT (t) == t)
999 {
1000 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1001 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1002 }
1003 }
1004
1005
1006 /* We keep prevailing tree SCCs in a hashtable with manual collision
1007 handling (in case all hashes compare the same) and keep the colliding
1008 entries in the tree_scc->next chain. */
1009
1010 struct tree_scc
1011 {
1012 tree_scc *next;
1013 /* Hash of the whole SCC. */
1014 hashval_t hash;
1015 /* Number of trees in the SCC. */
1016 unsigned len;
1017 /* Number of possible entries into the SCC (tree nodes [0..entry_len-1]
1018 which share the same individual tree hash). */
1019 unsigned entry_len;
1020 /* The members of the SCC.
1021 We only need to remember the first entry node candidate for prevailing
1022 SCCs (but of course have access to all entries for SCCs we are
1023 processing).
1024 ??? For prevailing SCCs we really only need hash and the first
1025 entry candidate, but that's too awkward to implement. */
1026 tree entries[1];
1027 };
1028
1029 struct tree_scc_hasher : nofree_ptr_hash <tree_scc>
1030 {
1031 static inline hashval_t hash (const tree_scc *);
1032 static inline bool equal (const tree_scc *, const tree_scc *);
1033 };
1034
1035 hashval_t
1036 tree_scc_hasher::hash (const tree_scc *scc)
1037 {
1038 return scc->hash;
1039 }
1040
1041 bool
1042 tree_scc_hasher::equal (const tree_scc *scc1, const tree_scc *scc2)
1043 {
1044 if (scc1->hash != scc2->hash
1045 || scc1->len != scc2->len
1046 || scc1->entry_len != scc2->entry_len)
1047 return false;
1048 return true;
1049 }
1050
1051 static hash_table<tree_scc_hasher> *tree_scc_hash;
1052 static struct obstack tree_scc_hash_obstack;
1053
1054 static unsigned long num_merged_types;
1055 static unsigned long num_prevailing_types;
1056 static unsigned long num_type_scc_trees;
1057 static unsigned long total_scc_size;
1058 static unsigned long num_sccs_read;
1059 static unsigned long num_unshared_trees_read;
1060 static unsigned long total_scc_size_merged;
1061 static unsigned long num_sccs_merged;
1062 static unsigned long num_scc_compares;
1063 static unsigned long num_scc_compare_collisions;
1064
1065
1066 /* Compare the two entries T1 and T2 of two SCCs that are possibly equal,
1067 recursing through in-SCC tree edges. Returns true if the SCCs entered
1068 through T1 and T2 are equal and fills in *MAP with the pairs of
1069 SCC entries we visited, starting with (*MAP)[0] = T1 and (*MAP)[1] = T2. */
1070
1071 static bool
1072 compare_tree_sccs_1 (tree t1, tree t2, tree **map)
1073 {
1074 enum tree_code code;
1075
1076 /* Mark already visited nodes. */
1077 TREE_ASM_WRITTEN (t2) = 1;
1078
1079 /* Push the pair onto map. */
1080 (*map)[0] = t1;
1081 (*map)[1] = t2;
1082 *map = *map + 2;
1083
1084 /* Compare value-fields. */
1085 #define compare_values(X) \
1086 do { \
1087 if (X(t1) != X(t2)) \
1088 return false; \
1089 } while (0)
1090
1091 compare_values (TREE_CODE);
1092 code = TREE_CODE (t1);
1093
1094 /* If we end up comparing translation unit decls we either forgot to mark
1095 some SCC as local or we compare too much. */
1096 gcc_checking_assert (code != TRANSLATION_UNIT_DECL);
1097
1098 if (!TYPE_P (t1))
1099 {
1100 compare_values (TREE_SIDE_EFFECTS);
1101 compare_values (TREE_CONSTANT);
1102 compare_values (TREE_READONLY);
1103 compare_values (TREE_PUBLIC);
1104 }
1105 compare_values (TREE_ADDRESSABLE);
1106 compare_values (TREE_THIS_VOLATILE);
1107 if (DECL_P (t1))
1108 compare_values (DECL_UNSIGNED);
1109 else if (TYPE_P (t1))
1110 compare_values (TYPE_UNSIGNED);
1111 if (TYPE_P (t1))
1112 compare_values (TYPE_ARTIFICIAL);
1113 else
1114 compare_values (TREE_NO_WARNING);
1115 compare_values (TREE_NOTHROW);
1116 compare_values (TREE_STATIC);
1117 if (code != TREE_BINFO)
1118 compare_values (TREE_PRIVATE);
1119 compare_values (TREE_PROTECTED);
1120 compare_values (TREE_DEPRECATED);
1121 if (TYPE_P (t1))
1122 {
1123 if (AGGREGATE_TYPE_P (t1))
1124 compare_values (TYPE_REVERSE_STORAGE_ORDER);
1125 else
1126 compare_values (TYPE_SATURATING);
1127 compare_values (TYPE_ADDR_SPACE);
1128 }
1129 else if (code == SSA_NAME)
1130 compare_values (SSA_NAME_IS_DEFAULT_DEF);
1131
1132 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1133 {
1134 if (wi::to_wide (t1) != wi::to_wide (t2))
1135 return false;
1136 }
1137
1138 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1139 {
1140 /* ??? No suitable compare routine available. */
1141 REAL_VALUE_TYPE r1 = TREE_REAL_CST (t1);
1142 REAL_VALUE_TYPE r2 = TREE_REAL_CST (t2);
1143 if (r1.cl != r2.cl
1144 || r1.decimal != r2.decimal
1145 || r1.sign != r2.sign
1146 || r1.signalling != r2.signalling
1147 || r1.canonical != r2.canonical
1148 || r1.uexp != r2.uexp)
1149 return false;
1150 for (unsigned i = 0; i < SIGSZ; ++i)
1151 if (r1.sig[i] != r2.sig[i])
1152 return false;
1153 }
1154
1155 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1156 if (!fixed_compare (EQ_EXPR,
1157 TREE_FIXED_CST_PTR (t1), TREE_FIXED_CST_PTR (t2)))
1158 return false;
1159
1160 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1161 {
1162 compare_values (VECTOR_CST_LOG2_NPATTERNS);
1163 compare_values (VECTOR_CST_NELTS_PER_PATTERN);
1164 }
1165
1166 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1167 {
1168 compare_values (DECL_MODE);
1169 compare_values (DECL_NONLOCAL);
1170 compare_values (DECL_VIRTUAL_P);
1171 compare_values (DECL_IGNORED_P);
1172 compare_values (DECL_ABSTRACT_P);
1173 compare_values (DECL_ARTIFICIAL);
1174 compare_values (DECL_USER_ALIGN);
1175 compare_values (DECL_PRESERVE_P);
1176 compare_values (DECL_EXTERNAL);
1177 compare_values (DECL_NOT_GIMPLE_REG_P);
1178 compare_values (DECL_ALIGN);
1179 if (code == LABEL_DECL)
1180 {
1181 compare_values (EH_LANDING_PAD_NR);
1182 compare_values (LABEL_DECL_UID);
1183 }
1184 else if (code == FIELD_DECL)
1185 {
1186 compare_values (DECL_PACKED);
1187 compare_values (DECL_NONADDRESSABLE_P);
1188 compare_values (DECL_PADDING_P);
1189 compare_values (DECL_FIELD_ABI_IGNORED);
1190 compare_values (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD);
1191 compare_values (DECL_OFFSET_ALIGN);
1192 }
1193 else if (code == VAR_DECL)
1194 {
1195 compare_values (DECL_HAS_DEBUG_EXPR_P);
1196 compare_values (DECL_NONLOCAL_FRAME);
1197 }
1198 if (code == RESULT_DECL
1199 || code == PARM_DECL
1200 || code == VAR_DECL)
1201 {
1202 compare_values (DECL_BY_REFERENCE);
1203 if (code == VAR_DECL
1204 || code == PARM_DECL)
1205 compare_values (DECL_HAS_VALUE_EXPR_P);
1206 }
1207 }
1208
1209 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1210 compare_values (DECL_REGISTER);
1211
1212 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1213 {
1214 compare_values (DECL_COMMON);
1215 compare_values (DECL_DLLIMPORT_P);
1216 compare_values (DECL_WEAK);
1217 compare_values (DECL_SEEN_IN_BIND_EXPR_P);
1218 compare_values (DECL_COMDAT);
1219 compare_values (DECL_VISIBILITY);
1220 compare_values (DECL_VISIBILITY_SPECIFIED);
1221 if (code == VAR_DECL)
1222 {
1223 compare_values (DECL_HARD_REGISTER);
1224 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1225 compare_values (DECL_IN_CONSTANT_POOL);
1226 }
1227 }
1228
1229 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1230 {
1231 compare_values (DECL_BUILT_IN_CLASS);
1232 compare_values (DECL_STATIC_CONSTRUCTOR);
1233 compare_values (DECL_STATIC_DESTRUCTOR);
1234 compare_values (DECL_UNINLINABLE);
1235 compare_values (DECL_POSSIBLY_INLINED);
1236 compare_values (DECL_IS_NOVOPS);
1237 compare_values (DECL_IS_RETURNS_TWICE);
1238 compare_values (DECL_IS_MALLOC);
1239 compare_values (FUNCTION_DECL_DECL_TYPE);
1240 compare_values (DECL_DECLARED_INLINE_P);
1241 compare_values (DECL_STATIC_CHAIN);
1242 compare_values (DECL_NO_INLINE_WARNING_P);
1243 compare_values (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT);
1244 compare_values (DECL_NO_LIMIT_STACK);
1245 compare_values (DECL_DISREGARD_INLINE_LIMITS);
1246 compare_values (DECL_PURE_P);
1247 compare_values (DECL_LOOPING_CONST_OR_PURE_P);
1248 compare_values (DECL_IS_REPLACEABLE_OPERATOR);
1249 compare_values (DECL_FINAL_P);
1250 compare_values (DECL_CXX_CONSTRUCTOR_P);
1251 compare_values (DECL_CXX_DESTRUCTOR_P);
1252 if (DECL_BUILT_IN_CLASS (t1) != NOT_BUILT_IN)
1253 compare_values (DECL_UNCHECKED_FUNCTION_CODE);
1254 }
1255
1256 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1257 {
1258 compare_values (TYPE_MODE);
1259 compare_values (TYPE_NEEDS_CONSTRUCTING);
1260 if (RECORD_OR_UNION_TYPE_P (t1))
1261 {
1262 compare_values (TYPE_TRANSPARENT_AGGR);
1263 compare_values (TYPE_FINAL_P);
1264 compare_values (TYPE_CXX_ODR_P);
1265 }
1266 else if (code == ARRAY_TYPE)
1267 compare_values (TYPE_NONALIASED_COMPONENT);
1268 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1269 compare_values (TYPE_STRING_FLAG);
1270 if (AGGREGATE_TYPE_P (t1))
1271 compare_values (TYPE_TYPELESS_STORAGE);
1272 compare_values (TYPE_EMPTY_P);
1273 compare_values (TYPE_NO_NAMED_ARGS_STDARG_P);
1274 compare_values (TYPE_PACKED);
1275 compare_values (TYPE_RESTRICT);
1276 compare_values (TYPE_USER_ALIGN);
1277 compare_values (TYPE_READONLY);
1278 compare_values (TYPE_PRECISION);
1279 compare_values (TYPE_ALIGN);
1280 /* Do not compare TYPE_ALIAS_SET. Doing so introduce ordering issues
1281 with calls to get_alias_set which may initialize it for streamed
1282 in types. */
1283 }
1284
1285 /* We don't want to compare locations, so there is nothing do compare
1286 for TS_EXP. */
1287
1288 /* BLOCKs are function local and we don't merge anything there, so
1289 simply refuse to merge. */
1290 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1291 return false;
1292
1293 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1294 if (strcmp (TRANSLATION_UNIT_LANGUAGE (t1),
1295 TRANSLATION_UNIT_LANGUAGE (t2)) != 0)
1296 return false;
1297
1298 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
1299 if (!cl_target_option_eq (TREE_TARGET_OPTION (t1), TREE_TARGET_OPTION (t2)))
1300 return false;
1301
1302 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1303 if (!cl_optimization_option_eq (TREE_OPTIMIZATION (t1),
1304 TREE_OPTIMIZATION (t2)))
1305 return false;
1306
1307 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1308 if (vec_safe_length (BINFO_BASE_ACCESSES (t1))
1309 != vec_safe_length (BINFO_BASE_ACCESSES (t2)))
1310 return false;
1311
1312 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1313 {
1314 compare_values (CLOBBER_KIND);
1315 compare_values (CONSTRUCTOR_NELTS);
1316 }
1317
1318 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1319 if (IDENTIFIER_LENGTH (t1) != IDENTIFIER_LENGTH (t2)
1320 || memcmp (IDENTIFIER_POINTER (t1), IDENTIFIER_POINTER (t2),
1321 IDENTIFIER_LENGTH (t1)) != 0)
1322 return false;
1323
1324 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1325 if (TREE_STRING_LENGTH (t1) != TREE_STRING_LENGTH (t2)
1326 || memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1327 TREE_STRING_LENGTH (t1)) != 0)
1328 return false;
1329
1330 if (code == OMP_CLAUSE)
1331 {
1332 compare_values (OMP_CLAUSE_CODE);
1333 switch (OMP_CLAUSE_CODE (t1))
1334 {
1335 case OMP_CLAUSE_DEFAULT:
1336 compare_values (OMP_CLAUSE_DEFAULT_KIND);
1337 break;
1338 case OMP_CLAUSE_SCHEDULE:
1339 compare_values (OMP_CLAUSE_SCHEDULE_KIND);
1340 break;
1341 case OMP_CLAUSE_DEPEND:
1342 compare_values (OMP_CLAUSE_DEPEND_KIND);
1343 break;
1344 case OMP_CLAUSE_MAP:
1345 compare_values (OMP_CLAUSE_MAP_KIND);
1346 break;
1347 case OMP_CLAUSE_PROC_BIND:
1348 compare_values (OMP_CLAUSE_PROC_BIND_KIND);
1349 break;
1350 case OMP_CLAUSE_REDUCTION:
1351 compare_values (OMP_CLAUSE_REDUCTION_CODE);
1352 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_INIT);
1353 compare_values (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE);
1354 break;
1355 default:
1356 break;
1357 }
1358 }
1359
1360 #undef compare_values
1361
1362
1363 /* Compare pointer fields. */
1364
1365 /* Recurse. Search & Replaced from DFS_write_tree_body.
1366 Folding the early checks into the compare_tree_edges recursion
1367 macro makes debugging way quicker as you are able to break on
1368 compare_tree_sccs_1 and simply finish until a call returns false
1369 to spot the SCC members with the difference. */
1370 #define compare_tree_edges(E1, E2) \
1371 do { \
1372 tree t1_ = (E1), t2_ = (E2); \
1373 if (t1_ != t2_ \
1374 && (!t1_ || !t2_ \
1375 || !TREE_VISITED (t2_) \
1376 || (!TREE_ASM_WRITTEN (t2_) \
1377 && !compare_tree_sccs_1 (t1_, t2_, map)))) \
1378 return false; \
1379 /* Only non-NULL trees outside of the SCC may compare equal. */ \
1380 gcc_checking_assert (t1_ != t2_ || (!t2_ || !TREE_VISITED (t2_))); \
1381 } while (0)
1382
1383 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1384 {
1385 if (code != IDENTIFIER_NODE)
1386 compare_tree_edges (TREE_TYPE (t1), TREE_TYPE (t2));
1387 }
1388
1389 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1390 {
1391 /* Note that the number of elements for EXPR has already been emitted
1392 in EXPR's header (see streamer_write_tree_header). */
1393 unsigned int count = vector_cst_encoded_nelts (t1);
1394 for (unsigned int i = 0; i < count; ++i)
1395 compare_tree_edges (VECTOR_CST_ENCODED_ELT (t1, i),
1396 VECTOR_CST_ENCODED_ELT (t2, i));
1397 }
1398
1399 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1400 {
1401 compare_tree_edges (TREE_REALPART (t1), TREE_REALPART (t2));
1402 compare_tree_edges (TREE_IMAGPART (t1), TREE_IMAGPART (t2));
1403 }
1404
1405 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1406 {
1407 compare_tree_edges (DECL_NAME (t1), DECL_NAME (t2));
1408 /* ??? Global decls from different TUs have non-matching
1409 TRANSLATION_UNIT_DECLs. Only consider a small set of
1410 decls equivalent, we should not end up merging others. */
1411 if ((code == TYPE_DECL
1412 || code == NAMESPACE_DECL
1413 || code == IMPORTED_DECL
1414 || code == CONST_DECL
1415 || (VAR_OR_FUNCTION_DECL_P (t1)
1416 && (TREE_PUBLIC (t1) || DECL_EXTERNAL (t1))))
1417 && DECL_FILE_SCOPE_P (t1) && DECL_FILE_SCOPE_P (t2))
1418 ;
1419 else
1420 compare_tree_edges (DECL_CONTEXT (t1), DECL_CONTEXT (t2));
1421 }
1422
1423 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1424 {
1425 compare_tree_edges (DECL_SIZE (t1), DECL_SIZE (t2));
1426 compare_tree_edges (DECL_SIZE_UNIT (t1), DECL_SIZE_UNIT (t2));
1427 compare_tree_edges (DECL_ATTRIBUTES (t1), DECL_ATTRIBUTES (t2));
1428 compare_tree_edges (DECL_ABSTRACT_ORIGIN (t1), DECL_ABSTRACT_ORIGIN (t2));
1429 if ((code == VAR_DECL
1430 || code == PARM_DECL)
1431 && DECL_HAS_VALUE_EXPR_P (t1))
1432 compare_tree_edges (DECL_VALUE_EXPR (t1), DECL_VALUE_EXPR (t2));
1433 if (code == VAR_DECL
1434 && DECL_HAS_DEBUG_EXPR_P (t1))
1435 compare_tree_edges (DECL_DEBUG_EXPR (t1), DECL_DEBUG_EXPR (t2));
1436 /* LTO specific edges. */
1437 if (code != FUNCTION_DECL
1438 && code != TRANSLATION_UNIT_DECL)
1439 compare_tree_edges (DECL_INITIAL (t1), DECL_INITIAL (t2));
1440 }
1441
1442 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1443 {
1444 if (code == FUNCTION_DECL)
1445 {
1446 tree a1, a2;
1447 for (a1 = DECL_ARGUMENTS (t1), a2 = DECL_ARGUMENTS (t2);
1448 a1 || a2;
1449 a1 = TREE_CHAIN (a1), a2 = TREE_CHAIN (a2))
1450 compare_tree_edges (a1, a2);
1451 compare_tree_edges (DECL_RESULT (t1), DECL_RESULT (t2));
1452 }
1453 else if (code == TYPE_DECL)
1454 compare_tree_edges (DECL_ORIGINAL_TYPE (t1), DECL_ORIGINAL_TYPE (t2));
1455 }
1456
1457 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1458 {
1459 /* Make sure we don't inadvertently set the assembler name. */
1460 if (DECL_ASSEMBLER_NAME_SET_P (t1))
1461 compare_tree_edges (DECL_ASSEMBLER_NAME (t1),
1462 DECL_ASSEMBLER_NAME (t2));
1463 }
1464
1465 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1466 {
1467 compare_tree_edges (DECL_FIELD_OFFSET (t1), DECL_FIELD_OFFSET (t2));
1468 compare_tree_edges (DECL_BIT_FIELD_TYPE (t1), DECL_BIT_FIELD_TYPE (t2));
1469 compare_tree_edges (DECL_BIT_FIELD_REPRESENTATIVE (t1),
1470 DECL_BIT_FIELD_REPRESENTATIVE (t2));
1471 compare_tree_edges (DECL_FIELD_BIT_OFFSET (t1),
1472 DECL_FIELD_BIT_OFFSET (t2));
1473 compare_tree_edges (DECL_FCONTEXT (t1), DECL_FCONTEXT (t2));
1474 }
1475
1476 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1477 {
1478 compare_tree_edges (DECL_FUNCTION_PERSONALITY (t1),
1479 DECL_FUNCTION_PERSONALITY (t2));
1480 compare_tree_edges (DECL_VINDEX (t1), DECL_VINDEX (t2));
1481 compare_tree_edges (DECL_FUNCTION_SPECIFIC_TARGET (t1),
1482 DECL_FUNCTION_SPECIFIC_TARGET (t2));
1483 compare_tree_edges (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t1),
1484 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t2));
1485 }
1486
1487 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1488 {
1489 compare_tree_edges (TYPE_SIZE (t1), TYPE_SIZE (t2));
1490 compare_tree_edges (TYPE_SIZE_UNIT (t1), TYPE_SIZE_UNIT (t2));
1491 compare_tree_edges (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2));
1492 compare_tree_edges (TYPE_NAME (t1), TYPE_NAME (t2));
1493 /* Do not compare TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1494 reconstructed during fixup. */
1495 /* Do not compare TYPE_NEXT_VARIANT, we reconstruct the variant lists
1496 during fixup. */
1497 compare_tree_edges (TYPE_MAIN_VARIANT (t1), TYPE_MAIN_VARIANT (t2));
1498 /* ??? Global types from different TUs have non-matching
1499 TRANSLATION_UNIT_DECLs. Still merge them if they are otherwise
1500 equal. */
1501 if (TYPE_FILE_SCOPE_P (t1) && TYPE_FILE_SCOPE_P (t2))
1502 ;
1503 else
1504 compare_tree_edges (TYPE_CONTEXT (t1), TYPE_CONTEXT (t2));
1505 /* TYPE_CANONICAL is re-computed during type merging, so do not
1506 compare it here. */
1507 compare_tree_edges (TYPE_STUB_DECL (t1), TYPE_STUB_DECL (t2));
1508 }
1509
1510 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1511 {
1512 if (code == ARRAY_TYPE)
1513 compare_tree_edges (TYPE_DOMAIN (t1), TYPE_DOMAIN (t2));
1514 else if (RECORD_OR_UNION_TYPE_P (t1))
1515 {
1516 tree f1, f2;
1517 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
1518 f1 || f2;
1519 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
1520 compare_tree_edges (f1, f2);
1521 }
1522 else if (code == FUNCTION_TYPE
1523 || code == METHOD_TYPE)
1524 compare_tree_edges (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2));
1525
1526 if (!POINTER_TYPE_P (t1))
1527 compare_tree_edges (TYPE_MIN_VALUE_RAW (t1), TYPE_MIN_VALUE_RAW (t2));
1528 compare_tree_edges (TYPE_MAX_VALUE_RAW (t1), TYPE_MAX_VALUE_RAW (t2));
1529 }
1530
1531 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1532 {
1533 compare_tree_edges (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
1534 compare_tree_edges (TREE_VALUE (t1), TREE_VALUE (t2));
1535 compare_tree_edges (TREE_CHAIN (t1), TREE_CHAIN (t2));
1536 }
1537
1538 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1539 for (int i = 0; i < TREE_VEC_LENGTH (t1); i++)
1540 compare_tree_edges (TREE_VEC_ELT (t1, i), TREE_VEC_ELT (t2, i));
1541
1542 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1543 {
1544 for (int i = 0; i < TREE_OPERAND_LENGTH (t1); i++)
1545 compare_tree_edges (TREE_OPERAND (t1, i),
1546 TREE_OPERAND (t2, i));
1547
1548 /* BLOCKs are function local and we don't merge anything there. */
1549 if (TREE_BLOCK (t1) || TREE_BLOCK (t2))
1550 return false;
1551 }
1552
1553 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1554 {
1555 unsigned i;
1556 tree t;
1557 /* Lengths have already been compared above. */
1558 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t1), i, t)
1559 compare_tree_edges (t, BINFO_BASE_BINFO (t2, i));
1560 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t1), i, t)
1561 compare_tree_edges (t, BINFO_BASE_ACCESS (t2, i));
1562 compare_tree_edges (BINFO_OFFSET (t1), BINFO_OFFSET (t2));
1563 compare_tree_edges (BINFO_VTABLE (t1), BINFO_VTABLE (t2));
1564 compare_tree_edges (BINFO_VPTR_FIELD (t1), BINFO_VPTR_FIELD (t2));
1565 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1566 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1567 }
1568
1569 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1570 {
1571 unsigned i;
1572 tree index, value;
1573 /* Lengths have already been compared above. */
1574 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, index, value)
1575 {
1576 compare_tree_edges (index, CONSTRUCTOR_ELT (t2, i)->index);
1577 compare_tree_edges (value, CONSTRUCTOR_ELT (t2, i)->value);
1578 }
1579 }
1580
1581 if (code == OMP_CLAUSE)
1582 {
1583 int i;
1584
1585 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t1)]; i++)
1586 compare_tree_edges (OMP_CLAUSE_OPERAND (t1, i),
1587 OMP_CLAUSE_OPERAND (t2, i));
1588 compare_tree_edges (OMP_CLAUSE_CHAIN (t1), OMP_CLAUSE_CHAIN (t2));
1589 }
1590
1591 #undef compare_tree_edges
1592
1593 return true;
1594 }
1595
1596 /* Compare the tree scc SCC to the prevailing candidate PSCC, filling
1597 out MAP if they are equal. */
1598
1599 static bool
1600 compare_tree_sccs (tree_scc *pscc, tree_scc *scc,
1601 tree *map)
1602 {
1603 /* Assume SCC entry hashes are sorted after their cardinality. Which
1604 means we can simply take the first n-tuple of equal hashes
1605 (which is recorded as entry_len) and do n SCC entry candidate
1606 comparisons. */
1607 for (unsigned i = 0; i < pscc->entry_len; ++i)
1608 {
1609 tree *mapp = map;
1610 num_scc_compare_collisions++;
1611 if (compare_tree_sccs_1 (pscc->entries[0], scc->entries[i], &mapp))
1612 {
1613 /* Equal - no need to reset TREE_VISITED or TREE_ASM_WRITTEN
1614 on the scc as all trees will be freed. */
1615 return true;
1616 }
1617 /* Reset TREE_ASM_WRITTEN on scc for the next compare or in case
1618 the SCC prevails. */
1619 for (unsigned j = 0; j < scc->len; ++j)
1620 TREE_ASM_WRITTEN (scc->entries[j]) = 0;
1621 }
1622
1623 return false;
1624 }
1625
1626 /* QSort sort function to sort a map of two pointers after the 2nd
1627 pointer. */
1628
1629 static int
1630 cmp_tree (const void *p1_, const void *p2_)
1631 {
1632 tree *p1 = (tree *)(const_cast<void *>(p1_));
1633 tree *p2 = (tree *)(const_cast<void *>(p2_));
1634 if (p1[1] == p2[1])
1635 return 0;
1636 return ((uintptr_t)p1[1] < (uintptr_t)p2[1]) ? -1 : 1;
1637 }
1638
1639 /* New scc of size 1 containing T was streamed in from DATA_IN and not merged.
1640 Register it to reader cache at index FROM. */
1641
1642 static void
1643 process_dref (class data_in *data_in, tree t, unsigned from)
1644 {
1645 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1646 /* If we got a debug reference queued, see if the prevailing
1647 tree has a debug reference and if not, register the one
1648 for the tree we are about to throw away. */
1649 if (dref_queue.length () == 1)
1650 {
1651 dref_entry e = dref_queue.pop ();
1652 gcc_assert (e.decl
1653 == streamer_tree_cache_get_tree (cache, from));
1654 const char *sym;
1655 unsigned HOST_WIDE_INT off;
1656 if (!debug_hooks->die_ref_for_decl (t, &sym, &off))
1657 debug_hooks->register_external_die (t, e.sym, e.off);
1658 }
1659 }
1660
1661 /* Try to unify the SCC with nodes FROM to FROM + LEN in CACHE and
1662 hash value SCC_HASH with an already recorded SCC. Return true if
1663 that was successful, otherwise return false. */
1664
1665 static bool
1666 unify_scc (class data_in *data_in, unsigned from,
1667 unsigned len, unsigned scc_entry_len, hashval_t scc_hash)
1668 {
1669 bool unified_p = false;
1670 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1671 tree_scc *scc
1672 = (tree_scc *) alloca (sizeof (tree_scc) + (len - 1) * sizeof (tree));
1673 scc->next = NULL;
1674 scc->hash = scc_hash;
1675 scc->len = len;
1676 scc->entry_len = scc_entry_len;
1677 for (unsigned i = 0; i < len; ++i)
1678 {
1679 tree t = streamer_tree_cache_get_tree (cache, from + i);
1680 scc->entries[i] = t;
1681 /* These types should be streamed as unshared. */
1682 gcc_checking_assert
1683 (!(TREE_CODE (t) == TRANSLATION_UNIT_DECL
1684 || (VAR_OR_FUNCTION_DECL_P (t)
1685 && !(TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
1686 || TREE_CODE (t) == LABEL_DECL
1687 || (TREE_CODE (t) == NAMESPACE_DECL && !DECL_NAME (t))
1688 || (TYPE_P (t)
1689 && type_with_linkage_p (TYPE_MAIN_VARIANT (t))
1690 && type_in_anonymous_namespace_p (TYPE_MAIN_VARIANT (t)))));
1691 }
1692
1693 /* Look for the list of candidate SCCs to compare against. */
1694 tree_scc **slot;
1695 slot = tree_scc_hash->find_slot_with_hash (scc, scc_hash, INSERT);
1696 if (*slot)
1697 {
1698 /* Try unifying against each candidate. */
1699 num_scc_compares++;
1700
1701 /* Set TREE_VISITED on the scc so we can easily identify tree nodes
1702 outside of the scc when following tree edges. Make sure
1703 that TREE_ASM_WRITTEN is unset so we can use it as 2nd bit
1704 to track whether we visited the SCC member during the compare.
1705 We cannot use TREE_VISITED on the pscc members as the extended
1706 scc and pscc can overlap. */
1707 for (unsigned i = 0; i < scc->len; ++i)
1708 {
1709 TREE_VISITED (scc->entries[i]) = 1;
1710 gcc_checking_assert (!TREE_ASM_WRITTEN (scc->entries[i]));
1711 }
1712
1713 tree *map = XALLOCAVEC (tree, 2 * len);
1714 for (tree_scc *pscc = *slot; pscc; pscc = pscc->next)
1715 {
1716 if (!compare_tree_sccs (pscc, scc, map))
1717 continue;
1718
1719 /* Found an equal SCC. */
1720 unified_p = true;
1721 num_scc_compare_collisions--;
1722 num_sccs_merged++;
1723 total_scc_size_merged += len;
1724
1725 if (flag_checking)
1726 for (unsigned i = 0; i < len; ++i)
1727 {
1728 tree t = map[2*i+1];
1729 enum tree_code code = TREE_CODE (t);
1730 /* IDENTIFIER_NODEs should be singletons and are merged by the
1731 streamer. The others should be singletons, too, and we
1732 should not merge them in any way. */
1733 gcc_assert (code != TRANSLATION_UNIT_DECL
1734 && code != IDENTIFIER_NODE);
1735 }
1736
1737 /* Fixup the streamer cache with the prevailing nodes according
1738 to the tree node mapping computed by compare_tree_sccs. */
1739 if (len == 1)
1740 {
1741 process_dref (data_in, pscc->entries[0], from);
1742 lto_maybe_register_decl (data_in, pscc->entries[0], from);
1743 streamer_tree_cache_replace_tree (cache, pscc->entries[0], from);
1744 }
1745 else
1746 {
1747 tree *map2 = XALLOCAVEC (tree, 2 * len);
1748 for (unsigned i = 0; i < len; ++i)
1749 {
1750 map2[i*2] = (tree)(uintptr_t)(from + i);
1751 map2[i*2+1] = scc->entries[i];
1752 }
1753 qsort (map2, len, 2 * sizeof (tree), cmp_tree);
1754 qsort (map, len, 2 * sizeof (tree), cmp_tree);
1755 for (unsigned i = 0; i < len; ++i)
1756 {
1757 lto_maybe_register_decl (data_in, map[2*i],
1758 (uintptr_t)map2[2*i]);
1759 streamer_tree_cache_replace_tree (cache, map[2*i],
1760 (uintptr_t)map2[2*i]);
1761 }
1762 }
1763
1764 /* Free the tree nodes from the read SCC. */
1765 data_in->location_cache.revert_location_cache ();
1766 for (unsigned i = 0; i < len; ++i)
1767 {
1768 if (TYPE_P (scc->entries[i]))
1769 num_merged_types++;
1770 free_node (scc->entries[i]);
1771 }
1772
1773 /* Drop DIE references.
1774 ??? Do as in the size-one SCC case which involves sorting
1775 the queue. */
1776 dref_queue.truncate (0);
1777
1778 break;
1779 }
1780
1781 /* Reset TREE_VISITED if we didn't unify the SCC with another. */
1782 if (!unified_p)
1783 for (unsigned i = 0; i < scc->len; ++i)
1784 TREE_VISITED (scc->entries[i]) = 0;
1785 }
1786
1787 /* If we didn't unify it to any candidate duplicate the relevant
1788 pieces to permanent storage and link it into the chain. */
1789 if (!unified_p)
1790 {
1791 tree_scc *pscc
1792 = XOBNEWVAR (&tree_scc_hash_obstack, tree_scc, sizeof (tree_scc));
1793 memcpy (pscc, scc, sizeof (tree_scc));
1794 pscc->next = (*slot);
1795 *slot = pscc;
1796 }
1797 return unified_p;
1798 }
1799
1800 typedef int_hash<unsigned, 0, UINT_MAX> code_id_hash;
1801
1802 /* Do registering necessary once new tree fully streamed in (including all
1803 trees it reffers to). */
1804
1805 static void
1806 process_new_tree (tree t, hash_map <code_id_hash, unsigned> *hm,
1807 unsigned index, unsigned *total, class data_in *data_in)
1808 {
1809 /* Reconstruct the type variant and pointer-to/reference-to
1810 chains. */
1811 if (TYPE_P (t))
1812 {
1813 /* Map the tree types to their frequencies. */
1814 if (flag_lto_dump_type_stats)
1815 {
1816 unsigned key = (unsigned) TREE_CODE (t);
1817 unsigned *countp = hm->get (key);
1818 hm->put (key, countp ? (*countp) + 1 : 1);
1819 (*total)++;
1820 }
1821
1822 num_prevailing_types++;
1823 lto_fixup_prevailing_type (t);
1824
1825 /* Compute the canonical type of all non-ODR types.
1826 Delay ODR types for the end of merging process - the canonical
1827 type for those can be computed using the (unique) name however
1828 we want to do this only if units in other languages do not
1829 contain structurally equivalent type.
1830
1831 Because SCC components are streamed in random (hash) order
1832 we may have encountered the type before while registering
1833 type canonical of a derived type in the same SCC. */
1834 if (!TYPE_CANONICAL (t))
1835 {
1836 if (!RECORD_OR_UNION_TYPE_P (t)
1837 || !TYPE_CXX_ODR_P (t))
1838 gimple_register_canonical_type (t);
1839 else if (COMPLETE_TYPE_P (t))
1840 vec_safe_push (types_to_register, t);
1841 }
1842 if (TYPE_MAIN_VARIANT (t) == t && odr_type_p (t))
1843 register_odr_type (t);
1844 }
1845 /* Link shared INTEGER_CSTs into TYPE_CACHED_VALUEs of its
1846 type which is also member of this SCC. */
1847 if (TREE_CODE (t) == INTEGER_CST
1848 && !TREE_OVERFLOW (t))
1849 cache_integer_cst (t);
1850 if (!flag_ltrans)
1851 {
1852 lto_maybe_register_decl (data_in, t, index);
1853 /* Scan the tree for references to global functions or
1854 variables and record those for later fixup. */
1855 if (mentions_vars_p (t))
1856 vec_safe_push (tree_with_vars, t);
1857 }
1858 }
1859
1860 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
1861 RESOLUTIONS is the set of symbols picked by the linker (read from the
1862 resolution file when the linker plugin is being used). */
1863
1864 static void
1865 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
1866 vec<ld_plugin_symbol_resolution_t> resolutions)
1867 {
1868 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
1869 const int decl_offset = sizeof (struct lto_decl_header);
1870 const int main_offset = decl_offset + header->decl_state_size;
1871 const int string_offset = main_offset + header->main_size;
1872 class data_in *data_in;
1873 unsigned int i;
1874 const uint32_t *data_ptr, *data_end;
1875 uint32_t num_decl_states;
1876
1877 lto_input_block ib_main ((const char *) data + main_offset,
1878 header->main_size, decl_data->mode_table);
1879
1880 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
1881 header->string_size, resolutions);
1882
1883 /* We do not uniquify the pre-loaded cache entries, those are middle-end
1884 internal types that should not be merged. */
1885
1886 hash_map <code_id_hash, unsigned> hm;
1887 unsigned total = 0;
1888
1889 /* Read the global declarations and types. */
1890 while (ib_main.p < ib_main.len)
1891 {
1892 tree t;
1893 unsigned from = data_in->reader_cache->nodes.length ();
1894 /* Read and uniquify SCCs as in the input stream. */
1895 enum LTO_tags tag = streamer_read_record_start (&ib_main);
1896 if (tag == LTO_tree_scc || tag == LTO_trees)
1897 {
1898 unsigned len_;
1899 unsigned scc_entry_len;
1900
1901 /* Because we stream in SCC order we know that all unshared trees
1902 are now fully streamed. Process them. */
1903 hashval_t scc_hash = lto_input_scc (&ib_main, data_in, &len_,
1904 &scc_entry_len,
1905 tag == LTO_tree_scc);
1906 unsigned len = data_in->reader_cache->nodes.length () - from;
1907 gcc_assert (len == len_);
1908
1909 if (tag == LTO_tree_scc)
1910 {
1911 total_scc_size += len;
1912 num_sccs_read++;
1913 }
1914 else
1915 num_unshared_trees_read += len;
1916
1917 /* We have the special case of size-1 SCCs that are pre-merged
1918 by means of identifier and string sharing for example.
1919 ??? Maybe we should avoid streaming those as SCCs. */
1920 tree first = streamer_tree_cache_get_tree (data_in->reader_cache,
1921 from);
1922 /* Identifier and integers are shared specially, they should never
1923 go by the tree merging path. */
1924 gcc_checking_assert ((TREE_CODE (first) != IDENTIFIER_NODE
1925 && (TREE_CODE (first) != INTEGER_CST
1926 || TREE_OVERFLOW (first)))
1927 || len != 1);
1928
1929 /* Try to unify the SCC with already existing ones. */
1930 if (!flag_ltrans && tag != LTO_trees
1931 && unify_scc (data_in, from,
1932 len, scc_entry_len, scc_hash))
1933 continue;
1934
1935 /* Tree merging failed, mark entries in location cache as
1936 permanent. */
1937 data_in->location_cache.accept_location_cache ();
1938
1939 bool seen_type = false;
1940 for (unsigned i = 0; i < len; ++i)
1941 {
1942 tree t = streamer_tree_cache_get_tree (data_in->reader_cache,
1943 from + i);
1944 process_new_tree (t, &hm, from + i, &total, data_in);
1945 if (TYPE_P (t))
1946 seen_type = true;
1947 }
1948
1949 /* Register DECLs with the debuginfo machinery. */
1950 while (!dref_queue.is_empty ())
1951 {
1952 dref_entry e = dref_queue.pop ();
1953 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1954 }
1955
1956 if (seen_type)
1957 num_type_scc_trees += len;
1958 }
1959 else
1960 {
1961 t = lto_input_tree_1 (&ib_main, data_in, tag, 0);
1962 gcc_assert (data_in->reader_cache->nodes.length () == from + 1);
1963 num_unshared_trees_read++;
1964 data_in->location_cache.accept_location_cache ();
1965 process_dref (data_in, t, from);
1966 if (TREE_CODE (t) == IDENTIFIER_NODE
1967 || (TREE_CODE (t) == INTEGER_CST
1968 && !TREE_OVERFLOW (t)))
1969 ;
1970 else
1971 {
1972 lto_maybe_register_decl (data_in, t, from);
1973 process_new_tree (t, &hm, from, &total, data_in);
1974 }
1975 }
1976 }
1977
1978 /* Dump type statistics. */
1979 if (flag_lto_dump_type_stats)
1980 {
1981 fprintf (stdout, " Type Frequency Percentage\n\n");
1982 for (hash_map<code_id_hash, unsigned>::iterator itr = hm.begin ();
1983 itr != hm.end ();
1984 ++itr)
1985 {
1986 std::pair<unsigned, unsigned> p = *itr;
1987 enum tree_code code = (enum tree_code) p.first;
1988 fprintf (stdout, "%14s %6d %12.2f\n", get_tree_code_name (code),
1989 p.second, float (p.second)/total*100);
1990 }
1991 }
1992
1993 data_in->location_cache.apply_location_cache ();
1994
1995 /* Read in lto_in_decl_state objects. */
1996 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
1997 data_end
1998 = (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
1999 num_decl_states = *data_ptr++;
2000
2001 gcc_assert (num_decl_states > 0);
2002 decl_data->global_decl_state = lto_new_in_decl_state ();
2003 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
2004 decl_data->global_decl_state);
2005
2006 /* Read in per-function decl states and enter them in hash table. */
2007 decl_data->function_decl_states
2008 = hash_table<decl_state_hasher>::create_ggc (37);
2009
2010 for (i = 1; i < num_decl_states; i++)
2011 {
2012 struct lto_in_decl_state *state = lto_new_in_decl_state ();
2013
2014 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
2015 lto_in_decl_state **slot
2016 = decl_data->function_decl_states->find_slot (state, INSERT);
2017 gcc_assert (*slot == NULL);
2018 *slot = state;
2019 }
2020
2021 if (data_ptr != data_end)
2022 internal_error ("bytecode stream: garbage at the end of symbols section");
2023
2024 /* Set the current decl state to be the global state. */
2025 decl_data->current_decl_state = decl_data->global_decl_state;
2026
2027 lto_data_in_delete (data_in);
2028 }
2029
2030 /* Custom version of strtoll, which is not portable. */
2031
2032 static int64_t
2033 lto_parse_hex (const char *p)
2034 {
2035 int64_t ret = 0;
2036
2037 for (; *p != '\0'; ++p)
2038 {
2039 char c = *p;
2040 unsigned char part;
2041 ret <<= 4;
2042 if (c >= '0' && c <= '9')
2043 part = c - '0';
2044 else if (c >= 'a' && c <= 'f')
2045 part = c - 'a' + 10;
2046 else if (c >= 'A' && c <= 'F')
2047 part = c - 'A' + 10;
2048 else
2049 internal_error ("could not parse hex number");
2050 ret |= part;
2051 }
2052
2053 return ret;
2054 }
2055
2056 /* Read resolution for file named FILE_NAME. The resolution is read from
2057 RESOLUTION. */
2058
2059 static void
2060 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
2061 {
2062 /* We require that objects in the resolution file are in the same
2063 order as the lto1 command line. */
2064 unsigned int name_len;
2065 char *obj_name;
2066 unsigned int num_symbols;
2067 unsigned int i;
2068 struct lto_file_decl_data *file_data;
2069 splay_tree_node nd = NULL;
2070
2071 if (!resolution)
2072 return;
2073
2074 name_len = strlen (file->filename);
2075 obj_name = XNEWVEC (char, name_len + 1);
2076 fscanf (resolution, " "); /* Read white space. */
2077
2078 fread (obj_name, sizeof (char), name_len, resolution);
2079 obj_name[name_len] = '\0';
2080 if (filename_cmp (obj_name, file->filename) != 0)
2081 internal_error ("unexpected file name %s in linker resolution file. "
2082 "Expected %s", obj_name, file->filename);
2083 if (file->offset != 0)
2084 {
2085 int t;
2086 char offset_p[17];
2087 int64_t offset;
2088 t = fscanf (resolution, "@0x%16s", offset_p);
2089 if (t != 1)
2090 internal_error ("could not parse file offset");
2091 offset = lto_parse_hex (offset_p);
2092 if (offset != file->offset)
2093 internal_error ("unexpected offset");
2094 }
2095
2096 free (obj_name);
2097
2098 fscanf (resolution, "%u", &num_symbols);
2099
2100 for (i = 0; i < num_symbols; i++)
2101 {
2102 int t;
2103 unsigned index;
2104 unsigned HOST_WIDE_INT id;
2105 char r_str[27];
2106 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
2107 unsigned int j;
2108 unsigned int lto_resolution_str_len = ARRAY_SIZE (lto_resolution_str);
2109 res_pair rp;
2110
2111 t = fscanf (resolution, "%u " HOST_WIDE_INT_PRINT_HEX_PURE
2112 " %26s %*[^\n]\n", &index, &id, r_str);
2113 if (t != 3)
2114 internal_error ("invalid line in the resolution file");
2115
2116 for (j = 0; j < lto_resolution_str_len; j++)
2117 {
2118 if (strcmp (lto_resolution_str[j], r_str) == 0)
2119 {
2120 r = (enum ld_plugin_symbol_resolution) j;
2121 /* Incremental linking together with -fwhole-program may seem
2122 somewhat contradictionary (as the point of incremental linking
2123 is to allow re-linking with more symbols later) but it is
2124 used to build LTO kernel. We want to hide all symbols that
2125 are not explicitely marked as exported and thus turn
2126 LDPR_PREVAILING_DEF_IRONLY_EXP
2127 to LDPR_PREVAILING_DEF_IRONLY. */
2128 if (flag_whole_program
2129 && flag_incremental_link == INCREMENTAL_LINK_NOLTO
2130 && r == LDPR_PREVAILING_DEF_IRONLY_EXP)
2131 r = LDPR_PREVAILING_DEF_IRONLY;
2132 break;
2133 }
2134 }
2135 if (j == lto_resolution_str_len)
2136 internal_error ("invalid resolution in the resolution file");
2137
2138 if (!(nd && lto_splay_tree_id_equal_p (nd->key, id)))
2139 {
2140 nd = lto_splay_tree_lookup (file_ids, id);
2141 if (nd == NULL)
2142 internal_error ("resolution sub id %wx not in object file", id);
2143 }
2144
2145 file_data = (struct lto_file_decl_data *)nd->value;
2146 /* The indexes are very sparse. To save memory save them in a compact
2147 format that is only unpacked later when the subfile is processed. */
2148 rp.res = r;
2149 rp.index = index;
2150 file_data->respairs.safe_push (rp);
2151 if (file_data->max_index < index)
2152 file_data->max_index = index;
2153 }
2154 }
2155
2156 /* List of file_decl_datas. */
2157 struct file_data_list
2158 {
2159 struct lto_file_decl_data *first, *last;
2160 };
2161
2162 /* Is the name for a id'ed LTO section? */
2163
2164 static int
2165 lto_section_with_id (const char *name, unsigned HOST_WIDE_INT *id)
2166 {
2167 const char *s;
2168
2169 if (strncmp (name, section_name_prefix, strlen (section_name_prefix)))
2170 return 0;
2171 s = strrchr (name, '.');
2172 if (!s)
2173 return 0;
2174 /* If the section is not suffixed with an ID return. */
2175 if ((size_t)(s - name) == strlen (section_name_prefix))
2176 return 0;
2177 return sscanf (s, "." HOST_WIDE_INT_PRINT_HEX_PURE, id) == 1;
2178 }
2179
2180 /* Create file_data of each sub file id. */
2181
2182 static int
2183 create_subid_section_table (struct lto_section_slot *ls, splay_tree file_ids,
2184 struct file_data_list *list)
2185 {
2186 struct lto_section_slot s_slot, *new_slot;
2187 unsigned HOST_WIDE_INT id;
2188 splay_tree_node nd;
2189 void **hash_slot;
2190 char *new_name;
2191 struct lto_file_decl_data *file_data;
2192
2193 if (!lto_section_with_id (ls->name, &id))
2194 return 1;
2195
2196 /* Find hash table of sub module id. */
2197 nd = lto_splay_tree_lookup (file_ids, id);
2198 if (nd != NULL)
2199 {
2200 file_data = (struct lto_file_decl_data *)nd->value;
2201 }
2202 else
2203 {
2204 file_data = ggc_alloc<lto_file_decl_data> ();
2205 memset(file_data, 0, sizeof (struct lto_file_decl_data));
2206 file_data->id = id;
2207 file_data->section_hash_table = lto_obj_create_section_hash_table ();
2208 lto_splay_tree_insert (file_ids, id, file_data);
2209
2210 /* Maintain list in linker order. */
2211 if (!list->first)
2212 list->first = file_data;
2213 if (list->last)
2214 list->last->next = file_data;
2215
2216 list->last = file_data;
2217 }
2218
2219 /* Copy section into sub module hash table. */
2220 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
2221 s_slot.name = new_name;
2222 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
2223 gcc_assert (*hash_slot == NULL);
2224
2225 new_slot = XDUP (struct lto_section_slot, ls);
2226 new_slot->name = new_name;
2227 *hash_slot = new_slot;
2228 return 1;
2229 }
2230
2231 /* Read declarations and other initializations for a FILE_DATA. */
2232
2233 static void
2234 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file,
2235 int order)
2236 {
2237 const char *data;
2238 size_t len;
2239 vec<ld_plugin_symbol_resolution_t>
2240 resolutions = vNULL;
2241 int i;
2242 res_pair *rp;
2243
2244 /* Create vector for fast access of resolution. We do this lazily
2245 to save memory. */
2246 resolutions.safe_grow_cleared (file_data->max_index + 1, true);
2247 for (i = 0; file_data->respairs.iterate (i, &rp); i++)
2248 resolutions[rp->index] = rp->res;
2249 file_data->respairs.release ();
2250
2251 file_data->renaming_hash_table = lto_create_renaming_table ();
2252 file_data->file_name = file->filename;
2253 file_data->order = order;
2254
2255 /* Read and verify LTO section. */
2256 data = lto_get_summary_section_data (file_data, LTO_section_lto, &len);
2257 if (data == NULL)
2258 {
2259 fatal_error (input_location, "bytecode stream in file %qs generated "
2260 "with GCC compiler older than 10.0", file_data->file_name);
2261 return;
2262 }
2263
2264 memcpy (&file_data->lto_section_header, data, sizeof (lto_section));
2265 lto_check_version (file_data->lto_section_header.major_version,
2266 file_data->lto_section_header.minor_version,
2267 file_data->file_name);
2268
2269 #ifdef ACCEL_COMPILER
2270 lto_input_mode_table (file_data);
2271 #else
2272 file_data->mode_table = lto_mode_identity_table;
2273 #endif
2274
2275 data = lto_get_summary_section_data (file_data, LTO_section_decls, &len);
2276 if (data == NULL)
2277 {
2278 internal_error ("cannot read %<LTO_section_decls%> from %s",
2279 file_data->file_name);
2280 return;
2281 }
2282 /* Frees resolutions. */
2283 lto_read_decls (file_data, data, resolutions);
2284 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
2285 }
2286
2287 /* Finalize FILE_DATA in FILE and increase COUNT. */
2288
2289 static int
2290 lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
2291 int *count, int order)
2292 {
2293 lto_file_finalize (file_data, file, order);
2294 if (symtab->dump_file)
2295 fprintf (symtab->dump_file,
2296 "Creating file %s with sub id " HOST_WIDE_INT_PRINT_HEX "\n",
2297 file_data->file_name, file_data->id);
2298 (*count)++;
2299 return 0;
2300 }
2301
2302 /* Generate a TREE representation for all types and external decls
2303 entities in FILE.
2304
2305 Read all of the globals out of the file. Then read the cgraph
2306 and process the .o index into the cgraph nodes so that it can open
2307 the .o file to load the functions and ipa information. */
2308
2309 static struct lto_file_decl_data *
2310 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
2311 {
2312 struct lto_file_decl_data *file_data = NULL;
2313 splay_tree file_ids;
2314 htab_t section_hash_table;
2315 struct lto_section_slot *section;
2316 struct file_data_list file_list;
2317 struct lto_section_list section_list;
2318
2319 memset (&section_list, 0, sizeof (struct lto_section_list));
2320 section_hash_table = lto_obj_build_section_table (file, &section_list);
2321
2322 /* Dump the details of LTO objects. */
2323 if (flag_lto_dump_objects)
2324 {
2325 int i=0;
2326 fprintf (stdout, "\n LTO Object Name: %s\n", file->filename);
2327 fprintf (stdout, "\nNo. Offset Size Section Name\n\n");
2328 for (section = section_list.first; section != NULL; section = section->next)
2329 fprintf (stdout, "%2d %8" PRId64 " %8" PRIu64 " %s\n",
2330 ++i, (int64_t) section->start, (uint64_t) section->len,
2331 section->name);
2332 }
2333
2334 /* Find all sub modules in the object and put their sections into new hash
2335 tables in a splay tree. */
2336 file_ids = lto_splay_tree_new ();
2337 memset (&file_list, 0, sizeof (struct file_data_list));
2338 for (section = section_list.first; section != NULL; section = section->next)
2339 create_subid_section_table (section, file_ids, &file_list);
2340
2341 /* Add resolutions to file ids. */
2342 lto_resolution_read (file_ids, resolution_file, file);
2343
2344 /* Finalize each lto file for each submodule in the merged object. */
2345 int order = 0;
2346 for (file_data = file_list.first; file_data != NULL;
2347 file_data = file_data->next)
2348 lto_create_files_from_ids (file, file_data, count, order++);
2349
2350 splay_tree_delete (file_ids);
2351 htab_delete (section_hash_table);
2352
2353 return file_list.first;
2354 }
2355
2356 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
2357 #define LTO_MMAP_IO 1
2358 #endif
2359
2360 #if LTO_MMAP_IO
2361 /* Page size of machine is used for mmap and munmap calls. */
2362 static size_t page_mask;
2363 #endif
2364
2365 /* Get the section data of length LEN from FILENAME starting at
2366 OFFSET. The data segment must be freed by the caller when the
2367 caller is finished. Returns NULL if all was not well. */
2368
2369 static char *
2370 lto_read_section_data (struct lto_file_decl_data *file_data,
2371 intptr_t offset, size_t len)
2372 {
2373 char *result;
2374 static int fd = -1;
2375 static char *fd_name;
2376 #if LTO_MMAP_IO
2377 intptr_t computed_len;
2378 intptr_t computed_offset;
2379 intptr_t diff;
2380 #endif
2381
2382 /* Keep a single-entry file-descriptor cache. The last file we
2383 touched will get closed at exit.
2384 ??? Eventually we want to add a more sophisticated larger cache
2385 or rather fix function body streaming to not stream them in
2386 practically random order. */
2387 if (fd != -1
2388 && filename_cmp (fd_name, file_data->file_name) != 0)
2389 {
2390 free (fd_name);
2391 close (fd);
2392 fd = -1;
2393 }
2394 if (fd == -1)
2395 {
2396 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
2397 if (fd == -1)
2398 {
2399 fatal_error (input_location, "Cannot open %s", file_data->file_name);
2400 return NULL;
2401 }
2402 fd_name = xstrdup (file_data->file_name);
2403 }
2404
2405 #if LTO_MMAP_IO
2406 if (!page_mask)
2407 {
2408 size_t page_size = sysconf (_SC_PAGE_SIZE);
2409 page_mask = ~(page_size - 1);
2410 }
2411
2412 computed_offset = offset & page_mask;
2413 diff = offset - computed_offset;
2414 computed_len = len + diff;
2415
2416 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
2417 fd, computed_offset);
2418 if (result == MAP_FAILED)
2419 {
2420 fatal_error (input_location, "Cannot map %s", file_data->file_name);
2421 return NULL;
2422 }
2423
2424 return result + diff;
2425 #else
2426 result = (char *) xmalloc (len);
2427 if (lseek (fd, offset, SEEK_SET) != offset
2428 || read (fd, result, len) != (ssize_t) len)
2429 {
2430 free (result);
2431 fatal_error (input_location, "Cannot read %s", file_data->file_name);
2432 result = NULL;
2433 }
2434 #ifdef __MINGW32__
2435 /* Native windows doesn't supports delayed unlink on opened file. So
2436 we close file here again. This produces higher I/O load, but at least
2437 it prevents to have dangling file handles preventing unlink. */
2438 free (fd_name);
2439 fd_name = NULL;
2440 close (fd);
2441 fd = -1;
2442 #endif
2443 return result;
2444 #endif
2445 }
2446
2447
2448 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
2449 NAME will be NULL unless the section type is for a function
2450 body. */
2451
2452 static const char *
2453 get_section_data (struct lto_file_decl_data *file_data,
2454 enum lto_section_type section_type,
2455 const char *name, int order,
2456 size_t *len)
2457 {
2458 htab_t section_hash_table = file_data->section_hash_table;
2459 struct lto_section_slot *f_slot;
2460 struct lto_section_slot s_slot;
2461 const char *section_name = lto_get_section_name (section_type, name,
2462 order, file_data);
2463 char *data = NULL;
2464
2465 *len = 0;
2466 s_slot.name = section_name;
2467 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
2468 if (f_slot)
2469 {
2470 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
2471 *len = f_slot->len;
2472 }
2473
2474 free (CONST_CAST (char *, section_name));
2475 return data;
2476 }
2477
2478
2479 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
2480 starts at OFFSET and has LEN bytes. */
2481
2482 static void
2483 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
2484 enum lto_section_type section_type ATTRIBUTE_UNUSED,
2485 const char *name ATTRIBUTE_UNUSED,
2486 const char *offset, size_t len ATTRIBUTE_UNUSED)
2487 {
2488 #if LTO_MMAP_IO
2489 intptr_t computed_len;
2490 intptr_t computed_offset;
2491 intptr_t diff;
2492 #endif
2493
2494 #if LTO_MMAP_IO
2495 computed_offset = ((intptr_t) offset) & page_mask;
2496 diff = (intptr_t) offset - computed_offset;
2497 computed_len = len + diff;
2498
2499 munmap ((caddr_t) computed_offset, computed_len);
2500 #else
2501 free (CONST_CAST(char *, offset));
2502 #endif
2503 }
2504
2505 static lto_file *current_lto_file;
2506
2507 /* If TT is a variable or function decl replace it with its
2508 prevailing variant. */
2509 #define LTO_SET_PREVAIL(tt) \
2510 do {\
2511 if ((tt) && VAR_OR_FUNCTION_DECL_P (tt) \
2512 && (TREE_PUBLIC (tt) || DECL_EXTERNAL (tt))) \
2513 { \
2514 tt = lto_symtab_prevailing_decl (tt); \
2515 fixed = true; \
2516 } \
2517 } while (0)
2518
2519 /* Ensure that TT isn't a replacable var of function decl. */
2520 #define LTO_NO_PREVAIL(tt) \
2521 gcc_checking_assert (!(tt) || !VAR_OR_FUNCTION_DECL_P (tt))
2522
2523 /* Given a tree T replace all fields referring to variables or functions
2524 with their prevailing variant. */
2525 static void
2526 lto_fixup_prevailing_decls (tree t)
2527 {
2528 enum tree_code code = TREE_CODE (t);
2529 bool fixed = false;
2530
2531 gcc_checking_assert (code != TREE_BINFO);
2532 LTO_NO_PREVAIL (TREE_TYPE (t));
2533 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)
2534 /* lto_symtab_prevail_decl use TREE_CHAIN to link to the prevailing decl.
2535 in the case T is a prevailed declaration we would ICE here. */
2536 && !VAR_OR_FUNCTION_DECL_P (t))
2537 LTO_NO_PREVAIL (TREE_CHAIN (t));
2538 if (DECL_P (t))
2539 {
2540 LTO_NO_PREVAIL (DECL_NAME (t));
2541 LTO_SET_PREVAIL (DECL_CONTEXT (t));
2542 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
2543 {
2544 LTO_SET_PREVAIL (DECL_SIZE (t));
2545 LTO_SET_PREVAIL (DECL_SIZE_UNIT (t));
2546 LTO_SET_PREVAIL (DECL_INITIAL (t));
2547 LTO_NO_PREVAIL (DECL_ATTRIBUTES (t));
2548 LTO_SET_PREVAIL (DECL_ABSTRACT_ORIGIN (t));
2549 }
2550 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
2551 {
2552 LTO_NO_PREVAIL (DECL_ASSEMBLER_NAME_RAW (t));
2553 }
2554 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
2555 {
2556 LTO_NO_PREVAIL (DECL_RESULT_FLD (t));
2557 }
2558 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
2559 {
2560 LTO_NO_PREVAIL (DECL_ARGUMENTS (t));
2561 LTO_SET_PREVAIL (DECL_FUNCTION_PERSONALITY (t));
2562 LTO_NO_PREVAIL (DECL_VINDEX (t));
2563 }
2564 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
2565 {
2566 LTO_SET_PREVAIL (DECL_FIELD_OFFSET (t));
2567 LTO_NO_PREVAIL (DECL_BIT_FIELD_TYPE (t));
2568 LTO_NO_PREVAIL (DECL_QUALIFIER (t));
2569 LTO_NO_PREVAIL (DECL_FIELD_BIT_OFFSET (t));
2570 LTO_NO_PREVAIL (DECL_FCONTEXT (t));
2571 }
2572 }
2573 else if (TYPE_P (t))
2574 {
2575 LTO_NO_PREVAIL (TYPE_CACHED_VALUES (t));
2576 LTO_SET_PREVAIL (TYPE_SIZE (t));
2577 LTO_SET_PREVAIL (TYPE_SIZE_UNIT (t));
2578 LTO_NO_PREVAIL (TYPE_ATTRIBUTES (t));
2579 LTO_NO_PREVAIL (TYPE_NAME (t));
2580
2581 LTO_SET_PREVAIL (TYPE_MIN_VALUE_RAW (t));
2582 LTO_SET_PREVAIL (TYPE_MAX_VALUE_RAW (t));
2583 LTO_NO_PREVAIL (TYPE_LANG_SLOT_1 (t));
2584
2585 LTO_SET_PREVAIL (TYPE_CONTEXT (t));
2586
2587 LTO_NO_PREVAIL (TYPE_CANONICAL (t));
2588 LTO_NO_PREVAIL (TYPE_MAIN_VARIANT (t));
2589 LTO_NO_PREVAIL (TYPE_NEXT_VARIANT (t));
2590 }
2591 else if (EXPR_P (t))
2592 {
2593 int i;
2594 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
2595 LTO_SET_PREVAIL (TREE_OPERAND (t, i));
2596 }
2597 else if (TREE_CODE (t) == CONSTRUCTOR)
2598 {
2599 unsigned i;
2600 tree val;
2601 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
2602 LTO_SET_PREVAIL (val);
2603 }
2604 else
2605 {
2606 switch (code)
2607 {
2608 case TREE_LIST:
2609 LTO_SET_PREVAIL (TREE_VALUE (t));
2610 LTO_SET_PREVAIL (TREE_PURPOSE (t));
2611 break;
2612 default:
2613 gcc_unreachable ();
2614 }
2615 }
2616 /* If we fixed nothing, then we missed something seen by
2617 mentions_vars_p. */
2618 gcc_checking_assert (fixed);
2619 }
2620 #undef LTO_SET_PREVAIL
2621 #undef LTO_NO_PREVAIL
2622
2623 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
2624 replaces var and function decls with the corresponding prevailing def. */
2625
2626 static void
2627 lto_fixup_state (struct lto_in_decl_state *state)
2628 {
2629 unsigned i, si;
2630
2631 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
2632 we still need to walk from all DECLs to find the reachable
2633 FUNCTION_DECLs and VAR_DECLs. */
2634 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
2635 {
2636 vec<tree, va_gc> *trees = state->streams[si];
2637 for (i = 0; i < vec_safe_length (trees); i++)
2638 {
2639 tree t = (*trees)[i];
2640 if (flag_checking && TYPE_P (t))
2641 verify_type (t);
2642 if (VAR_OR_FUNCTION_DECL_P (t)
2643 && (TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
2644 (*trees)[i] = lto_symtab_prevailing_decl (t);
2645 }
2646 }
2647 }
2648
2649 /* Fix the decls from all FILES. Replaces each decl with the corresponding
2650 prevailing one. */
2651
2652 static void
2653 lto_fixup_decls (struct lto_file_decl_data **files)
2654 {
2655 unsigned int i;
2656 tree t;
2657
2658 if (tree_with_vars)
2659 FOR_EACH_VEC_ELT ((*tree_with_vars), i, t)
2660 lto_fixup_prevailing_decls (t);
2661
2662 for (i = 0; files[i]; i++)
2663 {
2664 struct lto_file_decl_data *file = files[i];
2665 struct lto_in_decl_state *state = file->global_decl_state;
2666 lto_fixup_state (state);
2667
2668 hash_table<decl_state_hasher>::iterator iter;
2669 lto_in_decl_state *elt;
2670 FOR_EACH_HASH_TABLE_ELEMENT (*file->function_decl_states, elt,
2671 lto_in_decl_state *, iter)
2672 lto_fixup_state (elt);
2673 }
2674 }
2675
2676 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2677
2678 /* Turn file datas for sub files into a single array, so that they look
2679 like separate files for further passes. */
2680
2681 static void
2682 lto_flatten_files (struct lto_file_decl_data **orig, int count,
2683 int last_file_ix)
2684 {
2685 struct lto_file_decl_data *n, *next;
2686 int i, k;
2687
2688 lto_stats.num_input_files = count;
2689 all_file_decl_data
2690 = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (count + 1);
2691 /* Set the hooks so that all of the ipa passes can read in their data. */
2692 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2693 for (i = 0, k = 0; i < last_file_ix; i++)
2694 {
2695 for (n = orig[i]; n != NULL; n = next)
2696 {
2697 all_file_decl_data[k++] = n;
2698 next = n->next;
2699 n->next = NULL;
2700 }
2701 }
2702 all_file_decl_data[k] = NULL;
2703 gcc_assert (k == count);
2704 }
2705
2706 /* Input file data before flattening (i.e. splitting them to subfiles to support
2707 incremental linking. */
2708 static int real_file_count;
2709 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2710
2711 /* Read all the symbols from the input files FNAMES. NFILES is the
2712 number of files requested in the command line. Instantiate a
2713 global call graph by aggregating all the sub-graphs found in each
2714 file. */
2715
2716 void
2717 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2718 {
2719 unsigned int i, last_file_ix;
2720 FILE *resolution;
2721 unsigned resolution_objects = 0;
2722 int count = 0;
2723 struct lto_file_decl_data **decl_data;
2724 symtab_node *snode;
2725
2726 symtab->initialize ();
2727
2728 timevar_push (TV_IPA_LTO_DECL_IN);
2729
2730 #ifdef ACCEL_COMPILER
2731 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2732 lto_stream_offload_p = true;
2733 #endif
2734
2735 real_file_decl_data
2736 = decl_data = ggc_cleared_vec_alloc<lto_file_decl_data_ptr> (nfiles + 1);
2737 real_file_count = nfiles;
2738
2739 /* Read the resolution file. */
2740 resolution = NULL;
2741 if (resolution_file_name)
2742 {
2743 int t;
2744
2745 resolution = fopen (resolution_file_name, "r");
2746 if (resolution == NULL)
2747 fatal_error (input_location,
2748 "could not open symbol resolution file: %m");
2749
2750 t = fscanf (resolution, "%u", &resolution_objects);
2751 gcc_assert (t == 1);
2752 }
2753 symtab->state = LTO_STREAMING;
2754
2755 canonical_type_hash_cache = new hash_map<const_tree, hashval_t> (251);
2756 gimple_canonical_types = htab_create (16381, gimple_canonical_type_hash,
2757 gimple_canonical_type_eq, NULL);
2758 gcc_obstack_init (&tree_scc_hash_obstack);
2759 tree_scc_hash = new hash_table<tree_scc_hasher> (4096);
2760
2761 /* Register the common node types with the canonical type machinery so
2762 we properly share alias-sets across languages and TUs. Do not
2763 expose the common nodes as type merge target - those that should be
2764 are already exposed so by pre-loading the LTO streamer caches.
2765 Do two passes - first clear TYPE_CANONICAL and then re-compute it. */
2766 for (i = 0; i < itk_none; ++i)
2767 lto_register_canonical_types (integer_types[i], true);
2768 for (i = 0; i < stk_type_kind_last; ++i)
2769 lto_register_canonical_types (sizetype_tab[i], true);
2770 for (i = 0; i < TI_MAX; ++i)
2771 lto_register_canonical_types (global_trees[i], true);
2772 for (i = 0; i < itk_none; ++i)
2773 lto_register_canonical_types (integer_types[i], false);
2774 for (i = 0; i < stk_type_kind_last; ++i)
2775 lto_register_canonical_types (sizetype_tab[i], false);
2776 for (i = 0; i < TI_MAX; ++i)
2777 lto_register_canonical_types (global_trees[i], false);
2778
2779 if (!quiet_flag)
2780 fprintf (stderr, "Reading object files:");
2781
2782 /* Read all of the object files specified on the command line. */
2783 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2784 {
2785 struct lto_file_decl_data *file_data = NULL;
2786 if (!quiet_flag)
2787 {
2788 fprintf (stderr, " %s", fnames[i]);
2789 fflush (stderr);
2790 }
2791
2792 current_lto_file = lto_obj_file_open (fnames[i], false);
2793 if (!current_lto_file)
2794 break;
2795
2796 file_data = lto_file_read (current_lto_file, resolution, &count);
2797 if (!file_data)
2798 {
2799 lto_obj_file_close (current_lto_file);
2800 free (current_lto_file);
2801 current_lto_file = NULL;
2802 break;
2803 }
2804
2805 decl_data[last_file_ix++] = file_data;
2806
2807 lto_obj_file_close (current_lto_file);
2808 free (current_lto_file);
2809 current_lto_file = NULL;
2810 }
2811
2812 lto_flatten_files (decl_data, count, last_file_ix);
2813 lto_stats.num_input_files = count;
2814 ggc_free(decl_data);
2815 real_file_decl_data = NULL;
2816
2817 lto_register_canonical_types_for_odr_types ();
2818
2819 if (resolution_file_name)
2820 {
2821 /* True, since the plugin splits the archives. */
2822 gcc_assert (resolution_objects == nfiles);
2823 fclose (resolution);
2824 }
2825
2826 /* Show the LTO report before launching LTRANS. */
2827 if (flag_lto_report || (flag_wpa && flag_lto_report_wpa))
2828 print_lto_report_1 ();
2829
2830 /* Free gimple type merging datastructures. */
2831 delete tree_scc_hash;
2832 tree_scc_hash = NULL;
2833 obstack_free (&tree_scc_hash_obstack, NULL);
2834 htab_delete (gimple_canonical_types);
2835 gimple_canonical_types = NULL;
2836 delete canonical_type_hash_cache;
2837 canonical_type_hash_cache = NULL;
2838
2839 /* At this stage we know that majority of GGC memory is reachable.
2840 Growing the limits prevents unnecesary invocation of GGC. */
2841 ggc_grow ();
2842 report_heap_memory_use ();
2843
2844 /* Set the hooks so that all of the ipa passes can read in their data. */
2845 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2846
2847 timevar_pop (TV_IPA_LTO_DECL_IN);
2848
2849 if (!quiet_flag)
2850 fprintf (stderr, "\nReading the symbol table:");
2851
2852 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2853 /* Read the symtab. */
2854 input_symtab ();
2855
2856 input_offload_tables (!flag_ltrans);
2857
2858 /* Store resolutions into the symbol table. */
2859
2860 FOR_EACH_SYMBOL (snode)
2861 if (snode->externally_visible && snode->real_symbol_p ()
2862 && snode->lto_file_data && snode->lto_file_data->resolution_map
2863 && !(TREE_CODE (snode->decl) == FUNCTION_DECL
2864 && fndecl_built_in_p (snode->decl))
2865 && !(VAR_P (snode->decl) && DECL_HARD_REGISTER (snode->decl)))
2866 {
2867 ld_plugin_symbol_resolution_t *res;
2868
2869 res = snode->lto_file_data->resolution_map->get (snode->decl);
2870 if (!res || *res == LDPR_UNKNOWN)
2871 {
2872 if (snode->output_to_lto_symbol_table_p ())
2873 fatal_error (input_location, "missing resolution data for %s",
2874 IDENTIFIER_POINTER
2875 (DECL_ASSEMBLER_NAME (snode->decl)));
2876 }
2877 /* Symbol versions are always used externally, but linker does not
2878 report that correctly.
2879 This is binutils PR25924. */
2880 else if (snode->symver && *res == LDPR_PREVAILING_DEF_IRONLY)
2881 snode->resolution = LDPR_PREVAILING_DEF_IRONLY_EXP;
2882 else
2883 snode->resolution = *res;
2884 }
2885 for (i = 0; all_file_decl_data[i]; i++)
2886 if (all_file_decl_data[i]->resolution_map)
2887 {
2888 delete all_file_decl_data[i]->resolution_map;
2889 all_file_decl_data[i]->resolution_map = NULL;
2890 }
2891
2892 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2893
2894 if (!quiet_flag)
2895 fprintf (stderr, "\nMerging declarations:");
2896
2897 timevar_push (TV_IPA_LTO_DECL_MERGE);
2898 /* Merge global decls. In ltrans mode we read merged cgraph, we do not
2899 need to care about resolving symbols again, we only need to replace
2900 duplicated declarations read from the callgraph and from function
2901 sections. */
2902 if (!flag_ltrans)
2903 {
2904 lto_symtab_merge_decls ();
2905
2906 /* If there were errors during symbol merging bail out, we have no
2907 good way to recover here. */
2908 if (seen_error ())
2909 fatal_error (input_location,
2910 "errors during merging of translation units");
2911
2912 /* Fixup all decls. */
2913 lto_fixup_decls (all_file_decl_data);
2914 }
2915 if (tree_with_vars)
2916 ggc_free (tree_with_vars);
2917 tree_with_vars = NULL;
2918 /* During WPA we want to prevent ggc collecting by default. Grow limits
2919 until after the IPA summaries are streamed in. Basically all IPA memory
2920 is explcitly managed by ggc_free and ggc collect is not useful.
2921 Exception are the merged declarations. */
2922 ggc_grow ();
2923 report_heap_memory_use ();
2924
2925 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2926 /* Each pass will set the appropriate timer. */
2927
2928 if (!quiet_flag)
2929 fprintf (stderr, "\nReading summaries:");
2930
2931 /* Read the IPA summary data. */
2932 if (flag_ltrans)
2933 ipa_read_optimization_summaries ();
2934 else
2935 ipa_read_summaries ();
2936
2937 ggc_grow ();
2938
2939 for (i = 0; all_file_decl_data[i]; i++)
2940 {
2941 gcc_assert (all_file_decl_data[i]->symtab_node_encoder);
2942 lto_symtab_encoder_delete (all_file_decl_data[i]->symtab_node_encoder);
2943 all_file_decl_data[i]->symtab_node_encoder = NULL;
2944 lto_in_decl_state *global_decl_state
2945 = all_file_decl_data[i]->global_decl_state;
2946 lto_free_function_in_decl_state (global_decl_state);
2947 all_file_decl_data[i]->global_decl_state = NULL;
2948 all_file_decl_data[i]->current_decl_state = NULL;
2949 }
2950
2951 if (!flag_ltrans)
2952 {
2953 /* Finally merge the cgraph according to the decl merging decisions. */
2954 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2955
2956 if (!quiet_flag)
2957 fprintf (stderr, "\nMerging symbols:");
2958
2959 gcc_assert (!dump_file);
2960 dump_file = dump_begin (lto_link_dump_id, NULL);
2961
2962 if (dump_file)
2963 {
2964 fprintf (dump_file, "Before merging:\n");
2965 symtab->dump (dump_file);
2966 }
2967 lto_symtab_merge_symbols ();
2968 /* Removal of unreachable symbols is needed to make verify_symtab to pass;
2969 we are still having duplicated comdat groups containing local statics.
2970 We could also just remove them while merging. */
2971 symtab->remove_unreachable_nodes (dump_file);
2972 ggc_collect ();
2973 report_heap_memory_use ();
2974
2975 if (dump_file)
2976 dump_end (lto_link_dump_id, dump_file);
2977 dump_file = NULL;
2978 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2979 }
2980 symtab->state = IPA_SSA;
2981 /* All node removals happening here are useless, because
2982 WPA should not stream them. Still always perform remove_unreachable_nodes
2983 because we may reshape clone tree, get rid of dead masters of inline
2984 clones and remove symbol entries for read-only variables we keep around
2985 only to be able to constant fold them. */
2986 if (flag_ltrans)
2987 {
2988 if (symtab->dump_file)
2989 symtab->dump (symtab->dump_file);
2990 symtab->remove_unreachable_nodes (symtab->dump_file);
2991 }
2992
2993 /* Indicate that the cgraph is built and ready. */
2994 symtab->function_flags_ready = true;
2995
2996 ggc_free (all_file_decl_data);
2997 all_file_decl_data = NULL;
2998 }
2999
3000
3001
3002 /* Show various memory usage statistics related to LTO. */
3003 void
3004 print_lto_report_1 (void)
3005 {
3006 const char *pfx = (flag_lto) ? "LTO" : (flag_wpa) ? "WPA" : "LTRANS";
3007 fprintf (stderr, "%s statistics\n", pfx);
3008
3009 fprintf (stderr, "[%s] read %lu unshared trees\n",
3010 pfx, num_unshared_trees_read);
3011 fprintf (stderr, "[%s] read %lu mergeable SCCs of average size %f\n",
3012 pfx, num_sccs_read, total_scc_size / (double)num_sccs_read);
3013 fprintf (stderr, "[%s] %lu tree bodies read in total\n", pfx,
3014 total_scc_size + num_unshared_trees_read);
3015 if (flag_wpa && tree_scc_hash && num_sccs_read)
3016 {
3017 fprintf (stderr, "[%s] tree SCC table: size %ld, %ld elements, "
3018 "collision ratio: %f\n", pfx,
3019 (long) tree_scc_hash->size (),
3020 (long) tree_scc_hash->elements (),
3021 tree_scc_hash->collisions ());
3022 hash_table<tree_scc_hasher>::iterator hiter;
3023 tree_scc *scc, *max_scc = NULL;
3024 unsigned max_length = 0;
3025 FOR_EACH_HASH_TABLE_ELEMENT (*tree_scc_hash, scc, x, hiter)
3026 {
3027 unsigned length = 0;
3028 tree_scc *s = scc;
3029 for (; s; s = s->next)
3030 length++;
3031 if (length > max_length)
3032 {
3033 max_length = length;
3034 max_scc = scc;
3035 }
3036 }
3037 fprintf (stderr, "[%s] tree SCC max chain length %u (size %u)\n",
3038 pfx, max_length, max_scc->len);
3039 fprintf (stderr, "[%s] Compared %lu SCCs, %lu collisions (%f)\n", pfx,
3040 num_scc_compares, num_scc_compare_collisions,
3041 num_scc_compare_collisions / (double) num_scc_compares);
3042 fprintf (stderr, "[%s] Merged %lu SCCs\n", pfx, num_sccs_merged);
3043 fprintf (stderr, "[%s] Merged %lu tree bodies\n", pfx,
3044 total_scc_size_merged);
3045 fprintf (stderr, "[%s] Merged %lu types\n", pfx, num_merged_types);
3046 fprintf (stderr, "[%s] %lu types prevailed (%lu associated trees)\n",
3047 pfx, num_prevailing_types, num_type_scc_trees);
3048 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3049 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3050 (long) htab_size (gimple_canonical_types),
3051 (long) htab_elements (gimple_canonical_types),
3052 (long) gimple_canonical_types->searches,
3053 (long) gimple_canonical_types->collisions,
3054 htab_collisions (gimple_canonical_types));
3055 fprintf (stderr, "[%s] GIMPLE canonical type pointer-map: "
3056 "%lu elements, %ld searches\n", pfx,
3057 num_canonical_type_hash_entries,
3058 num_canonical_type_hash_queries);
3059 }
3060
3061 print_lto_report (pfx);
3062 }
3063
3064 GTY(()) tree lto_eh_personality_decl;
3065
3066 /* Return the LTO personality function decl. */
3067
3068 tree
3069 lto_eh_personality (void)
3070 {
3071 if (!lto_eh_personality_decl)
3072 {
3073 /* Use the first personality DECL for our personality if we don't
3074 support multiple ones. This ensures that we don't artificially
3075 create the need for them in a single-language program. */
3076 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
3077 lto_eh_personality_decl = first_personality_decl;
3078 else
3079 lto_eh_personality_decl = lhd_gcc_personality ();
3080 }
3081
3082 return lto_eh_personality_decl;
3083 }
3084
3085 /* Set the process name based on the LTO mode. */
3086
3087 static void
3088 lto_process_name (void)
3089 {
3090 if (flag_lto)
3091 setproctitle (flag_incremental_link == INCREMENTAL_LINK_LTO
3092 ? "lto1-inclink" : "lto1-lto");
3093 if (flag_wpa)
3094 setproctitle ("lto1-wpa");
3095 if (flag_ltrans)
3096 setproctitle ("lto1-ltrans");
3097 }
3098
3099
3100 /* Initialize the LTO front end. */
3101
3102 void
3103 lto_fe_init (void)
3104 {
3105 lto_process_name ();
3106 lto_streamer_hooks_init ();
3107 lto_reader_init ();
3108 lto_set_in_hooks (NULL, get_section_data, free_section_data);
3109 memset (&lto_stats, 0, sizeof (lto_stats));
3110 bitmap_obstack_initialize (NULL);
3111 gimple_register_cfg_hooks ();
3112 #ifndef ACCEL_COMPILER
3113 unsigned char *table
3114 = ggc_vec_alloc<unsigned char> (MAX_MACHINE_MODE);
3115 for (int m = 0; m < MAX_MACHINE_MODE; m++)
3116 table[m] = m;
3117 lto_mode_identity_table = table;
3118 #endif
3119 }
3120
3121 #include "gt-lto-lto-common.h"
This page took 0.213926 seconds and 5 git commands to generate.