]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa.c
function.h (struct rtl_data): Remove struct and accessor macros.
[gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
5624e564 2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
40e23961
MC
24#include "alias.h"
25#include "symtab.h"
26#include "options.h"
4d648807 27#include "tree.h"
40e23961 28#include "fold-const.h"
d8a2d370
DN
29#include "calls.h"
30#include "stringpool.h"
60393bbc
AM
31#include "predict.h"
32#include "basic-block.h"
c582198b 33#include "plugin-api.h"
c582198b 34#include "hard-reg-set.h"
c582198b
AM
35#include "function.h"
36#include "ipa-ref.h"
37#include "cgraph.h"
38#include "tree-pass.h"
2fb9a547 39#include "gimple-expr.h"
45b0be94 40#include "gimplify.h"
4a444e58 41#include "flags.h"
9e97ff61
JH
42#include "target.h"
43#include "tree-iterator.h"
af8bca3c 44#include "ipa-utils.h"
c582198b 45#include "alloc-pool.h"
dd912cb8 46#include "symbol-summary.h"
c582198b 47#include "ipa-prop.h"
04142cc3 48#include "ipa-inline.h"
0208f7da
JH
49#include "tree-inline.h"
50#include "profile.h"
51#include "params.h"
2b5f0895
XDL
52#include "internal-fn.h"
53#include "tree-ssa-alias.h"
54#include "gimple.h"
55#include "dbgcnt.h"
ca31b95f 56
e70670cf
JH
57
58/* Return true when NODE has ADDR reference. */
59
60static bool
61has_addr_references_p (struct cgraph_node *node,
62 void *data ATTRIBUTE_UNUSED)
63{
64 int i;
d122681a 65 struct ipa_ref *ref = NULL;
e70670cf 66
d122681a 67 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
68 if (ref->use == IPA_REF_ADDR)
69 return true;
70 return false;
71}
72
d563610d
JH
73/* Look for all functions inlined to NODE and update their inlined_to pointers
74 to INLINED_TO. */
75
76static void
77update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
78{
79 struct cgraph_edge *e;
80 for (e = node->callees; e; e = e->next_callee)
81 if (e->callee->global.inlined_to)
82 {
83 e->callee->global.inlined_to = inlined_to;
84 update_inlined_to_pointer (e->callee, inlined_to);
85 }
86}
87
04142cc3 88/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
89
90 The queue is linked via AUX pointers and terminated by pointer to 1.
91 We enqueue nodes at two occasions: when we find them reachable or when we find
92 their bodies needed for further clonning. In the second case we mark them
93 by pointer to 2 after processing so they are re-queue when they become
94 reachable. */
b34fd25c
JH
95
96static void
5e20cdc9 97enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 98 hash_set<symtab_node *> *reachable)
b34fd25c 99{
19fb0b86 100 /* Node is still in queue; do nothing. */
67348ccc 101 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
102 return;
103 /* Node was already processed as unreachable, re-enqueue
104 only if it became reachable now. */
6e2830c3 105 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 106 return;
67348ccc 107 node->aux = *first;
b34fd25c
JH
108 *first = node;
109}
110
b34fd25c
JH
111/* Process references. */
112
113static void
d122681a 114process_references (symtab_node *snode,
5e20cdc9 115 symtab_node **first,
93a18a70 116 bool before_inlining_p,
6e2830c3 117 hash_set<symtab_node *> *reachable)
b34fd25c
JH
118{
119 int i;
d122681a
ML
120 struct ipa_ref *ref = NULL;
121 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 122 {
5e20cdc9 123 symtab_node *node = ref->referred;
17e0fc92 124 symtab_node *body = node->ultimate_alias_target ();
e70670cf 125
67348ccc
DM
126 if (node->definition && !node->in_other_partition
127 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 128 || (((before_inlining_p
f1ced6f5
JH
129 && ((TREE_CODE (node->decl) != FUNCTION_DECL
130 && optimize)
131 || (TREE_CODE (node->decl) == FUNCTION_DECL
132 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
133 || (symtab->state < IPA_SSA
134 && lookup_attribute
135 ("always_inline",
136 DECL_ATTRIBUTES (body->decl))))))
137 /* We use variable constructors during late compilation for
e70670cf
JH
138 constant folding. Keep references alive so partitioning
139 knows about potential references. */
67348ccc 140 || (TREE_CODE (node->decl) == VAR_DECL
6a6dac52 141 && flag_wpa
67348ccc 142 && ctor_for_folding (node->decl)
6a6dac52 143 != error_mark_node))))
17e0fc92
JH
144 {
145 /* Be sure that we will not optimize out alias target
146 body. */
147 if (DECL_EXTERNAL (node->decl)
148 && node->alias
149 && before_inlining_p)
150 reachable->add (body);
151 reachable->add (node);
152 }
67348ccc 153 enqueue_node (node, first, reachable);
b34fd25c
JH
154 }
155}
156
3462aa02
JH
157/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
158 all its potential targets as reachable to permit later inlining if
159 devirtualization happens. After inlining still keep their declarations
160 around, so we can devirtualize to a direct call.
161
162 Also try to make trivial devirutalization when no or only one target is
163 possible. */
164
165static void
6e2830c3 166walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 167 struct cgraph_edge *edge,
5e20cdc9 168 symtab_node **first,
6e2830c3
TS
169 hash_set<symtab_node *> *reachable,
170 bool before_inlining_p)
3462aa02
JH
171{
172 unsigned int i;
173 void *cache_token;
174 bool final;
175 vec <cgraph_node *>targets
176 = possible_polymorphic_call_targets
177 (edge, &final, &cache_token);
178
6e2830c3 179 if (!reachable_call_targets->add (cache_token))
3462aa02 180 {
c3284718 181 for (i = 0; i < targets.length (); i++)
3462aa02
JH
182 {
183 struct cgraph_node *n = targets[i];
184
185 /* Do not bother to mark virtual methods in anonymous namespace;
186 either we will find use of virtual table defining it, or it is
187 unused. */
67348ccc 188 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 189 && type_in_anonymous_namespace_p
70e7f2a2 190 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
191 continue;
192
17e0fc92
JH
193 symtab_node *body = n->function_symbol ();
194
3462aa02
JH
195 /* Prior inlining, keep alive bodies of possible targets for
196 devirtualization. */
67348ccc 197 if (n->definition
8fe91ca8 198 && (before_inlining_p
17e0fc92
JH
199 && opt_for_fn (body->decl, optimize)
200 && opt_for_fn (body->decl, flag_devirtualize)))
201 {
202 /* Be sure that we will not optimize out alias target
203 body. */
204 if (DECL_EXTERNAL (n->decl)
205 && n->alias
206 && before_inlining_p)
207 reachable->add (body);
208 reachable->add (n);
209 }
3462aa02
JH
210 /* Even after inlining we want to keep the possible targets in the
211 boundary, so late passes can still produce direct call even if
212 the chance for inlining is lost. */
67348ccc 213 enqueue_node (n, first, reachable);
3462aa02
JH
214 }
215 }
216
217 /* Very trivial devirtualization; when the type is
218 final or anonymous (so we know all its derivation)
219 and there is only one possible virtual call target,
220 make the edge direct. */
221 if (final)
222 {
2b5f0895 223 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 224 {
7b395ddd 225 cgraph_node *target, *node = edge->caller;
3462aa02
JH
226 if (targets.length () == 1)
227 target = targets[0];
228 else
d52f5295 229 target = cgraph_node::get_create
3462aa02
JH
230 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
231
2b5f0895
XDL
232 if (dump_enabled_p ())
233 {
9189aff7
JH
234 location_t locus;
235 if (edge->call_stmt)
236 locus = gimple_location (edge->call_stmt);
237 else
238 locus = UNKNOWN_LOCATION;
d52f5295 239 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2b5f0895
XDL
240 "devirtualizing call in %s/%i to %s/%i\n",
241 edge->caller->name (), edge->caller->order,
242 target->name (),
243 target->order);
244 }
3dafb85c 245 edge = edge->make_direct (target);
9a1e784a 246 if (inline_summaries)
7b395ddd 247 inline_update_overall_summary (node);
477145c8 248 else if (edge->call_stmt)
d5e254e1
IE
249 {
250 edge->redirect_call_stmt_to_callee ();
251
252 /* Call to __builtin_unreachable shouldn't be instrumented. */
253 if (!targets.length ())
254 gimple_call_set_with_bounds (edge->call_stmt, false);
255 }
3462aa02
JH
256 }
257 }
258}
41817394 259
ca31b95f 260/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
261
262 The algorithm is basically mark&sweep but with some extra refinements:
263
264 - reachable extern inline functions needs special handling; the bodies needs
265 to stay in memory until inlining in hope that they will be inlined.
266 After inlining we release their bodies and turn them into unanalyzed
267 nodes even when they are reachable.
268
04142cc3
JH
269 - virtual functions are kept in callgraph even if they seem unreachable in
270 hope calls to them will be devirtualized.
271
272 Again we remove them after inlining. In late optimization some
31519c38 273 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
274 the call. In theory early opts and IPA should work out all important cases.
275
276 - virtual clones needs bodies of their origins for later materialization;
277 this means that we want to keep the body even if the origin is unreachable
278 otherwise. To avoid origin from sitting in the callgraph and being
279 walked by IPA passes, we turn them into unanalyzed nodes with body
280 defined.
281
282 We maintain set of function declaration where body needs to stay in
283 body_needed_for_clonning
284
285 Inline clones represent special case: their declaration match the
286 declaration of origin and cgraph_remove_node already knows how to
287 reshape callgraph and preserve body when offline copy of function or
288 inline clone is being removed.
289
6649df51
JH
290 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
291 variables with DECL_INITIAL set. We finalize these and keep reachable
292 ones around for constant folding purposes. After inlining we however
293 stop walking their references to let everything static referneced by them
294 to be removed when it is otherwise unreachable.
295
04142cc3
JH
296 We maintain queue of both reachable symbols (i.e. defined symbols that needs
297 to stay) and symbols that are in boundary (i.e. external symbols referenced
298 by reachable symbols or origins of clones). The queue is represented
299 as linked list by AUX pointer terminated by 1.
300
31519c38 301 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
302 turn definition into a declaration, but we may keep function body around
303 based on body_needed_for_clonning
304
305 All symbols that enter the queue have AUX pointer non-zero and are in the
306 boundary. Pointer set REACHABLE is used to track reachable symbols.
307
308 Every symbol can be visited twice - once as part of boundary and once
309 as real reachable symbol. enqueue_node needs to decide whether the
310 node needs to be re-queued for second processing. For this purpose
311 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
312
313bool
17e0fc92 314symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 315{
5e20cdc9 316 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 317 struct cgraph_node *node, *next;
2c8326a5 318 varpool_node *vnode, *vnext;
ca31b95f 319 bool changed = false;
6e2830c3
TS
320 hash_set<symtab_node *> reachable;
321 hash_set<tree> body_needed_for_clonning;
322 hash_set<void *> reachable_call_targets;
17e0fc92
JH
323 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
324 : IPA_SSA_AFTER_INLINING);
ca31b95f 325
3462aa02 326 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 327 build_type_inheritance_graph ();
10d22567
ZD
328 if (file)
329 fprintf (file, "\nReclaiming functions:");
ca31b95f 330#ifdef ENABLE_CHECKING
65c70e6b 331 FOR_EACH_FUNCTION (node)
67348ccc 332 gcc_assert (!node->aux);
65c70e6b 333 FOR_EACH_VARIABLE (vnode)
67348ccc 334 gcc_assert (!vnode->aux);
ca31b95f 335#endif
530f3a1b
JH
336 /* Mark functions whose bodies are obviously needed.
337 This is mostly when they can be referenced externally. Inline clones
338 are special since their declarations are shared with master clone and thus
339 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
340 FOR_EACH_FUNCTION (node)
341 {
342 node->used_as_abstract_origin = false;
67348ccc 343 if (node->definition
c0c123ef 344 && !node->global.inlined_to
67348ccc 345 && !node->in_other_partition
d52f5295 346 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
347 {
348 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
349 reachable.add (node);
350 enqueue_node (node, &first, &reachable);
c0c123ef
JH
351 }
352 else
67348ccc 353 gcc_assert (!node->aux);
c0c123ef 354 }
530f3a1b
JH
355
356 /* Mark variables that are obviously needed. */
04142cc3 357 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 358 if (!vnode->can_remove_if_no_refs_p()
67348ccc 359 && !vnode->in_other_partition)
04142cc3 360 {
6e2830c3
TS
361 reachable.add (vnode);
362 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
363 }
364
365 /* Perform reachability analysis. */
5e20cdc9 366 while (first != (symtab_node *) (void *) 1)
b34fd25c 367 {
6e2830c3 368 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 369 symtab_node *node = first;
ca31b95f 370
5e20cdc9 371 first = (symtab_node *)first->aux;
19fb0b86 372
04142cc3
JH
373 /* If we are processing symbol in boundary, mark its AUX pointer for
374 possible later re-processing in enqueue_node. */
375 if (in_boundary_p)
4bd019b8
JH
376 {
377 node->aux = (void *)2;
378 if (node->alias && node->analyzed)
379 enqueue_node (node->get_alias_target (), &first, &reachable);
380 }
04142cc3
JH
381 else
382 {
31dad809
JJ
383 if (TREE_CODE (node->decl) == FUNCTION_DECL
384 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
385 {
386 struct cgraph_node *origin_node
4ad08ee8
JH
387 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
388 if (origin_node && !origin_node->used_as_abstract_origin)
389 {
390 origin_node->used_as_abstract_origin = true;
391 gcc_assert (!origin_node->prev_sibling_clone);
392 gcc_assert (!origin_node->next_sibling_clone);
393 for (cgraph_node *n = origin_node->clones; n;
394 n = n->next_sibling_clone)
395 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
396 n->used_as_abstract_origin = true;
4ad08ee8 397 }
c0c123ef 398 }
04142cc3 399 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
400 all externally visible symbols in the same comdat
401 group to be reachable as well. Comdat-local symbols
402 can be discarded if all uses were inlined. */
67348ccc 403 if (node->same_comdat_group)
04142cc3 404 {
5e20cdc9 405 symtab_node *next;
67348ccc 406 for (next = node->same_comdat_group;
04142cc3 407 next != node;
67348ccc 408 next = next->same_comdat_group)
d52f5295 409 if (!next->comdat_local_p ()
6e2830c3
TS
410 && !reachable.add (next))
411 enqueue_node (next, &first, &reachable);
04142cc3
JH
412 }
413 /* Mark references as reachable. */
6e2830c3 414 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 415 }
19fb0b86 416
7de90a6c 417 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 418 {
04142cc3
JH
419 /* Mark the callees reachable unless they are direct calls to extern
420 inline functions we decided to not inline. */
421 if (!in_boundary_p)
8a6295ba 422 {
04142cc3 423 struct cgraph_edge *e;
3462aa02 424 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
425 if (opt_for_fn (cnode->decl, optimize)
426 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
427 {
428 struct cgraph_edge *next;
429 for (e = cnode->indirect_calls; e; e = next)
430 {
431 next = e->next_callee;
432 if (e->indirect_info->polymorphic)
6e2830c3
TS
433 walk_polymorphic_call_targets (&reachable_call_targets,
434 e, &first, &reachable,
3462aa02
JH
435 before_inlining_p);
436 }
437 }
04142cc3 438 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 439 {
17e0fc92 440 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
441 if (e->callee->definition
442 && !e->callee->in_other_partition
ed62e0d9 443 && (!e->inline_failed
67348ccc
DM
444 || !DECL_EXTERNAL (e->callee->decl)
445 || e->callee->alias
17e0fc92
JH
446 || (before_inlining_p
447 && (opt_for_fn (body->decl, optimize)
448 || (symtab->state < IPA_SSA
449 && lookup_attribute
450 ("always_inline",
451 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
452 {
453 /* Be sure that we will not optimize out alias target
454 body. */
455 if (DECL_EXTERNAL (e->callee->decl)
456 && e->callee->alias
457 && before_inlining_p)
17e0fc92 458 reachable.add (body);
6e2830c3 459 reachable.add (e->callee);
789c2741 460 }
6e2830c3 461 enqueue_node (e->callee, &first, &reachable);
93a18a70 462 }
04142cc3
JH
463
464 /* When inline clone exists, mark body to be preserved so when removing
465 offline copy of the function we don't kill it. */
4f63dfc6 466 if (cnode->global.inlined_to)
6e2830c3 467 body_needed_for_clonning.add (cnode->decl);
b66887e4 468
48de5d37
IE
469 /* For instrumentation clones we always need original
470 function node for proper LTO privatization. */
471 if (cnode->instrumentation_clone
472 && cnode->definition)
473 {
474 gcc_assert (cnode->instrumented_version || in_lto_p);
475 if (cnode->instrumented_version)
476 {
477 enqueue_node (cnode->instrumented_version, &first,
478 &reachable);
479 reachable.add (cnode->instrumented_version);
480 }
481 }
482
4f63dfc6
JH
483 /* For non-inline clones, force their origins to the boundary and ensure
484 that body is not removed. */
485 while (cnode->clone_of)
486 {
67348ccc 487 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
488 cnode = cnode->clone_of;
489 if (noninline)
490 {
6e2830c3
TS
491 body_needed_for_clonning.add (cnode->decl);
492 enqueue_node (cnode, &first, &reachable);
4f63dfc6 493 }
b34fd25c 494 }
0136f8f0
AH
495
496 }
4bd019b8
JH
497 else if (cnode->thunk.thunk_p)
498 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 499
0136f8f0
AH
500 /* If any reachable function has simd clones, mark them as
501 reachable as well. */
502 if (cnode->simd_clones)
503 {
504 cgraph_node *next;
505 for (next = cnode->simd_clones;
506 next;
507 next = next->simdclone->next_clone)
508 if (in_boundary_p
6e2830c3
TS
509 || !reachable.add (next))
510 enqueue_node (next, &first, &reachable);
47cb0d7d 511 }
b34fd25c 512 }
6649df51 513 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
514 boundary. This will also hold initializers of the external vars NODE
515 refers to. */
7de90a6c 516 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 517 if (vnode
67348ccc
DM
518 && DECL_EXTERNAL (node->decl)
519 && !vnode->alias
6649df51 520 && in_boundary_p)
5d59b5e1 521 {
d122681a
ML
522 struct ipa_ref *ref = NULL;
523 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 524 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 525 }
ca31b95f
JH
526 }
527
04142cc3 528 /* Remove unreachable functions. */
3dafb85c 529 for (node = first_function (); node; node = next)
ca31b95f 530 {
3dafb85c 531 next = next_function (node);
e70670cf
JH
532
533 /* If node is not needed at all, remove it. */
67348ccc 534 if (!node->aux)
ca31b95f 535 {
10d22567 536 if (file)
5bed50e8 537 fprintf (file, " %s/%i", node->name (), node->order);
d52f5295 538 node->remove ();
04142cc3
JH
539 changed = true;
540 }
e70670cf 541 /* If node is unreachable, remove its body. */
6e2830c3 542 else if (!reachable.contains (node))
04142cc3 543 {
d3f2e41e
JH
544 /* We keep definitions of thunks and aliases in the boundary so
545 we can walk to the ultimate alias targets and function symbols
546 reliably. */
547 if (node->alias || node->thunk.thunk_p)
548 ;
549 else if (!body_needed_for_clonning.contains (node->decl)
550 && !node->alias && !node->thunk.thunk_p)
d52f5295 551 node->release_body ();
4f63dfc6 552 else if (!node->clone_of)
67348ccc 553 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 554 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 555 {
04142cc3 556 if (file)
5bed50e8 557 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 558 node->body_removed = true;
67348ccc
DM
559 node->analyzed = false;
560 node->definition = false;
561 node->cpp_implicit_alias = false;
562 node->alias = false;
d833415c 563 node->thunk.thunk_p = false;
67348ccc 564 node->weakref = false;
8fe91ca8
JH
565 /* After early inlining we drop always_inline attributes on
566 bodies of functions that are still referenced (have their
567 address taken). */
568 DECL_ATTRIBUTES (node->decl)
569 = remove_attribute ("always_inline",
570 DECL_ATTRIBUTES (node->decl));
67348ccc 571 if (!node->in_other_partition)
51a5c0c2 572 node->local.local = false;
d52f5295 573 node->remove_callees ();
d122681a 574 node->remove_all_references ();
bb853349 575 changed = true;
d5e254e1
IE
576 if (node->thunk.thunk_p
577 && node->thunk.add_pointer_bounds_args)
578 {
579 node->thunk.thunk_p = false;
580 node->thunk.add_pointer_bounds_args = false;
581 }
bb853349 582 }
ca31b95f 583 }
4f63dfc6 584 else
d52f5295 585 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 586 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 587 }
04142cc3
JH
588
589 /* Inline clones might be kept around so their materializing allows further
590 cloning. If the function the clone is inlined into is removed, we need
591 to turn it into normal cone. */
65c70e6b 592 FOR_EACH_FUNCTION (node)
9187e02d 593 {
9187e02d
JH
594 if (node->global.inlined_to
595 && !node->callers)
596 {
597 gcc_assert (node->clones);
d563610d
JH
598 node->global.inlined_to = NULL;
599 update_inlined_to_pointer (node, node);
9187e02d 600 }
67348ccc 601 node->aux = NULL;
9187e02d 602 }
4a444e58 603
04142cc3 604 /* Remove unreachable variables. */
4a444e58 605 if (file)
04142cc3 606 fprintf (file, "\nReclaiming variables:");
3dafb85c 607 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 608 {
3dafb85c 609 vnext = next_variable (vnode);
67348ccc 610 if (!vnode->aux
b9bd2075
JH
611 /* For can_refer_decl_in_current_unit_p we want to track for
612 all external variables if they are defined in other partition
613 or not. */
67348ccc 614 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 615 {
d2b35c04
JH
616 struct ipa_ref *ref = NULL;
617
618 /* First remove the aliases, so varpool::remove can possibly lookup
619 the constructor and save it for future use. */
620 while (vnode->iterate_direct_aliases (0, ref))
621 {
622 if (file)
623 fprintf (file, " %s/%i", ref->referred->name (),
624 ref->referred->order);
625 ref->referring->remove ();
626 }
4a444e58 627 if (file)
5bed50e8 628 fprintf (file, " %s/%i", vnode->name (), vnode->order);
d2b35c04 629 vnext = next_variable (vnode);
d52f5295 630 vnode->remove ();
4a444e58 631 changed = true;
b34fd25c 632 }
4bd019b8 633 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 634 {
6a6dac52 635 tree init;
67348ccc 636 if (vnode->definition)
04142cc3
JH
637 {
638 if (file)
fec39fa6 639 fprintf (file, " %s", vnode->name ());
04142cc3
JH
640 changed = true;
641 }
1acc5591 642 /* Keep body if it may be useful for constant folding. */
d5e254e1
IE
643 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
644 && !POINTER_BOUNDS_P (vnode->decl))
1acc5591
JH
645 vnode->remove_initializer ();
646 else
647 DECL_INITIAL (vnode->decl) = init;
3d8d0043 648 vnode->body_removed = true;
67348ccc
DM
649 vnode->definition = false;
650 vnode->analyzed = false;
651 vnode->aux = NULL;
e70670cf 652
d52f5295 653 vnode->remove_from_same_comdat_group ();
7b3376a0 654
d122681a 655 vnode->remove_all_references ();
04142cc3
JH
656 }
657 else
67348ccc 658 vnode->aux = NULL;
b34fd25c 659 }
4a444e58 660
04142cc3 661 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
662 if (file)
663 fprintf (file, "\nClearing address taken flags:");
65c70e6b 664 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
665 if (node->address_taken
666 && !node->used_from_other_partition)
bd3cdcc0 667 {
1ede94c5 668 if (!node->call_for_symbol_and_aliases
d5e254e1
IE
669 (has_addr_references_p, NULL, true)
670 && (!node->instrumentation_clone
671 || !node->instrumented_version
672 || !node->instrumented_version->address_taken))
bd3cdcc0
JH
673 {
674 if (file)
fec39fa6 675 fprintf (file, " %s", node->name ());
67348ccc 676 node->address_taken = false;
4a444e58 677 changed = true;
d52f5295 678 if (node->local_p ())
4a444e58
JH
679 {
680 node->local.local = true;
681 if (file)
682 fprintf (file, " (local)");
683 }
bd3cdcc0
JH
684 }
685 }
10a5dd5d
JH
686 if (file)
687 fprintf (file, "\n");
b34fd25c 688
873aa8f5 689#ifdef ENABLE_CHECKING
d52f5295 690 symtab_node::verify_symtab_nodes ();
873aa8f5 691#endif
4537ec0c 692
a8da72b8 693 /* If we removed something, perhaps profile could be improved. */
9771b263 694 if (changed && optimize && inline_edge_summary_vec.exists ())
a8da72b8 695 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 696 ipa_propagate_frequency (node);
a8da72b8 697
3462aa02 698 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
699 return changed;
700}
f4b3ca72 701
6de88c6a
JH
702/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
703 as needed, also clear EXPLICIT_REFS if the references to given variable
704 do not need to be explicit. */
705
706void
707process_references (varpool_node *vnode,
708 bool *written, bool *address_taken,
709 bool *read, bool *explicit_refs)
710{
711 int i;
712 struct ipa_ref *ref;
713
9041d2e6 714 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
715 || TREE_THIS_VOLATILE (vnode->decl))
716 *explicit_refs = false;
717
d122681a 718 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
719 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
720 switch (ref->use)
721 {
722 case IPA_REF_ADDR:
723 *address_taken = true;
724 break;
725 case IPA_REF_LOAD:
726 *read = true;
727 break;
728 case IPA_REF_STORE:
729 *written = true;
730 break;
731 case IPA_REF_ALIAS:
d52f5295
ML
732 process_references (dyn_cast<varpool_node *> (ref->referring), written,
733 address_taken, read, explicit_refs);
6de88c6a 734 break;
d5e254e1
IE
735 case IPA_REF_CHKP:
736 gcc_unreachable ();
6de88c6a
JH
737 }
738}
739
740/* Set TREE_READONLY bit. */
741
742bool
743set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
744{
745 TREE_READONLY (vnode->decl) = true;
746 return false;
747}
748
749/* Set writeonly bit and clear the initalizer, since it will not be needed. */
750
751bool
dea91a66 752set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
753{
754 vnode->writeonly = true;
755 if (optimize)
756 {
757 DECL_INITIAL (vnode->decl) = NULL;
758 if (!vnode->alias)
dea91a66
JH
759 {
760 if (vnode->num_references ())
761 *(bool *)data = true;
762 vnode->remove_all_references ();
763 }
6de88c6a
JH
764 }
765 return false;
766}
767
768/* Clear addressale bit of VNODE. */
769
770bool
771clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
772{
773 vnode->address_taken = false;
774 TREE_ADDRESSABLE (vnode->decl) = 0;
775 return false;
776}
777
4a444e58
JH
778/* Discover variables that have no longer address taken or that are read only
779 and update their flags.
780
dea91a66
JH
781 Return true when unreachable symbol removan should be done.
782
4a444e58
JH
783 FIXME: This can not be done in between gimplify and omp_expand since
784 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
785 this transformation as part of whole program visibility and re-do at
786 ipa-reference pass (to take into account clonning), but it would
787 make sense to do it before early optimizations. */
4a444e58 788
dea91a66 789bool
4a444e58
JH
790ipa_discover_readonly_nonaddressable_vars (void)
791{
dea91a66 792 bool remove_p = false;
2c8326a5 793 varpool_node *vnode;
4a444e58
JH
794 if (dump_file)
795 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 796 FOR_EACH_VARIABLE (vnode)
6de88c6a 797 if (!vnode->alias
67348ccc 798 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 799 || !vnode->writeonly
67348ccc 800 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
801 {
802 bool written = false;
803 bool address_taken = false;
6de88c6a
JH
804 bool read = false;
805 bool explicit_refs = true;
806
dea91a66
JH
807 process_references (vnode, &written, &address_taken, &read,
808 &explicit_refs);
6de88c6a
JH
809 if (!explicit_refs)
810 continue;
811 if (!address_taken)
4a444e58 812 {
6de88c6a 813 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 814 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
815 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
816 true);
4a444e58 817 }
6de88c6a 818 if (!address_taken && !written
4a444e58
JH
819 /* Making variable in explicit section readonly can cause section
820 type conflict.
821 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 822 && vnode->get_section () == NULL)
4a444e58 823 {
6de88c6a 824 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 825 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 826 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 827 }
d5ce4663 828 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
829 {
830 if (dump_file)
831 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
832 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
833 true);
4a444e58
JH
834 }
835 }
836 if (dump_file)
837 fprintf (dump_file, "\n");
dea91a66 838 return remove_p;
4a444e58
JH
839}
840
a8da72b8
L
841/* Free inline summary. */
842
27a4cd48
DM
843namespace {
844
845const pass_data pass_data_ipa_free_inline_summary =
a8da72b8 846{
27a4cd48 847 SIMPLE_IPA_PASS, /* type */
8605403e 848 "free-inline-summary", /* name */
27a4cd48 849 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
850 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
851 0, /* properties_required */
852 0, /* properties_provided */
853 0, /* properties_destroyed */
854 0, /* todo_flags_start */
8605403e
JH
855 /* Early optimizations may make function unreachable. We can not
856 remove unreachable functions as part of the ealry opts pass because
857 TODOs are run before subpasses. Do it here. */
858 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
a8da72b8
L
859};
860
27a4cd48
DM
861class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
862{
863public:
c3284718
RS
864 pass_ipa_free_inline_summary (gcc::context *ctxt)
865 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
27a4cd48
DM
866 {}
867
868 /* opt_pass methods: */
be55bfe6
TS
869 virtual unsigned int execute (function *)
870 {
871 inline_free_summary ();
872 return 0;
873 }
27a4cd48
DM
874
875}; // class pass_ipa_free_inline_summary
876
877} // anon namespace
878
879simple_ipa_opt_pass *
880make_pass_ipa_free_inline_summary (gcc::context *ctxt)
881{
882 return new pass_ipa_free_inline_summary (ctxt);
883}
884
9e97ff61 885/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
886 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
887 (for chp static vars constructor) or 'B' (for chkp static bounds
888 constructor). BODY is a STATEMENT_LIST containing GENERIC
889 statements. PRIORITY is the initialization priority for this
890 constructor or destructor.
9e97ff61 891
3a9ed12a
JH
892 FINAL specify whether the externally visible name for collect2 should
893 be produced. */
894
895static void
896cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
897{
898 static int counter = 0;
899 char which_buf[16];
900 tree decl, name, resdecl;
901
902 /* The priority is encoded in the constructor or destructor name.
903 collect2 will sort the names and arrange that they are called at
904 program startup. */
3a9ed12a
JH
905 if (final)
906 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
907 else
908 /* Proudce sane name but one not recognizable by collect2, just for the
909 case we fail to inline the function. */
910 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
911 name = get_file_function_name (which_buf);
912
913 decl = build_decl (input_location, FUNCTION_DECL, name,
914 build_function_type_list (void_type_node, NULL_TREE));
915 current_function_decl = decl;
916
917 resdecl = build_decl (input_location,
918 RESULT_DECL, NULL_TREE, void_type_node);
919 DECL_ARTIFICIAL (resdecl) = 1;
920 DECL_RESULT (decl) = resdecl;
921 DECL_CONTEXT (resdecl) = decl;
922
923 allocate_struct_function (decl, false);
924
925 TREE_STATIC (decl) = 1;
926 TREE_USED (decl) = 1;
927 DECL_ARTIFICIAL (decl) = 1;
928 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
929 DECL_SAVED_TREE (decl) = body;
3a9ed12a 930 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
931 {
932 TREE_PUBLIC (decl) = 1;
933 DECL_PRESERVE_P (decl) = 1;
934 }
935 DECL_UNINLINABLE (decl) = 1;
936
937 DECL_INITIAL (decl) = make_node (BLOCK);
938 TREE_USED (DECL_INITIAL (decl)) = 1;
939
940 DECL_SOURCE_LOCATION (decl) = input_location;
941 cfun->function_end_locus = input_location;
942
943 switch (which)
944 {
945 case 'I':
946 DECL_STATIC_CONSTRUCTOR (decl) = 1;
947 decl_init_priority_insert (decl, priority);
948 break;
d5e254e1
IE
949 case 'P':
950 DECL_STATIC_CONSTRUCTOR (decl) = 1;
951 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
952 NULL,
953 NULL_TREE);
954 decl_init_priority_insert (decl, priority);
955 break;
956 case 'B':
957 DECL_STATIC_CONSTRUCTOR (decl) = 1;
958 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
959 NULL,
960 NULL_TREE);
961 decl_init_priority_insert (decl, priority);
962 break;
9e97ff61
JH
963 case 'D':
964 DECL_STATIC_DESTRUCTOR (decl) = 1;
965 decl_fini_priority_insert (decl, priority);
966 break;
967 default:
968 gcc_unreachable ();
969 }
970
971 gimplify_function_tree (decl);
972
d52f5295 973 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
974
975 set_cfun (NULL);
976 current_function_decl = NULL;
977}
978
3a9ed12a 979/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
980 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
981 (for chkp static vars constructor) or 'B' (for chkp static bounds
982 constructor). BODY is a STATEMENT_LIST containing GENERIC
983 statements. PRIORITY is the initialization priority for this
984 constructor or destructor. */
3a9ed12a
JH
985
986void
987cgraph_build_static_cdtor (char which, tree body, int priority)
988{
989 cgraph_build_static_cdtor_1 (which, body, priority, false);
990}
9e97ff61
JH
991
992/* A vector of FUNCTION_DECLs declared as static constructors. */
9771b263 993static vec<tree> static_ctors;
9e97ff61 994/* A vector of FUNCTION_DECLs declared as static destructors. */
9771b263 995static vec<tree> static_dtors;
9e97ff61
JH
996
997/* When target does not have ctors and dtors, we call all constructor
998 and destructor by special initialization/destruction function
999 recognized by collect2.
1000
1001 When we are going to build this function, collect all constructors and
1002 destructors and turn them into normal functions. */
1003
1004static void
1005record_cdtor_fn (struct cgraph_node *node)
1006{
67348ccc
DM
1007 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1008 static_ctors.safe_push (node->decl);
1009 if (DECL_STATIC_DESTRUCTOR (node->decl))
1010 static_dtors.safe_push (node->decl);
d52f5295 1011 node = cgraph_node::get (node->decl);
67348ccc 1012 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
1013}
1014
1015/* Define global constructors/destructor functions for the CDTORS, of
1016 which they are LEN. The CDTORS are sorted by initialization
1017 priority. If CTOR_P is true, these are constructors; otherwise,
1018 they are destructors. */
1019
1020static void
9771b263 1021build_cdtor (bool ctor_p, vec<tree> cdtors)
9e97ff61
JH
1022{
1023 size_t i,j;
9771b263 1024 size_t len = cdtors.length ();
9e97ff61
JH
1025
1026 i = 0;
1027 while (i < len)
1028 {
1029 tree body;
1030 tree fn;
1031 priority_type priority;
1032
1033 priority = 0;
1034 body = NULL_TREE;
1035 j = i;
1036 do
1037 {
1038 priority_type p;
9771b263 1039 fn = cdtors[j];
9e97ff61
JH
1040 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1041 if (j == i)
1042 priority = p;
1043 else if (p != priority)
1044 break;
1045 j++;
1046 }
1047 while (j < len);
1048
48c24aca 1049 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
1050 if (j == i + 1
1051 && targetm.have_ctors_dtors)
1052 {
1053 i++;
1054 continue;
1055 }
1056 /* Find the next batch of constructors/destructors with the same
1057 initialization priority. */
48c24aca 1058 for (;i < j; i++)
9e97ff61 1059 {
9e97ff61 1060 tree call;
9771b263 1061 fn = cdtors[i];
9e97ff61
JH
1062 call = build_call_expr (fn, 0);
1063 if (ctor_p)
1064 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1065 else
1066 DECL_STATIC_DESTRUCTOR (fn) = 0;
1067 /* We do not want to optimize away pure/const calls here.
1068 When optimizing, these should be already removed, when not
1069 optimizing, we want user to be able to breakpoint in them. */
1070 TREE_SIDE_EFFECTS (call) = 1;
1071 append_to_statement_list (call, &body);
9e97ff61 1072 }
9e97ff61
JH
1073 gcc_assert (body != NULL_TREE);
1074 /* Generate a function to call all the function of like
1075 priority. */
3a9ed12a 1076 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
1077 }
1078}
1079
1080/* Comparison function for qsort. P1 and P2 are actually of type
1081 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1082 used to determine the sort order. */
1083
1084static int
1085compare_ctor (const void *p1, const void *p2)
1086{
1087 tree f1;
1088 tree f2;
1089 int priority1;
1090 int priority2;
1091
1092 f1 = *(const tree *)p1;
1093 f2 = *(const tree *)p2;
1094 priority1 = DECL_INIT_PRIORITY (f1);
1095 priority2 = DECL_INIT_PRIORITY (f2);
1096
1097 if (priority1 < priority2)
1098 return -1;
1099 else if (priority1 > priority2)
1100 return 1;
1101 else
1102 /* Ensure a stable sort. Constructors are executed in backwarding
1103 order to make LTO initialize braries first. */
1104 return DECL_UID (f2) - DECL_UID (f1);
1105}
1106
1107/* Comparison function for qsort. P1 and P2 are actually of type
1108 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1109 used to determine the sort order. */
1110
1111static int
1112compare_dtor (const void *p1, const void *p2)
1113{
1114 tree f1;
1115 tree f2;
1116 int priority1;
1117 int priority2;
1118
1119 f1 = *(const tree *)p1;
1120 f2 = *(const tree *)p2;
1121 priority1 = DECL_FINI_PRIORITY (f1);
1122 priority2 = DECL_FINI_PRIORITY (f2);
1123
1124 if (priority1 < priority2)
1125 return -1;
1126 else if (priority1 > priority2)
1127 return 1;
1128 else
1129 /* Ensure a stable sort. */
1130 return DECL_UID (f1) - DECL_UID (f2);
1131}
1132
1133/* Generate functions to call static constructors and destructors
1134 for targets that do not support .ctors/.dtors sections. These
1135 functions have magic names which are detected by collect2. */
1136
1137static void
1138build_cdtor_fns (void)
1139{
9771b263 1140 if (!static_ctors.is_empty ())
9e97ff61
JH
1141 {
1142 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1143 static_ctors.qsort (compare_ctor);
48c24aca 1144 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1145 }
1146
9771b263 1147 if (!static_dtors.is_empty ())
9e97ff61
JH
1148 {
1149 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1150 static_dtors.qsort (compare_dtor);
48c24aca 1151 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1152 }
1153}
1154
1155/* Look for constructors and destructors and produce function calling them.
1156 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1157 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1158 constructors/destructors into single function to improve code locality and
1159 reduce size. */
1160
1161static unsigned int
1162ipa_cdtor_merge (void)
1163{
1164 struct cgraph_node *node;
65c70e6b 1165 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1166 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1167 || DECL_STATIC_DESTRUCTOR (node->decl))
9e97ff61
JH
1168 record_cdtor_fn (node);
1169 build_cdtor_fns ();
9771b263
DN
1170 static_ctors.release ();
1171 static_dtors.release ();
9e97ff61
JH
1172 return 0;
1173}
1174
27a4cd48
DM
1175namespace {
1176
1177const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1178{
27a4cd48
DM
1179 IPA_PASS, /* type */
1180 "cdtor", /* name */
1181 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1182 TV_CGRAPHOPT, /* tv_id */
1183 0, /* properties_required */
1184 0, /* properties_provided */
1185 0, /* properties_destroyed */
1186 0, /* todo_flags_start */
1187 0, /* todo_flags_finish */
9e97ff61 1188};
27a4cd48
DM
1189
1190class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1191{
1192public:
c3284718
RS
1193 pass_ipa_cdtor_merge (gcc::context *ctxt)
1194 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1195 NULL, /* generate_summary */
1196 NULL, /* write_summary */
1197 NULL, /* read_summary */
1198 NULL, /* write_optimization_summary */
1199 NULL, /* read_optimization_summary */
1200 NULL, /* stmt_fixup */
1201 0, /* function_transform_todo_flags_start */
1202 NULL, /* function_transform */
1203 NULL) /* variable_transform */
27a4cd48
DM
1204 {}
1205
1206 /* opt_pass methods: */
1a3d085c 1207 virtual bool gate (function *);
be55bfe6 1208 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1209
1210}; // class pass_ipa_cdtor_merge
1211
1a3d085c
TS
1212bool
1213pass_ipa_cdtor_merge::gate (function *)
1214{
1215 /* Perform the pass when we have no ctors/dtors support
1216 or at LTO time to merge multiple constructors into single
1217 function. */
1218 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1219}
1220
27a4cd48
DM
1221} // anon namespace
1222
1223ipa_opt_pass_d *
1224make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1225{
1226 return new pass_ipa_cdtor_merge (ctxt);
1227}
eb6a09a7
JH
1228
1229/* Invalid pointer representing BOTTOM for single user dataflow. */
1230#define BOTTOM ((cgraph_node *)(size_t) 2)
1231
1232/* Meet operation for single user dataflow.
1233 Here we want to associate variables with sigle function that may access it.
1234
1235 FUNCTION is current single user of a variable, VAR is variable that uses it.
1236 Latttice is stored in SINGLE_USER_MAP.
1237
1238 We represent:
1239 - TOP by no entry in SIGNLE_USER_MAP
1240 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1241 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1242
1243cgraph_node *
1244meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1245 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1246{
1247 struct cgraph_node *user, **f;
1248
1249 if (var->aux == BOTTOM)
1250 return BOTTOM;
1251
1eb68d2d 1252 f = single_user_map.get (var);
eb6a09a7
JH
1253 if (!f)
1254 return function;
1255 user = *f;
1256 if (!function)
1257 return user;
1258 else if (function != user)
1259 return BOTTOM;
1260 else
1261 return function;
1262}
1263
1264/* Propagation step of single-use dataflow.
1265
1266 Check all uses of VNODE and see if they are used by single function FUNCTION.
1267 SINGLE_USER_MAP represents the dataflow lattice. */
1268
1269cgraph_node *
1270propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1271 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1272{
1273 int i;
1274 struct ipa_ref *ref;
1275
1276 gcc_assert (!vnode->externally_visible);
1277
1278 /* If node is an alias, first meet with its target. */
1279 if (vnode->alias)
9041d2e6 1280 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1281
1282 /* Check all users and see if they correspond to a single function. */
d52f5295 1283 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1284 {
1285 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1286 if (cnode)
1287 {
1288 if (cnode->global.inlined_to)
1289 cnode = cnode->global.inlined_to;
1290 if (!function)
1291 function = cnode;
1292 else if (function != cnode)
1293 function = BOTTOM;
1294 }
1295 else
17e0fc92
JH
1296 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1297 single_user_map);
eb6a09a7
JH
1298 }
1299 return function;
1300}
1301
1302/* Pass setting used_by_single_function flag.
17e0fc92
JH
1303 This flag is set on variable when there is only one function that may
1304 possibly referr to it. */
eb6a09a7
JH
1305
1306static unsigned int
1307ipa_single_use (void)
1308{
1309 varpool_node *first = (varpool_node *) (void *) 1;
1310 varpool_node *var;
1eb68d2d 1311 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1312
1313 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1314 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1315 var->aux = BOTTOM;
1316 else
1317 {
1318 /* Enqueue symbol for dataflow. */
1319 var->aux = first;
1320 first = var;
1321 }
1322
1323 /* The actual dataflow. */
1324
1325 while (first != (void *) 1)
1326 {
1327 cgraph_node *user, *orig_user, **f;
1328
1329 var = first;
1330 first = (varpool_node *)first->aux;
1331
1eb68d2d 1332 f = single_user_map.get (var);
eb6a09a7
JH
1333 if (f)
1334 orig_user = *f;
1335 else
1336 orig_user = NULL;
1337 user = propagate_single_user (var, orig_user, single_user_map);
1338
1339 gcc_checking_assert (var->aux != BOTTOM);
1340
1341 /* If user differs, enqueue all references. */
1342 if (user != orig_user)
1343 {
1344 unsigned int i;
1345 ipa_ref *ref;
1346
1eb68d2d 1347 single_user_map.put (var, user);
eb6a09a7
JH
1348
1349 /* Enqueue all aliases for re-processing. */
31de7606
JH
1350 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1351 if (!ref->referring->aux)
eb6a09a7
JH
1352 {
1353 ref->referring->aux = first;
1354 first = dyn_cast <varpool_node *> (ref->referring);
1355 }
1356 /* Enqueue all users for re-processing. */
d52f5295 1357 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1358 if (!ref->referred->aux
1359 && ref->referred->definition
1360 && is_a <varpool_node *> (ref->referred))
1361 {
1362 ref->referred->aux = first;
1363 first = dyn_cast <varpool_node *> (ref->referred);
1364 }
1365
1366 /* If user is BOTTOM, just punt on this var. */
1367 if (user == BOTTOM)
1368 var->aux = BOTTOM;
1369 else
1370 var->aux = NULL;
1371 }
1372 else
1373 var->aux = NULL;
1374 }
1375
1376 FOR_EACH_DEFINED_VARIABLE (var)
1377 {
1378 if (var->aux != BOTTOM)
1379 {
1380#ifdef ENABLE_CHECKING
17e0fc92
JH
1381 /* Not having the single user known means that the VAR is
1382 unreachable. Either someone forgot to remove unreachable
1383 variables or the reachability here is wrong. */
1384
1eb68d2d 1385 gcc_assert (single_user_map.get (var));
eb6a09a7
JH
1386#endif
1387 if (dump_file)
1388 {
1389 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1390 var->name (), var->order);
1391 }
1392 var->used_by_single_function = true;
1393 }
1394 var->aux = NULL;
1395 }
1396 return 0;
1397}
1398
1399namespace {
1400
1401const pass_data pass_data_ipa_single_use =
1402{
1403 IPA_PASS, /* type */
1404 "single-use", /* name */
1405 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1406 TV_CGRAPHOPT, /* tv_id */
1407 0, /* properties_required */
1408 0, /* properties_provided */
1409 0, /* properties_destroyed */
1410 0, /* todo_flags_start */
1411 0, /* todo_flags_finish */
1412};
1413
1414class pass_ipa_single_use : public ipa_opt_pass_d
1415{
1416public:
1417 pass_ipa_single_use (gcc::context *ctxt)
1418 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1419 NULL, /* generate_summary */
1420 NULL, /* write_summary */
1421 NULL, /* read_summary */
1422 NULL, /* write_optimization_summary */
1423 NULL, /* read_optimization_summary */
1424 NULL, /* stmt_fixup */
1425 0, /* function_transform_todo_flags_start */
1426 NULL, /* function_transform */
1427 NULL) /* variable_transform */
1428 {}
1429
1430 /* opt_pass methods: */
1431 virtual bool gate (function *);
1432 virtual unsigned int execute (function *) { return ipa_single_use (); }
1433
1434}; // class pass_ipa_single_use
1435
1436bool
1437pass_ipa_single_use::gate (function *)
1438{
1439 return optimize;
1440}
1441
1442} // anon namespace
1443
1444ipa_opt_pass_d *
1445make_pass_ipa_single_use (gcc::context *ctxt)
1446{
1447 return new pass_ipa_single_use (ctxt);
1448}
This page took 3.981961 seconds and 5 git commands to generate.