]> gcc.gnu.org Git - gcc.git/blob - gcc/cgraphunit.c
cgraph.c (cgraph_insert_node_to_hashtable): New function.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_analyze_function (struct cgraph_node *node);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
219 COMDAT functions that must be output only when they are needed. */
220 if ((TREE_PUBLIC (decl) && !flag_whole_program)
221 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
222 return true;
223
224 /* Constructors and destructors are reachable from the runtime by
225 some mechanism. */
226 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
227 return true;
228
229 if (flag_unit_at_a_time)
230 return false;
231
232 /* If not doing unit at a time, then we'll only defer this function
233 if its marked for inlining. Otherwise we want to emit it now. */
234
235 /* "extern inline" functions are never output locally. */
236 if (DECL_EXTERNAL (decl))
237 return false;
238 /* Nested functions of extern inline function shall not be emit unless
239 we inlined the origin. */
240 for (origin = decl_function_context (decl); origin;
241 origin = decl_function_context (origin))
242 if (DECL_EXTERNAL (origin))
243 return false;
244 /* We want to emit COMDAT functions only when absolutely necessary. */
245 if (DECL_COMDAT (decl))
246 return false;
247 if (!DECL_INLINE (decl)
248 || (!node->local.disregard_inline_limits
249 /* When declared inline, defer even the uninlinable functions.
250 This allows them to be eliminated when unused. */
251 && !DECL_DECLARED_INLINE_P (decl)
252 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
253 return true;
254
255 return false;
256 }
257
258 /* Walk the decls we marked as necessary and see if they reference new
259 variables or functions and add them into the worklists. */
260 static bool
261 cgraph_varpool_analyze_pending_decls (void)
262 {
263 bool changed = false;
264 timevar_push (TV_CGRAPH);
265
266 while (cgraph_varpool_first_unanalyzed_node)
267 {
268 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
269
270 cgraph_varpool_first_unanalyzed_node->analyzed = true;
271
272 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
273
274 if (DECL_INITIAL (decl))
275 {
276 visited_nodes = pointer_set_create ();
277 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
278 pointer_set_destroy (visited_nodes);
279 visited_nodes = NULL;
280 }
281 changed = true;
282 }
283 timevar_pop (TV_CGRAPH);
284 return changed;
285 }
286
287 /* Optimization of function bodies might've rendered some variables as
288 unnecessary so we want to avoid these from being compiled.
289
290 This is done by pruning the queue and keeping only the variables that
291 really appear needed (ie they are either externally visible or referenced
292 by compiled function). Re-doing the reachability analysis on variables
293 brings back the remaining variables referenced by these. */
294 static void
295 cgraph_varpool_remove_unreferenced_decls (void)
296 {
297 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
298
299 cgraph_varpool_reset_queue ();
300
301 if (errorcount || sorrycount)
302 return;
303
304 while (node)
305 {
306 tree decl = node->decl;
307 next = node->next_needed;
308 node->needed = 0;
309
310 if (node->finalized
311 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
312 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
313 || node->force_output
314 || decide_is_variable_needed (node, decl)
315 /* ??? Cgraph does not yet rule the world with an iron hand,
316 and does not control the emission of debug information.
317 After a variable has its DECL_RTL set, we must assume that
318 it may be referenced by the debug information, and we can
319 no longer elide it. */
320 || DECL_RTL_SET_P (decl)))
321 cgraph_varpool_mark_needed_node (node);
322
323 node = next;
324 }
325 /* Make sure we mark alias targets as used targets. */
326 finish_aliases_1 ();
327 cgraph_varpool_analyze_pending_decls ();
328 }
329
330
331 /* When not doing unit-at-a-time, output all functions enqueued.
332 Return true when such a functions were found. */
333
334 bool
335 cgraph_assemble_pending_functions (void)
336 {
337 bool output = false;
338
339 if (flag_unit_at_a_time)
340 return false;
341
342 while (cgraph_nodes_queue)
343 {
344 struct cgraph_node *n = cgraph_nodes_queue;
345
346 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
347 n->next_needed = NULL;
348 if (!n->global.inlined_to
349 && !n->alias
350 && !DECL_EXTERNAL (n->decl))
351 {
352 cgraph_expand_function (n);
353 output = true;
354 }
355 }
356
357 return output;
358 }
359 /* As an GCC extension we allow redefinition of the function. The
360 semantics when both copies of bodies differ is not well defined.
361 We replace the old body with new body so in unit at a time mode
362 we always use new body, while in normal mode we may end up with
363 old body inlined into some functions and new body expanded and
364 inlined in others.
365
366 ??? It may make more sense to use one body for inlining and other
367 body for expanding the function but this is difficult to do. */
368
369 static void
370 cgraph_reset_node (struct cgraph_node *node)
371 {
372 /* If node->output is set, then this is a unit-at-a-time compilation
373 and we have already begun whole-unit analysis. This is *not*
374 testing for whether we've already emitted the function. That
375 case can be sort-of legitimately seen with real function
376 redefinition errors. I would argue that the front end should
377 never present us with such a case, but don't enforce that for now. */
378 gcc_assert (!node->output);
379
380 /* Reset our data structures so we can analyze the function again. */
381 memset (&node->local, 0, sizeof (node->local));
382 memset (&node->global, 0, sizeof (node->global));
383 memset (&node->rtl, 0, sizeof (node->rtl));
384 node->analyzed = false;
385 node->local.redefined_extern_inline = true;
386 node->local.finalized = false;
387
388 if (!flag_unit_at_a_time)
389 {
390 struct cgraph_node *n;
391
392 for (n = cgraph_nodes; n; n = n->next)
393 if (n->global.inlined_to == node)
394 cgraph_remove_node (n);
395 }
396
397 cgraph_node_remove_callees (node);
398
399 /* We may need to re-queue the node for assembling in case
400 we already proceeded it and ignored as not needed. */
401 if (node->reachable && !flag_unit_at_a_time)
402 {
403 struct cgraph_node *n;
404
405 for (n = cgraph_nodes_queue; n; n = n->next_needed)
406 if (n == node)
407 break;
408 if (!n)
409 node->reachable = 0;
410 }
411 }
412
413 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
414 logic in effect. If NESTED is true, then our caller cannot stand to have
415 the garbage collector run at the moment. We would need to either create
416 a new GC context, or just not compile right now. */
417
418 void
419 cgraph_finalize_function (tree decl, bool nested)
420 {
421 struct cgraph_node *node = cgraph_node (decl);
422
423 if (node->local.finalized)
424 cgraph_reset_node (node);
425
426 notice_global_symbol (decl);
427 node->decl = decl;
428 node->local.finalized = true;
429 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
430 if (node->nested)
431 lower_nested_functions (decl);
432 gcc_assert (!node->nested);
433
434 /* If not unit at a time, then we need to create the call graph
435 now, so that called functions can be queued and emitted now. */
436 if (!flag_unit_at_a_time)
437 {
438 cgraph_analyze_function (node);
439 cgraph_decide_inlining_incrementally (node, false);
440 }
441
442 if (decide_is_function_needed (node, decl))
443 cgraph_mark_needed_node (node);
444
445 /* Since we reclaim unreachable nodes at the end of every language
446 level unit, we need to be conservative about possible entry points
447 there. */
448 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
449 cgraph_mark_reachable_node (node);
450
451 /* If not unit at a time, go ahead and emit everything we've found
452 to be reachable at this time. */
453 if (!nested)
454 {
455 if (!cgraph_assemble_pending_functions ())
456 ggc_collect ();
457 }
458
459 /* If we've not yet emitted decl, tell the debug info about it. */
460 if (!TREE_ASM_WRITTEN (decl))
461 (*debug_hooks->deferred_inline_function) (decl);
462
463 /* Possibly warn about unused parameters. */
464 if (warn_unused_parameter)
465 do_warn_unused_parameter (decl);
466 }
467
468 void
469 cgraph_lower_function (struct cgraph_node *node)
470 {
471 if (node->lowered)
472 return;
473 tree_lowering_passes (node->decl);
474 node->lowered = true;
475 }
476
477 /* Walk tree and record all calls. Called via walk_tree. */
478 static tree
479 record_reference (tree *tp, int *walk_subtrees, void *data)
480 {
481 tree t = *tp;
482
483 switch (TREE_CODE (t))
484 {
485 case VAR_DECL:
486 /* ??? Really, we should mark this decl as *potentially* referenced
487 by this function and re-examine whether the decl is actually used
488 after rtl has been generated. */
489 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
490 {
491 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
492 if (lang_hooks.callgraph.analyze_expr)
493 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
494 data);
495 }
496 break;
497
498 case FDESC_EXPR:
499 case ADDR_EXPR:
500 if (flag_unit_at_a_time)
501 {
502 /* Record dereferences to the functions. This makes the
503 functions reachable unconditionally. */
504 tree decl = TREE_OPERAND (*tp, 0);
505 if (TREE_CODE (decl) == FUNCTION_DECL)
506 cgraph_mark_needed_node (cgraph_node (decl));
507 }
508 break;
509
510 default:
511 /* Save some cycles by not walking types and declaration as we
512 won't find anything useful there anyway. */
513 if (IS_TYPE_OR_DECL_P (*tp))
514 {
515 *walk_subtrees = 0;
516 break;
517 }
518
519 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
520 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
521 break;
522 }
523
524 return NULL;
525 }
526
527 /* Create cgraph edges for function calls inside BODY from NODE. */
528
529 static void
530 cgraph_create_edges (struct cgraph_node *node, tree body)
531 {
532 basic_block bb;
533
534 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
535 block_stmt_iterator bsi;
536 tree step;
537 visited_nodes = pointer_set_create ();
538
539 /* Reach the trees by walking over the CFG, and note the
540 enclosing basic-blocks in the call edges. */
541 FOR_EACH_BB_FN (bb, this_cfun)
542 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
543 {
544 tree stmt = bsi_stmt (bsi);
545 tree call = get_call_expr_in (stmt);
546 tree decl;
547
548 if (call && (decl = get_callee_fndecl (call)))
549 {
550 cgraph_create_edge (node, cgraph_node (decl), stmt,
551 bb->count,
552 bb->loop_depth);
553 walk_tree (&TREE_OPERAND (call, 1),
554 record_reference, node, visited_nodes);
555 if (TREE_CODE (stmt) == MODIFY_EXPR)
556 walk_tree (&TREE_OPERAND (stmt, 0),
557 record_reference, node, visited_nodes);
558 }
559 else
560 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
561 }
562
563 /* Look for initializers of constant variables and private statics. */
564 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
565 step;
566 step = TREE_CHAIN (step))
567 {
568 tree decl = TREE_VALUE (step);
569 if (TREE_CODE (decl) == VAR_DECL
570 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
571 && flag_unit_at_a_time)
572 cgraph_varpool_finalize_decl (decl);
573 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
574 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
575 }
576
577 pointer_set_destroy (visited_nodes);
578 visited_nodes = NULL;
579 }
580
581 /* Give initial reasons why inlining would fail. Those gets
582 either NULLified or usually overwritten by more precise reason
583 later. */
584 static void
585 initialize_inline_failed (struct cgraph_node *node)
586 {
587 struct cgraph_edge *e;
588
589 for (e = node->callers; e; e = e->next_caller)
590 {
591 gcc_assert (!e->callee->global.inlined_to);
592 gcc_assert (e->inline_failed);
593 if (node->local.redefined_extern_inline)
594 e->inline_failed = N_("redefined extern inline functions are not "
595 "considered for inlining");
596 else if (!node->local.inlinable)
597 e->inline_failed = N_("function not inlinable");
598 else
599 e->inline_failed = N_("function not considered for inlining");
600 }
601 }
602
603 /* Rebuild call edges from current function after a passes not aware
604 of cgraph updating. */
605 static void
606 rebuild_cgraph_edges (void)
607 {
608 basic_block bb;
609 struct cgraph_node *node = cgraph_node (current_function_decl);
610 block_stmt_iterator bsi;
611
612 cgraph_node_remove_callees (node);
613
614 node->count = ENTRY_BLOCK_PTR->count;
615
616 FOR_EACH_BB (bb)
617 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
618 {
619 tree stmt = bsi_stmt (bsi);
620 tree call = get_call_expr_in (stmt);
621 tree decl;
622
623 if (call && (decl = get_callee_fndecl (call)))
624 cgraph_create_edge (node, cgraph_node (decl), stmt,
625 bb->count,
626 bb->loop_depth);
627 }
628 initialize_inline_failed (node);
629 gcc_assert (!node->global.inlined_to);
630 }
631
632 struct tree_opt_pass pass_rebuild_cgraph_edges =
633 {
634 NULL, /* name */
635 NULL, /* gate */
636 rebuild_cgraph_edges, /* execute */
637 NULL, /* sub */
638 NULL, /* next */
639 0, /* static_pass_number */
640 0, /* tv_id */
641 PROP_cfg, /* properties_required */
642 0, /* properties_provided */
643 0, /* properties_destroyed */
644 0, /* todo_flags_start */
645 0, /* todo_flags_finish */
646 0 /* letter */
647 };
648
649 /* Verify cgraph nodes of given cgraph node. */
650 void
651 verify_cgraph_node (struct cgraph_node *node)
652 {
653 struct cgraph_edge *e;
654 struct cgraph_node *main_clone;
655 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
656 basic_block this_block;
657 block_stmt_iterator bsi;
658 bool error_found = false;
659
660 timevar_push (TV_CGRAPH_VERIFY);
661 for (e = node->callees; e; e = e->next_callee)
662 if (e->aux)
663 {
664 error ("aux field set for edge %s->%s",
665 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
666 error_found = true;
667 }
668 if (node->count < 0)
669 {
670 error ("Execution count is negative");
671 error_found = true;
672 }
673 for (e = node->callers; e; e = e->next_caller)
674 {
675 if (e->count < 0)
676 {
677 error ("caller edge count is negative");
678 error_found = true;
679 }
680 if (!e->inline_failed)
681 {
682 if (node->global.inlined_to
683 != (e->caller->global.inlined_to
684 ? e->caller->global.inlined_to : e->caller))
685 {
686 error ("inlined_to pointer is wrong");
687 error_found = true;
688 }
689 if (node->callers->next_caller)
690 {
691 error ("multiple inline callers");
692 error_found = true;
693 }
694 }
695 else
696 if (node->global.inlined_to)
697 {
698 error ("inlined_to pointer set for noninline callers");
699 error_found = true;
700 }
701 }
702 if (!node->callers && node->global.inlined_to)
703 {
704 error ("inlined_to pointer is set but no predecesors found");
705 error_found = true;
706 }
707 if (node->global.inlined_to == node)
708 {
709 error ("inlined_to pointer refers to itself");
710 error_found = true;
711 }
712
713 for (main_clone = cgraph_node (node->decl); main_clone;
714 main_clone = main_clone->next_clone)
715 if (main_clone == node)
716 break;
717 if (!node)
718 {
719 error ("node not found in DECL_ASSEMBLER_NAME hash");
720 error_found = true;
721 }
722
723 if (node->analyzed
724 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
725 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
726 {
727 if (this_cfun->cfg)
728 {
729 /* The nodes we're interested in are never shared, so walk
730 the tree ignoring duplicates. */
731 visited_nodes = pointer_set_create ();
732 /* Reach the trees by walking over the CFG, and note the
733 enclosing basic-blocks in the call edges. */
734 FOR_EACH_BB_FN (this_block, this_cfun)
735 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
736 {
737 tree stmt = bsi_stmt (bsi);
738 tree call = get_call_expr_in (stmt);
739 tree decl;
740 if (call && (decl = get_callee_fndecl (call)))
741 {
742 struct cgraph_edge *e = cgraph_edge (node, stmt);
743 if (e)
744 {
745 if (e->aux)
746 {
747 error ("shared call_stmt:");
748 debug_generic_stmt (stmt);
749 error_found = true;
750 }
751 if (e->callee->decl != cgraph_node (decl)->decl
752 && e->inline_failed)
753 {
754 error ("edge points to wrong declaration:");
755 debug_tree (e->callee->decl);
756 fprintf (stderr," Instead of:");
757 debug_tree (decl);
758 }
759 e->aux = (void *)1;
760 }
761 else
762 {
763 error ("missing callgraph edge for call stmt:");
764 debug_generic_stmt (stmt);
765 error_found = true;
766 }
767 }
768 }
769 pointer_set_destroy (visited_nodes);
770 visited_nodes = NULL;
771 }
772 else
773 /* No CFG available?! */
774 gcc_unreachable ();
775
776 for (e = node->callees; e; e = e->next_callee)
777 {
778 if (!e->aux)
779 {
780 error ("edge %s->%s has no corresponding call_stmt",
781 cgraph_node_name (e->caller),
782 cgraph_node_name (e->callee));
783 debug_generic_stmt (e->call_stmt);
784 error_found = true;
785 }
786 e->aux = 0;
787 }
788 }
789 if (error_found)
790 {
791 dump_cgraph_node (stderr, node);
792 internal_error ("verify_cgraph_node failed");
793 }
794 timevar_pop (TV_CGRAPH_VERIFY);
795 }
796
797 /* Verify whole cgraph structure. */
798 void
799 verify_cgraph (void)
800 {
801 struct cgraph_node *node;
802
803 if (sorrycount || errorcount)
804 return;
805
806 for (node = cgraph_nodes; node; node = node->next)
807 verify_cgraph_node (node);
808 }
809
810
811 /* Output all variables enqueued to be assembled. */
812 bool
813 cgraph_varpool_assemble_pending_decls (void)
814 {
815 bool changed = false;
816
817 if (errorcount || sorrycount)
818 return false;
819
820 /* EH might mark decls as needed during expansion. This should be safe since
821 we don't create references to new function, but it should not be used
822 elsewhere. */
823 cgraph_varpool_analyze_pending_decls ();
824
825 while (cgraph_varpool_nodes_queue)
826 {
827 tree decl = cgraph_varpool_nodes_queue->decl;
828 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
829
830 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
831 if (!TREE_ASM_WRITTEN (decl) && !node->alias && !DECL_EXTERNAL (decl))
832 {
833 assemble_variable (decl, 0, 1, 0);
834 /* Local static variables are never seen by check_global_declarations
835 so we need to output debug info by hand. */
836 if (DECL_CONTEXT (decl)
837 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
838 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
839 && errorcount == 0 && sorrycount == 0)
840 {
841 timevar_push (TV_SYMOUT);
842 (*debug_hooks->global_decl) (decl);
843 timevar_pop (TV_SYMOUT);
844 }
845 changed = true;
846 }
847 node->next_needed = NULL;
848 }
849 return changed;
850 }
851
852 /* Analyze the function scheduled to be output. */
853 static void
854 cgraph_analyze_function (struct cgraph_node *node)
855 {
856 tree decl = node->decl;
857
858 current_function_decl = decl;
859 push_cfun (DECL_STRUCT_FUNCTION (decl));
860 cgraph_lower_function (node);
861
862 /* First kill forward declaration so reverse inlining works properly. */
863 cgraph_create_edges (node, decl);
864
865 node->local.inlinable = tree_inlinable_function_p (decl);
866 node->local.self_insns = estimate_num_insns (decl);
867 if (node->local.inlinable)
868 node->local.disregard_inline_limits
869 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
870 initialize_inline_failed (node);
871 if (flag_really_no_inline && !node->local.disregard_inline_limits)
872 node->local.inlinable = 0;
873 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
874 node->global.insns = node->local.self_insns;
875
876 node->analyzed = true;
877 pop_cfun ();
878 current_function_decl = NULL;
879 }
880
881 /* Analyze the whole compilation unit once it is parsed completely. */
882
883 void
884 cgraph_finalize_compilation_unit (void)
885 {
886 struct cgraph_node *node;
887 /* Keep track of already processed nodes when called multiple times for
888 intermodule optimization. */
889 static struct cgraph_node *first_analyzed;
890
891 finish_aliases_1 ();
892
893 if (!flag_unit_at_a_time)
894 {
895 cgraph_assemble_pending_functions ();
896 return;
897 }
898
899 if (!quiet_flag)
900 {
901 fprintf (stderr, "\nAnalyzing compilation unit");
902 fflush (stderr);
903 }
904
905 timevar_push (TV_CGRAPH);
906 cgraph_varpool_analyze_pending_decls ();
907 if (cgraph_dump_file)
908 {
909 fprintf (cgraph_dump_file, "Initial entry points:");
910 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
911 if (node->needed && DECL_SAVED_TREE (node->decl))
912 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
913 fprintf (cgraph_dump_file, "\n");
914 }
915
916 /* Propagate reachability flag and lower representation of all reachable
917 functions. In the future, lowering will introduce new functions and
918 new entry points on the way (by template instantiation and virtual
919 method table generation for instance). */
920 while (cgraph_nodes_queue)
921 {
922 struct cgraph_edge *edge;
923 tree decl = cgraph_nodes_queue->decl;
924
925 node = cgraph_nodes_queue;
926 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
927 node->next_needed = NULL;
928
929 /* ??? It is possible to create extern inline function and later using
930 weak alias attribute to kill its body. See
931 gcc.c-torture/compile/20011119-1.c */
932 if (!DECL_SAVED_TREE (decl))
933 {
934 cgraph_reset_node (node);
935 continue;
936 }
937
938 gcc_assert (!node->analyzed && node->reachable);
939 gcc_assert (DECL_SAVED_TREE (decl));
940
941 cgraph_analyze_function (node);
942
943 for (edge = node->callees; edge; edge = edge->next_callee)
944 if (!edge->callee->reachable)
945 cgraph_mark_reachable_node (edge->callee);
946
947 cgraph_varpool_analyze_pending_decls ();
948 }
949
950 /* Collect entry points to the unit. */
951
952 if (cgraph_dump_file)
953 {
954 fprintf (cgraph_dump_file, "Unit entry points:");
955 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
956 if (node->needed && DECL_SAVED_TREE (node->decl))
957 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
958 fprintf (cgraph_dump_file, "\n\nInitial ");
959 dump_cgraph (cgraph_dump_file);
960 }
961
962 if (cgraph_dump_file)
963 fprintf (cgraph_dump_file, "\nReclaiming functions:");
964
965 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
966 {
967 tree decl = node->decl;
968
969 if (node->local.finalized && !DECL_SAVED_TREE (decl))
970 cgraph_reset_node (node);
971
972 if (!node->reachable && DECL_SAVED_TREE (decl))
973 {
974 if (cgraph_dump_file)
975 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
976 cgraph_remove_node (node);
977 continue;
978 }
979 else
980 node->next_needed = NULL;
981 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
982 gcc_assert (node->analyzed == node->local.finalized);
983 }
984 if (cgraph_dump_file)
985 {
986 fprintf (cgraph_dump_file, "\n\nReclaimed ");
987 dump_cgraph (cgraph_dump_file);
988 }
989 first_analyzed = cgraph_nodes;
990 ggc_collect ();
991 timevar_pop (TV_CGRAPH);
992 }
993 /* Figure out what functions we want to assemble. */
994
995 static void
996 cgraph_mark_functions_to_output (void)
997 {
998 struct cgraph_node *node;
999
1000 for (node = cgraph_nodes; node; node = node->next)
1001 {
1002 tree decl = node->decl;
1003 struct cgraph_edge *e;
1004
1005 gcc_assert (!node->output);
1006
1007 for (e = node->callers; e; e = e->next_caller)
1008 if (e->inline_failed)
1009 break;
1010
1011 /* We need to output all local functions that are used and not
1012 always inlined, as well as those that are reachable from
1013 outside the current compilation unit. */
1014 if (DECL_SAVED_TREE (decl)
1015 && !node->global.inlined_to
1016 && (node->needed
1017 || (e && node->reachable))
1018 && !TREE_ASM_WRITTEN (decl)
1019 && !DECL_EXTERNAL (decl))
1020 node->output = 1;
1021 else
1022 {
1023 /* We should've reclaimed all functions that are not needed. */
1024 #ifdef ENABLE_CHECKING
1025 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1026 && !DECL_EXTERNAL (decl))
1027 {
1028 dump_cgraph_node (stderr, node);
1029 internal_error ("failed to reclaim unneeded function");
1030 }
1031 #endif
1032 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1033 || DECL_EXTERNAL (decl));
1034
1035 }
1036
1037 }
1038 }
1039
1040 /* Expand function specified by NODE. */
1041
1042 static void
1043 cgraph_expand_function (struct cgraph_node *node)
1044 {
1045 tree decl = node->decl;
1046
1047 /* We ought to not compile any inline clones. */
1048 gcc_assert (!node->global.inlined_to);
1049
1050 if (flag_unit_at_a_time)
1051 announce_function (decl);
1052
1053 cgraph_lower_function (node);
1054
1055 /* Generate RTL for the body of DECL. */
1056 lang_hooks.callgraph.expand_function (decl);
1057
1058 /* Make sure that BE didn't give up on compiling. */
1059 /* ??? Can happen with nested function of extern inline. */
1060 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1061
1062 current_function_decl = NULL;
1063 if (!cgraph_preserve_function_body_p (node->decl))
1064 {
1065 DECL_SAVED_TREE (node->decl) = NULL;
1066 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1067 DECL_INITIAL (node->decl) = error_mark_node;
1068 /* Eliminate all call edges. This is important so the call_expr no longer
1069 points to the dead function body. */
1070 cgraph_node_remove_callees (node);
1071 }
1072
1073 cgraph_function_flags_ready = true;
1074 }
1075
1076 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1077
1078 bool
1079 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1080 {
1081 *reason = e->inline_failed;
1082 return !e->inline_failed;
1083 }
1084
1085
1086
1087 /* Expand all functions that must be output.
1088
1089 Attempt to topologically sort the nodes so function is output when
1090 all called functions are already assembled to allow data to be
1091 propagated across the callgraph. Use a stack to get smaller distance
1092 between a function and its callees (later we may choose to use a more
1093 sophisticated algorithm for function reordering; we will likely want
1094 to use subsections to make the output functions appear in top-down
1095 order). */
1096
1097 static void
1098 cgraph_expand_all_functions (void)
1099 {
1100 struct cgraph_node *node;
1101 struct cgraph_node **order =
1102 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1103 int order_pos = 0, new_order_pos = 0;
1104 int i;
1105
1106 order_pos = cgraph_postorder (order);
1107 gcc_assert (order_pos == cgraph_n_nodes);
1108
1109 /* Garbage collector may remove inline clones we eliminate during
1110 optimization. So we must be sure to not reference them. */
1111 for (i = 0; i < order_pos; i++)
1112 if (order[i]->output)
1113 order[new_order_pos++] = order[i];
1114
1115 for (i = new_order_pos - 1; i >= 0; i--)
1116 {
1117 node = order[i];
1118 if (node->output)
1119 {
1120 gcc_assert (node->reachable);
1121 node->output = 0;
1122 cgraph_expand_function (node);
1123 }
1124 }
1125 free (order);
1126 }
1127
1128 /* Mark visibility of all functions.
1129
1130 A local function is one whose calls can occur only in the current
1131 compilation unit and all its calls are explicit, so we can change
1132 its calling convention. We simply mark all static functions whose
1133 address is not taken as local.
1134
1135 We also change the TREE_PUBLIC flag of all declarations that are public
1136 in language point of view but we want to overwrite this default
1137 via visibilities for the backend point of view. */
1138
1139 static void
1140 cgraph_function_and_variable_visibility (void)
1141 {
1142 struct cgraph_node *node;
1143 struct cgraph_varpool_node *vnode;
1144
1145 for (node = cgraph_nodes; node; node = node->next)
1146 {
1147 if (node->reachable
1148 && (DECL_COMDAT (node->decl)
1149 || (!flag_whole_program
1150 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1151 node->local.externally_visible = true;
1152 if (!node->local.externally_visible && node->analyzed
1153 && !DECL_EXTERNAL (node->decl))
1154 {
1155 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1156 TREE_PUBLIC (node->decl) = 0;
1157 }
1158 node->local.local = (!node->needed
1159 && node->analyzed
1160 && !DECL_EXTERNAL (node->decl)
1161 && !node->local.externally_visible);
1162 }
1163 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1164 {
1165 if (vnode->needed
1166 && !flag_whole_program
1167 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1168 vnode->externally_visible = 1;
1169 if (!vnode->externally_visible)
1170 {
1171 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1172 TREE_PUBLIC (vnode->decl) = 0;
1173 }
1174 gcc_assert (TREE_STATIC (vnode->decl));
1175 }
1176
1177 /* Because we have to be conservative on the boundaries of source
1178 level units, it is possible that we marked some functions in
1179 reachable just because they might be used later via external
1180 linkage, but after making them local they are really unreachable
1181 now. */
1182 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1183
1184 if (cgraph_dump_file)
1185 {
1186 fprintf (cgraph_dump_file, "\nMarking local functions:");
1187 for (node = cgraph_nodes; node; node = node->next)
1188 if (node->local.local)
1189 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1190 fprintf (cgraph_dump_file, "\n\n");
1191 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1192 for (node = cgraph_nodes; node; node = node->next)
1193 if (node->local.externally_visible)
1194 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1195 fprintf (cgraph_dump_file, "\n\n");
1196 }
1197 cgraph_function_flags_ready = true;
1198 }
1199
1200 /* Return true when function body of DECL still needs to be kept around
1201 for later re-use. */
1202 bool
1203 cgraph_preserve_function_body_p (tree decl)
1204 {
1205 struct cgraph_node *node;
1206 if (!cgraph_global_info_ready)
1207 return (DECL_INLINE (decl) && !flag_really_no_inline);
1208 /* Look if there is any clone around. */
1209 for (node = cgraph_node (decl); node; node = node->next_clone)
1210 if (node->global.inlined_to)
1211 return true;
1212 return false;
1213 }
1214
1215 static void
1216 ipa_passes (void)
1217 {
1218 cfun = NULL;
1219 tree_register_cfg_hooks ();
1220 bitmap_obstack_initialize (NULL);
1221 execute_ipa_pass_list (all_ipa_passes);
1222 bitmap_obstack_release (NULL);
1223 }
1224
1225 /* Perform simple optimizations based on callgraph. */
1226
1227 void
1228 cgraph_optimize (void)
1229 {
1230 #ifdef ENABLE_CHECKING
1231 verify_cgraph ();
1232 #endif
1233 if (!flag_unit_at_a_time)
1234 {
1235 cgraph_varpool_assemble_pending_decls ();
1236 return;
1237 }
1238
1239 process_pending_assemble_externals ();
1240
1241 /* Frontend may output common variables after the unit has been finalized.
1242 It is safe to deal with them here as they are always zero initialized. */
1243 cgraph_varpool_analyze_pending_decls ();
1244
1245 timevar_push (TV_CGRAPHOPT);
1246 if (!quiet_flag)
1247 fprintf (stderr, "Performing intraprocedural optimizations\n");
1248
1249 cgraph_function_and_variable_visibility ();
1250 if (cgraph_dump_file)
1251 {
1252 fprintf (cgraph_dump_file, "Marked ");
1253 dump_cgraph (cgraph_dump_file);
1254 }
1255 ipa_passes ();
1256 /* This pass remove bodies of extern inline functions we never inlined.
1257 Do this later so other IPA passes see what is really going on. */
1258 cgraph_remove_unreachable_nodes (false, dump_file);
1259 cgraph_global_info_ready = true;
1260 if (cgraph_dump_file)
1261 {
1262 fprintf (cgraph_dump_file, "Optimized ");
1263 dump_cgraph (cgraph_dump_file);
1264 dump_varpool (cgraph_dump_file);
1265 }
1266 timevar_pop (TV_CGRAPHOPT);
1267
1268 /* Output everything. */
1269 if (!quiet_flag)
1270 fprintf (stderr, "Assembling functions:\n");
1271 #ifdef ENABLE_CHECKING
1272 verify_cgraph ();
1273 #endif
1274
1275 cgraph_mark_functions_to_output ();
1276 cgraph_expand_all_functions ();
1277 cgraph_varpool_remove_unreferenced_decls ();
1278
1279 cgraph_varpool_assemble_pending_decls ();
1280
1281 if (cgraph_dump_file)
1282 {
1283 fprintf (cgraph_dump_file, "\nFinal ");
1284 dump_cgraph (cgraph_dump_file);
1285 }
1286 #ifdef ENABLE_CHECKING
1287 verify_cgraph ();
1288 /* Double check that all inline clones are gone and that all
1289 function bodies have been released from memory. */
1290 if (flag_unit_at_a_time
1291 && !dump_enabled_p (TDI_tree_all)
1292 && !(sorrycount || errorcount))
1293 {
1294 struct cgraph_node *node;
1295 bool error_found = false;
1296
1297 for (node = cgraph_nodes; node; node = node->next)
1298 if (node->analyzed
1299 && (node->global.inlined_to
1300 || DECL_SAVED_TREE (node->decl)))
1301 {
1302 error_found = true;
1303 dump_cgraph_node (stderr, node);
1304 }
1305 if (error_found)
1306 internal_error ("nodes with no released memory found");
1307 }
1308 #endif
1309 }
1310
1311 /* Generate and emit a static constructor or destructor. WHICH must be
1312 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1313 GENERIC statements. */
1314
1315 void
1316 cgraph_build_static_cdtor (char which, tree body, int priority)
1317 {
1318 static int counter = 0;
1319 char which_buf[16];
1320 tree decl, name, resdecl;
1321
1322 sprintf (which_buf, "%c_%d", which, counter++);
1323 name = get_file_function_name_long (which_buf);
1324
1325 decl = build_decl (FUNCTION_DECL, name,
1326 build_function_type (void_type_node, void_list_node));
1327 current_function_decl = decl;
1328
1329 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1330 DECL_ARTIFICIAL (resdecl) = 1;
1331 DECL_IGNORED_P (resdecl) = 1;
1332 DECL_RESULT (decl) = resdecl;
1333
1334 allocate_struct_function (decl);
1335
1336 TREE_STATIC (decl) = 1;
1337 TREE_USED (decl) = 1;
1338 DECL_ARTIFICIAL (decl) = 1;
1339 DECL_IGNORED_P (decl) = 1;
1340 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1341 DECL_SAVED_TREE (decl) = body;
1342 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1343 DECL_UNINLINABLE (decl) = 1;
1344
1345 DECL_INITIAL (decl) = make_node (BLOCK);
1346 TREE_USED (DECL_INITIAL (decl)) = 1;
1347
1348 DECL_SOURCE_LOCATION (decl) = input_location;
1349 cfun->function_end_locus = input_location;
1350
1351 switch (which)
1352 {
1353 case 'I':
1354 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1355 break;
1356 case 'D':
1357 DECL_STATIC_DESTRUCTOR (decl) = 1;
1358 break;
1359 default:
1360 gcc_unreachable ();
1361 }
1362
1363 gimplify_function_tree (decl);
1364
1365 /* ??? We will get called LATE in the compilation process. */
1366 if (cgraph_global_info_ready)
1367 {
1368 tree_lowering_passes (decl);
1369 tree_rest_of_compilation (decl);
1370 }
1371 else
1372 cgraph_finalize_function (decl, 0);
1373
1374 if (targetm.have_ctors_dtors)
1375 {
1376 void (*fn) (rtx, int);
1377
1378 if (which == 'I')
1379 fn = targetm.asm_out.constructor;
1380 else
1381 fn = targetm.asm_out.destructor;
1382 fn (XEXP (DECL_RTL (decl), 0), priority);
1383 }
1384 }
1385
1386 void
1387 init_cgraph (void)
1388 {
1389 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1390 }
1391
1392 /* The edges representing the callers of the NEW_VERSION node were
1393 fixed by cgraph_function_versioning (), now the call_expr in their
1394 respective tree code should be updated to call the NEW_VERSION. */
1395
1396 static void
1397 update_call_expr (struct cgraph_node *new_version)
1398 {
1399 struct cgraph_edge *e;
1400
1401 gcc_assert (new_version);
1402 for (e = new_version->callers; e; e = e->next_caller)
1403 /* Update the call expr on the edges
1404 to call the new version. */
1405 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1406 }
1407
1408
1409 /* Create a new cgraph node which is the new version of
1410 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1411 edges which should be redirected to point to
1412 NEW_VERSION. ALL the callees edges of OLD_VERSION
1413 are cloned to the new version node. Return the new
1414 version node. */
1415
1416 static struct cgraph_node *
1417 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1418 tree new_decl, varray_type redirect_callers)
1419 {
1420 struct cgraph_node *new_version;
1421 struct cgraph_edge *e, *new_e;
1422 struct cgraph_edge *next_callee;
1423 unsigned i;
1424
1425 gcc_assert (old_version);
1426
1427 new_version = cgraph_node (new_decl);
1428
1429 new_version->analyzed = true;
1430 new_version->local = old_version->local;
1431 new_version->global = old_version->global;
1432 new_version->rtl = new_version->rtl;
1433 new_version->reachable = true;
1434 new_version->count = old_version->count;
1435
1436 /* Clone the old node callees. Recursive calls are
1437 also cloned. */
1438 for (e = old_version->callees;e; e=e->next_callee)
1439 {
1440 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1441 new_e->count = e->count;
1442 }
1443 /* Fix recursive calls.
1444 If OLD_VERSION has a recursive call after the
1445 previous edge cloning, the new version will have an edge
1446 pointing to the old version, which is wrong;
1447 Redirect it to point to the new version. */
1448 for (e = new_version->callees ; e; e = next_callee)
1449 {
1450 next_callee = e->next_callee;
1451 if (e->callee == old_version)
1452 cgraph_redirect_edge_callee (e, new_version);
1453
1454 if (!next_callee)
1455 break;
1456 }
1457 if (redirect_callers)
1458 for (i = 0; i < VARRAY_ACTIVE_SIZE (redirect_callers); i++)
1459 {
1460 e = VARRAY_GENERIC_PTR (redirect_callers, i);
1461 /* Redirect calls to the old version node
1462 to point to it's new version. */
1463 cgraph_redirect_edge_callee (e, new_version);
1464 }
1465
1466 return new_version;
1467 }
1468
1469 /* Perform function versioning.
1470 Function versioning includes copying of the tree and
1471 a callgraph update (creating a new cgraph node and updating
1472 its callees and callers).
1473
1474 REDIRECT_CALLERS varray includes the edges to be redirected
1475 to the new version.
1476
1477 TREE_MAP is a mapping of tree nodes we want to replace with
1478 new ones (according to results of prior analysis).
1479 OLD_VERSION_NODE is the node that is versioned.
1480 It returns the new version's cgraph node. */
1481
1482 struct cgraph_node *
1483 cgraph_function_versioning (struct cgraph_node *old_version_node,
1484 varray_type redirect_callers,
1485 varray_type tree_map)
1486 {
1487 tree old_decl = old_version_node->decl;
1488 struct cgraph_node *new_version_node = NULL;
1489 tree new_decl;
1490
1491 if (!tree_versionable_function_p (old_decl))
1492 return NULL;
1493
1494 /* Make a new FUNCTION_DECL tree node for the
1495 new version. */
1496 new_decl = copy_node (old_decl);
1497
1498 /* Create the new version's call-graph node.
1499 and update the edges of the new node. */
1500 new_version_node =
1501 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1502 redirect_callers);
1503
1504 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1505 tree_function_versioning (old_decl, new_decl, tree_map, false);
1506 /* Update the call_expr on the edges to call the new version node. */
1507 update_call_expr (new_version_node);
1508
1509 /* Update the new version's properties.
1510 Make The new version visible only within this translation unit.
1511 ??? We cannot use COMDAT linkage because there is no
1512 ABI support for this. */
1513 DECL_EXTERNAL (new_version_node->decl) = 0;
1514 DECL_ONE_ONLY (new_version_node->decl) = 0;
1515 TREE_PUBLIC (new_version_node->decl) = 0;
1516 DECL_COMDAT (new_version_node->decl) = 0;
1517 new_version_node->local.externally_visible = 0;
1518 new_version_node->local.local = 1;
1519 new_version_node->lowered = true;
1520 return new_version_node;
1521 }
1522
1523 /* Produce separate function body for inline clones so the offline copy can be
1524 modified without affecting them. */
1525 struct cgraph_node *
1526 save_inline_function_body (struct cgraph_node *node)
1527 {
1528 struct cgraph_node *first_clone;
1529
1530 gcc_assert (node == cgraph_node (node->decl));
1531
1532 cgraph_lower_function (node);
1533
1534 /* In non-unit-at-a-time we construct full fledged clone we never output to
1535 assembly file. This clone is pointed out by inline_decl of orginal function
1536 and inlining infrastructure knows how to deal with this. */
1537 if (!flag_unit_at_a_time)
1538 {
1539 struct cgraph_edge *e;
1540
1541 first_clone = cgraph_clone_node (node, node->count, 0, false);
1542 first_clone->needed = 0;
1543 first_clone->reachable = 1;
1544 /* Recursively clone all bodies. */
1545 for (e = first_clone->callees; e; e = e->next_callee)
1546 if (!e->inline_failed)
1547 cgraph_clone_inlined_nodes (e, true, false);
1548 }
1549 else
1550 first_clone = node->next_clone;
1551
1552 first_clone->decl = copy_node (node->decl);
1553 node->next_clone = NULL;
1554 if (!flag_unit_at_a_time)
1555 node->inline_decl = first_clone->decl;
1556 first_clone->prev_clone = NULL;
1557 cgraph_insert_node_to_hashtable (first_clone);
1558 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1559
1560 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1561 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1562
1563 DECL_EXTERNAL (first_clone->decl) = 0;
1564 DECL_ONE_ONLY (first_clone->decl) = 0;
1565 TREE_PUBLIC (first_clone->decl) = 0;
1566 DECL_COMDAT (first_clone->decl) = 0;
1567
1568 for (node = first_clone->next_clone; node; node = node->next_clone)
1569 node->decl = first_clone->decl;
1570 #ifdef ENABLE_CHECKING
1571 verify_cgraph_node (first_clone);
1572 #endif
1573 return first_clone;
1574 }
1575
This page took 0.102679 seconds and 5 git commands to generate.