]> gcc.gnu.org Git - gcc.git/blob - gcc/cgraphunit.c
c-common.h: Fix comment typos.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "langhooks.h"
29 #include "hashtab.h"
30 #include "toplev.h"
31 #include "flags.h"
32 #include "ggc.h"
33 #include "debug.h"
34 #include "target.h"
35 #include "cgraph.h"
36 #include "diagnostic.h"
37 #include "timevar.h"
38 #include "params.h"
39 #include "fibheap.h"
40 #include "c-common.h"
41 #include "intl.h"
42
43 #define INSNS_PER_CALL 10
44
45 static void cgraph_expand_all_functions (void);
46 static void cgraph_mark_functions_to_output (void);
47 static void cgraph_expand_function (struct cgraph_node *);
48 static tree record_call_1 (tree *, int *, void *);
49 static void cgraph_mark_local_functions (void);
50 static void cgraph_optimize_function (struct cgraph_node *);
51 static bool cgraph_default_inline_p (struct cgraph_node *n);
52 static void cgraph_analyze_function (struct cgraph_node *node);
53 static void cgraph_decide_inlining_incrementally (struct cgraph_node *);
54
55 /* Statistics we collect about inlining algorithm. */
56 static int ncalls_inlined;
57 static int nfunctions_inlined;
58 static int initial_insns;
59 static int overall_insns;
60
61 /* Records tree nodes seen in cgraph_create_edges. Simply using
62 walk_tree_without_duplicates doesn't guarantee each node is visited
63 once because it gets a new htab upon each recursive call from
64 record_calls_1. */
65 static htab_t visited_nodes;
66
67 /* Determine if function DECL is needed. That is, visible to something
68 either outside this translation unit, something magic in the system
69 configury, or (if not doing unit-at-a-time) to something we havn't
70 seen yet. */
71
72 static bool
73 decide_is_function_needed (struct cgraph_node *node, tree decl)
74 {
75 /* If we decided it was needed before, but at the time we didn't have
76 the body of the function available, then it's still needed. We have
77 to go back and re-check its dependencies now. */
78 if (node->needed)
79 return true;
80
81 /* Externally visible functions must be output. The exception is
82 COMDAT functions that must be output only when they are needed. */
83 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
84 return true;
85
86 /* Constructors and destructors are reachable from the runtime by
87 some mechanism. */
88 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
89 return true;
90
91 /* If the user told us it is used, then it must be so. */
92 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
93 return true;
94
95 /* ??? If the assembler name is set by hand, it is possible to assemble
96 the name later after finalizing the function and the fact is noticed
97 in assemble_name then. This is arguably a bug. */
98 if (DECL_ASSEMBLER_NAME_SET_P (decl)
99 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
100 return true;
101
102 if (flag_unit_at_a_time)
103 return false;
104
105 /* If not doing unit at a time, then we'll only defer this function
106 if its marked for inlining. Otherwise we want to emit it now. */
107
108 /* "extern inline" functions are never output locally. */
109 if (DECL_EXTERNAL (decl))
110 return false;
111 /* We want to emit COMDAT functions only when absolutely necessary. */
112 if (DECL_COMDAT (decl))
113 return false;
114 if (!DECL_INLINE (decl)
115 || (!node->local.disregard_inline_limits
116 /* When declared inline, defer even the uninlinable functions.
117 This allows them to be eliminated when unused. */
118 && !DECL_DECLARED_INLINE_P (decl)
119 && (!node->local.inlinable || !cgraph_default_inline_p (node))))
120 return true;
121
122 return false;
123 }
124
125 /* When not doing unit-at-a-time, output all functions enqueued.
126 Return true when such a functions were found. */
127
128 bool
129 cgraph_assemble_pending_functions (void)
130 {
131 bool output = false;
132
133 if (flag_unit_at_a_time)
134 return false;
135
136 while (cgraph_nodes_queue)
137 {
138 struct cgraph_node *n = cgraph_nodes_queue;
139
140 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
141 if (!n->origin && !DECL_EXTERNAL (n->decl))
142 {
143 cgraph_expand_function (n);
144 output = true;
145 }
146 }
147
148 return output;
149 }
150
151 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
152 logic in effect. If NESTED is true, then our caller cannot stand to have
153 the garbage collector run at the moment. We would need to either create
154 a new GC context, or just not compile right now. */
155
156 void
157 cgraph_finalize_function (tree decl, bool nested)
158 {
159 struct cgraph_node *node = cgraph_node (decl);
160
161 if (node->local.finalized)
162 {
163 /* As an GCC extension we allow redefinition of the function. The
164 semantics when both copies of bodies differ is not well defined.
165 We replace the old body with new body so in unit at a time mode
166 we always use new body, while in normal mode we may end up with
167 old body inlined into some functions and new body expanded and
168 inlined in others.
169
170 ??? It may make more sense to use one body for inlining and other
171 body for expanding the function but this is difficult to do. */
172
173 /* If node->output is set, then this is a unit-at-a-time compilation
174 and we have already begun whole-unit analysis. This is *not*
175 testing for whether we've already emitted the function. That
176 case can be sort-of legitimately seen with real function
177 redefinition errors. I would argue that the front end should
178 never present us with such a case, but don't enforce that for now. */
179 if (node->output)
180 abort ();
181
182 /* Reset our datastructures so we can analyze the function again. */
183 memset (&node->local, 0, sizeof (node->local));
184 memset (&node->global, 0, sizeof (node->global));
185 memset (&node->rtl, 0, sizeof (node->rtl));
186 node->analyzed = false;
187 node->local.redefined_extern_inline = true;
188 while (node->callees)
189 cgraph_remove_edge (node, node->callees->callee);
190
191 /* We may need to re-queue the node for assembling in case
192 we already proceeded it and ignored as not needed. */
193 if (node->reachable && !flag_unit_at_a_time)
194 {
195 struct cgraph_node *n;
196
197 for (n = cgraph_nodes_queue; n; n = n->next_needed)
198 if (n == node)
199 break;
200 if (!n)
201 node->reachable = 0;
202 }
203 }
204
205 notice_global_symbol (decl);
206 node->decl = decl;
207 node->local.finalized = true;
208
209 /* If not unit at a time, then we need to create the call graph
210 now, so that called functions can be queued and emitted now. */
211 if (!flag_unit_at_a_time)
212 {
213 cgraph_analyze_function (node);
214 cgraph_decide_inlining_incrementally (node);
215 }
216
217 if (decide_is_function_needed (node, decl))
218 cgraph_mark_needed_node (node);
219
220 /* If not unit at a time, go ahead and emit everything we've found
221 to be reachable at this time. */
222 if (!nested)
223 {
224 if (!cgraph_assemble_pending_functions ())
225 ggc_collect ();
226 }
227
228 /* If we've not yet emitted decl, tell the debug info about it. */
229 if (!TREE_ASM_WRITTEN (decl))
230 (*debug_hooks->deferred_inline_function) (decl);
231 }
232
233 /* Walk tree and record all calls. Called via walk_tree. */
234 static tree
235 record_call_1 (tree *tp, int *walk_subtrees, void *data)
236 {
237 tree t = *tp;
238
239 switch (TREE_CODE (t))
240 {
241 case VAR_DECL:
242 /* ??? Really, we should mark this decl as *potentially* referenced
243 by this function and re-examine whether the decl is actually used
244 after rtl has been generated. */
245 if (TREE_STATIC (t))
246 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
247 break;
248
249 case ADDR_EXPR:
250 if (flag_unit_at_a_time)
251 {
252 /* Record dereferences to the functions. This makes the
253 functions reachable unconditionally. */
254 tree decl = TREE_OPERAND (*tp, 0);
255 if (TREE_CODE (decl) == FUNCTION_DECL)
256 cgraph_mark_needed_node (cgraph_node (decl));
257 }
258 break;
259
260 case CALL_EXPR:
261 {
262 tree decl = get_callee_fndecl (*tp);
263 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
264 {
265 cgraph_record_call (data, decl);
266
267 /* When we see a function call, we don't want to look at the
268 function reference in the ADDR_EXPR that is hanging from
269 the CALL_EXPR we're examining here, because we would
270 conclude incorrectly that the function's address could be
271 taken by something that is not a function call. So only
272 walk the function parameter list, skip the other subtrees. */
273
274 walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
275 visited_nodes);
276 *walk_subtrees = 0;
277 }
278 break;
279 }
280
281 default:
282 /* Save some cycles by not walking types and declaration as we
283 won't find anything useful there anyway. */
284 if (DECL_P (*tp) || TYPE_P (*tp))
285 {
286 *walk_subtrees = 0;
287 break;
288 }
289
290 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
291 return (*lang_hooks.callgraph.analyze_expr) (tp, walk_subtrees, data);
292 break;
293 }
294
295 return NULL;
296 }
297
298 /* Create cgraph edges for function calls inside BODY from DECL. */
299
300 void
301 cgraph_create_edges (tree decl, tree body)
302 {
303 /* The nodes we're interested in are never shared, so walk
304 the tree ignoring duplicates. */
305 visited_nodes = htab_create (37, htab_hash_pointer,
306 htab_eq_pointer, NULL);
307 walk_tree (&body, record_call_1, decl, visited_nodes);
308 htab_delete (visited_nodes);
309 visited_nodes = NULL;
310 }
311
312 /* Analyze the function scheduled to be output. */
313 static void
314 cgraph_analyze_function (struct cgraph_node *node)
315 {
316 tree decl = node->decl;
317 struct cgraph_edge *e;
318
319 current_function_decl = decl;
320
321 /* First kill forward declaration so reverse inlining works properly. */
322 cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
323
324 node->local.inlinable = tree_inlinable_function_p (decl);
325 if (!DECL_ESTIMATED_INSNS (decl))
326 DECL_ESTIMATED_INSNS (decl)
327 = (*lang_hooks.tree_inlining.estimate_num_insns) (decl);
328 node->local.self_insns = DECL_ESTIMATED_INSNS (decl);
329 if (node->local.inlinable)
330 node->local.disregard_inline_limits
331 = (*lang_hooks.tree_inlining.disregard_inline_limits) (decl);
332 for (e = node->callers; e; e = e->next_caller)
333 if (e->inline_failed)
334 {
335 if (node->local.redefined_extern_inline)
336 e->inline_failed = N_("redefined extern inline functions are not "
337 "considered for inlining");
338 else if (!node->local.inlinable)
339 e->inline_failed = N_("function not inlinable");
340 else
341 e->inline_failed = N_("function not considered for inlining");
342 }
343 if (flag_really_no_inline && !node->local.disregard_inline_limits)
344 node->local.inlinable = 0;
345 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
346 node->global.insns = node->local.self_insns;
347 if (!DECL_EXTERNAL (decl))
348 {
349 node->global.cloned_times = 1;
350 node->global.will_be_output = true;
351 }
352
353 node->analyzed = true;
354 current_function_decl = NULL;
355 }
356
357 /* Analyze the whole compilation unit once it is parsed completely. */
358
359 void
360 cgraph_finalize_compilation_unit (void)
361 {
362 struct cgraph_node *node;
363
364 if (!flag_unit_at_a_time)
365 {
366 cgraph_assemble_pending_functions ();
367 return;
368 }
369
370 cgraph_varpool_assemble_pending_decls ();
371 if (!quiet_flag)
372 fprintf (stderr, "\nAnalyzing compilation unit\n");
373
374 timevar_push (TV_CGRAPH);
375 if (cgraph_dump_file)
376 {
377 fprintf (cgraph_dump_file, "Initial entry points:");
378 for (node = cgraph_nodes; node; node = node->next)
379 if (node->needed && DECL_SAVED_TREE (node->decl))
380 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
381 fprintf (cgraph_dump_file, "\n");
382 }
383
384 /* Propagate reachability flag and lower representation of all reachable
385 functions. In the future, lowering will introduce new functions and
386 new entry points on the way (by template instantiation and virtual
387 method table generation for instance). */
388 while (cgraph_nodes_queue)
389 {
390 struct cgraph_edge *edge;
391 tree decl = cgraph_nodes_queue->decl;
392
393 node = cgraph_nodes_queue;
394 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
395
396 /* ??? It is possible to create extern inline function and later using
397 weak alas attribute to kill it's body. See
398 gcc.c-torture/compile/20011119-1.c */
399 if (!DECL_SAVED_TREE (decl))
400 continue;
401
402 if (node->analyzed || !node->reachable || !DECL_SAVED_TREE (decl))
403 abort ();
404
405 cgraph_analyze_function (node);
406
407 for (edge = node->callees; edge; edge = edge->next_callee)
408 if (!edge->callee->reachable)
409 cgraph_mark_reachable_node (edge->callee);
410
411 cgraph_varpool_assemble_pending_decls ();
412 }
413
414 /* Collect entry points to the unit. */
415
416 if (cgraph_dump_file)
417 {
418 fprintf (cgraph_dump_file, "Unit entry points:");
419 for (node = cgraph_nodes; node; node = node->next)
420 if (node->needed && DECL_SAVED_TREE (node->decl))
421 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
422 fprintf (cgraph_dump_file, "\n\nInitial ");
423 dump_cgraph (cgraph_dump_file);
424 }
425
426 if (cgraph_dump_file)
427 fprintf (cgraph_dump_file, "\nReclaiming functions:");
428
429 for (node = cgraph_nodes; node; node = node->next)
430 {
431 tree decl = node->decl;
432
433 if (!node->reachable && DECL_SAVED_TREE (decl))
434 {
435 cgraph_remove_node (node);
436 if (cgraph_dump_file)
437 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
438 }
439 else
440 node->next_needed = NULL;
441 }
442 if (cgraph_dump_file)
443 {
444 fprintf (cgraph_dump_file, "\n\nReclaimed ");
445 dump_cgraph (cgraph_dump_file);
446 }
447 ggc_collect ();
448 timevar_pop (TV_CGRAPH);
449 }
450
451 /* Figure out what functions we want to assemble. */
452
453 static void
454 cgraph_mark_functions_to_output (void)
455 {
456 struct cgraph_node *node;
457
458 for (node = cgraph_nodes; node; node = node->next)
459 {
460 tree decl = node->decl;
461 struct cgraph_edge *e;
462
463 if (node->output)
464 abort ();
465
466 for (e = node->callers; e; e = e->next_caller)
467 if (e->inline_failed)
468 break;
469
470 /* We need to output all local functions that are used and not
471 always inlined, as well as those that are reachable from
472 outside the current compilation unit. */
473 if (DECL_SAVED_TREE (decl)
474 && (node->needed
475 || (e && node->reachable))
476 && !TREE_ASM_WRITTEN (decl) && !node->origin
477 && !DECL_EXTERNAL (decl))
478 node->output = 1;
479 }
480 }
481
482 /* Optimize the function before expansion. */
483
484 static void
485 cgraph_optimize_function (struct cgraph_node *node)
486 {
487 tree decl = node->decl;
488
489 timevar_push (TV_INTEGRATION);
490 /* optimize_inline_calls avoids inlining of current_function_decl. */
491 current_function_decl = decl;
492 if (flag_inline_trees)
493 {
494 struct cgraph_edge *e;
495
496 for (e = node->callees; e; e = e->next_callee)
497 if (!e->inline_failed || warn_inline
498 || (DECL_DECLARED_INLINE_P (e->callee->decl)
499 && lookup_attribute ("always_inline",
500 DECL_ATTRIBUTES (e->callee->decl))))
501 break;
502 if (e)
503 optimize_inline_calls (decl);
504 }
505 if (node->nested)
506 {
507 for (node = node->nested; node; node = node->next_nested)
508 cgraph_optimize_function (node);
509 }
510 timevar_pop (TV_INTEGRATION);
511 }
512
513 /* Expand function specified by NODE. */
514
515 static void
516 cgraph_expand_function (struct cgraph_node *node)
517 {
518 tree decl = node->decl;
519
520 if (flag_unit_at_a_time)
521 announce_function (decl);
522
523 cgraph_optimize_function (node);
524
525 /* Generate RTL for the body of DECL. Nested functions are expanded
526 via lang_expand_decl_stmt. */
527 (*lang_hooks.callgraph.expand_function) (decl);
528
529 if (!cgraph_function_possibly_inlined_p (decl))
530 DECL_SAVED_TREE (decl) = NULL;
531 current_function_decl = NULL;
532 }
533
534 /* Fill array order with all nodes with output flag set in the reverse
535 topological order. */
536
537 static int
538 cgraph_postorder (struct cgraph_node **order)
539 {
540 struct cgraph_node *node, *node2;
541 int stack_size = 0;
542 int order_pos = 0;
543 struct cgraph_edge *edge, last;
544
545 struct cgraph_node **stack =
546 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
547
548 /* We have to deal with cycles nicely, so use a depth first traversal
549 output algorithm. Ignore the fact that some functions won't need
550 to be output and put them into order as well, so we get dependencies
551 right through intline functions. */
552 for (node = cgraph_nodes; node; node = node->next)
553 node->aux = NULL;
554 for (node = cgraph_nodes; node; node = node->next)
555 if (!node->aux)
556 {
557 node2 = node;
558 if (!node->callers)
559 node->aux = &last;
560 else
561 node->aux = node->callers;
562 while (node2)
563 {
564 while (node2->aux != &last)
565 {
566 edge = node2->aux;
567 if (edge->next_caller)
568 node2->aux = edge->next_caller;
569 else
570 node2->aux = &last;
571 if (!edge->caller->aux)
572 {
573 if (!edge->caller->callers)
574 edge->caller->aux = &last;
575 else
576 edge->caller->aux = edge->caller->callers;
577 stack[stack_size++] = node2;
578 node2 = edge->caller;
579 break;
580 }
581 }
582 if (node2->aux == &last)
583 {
584 order[order_pos++] = node2;
585 if (stack_size)
586 node2 = stack[--stack_size];
587 else
588 node2 = NULL;
589 }
590 }
591 }
592 free (stack);
593 return order_pos;
594 }
595
596 #define INLINED_TIMES(node) ((size_t)(node)->aux)
597 #define SET_INLINED_TIMES(node,times) ((node)->aux = (void *)(times))
598
599 /* Return list of nodes we decided to inline NODE into, set their output
600 flag and compute INLINED_TIMES.
601
602 We do simple backtracing to get INLINED_TIMES right. This should not be
603 expensive as we limit the amount of inlining. Alternatively we may first
604 discover set of nodes, topologically sort these and propagate
605 INLINED_TIMES */
606
607 static int
608 cgraph_inlined_into (struct cgraph_node *node, struct cgraph_node **array)
609 {
610 int nfound = 0;
611 struct cgraph_edge **stack;
612 struct cgraph_edge *e, *e1;
613 int sp;
614 int i;
615
616 /* Fast path: since we traverse in mostly topological order, we will likely
617 find no edges. */
618 for (e = node->callers; e; e = e->next_caller)
619 if (!e->inline_failed)
620 break;
621
622 if (!e)
623 return 0;
624
625 /* Allocate stack for back-tracking up callgraph. */
626 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
627 sp = 0;
628
629 /* Push the first edge on to the stack. */
630 stack[sp++] = e;
631
632 while (sp)
633 {
634 struct cgraph_node *caller;
635
636 /* Look at the edge on the top of the stack. */
637 e = stack[sp - 1];
638 caller = e->caller;
639
640 /* Check if the caller destination has been visited yet. */
641 if (!caller->output)
642 {
643 array[nfound++] = e->caller;
644 /* Mark that we have visited the destination. */
645 caller->output = true;
646 SET_INLINED_TIMES (caller, 0);
647 }
648 SET_INLINED_TIMES (caller, INLINED_TIMES (caller) + 1);
649
650 for (e1 = caller->callers; e1; e1 = e1->next_caller)
651 if (!e1->inline_failed)
652 break;
653
654 if (e1)
655 stack[sp++] = e1;
656 else
657 {
658 while (true)
659 {
660 for (e1 = e->next_caller; e1; e1 = e1->next_caller)
661 if (!e1->inline_failed)
662 break;
663
664 if (e1)
665 {
666 stack[sp - 1] = e1;
667 break;
668 }
669 else
670 {
671 sp--;
672 if (!sp)
673 break;
674 e = stack[sp - 1];
675 }
676 }
677 }
678 }
679
680 free (stack);
681
682
683 if (cgraph_dump_file)
684 {
685 fprintf (cgraph_dump_file, " Found inline predecesors of %s:",
686 cgraph_node_name (node));
687 for (i = 0; i < nfound; i++)
688 {
689 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
690 if (INLINED_TIMES (array[i]) != 1)
691 fprintf (cgraph_dump_file, " (%i times)",
692 (int)INLINED_TIMES (array[i]));
693 }
694 fprintf (cgraph_dump_file, "\n");
695 }
696
697 return nfound;
698 }
699
700 /* Return list of nodes we decided to inline into NODE, set their output
701 flag and compute INLINED_TIMES.
702
703 This function is identical to cgraph_inlined_into with callers and callees
704 nodes swapped. */
705
706 static int
707 cgraph_inlined_callees (struct cgraph_node *node, struct cgraph_node **array)
708 {
709 int nfound = 0;
710 struct cgraph_edge **stack;
711 struct cgraph_edge *e, *e1;
712 int sp;
713 int i;
714
715 /* Fast path: since we traverse in mostly topological order, we will likely
716 find no edges. */
717 for (e = node->callees; e; e = e->next_callee)
718 if (!e->inline_failed)
719 break;
720
721 if (!e)
722 return 0;
723
724 /* Allocate stack for back-tracking up callgraph. */
725 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
726 sp = 0;
727
728 /* Push the first edge on to the stack. */
729 stack[sp++] = e;
730
731 while (sp)
732 {
733 struct cgraph_node *callee;
734
735 /* Look at the edge on the top of the stack. */
736 e = stack[sp - 1];
737 callee = e->callee;
738
739 /* Check if the callee destination has been visited yet. */
740 if (!callee->output)
741 {
742 array[nfound++] = e->callee;
743 /* Mark that we have visited the destination. */
744 callee->output = true;
745 SET_INLINED_TIMES (callee, 0);
746 }
747 SET_INLINED_TIMES (callee, INLINED_TIMES (callee) + 1);
748
749 for (e1 = callee->callees; e1; e1 = e1->next_callee)
750 if (!e1->inline_failed)
751 break;
752 if (e1)
753 stack[sp++] = e1;
754 else
755 {
756 while (true)
757 {
758 for (e1 = e->next_callee; e1; e1 = e1->next_callee)
759 if (!e1->inline_failed)
760 break;
761
762 if (e1)
763 {
764 stack[sp - 1] = e1;
765 break;
766 }
767 else
768 {
769 sp--;
770 if (!sp)
771 break;
772 e = stack[sp - 1];
773 }
774 }
775 }
776 }
777
778 free (stack);
779
780 if (cgraph_dump_file)
781 {
782 fprintf (cgraph_dump_file, " Found inline successors of %s:",
783 cgraph_node_name (node));
784 for (i = 0; i < nfound; i++)
785 {
786 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
787 if (INLINED_TIMES (array[i]) != 1)
788 fprintf (cgraph_dump_file, " (%i times)",
789 (int)INLINED_TIMES (array[i]));
790 }
791 fprintf (cgraph_dump_file, "\n");
792 }
793
794 return nfound;
795 }
796
797 /* Perform reachability analysis and reclaim all unreachable nodes.
798 This function also remove unneeded bodies of extern inline functions
799 and thus needs to be done only after inlining decisions has been made. */
800 static bool
801 cgraph_remove_unreachable_nodes (void)
802 {
803 struct cgraph_node *first = (void *) 1;
804 struct cgraph_node *node;
805 bool changed = false;
806 int insns = 0;
807
808 if (cgraph_dump_file)
809 fprintf (cgraph_dump_file, "\nReclaiming functions:");
810 #ifdef ENABLE_CHECKING
811 for (node = cgraph_nodes; node; node = node->next)
812 if (node->aux)
813 abort ();
814 #endif
815 for (node = cgraph_nodes; node; node = node->next)
816 if (node->needed && (!DECL_EXTERNAL (node->decl) || !node->analyzed))
817 {
818 node->aux = first;
819 first = node;
820 }
821 else if (node->aux)
822 abort ();
823
824 /* Perform reachability analysis. As a special case do not consider
825 extern inline functions not inlined as live because we won't output
826 them at all. */
827 while (first != (void *) 1)
828 {
829 struct cgraph_edge *e;
830 node = first;
831 first = first->aux;
832
833 for (e = node->callees; e; e = e->next_callee)
834 if (!e->callee->aux
835 && node->analyzed
836 && (!e->inline_failed || !e->callee->analyzed
837 || !DECL_EXTERNAL (e->callee->decl)))
838 {
839 e->callee->aux = first;
840 first = e->callee;
841 }
842 }
843
844 /* Remove unreachable nodes. Extern inline functions need special care;
845 Unreachable extern inline functions shall be removed.
846 Reachable extern inline functions we never inlined shall get their bodies
847 eliminated.
848 Reachable extern inline functions we sometimes inlined will be turned into
849 unanalyzed nodes so they look like for true extern functions to the rest
850 of code. */
851 for (node = cgraph_nodes; node; node = node->next)
852 {
853 if (!node->aux)
854 {
855 int local_insns;
856 tree decl = node->decl;
857
858 if (DECL_SAVED_INSNS (decl))
859 local_insns = node->local.self_insns;
860 else
861 local_insns = 0;
862 if (cgraph_dump_file)
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 if (!node->analyzed || !DECL_EXTERNAL (node->decl))
865 cgraph_remove_node (node);
866 else
867 {
868 struct cgraph_edge *e;
869
870 for (e = node->callers; e; e = e->next_caller)
871 if (e->caller->aux)
872 break;
873 if (e || node->needed)
874 {
875 DECL_SAVED_TREE (node->decl) = NULL_TREE;
876 while (node->callees)
877 cgraph_remove_edge (node, node->callees->callee);
878 node->analyzed = false;
879 }
880 else
881 cgraph_remove_node (node);
882 }
883 if (!DECL_SAVED_TREE (decl))
884 insns += local_insns;
885 changed = true;
886 }
887 }
888 for (node = cgraph_nodes; node; node = node->next)
889 node->aux = NULL;
890 if (cgraph_dump_file)
891 fprintf (cgraph_dump_file, "\nReclaimed %i insns", insns);
892 return changed;
893 }
894
895
896 /* Estimate size of the function after inlining WHAT into TO. */
897
898 static int
899 cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
900 struct cgraph_node *what)
901 {
902 return (what->global.insns - INSNS_PER_CALL) * times + to->global.insns;
903 }
904
905 /* Estimate the growth caused by inlining NODE into all callees. */
906
907 static int
908 cgraph_estimate_growth (struct cgraph_node *node)
909 {
910 int growth = 0;
911 int calls_saved = 0;
912 int clones_added = 0;
913 struct cgraph_edge *e;
914
915 for (e = node->callers; e; e = e->next_caller)
916 if (e->inline_failed)
917 {
918 growth += ((cgraph_estimate_size_after_inlining (1, e->caller, node)
919 -
920 e->caller->global.insns) *e->caller->global.cloned_times);
921 calls_saved += e->caller->global.cloned_times;
922 clones_added += e->caller->global.cloned_times;
923 }
924
925 /* ??? Wrong for self recursive functions or cases where we decide to not
926 inline for different reasons, but it is not big deal as in that case
927 we will keep the body around, but we will also avoid some inlining. */
928 if (!node->needed && !node->origin && !DECL_EXTERNAL (node->decl))
929 growth -= node->global.insns, clones_added--;
930
931 if (!calls_saved)
932 calls_saved = 1;
933
934 return growth;
935 }
936
937 /* Update insn sizes after inlining WHAT into TO that is already inlined into
938 all nodes in INLINED array. */
939
940 static void
941 cgraph_mark_inline (struct cgraph_node *to, struct cgraph_node *what,
942 struct cgraph_node **inlined, int ninlined,
943 struct cgraph_node **inlined_callees,
944 int ninlined_callees)
945 {
946 int i;
947 int times = 0;
948 int clones = 0;
949 struct cgraph_edge *e;
950 bool called = false;
951 int new_insns;
952
953 what->global.inlined = 1;
954 for (e = what->callers; e; e = e->next_caller)
955 {
956 if (e->caller == to)
957 {
958 if (!e->inline_failed)
959 continue;
960 e->inline_failed = NULL;
961 times++;
962 clones += e->caller->global.cloned_times;
963 }
964 else if (e->inline_failed)
965 called = true;
966 }
967 if (!times)
968 abort ();
969 ncalls_inlined += times;
970
971 new_insns = cgraph_estimate_size_after_inlining (times, to, what);
972 if (to->global.will_be_output)
973 overall_insns += new_insns - to->global.insns;
974 to->global.insns = new_insns;
975
976 if (!called && !what->needed && !what->origin
977 && flag_unit_at_a_time
978 && !DECL_EXTERNAL (what->decl))
979 {
980 if (!what->global.will_be_output)
981 abort ();
982 clones--;
983 nfunctions_inlined++;
984 what->global.will_be_output = 0;
985 overall_insns -= what->global.insns;
986 }
987 what->global.cloned_times += clones;
988 for (i = 0; i < ninlined; i++)
989 {
990 new_insns =
991 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
992 times, inlined[i], what);
993 if (inlined[i]->global.will_be_output)
994 overall_insns += new_insns - inlined[i]->global.insns;
995 inlined[i]->global.insns = new_insns;
996 }
997 for (i = 0; i < ninlined_callees; i++)
998 {
999 inlined_callees[i]->global.cloned_times +=
1000 INLINED_TIMES (inlined_callees[i]) * clones;
1001 }
1002 }
1003
1004 /* Return false when inlining WHAT into TO is not good idea as it would cause
1005 too large growth of function bodies. */
1006
1007 static bool
1008 cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
1009 struct cgraph_node **inlined, int ninlined,
1010 const char **reason)
1011 {
1012 int i;
1013 int times = 0;
1014 struct cgraph_edge *e;
1015 int newsize;
1016 int limit;
1017
1018 for (e = to->callees; e; e = e->next_callee)
1019 if (e->callee == what)
1020 times++;
1021
1022 /* When inlining large function body called once into small function,
1023 take the inlined function as base for limiting the growth. */
1024 if (to->local.self_insns > what->local.self_insns)
1025 limit = to->local.self_insns;
1026 else
1027 limit = what->local.self_insns;
1028
1029 limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
1030
1031 newsize = cgraph_estimate_size_after_inlining (times, to, what);
1032 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1033 && newsize > limit)
1034 {
1035 *reason = N_("--param large-function-growth limit reached");
1036 return false;
1037 }
1038 for (i = 0; i < ninlined; i++)
1039 {
1040 newsize =
1041 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
1042 times, inlined[i], what);
1043 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1044 && newsize >
1045 inlined[i]->local.self_insns *
1046 (100 + PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH)) / 100)
1047 {
1048 *reason = N_("--param large-function-growth limit reached while inlining the caller");
1049 return false;
1050 }
1051 }
1052 return true;
1053 }
1054
1055 /* Return true when function N is small enough to be inlined. */
1056
1057 static bool
1058 cgraph_default_inline_p (struct cgraph_node *n)
1059 {
1060 if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
1061 return false;
1062 if (DECL_DECLARED_INLINE_P (n->decl))
1063 return n->global.insns < MAX_INLINE_INSNS_SINGLE;
1064 else
1065 return n->global.insns < MAX_INLINE_INSNS_AUTO;
1066 }
1067
1068 /* Set inline_failed for all callers of given function to REASON. */
1069
1070 static void
1071 cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
1072 {
1073 struct cgraph_edge *e;
1074
1075 if (cgraph_dump_file)
1076 fprintf (cgraph_dump_file, "Inlining failed: %s\n", reason);
1077 for (e = node->callers; e; e = e->next_caller)
1078 if (e->inline_failed)
1079 e->inline_failed = reason;
1080 }
1081
1082 /* We use greedy algorithm for inlining of small functions:
1083 All inline candidates are put into prioritized heap based on estimated
1084 growth of the overall number of instructions and then update the estimates.
1085
1086 INLINED and INLINED_CALEES are just pointers to arrays large enough
1087 to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
1088
1089 static void
1090 cgraph_decide_inlining_of_small_functions (struct cgraph_node **inlined,
1091 struct cgraph_node **inlined_callees)
1092 {
1093 int i;
1094 struct cgraph_node *node;
1095 fibheap_t heap = fibheap_new ();
1096 struct fibnode **heap_node =
1097 xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
1098 int ninlined, ninlined_callees;
1099 int max_insns = ((HOST_WIDEST_INT) initial_insns
1100 * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
1101
1102 /* Put all inline candidates into the heap. */
1103
1104 for (node = cgraph_nodes; node; node = node->next)
1105 {
1106 if (!node->local.inlinable || !node->callers
1107 || node->local.disregard_inline_limits)
1108 continue;
1109
1110 if (!cgraph_default_inline_p (node))
1111 {
1112 cgraph_set_inline_failed (node,
1113 N_("--param max-inline-insns-single limit reached"));
1114 continue;
1115 }
1116 heap_node[node->uid] =
1117 fibheap_insert (heap, cgraph_estimate_growth (node), node);
1118 }
1119
1120 if (cgraph_dump_file)
1121 fprintf (cgraph_dump_file, "\nDeciding on smaller functions:\n");
1122 while (overall_insns <= max_insns && (node = fibheap_extract_min (heap)))
1123 {
1124 struct cgraph_edge *e;
1125 int old_insns = overall_insns;
1126
1127 heap_node[node->uid] = NULL;
1128 if (cgraph_dump_file)
1129 fprintf (cgraph_dump_file,
1130 "\nConsidering %s with %i insns\n"
1131 " Estimated growth is %+i insns.\n",
1132 cgraph_node_name (node), node->global.insns,
1133 cgraph_estimate_growth (node));
1134 if (!cgraph_default_inline_p (node))
1135 {
1136 cgraph_set_inline_failed (node,
1137 N_("--param max-inline-insns-single limit reached after inlining into the callee"));
1138 continue;
1139 }
1140 ninlined_callees = cgraph_inlined_callees (node, inlined_callees);
1141 for (e = node->callers; e; e = e->next_caller)
1142 if (e->inline_failed)
1143 {
1144 /* Marking recursive function inlinine has sane semantic and
1145 thus we should not warn on it. */
1146 if (e->caller == node)
1147 {
1148 e->inline_failed = "";
1149 continue;
1150 }
1151 ninlined = cgraph_inlined_into (e->caller, inlined);
1152 if (e->callee->output)
1153 e->inline_failed = "";
1154 if (e->callee->output
1155 || !cgraph_check_inline_limits (e->caller, node, inlined,
1156 ninlined, &e->inline_failed))
1157 {
1158 for (i = 0; i < ninlined; i++)
1159 inlined[i]->output = 0, inlined[i]->aux = 0;
1160 if (cgraph_dump_file)
1161 fprintf (cgraph_dump_file, " Not inlining into %s.\n",
1162 cgraph_node_name (e->caller));
1163 continue;
1164 }
1165 cgraph_mark_inline (e->caller, node, inlined, ninlined,
1166 inlined_callees, ninlined_callees);
1167 if (heap_node[e->caller->uid])
1168 fibheap_replace_key (heap, heap_node[e->caller->uid],
1169 cgraph_estimate_growth (e->caller));
1170
1171 /* Size of the functions we updated into has changed, so update
1172 the keys. */
1173 for (i = 0; i < ninlined; i++)
1174 {
1175 inlined[i]->output = 0, inlined[i]->aux = 0;
1176 if (heap_node[inlined[i]->uid])
1177 fibheap_replace_key (heap, heap_node[inlined[i]->uid],
1178 cgraph_estimate_growth (inlined[i]));
1179 }
1180 if (cgraph_dump_file)
1181 fprintf (cgraph_dump_file,
1182 " Inlined into %s which now has %i insns.\n",
1183 cgraph_node_name (e->caller),
1184 e->caller->global.insns);
1185 }
1186
1187 /* Similarly all functions called by the function we just inlined
1188 are now called more times; update keys. */
1189
1190 for (e = node->callees; e; e = e->next_callee)
1191 if (e->inline_failed && heap_node[e->callee->uid])
1192 fibheap_replace_key (heap, heap_node[e->callee->uid],
1193 cgraph_estimate_growth (e->callee));
1194
1195 for (i = 0; i < ninlined_callees; i++)
1196 {
1197 struct cgraph_edge *e;
1198
1199 for (e = inlined_callees[i]->callees; e; e = e->next_callee)
1200 if (e->inline_failed && heap_node[e->callee->uid])
1201 fibheap_replace_key (heap, heap_node[e->callee->uid],
1202 cgraph_estimate_growth (e->callee));
1203
1204 inlined_callees[i]->output = 0;
1205 inlined_callees[i]->aux = 0;
1206 }
1207 if (cgraph_dump_file)
1208 fprintf (cgraph_dump_file,
1209 " Inlined %i times for a net change of %+i insns.\n",
1210 node->global.cloned_times, overall_insns - old_insns);
1211 }
1212 while ((node = fibheap_extract_min (heap)) != NULL)
1213 if (!node->local.disregard_inline_limits)
1214 cgraph_set_inline_failed (node, N_("--param inline-unit-growth limit reached"));
1215 fibheap_delete (heap);
1216 free (heap_node);
1217 }
1218
1219 /* Decide on the inlining. We do so in the topological order to avoid
1220 expenses on updating datastructures. */
1221
1222 static void
1223 cgraph_decide_inlining (void)
1224 {
1225 struct cgraph_node *node;
1226 int nnodes;
1227 struct cgraph_node **order =
1228 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1229 struct cgraph_node **inlined =
1230 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1231 struct cgraph_node **inlined_callees =
1232 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1233 int ninlined;
1234 int ninlined_callees;
1235 int old_insns = 0;
1236 int i, y;
1237
1238 for (node = cgraph_nodes; node; node = node->next)
1239 initial_insns += node->local.self_insns;
1240 overall_insns = initial_insns;
1241
1242 nnodes = cgraph_postorder (order);
1243
1244 if (cgraph_dump_file)
1245 fprintf (cgraph_dump_file,
1246 "\nDeciding on inlining. Starting with %i insns.\n",
1247 initial_insns);
1248
1249 for (node = cgraph_nodes; node; node = node->next)
1250 node->aux = 0;
1251
1252 if (cgraph_dump_file)
1253 fprintf (cgraph_dump_file, "\nInlining always_inline functions:\n");
1254 #ifdef ENABLE_CHECKING
1255 for (node = cgraph_nodes; node; node = node->next)
1256 if (node->aux || node->output)
1257 abort ();
1258 #endif
1259
1260 /* In the first pass mark all always_inline edges. Do this with a priority
1261 so none of our later choices will make this impossible. */
1262 for (i = nnodes - 1; i >= 0; i--)
1263 {
1264 struct cgraph_edge *e;
1265
1266 node = order[i];
1267
1268 for (e = node->callees; e; e = e->next_callee)
1269 if (e->callee->local.disregard_inline_limits)
1270 break;
1271 if (!e)
1272 continue;
1273 if (cgraph_dump_file)
1274 fprintf (cgraph_dump_file,
1275 "\nConsidering %s %i insns (always inline)\n",
1276 cgraph_node_name (e->callee), e->callee->global.insns);
1277 ninlined = cgraph_inlined_into (order[i], inlined);
1278 for (; e; e = e->next_callee)
1279 {
1280 old_insns = overall_insns;
1281 if (!e->inline_failed || !e->callee->local.inlinable
1282 || !e->callee->local.disregard_inline_limits)
1283 continue;
1284 if (e->callee->output || e->callee == node)
1285 {
1286 e->inline_failed = N_("recursive inlining");
1287 continue;
1288 }
1289 ninlined_callees =
1290 cgraph_inlined_callees (e->callee, inlined_callees);
1291 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1292 inlined_callees, ninlined_callees);
1293 for (y = 0; y < ninlined_callees; y++)
1294 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1295 if (cgraph_dump_file)
1296 fprintf (cgraph_dump_file,
1297 " Inlined into %s which now has %i insns.\n",
1298 cgraph_node_name (node->callees->caller),
1299 node->callees->caller->global.insns);
1300 }
1301 if (cgraph_dump_file && node->global.cloned_times > 0)
1302 fprintf (cgraph_dump_file,
1303 " Inlined %i times for a net change of %+i insns.\n",
1304 node->global.cloned_times, overall_insns - old_insns);
1305 for (y = 0; y < ninlined; y++)
1306 inlined[y]->output = 0, inlined[y]->aux = 0;
1307 }
1308 #ifdef ENABLE_CHECKING
1309 for (node = cgraph_nodes; node; node = node->next)
1310 if (node->aux || node->output)
1311 abort ();
1312 #endif
1313
1314 if (!flag_really_no_inline)
1315 {
1316 cgraph_decide_inlining_of_small_functions (inlined, inlined_callees);
1317 #ifdef ENABLE_CHECKING
1318 for (node = cgraph_nodes; node; node = node->next)
1319 if (node->aux || node->output)
1320 abort ();
1321 #endif
1322
1323 if (cgraph_dump_file)
1324 fprintf (cgraph_dump_file, "\nDeciding on functions called once:\n");
1325
1326 /* And finally decide what functions are called once. */
1327
1328 for (i = nnodes - 1; i >= 0; i--)
1329 {
1330 node = order[i];
1331
1332 if (node->callers && !node->callers->next_caller && !node->needed
1333 && node->local.inlinable && node->callers->inline_failed
1334 && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1335 {
1336 bool ok = true;
1337 struct cgraph_node *node1;
1338
1339 /* Verify that we won't duplicate the caller. */
1340 for (node1 = node->callers->caller;
1341 node1->callers && !node1->callers->inline_failed
1342 && ok; node1 = node1->callers->caller)
1343 if (node1->callers->next_caller || node1->needed)
1344 ok = false;
1345 if (ok)
1346 {
1347 const char *dummy_reason;
1348 if (cgraph_dump_file)
1349 fprintf (cgraph_dump_file,
1350 "\nConsidering %s %i insns.\n"
1351 " Called once from %s %i insns.\n",
1352 cgraph_node_name (node), node->global.insns,
1353 cgraph_node_name (node->callers->caller),
1354 node->callers->caller->global.insns);
1355 ninlined = cgraph_inlined_into (node->callers->caller,
1356 inlined);
1357 old_insns = overall_insns;
1358
1359 /* Inlining functions once would never cause inlining warnings. */
1360 if (cgraph_check_inline_limits
1361 (node->callers->caller, node, inlined, ninlined,
1362 &dummy_reason))
1363 {
1364 ninlined_callees =
1365 cgraph_inlined_callees (node, inlined_callees);
1366 cgraph_mark_inline (node->callers->caller, node, inlined,
1367 ninlined, inlined_callees,
1368 ninlined_callees);
1369 for (y = 0; y < ninlined_callees; y++)
1370 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1371 if (cgraph_dump_file)
1372 fprintf (cgraph_dump_file,
1373 " Inlined into %s which now has %i insns"
1374 " for a net change of %+i insns.\n",
1375 cgraph_node_name (node->callers->caller),
1376 node->callers->caller->global.insns,
1377 overall_insns - old_insns);
1378 }
1379 else
1380 {
1381 if (cgraph_dump_file)
1382 fprintf (cgraph_dump_file,
1383 " Inline limit reached, not inlined.\n");
1384 }
1385 for (y = 0; y < ninlined; y++)
1386 inlined[y]->output = 0, inlined[y]->aux = 0;
1387 }
1388 }
1389 }
1390 }
1391 cgraph_remove_unreachable_nodes ();
1392
1393 if (cgraph_dump_file)
1394 fprintf (cgraph_dump_file,
1395 "\nInlined %i calls, eliminated %i functions, "
1396 "%i insns turned to %i insns.\n\n",
1397 ncalls_inlined, nfunctions_inlined, initial_insns,
1398 overall_insns);
1399 free (order);
1400 free (inlined);
1401 free (inlined_callees);
1402 }
1403
1404 /* Decide on the inlining. We do so in the topological order to avoid
1405 expenses on updating datastructures. */
1406
1407 static void
1408 cgraph_decide_inlining_incrementally (struct cgraph_node *node)
1409 {
1410 struct cgraph_edge *e;
1411 struct cgraph_node **inlined =
1412 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1413 struct cgraph_node **inlined_callees =
1414 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1415 int ninlined;
1416 int ninlined_callees;
1417 int y;
1418
1419 ninlined = cgraph_inlined_into (node, inlined);
1420
1421 /* First of all look for always inline functions. */
1422 for (e = node->callees; e; e = e->next_callee)
1423 if (e->callee->local.disregard_inline_limits && e->inline_failed
1424 /* ??? It is possible that renaming variable removed the function body
1425 in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
1426 && DECL_SAVED_TREE (e->callee->decl))
1427 {
1428 if (e->callee->output || e->callee == node)
1429 {
1430 e->inline_failed = N_("recursive inlining");
1431 continue;
1432 }
1433 ninlined_callees = cgraph_inlined_callees (e->callee, inlined_callees);
1434 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1435 inlined_callees, ninlined_callees);
1436 for (y = 0; y < ninlined_callees; y++)
1437 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1438 }
1439
1440 if (!flag_really_no_inline)
1441 {
1442 /* Now do the automatic inlining. */
1443 for (e = node->callees; e; e = e->next_callee)
1444 if (e->callee->local.inlinable && e->inline_failed
1445 && cgraph_default_inline_p (e->callee)
1446 && cgraph_check_inline_limits (node, e->callee, inlined,
1447 ninlined, &e->inline_failed)
1448 && DECL_SAVED_TREE (e->callee->decl))
1449 {
1450 /* Marking recursive function inlinine has sane semantic and thus
1451 we should not warn on it. */
1452 if (e->callee->output || e->callee == node)
1453 {
1454 e->inline_failed = "";
1455 continue;
1456 }
1457 ninlined_callees = cgraph_inlined_callees (e->callee,
1458 inlined_callees);
1459 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1460 inlined_callees, ninlined_callees);
1461 for (y = 0; y < ninlined_callees; y++)
1462 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
1463 }
1464 }
1465
1466 /* Clear the flags set by cgraph_inlined_into. */
1467 for (y = 0; y < ninlined; y++)
1468 inlined[y]->output = 0, inlined[y]->aux = 0;
1469
1470 free (inlined);
1471 free (inlined_callees);
1472 }
1473
1474
1475 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL.
1476 When returned false and reason is non-NULL, set it to the reason
1477 why the call was not inlined. */
1478
1479 bool
1480 cgraph_inline_p (tree caller_decl, tree callee_decl, const char **reason)
1481 {
1482 struct cgraph_node *caller = cgraph_node (caller_decl);
1483 struct cgraph_node *callee = cgraph_node (callee_decl);
1484 struct cgraph_edge *e;
1485
1486 for (e = caller->callees; e; e = e->next_callee)
1487 if (e->callee == callee)
1488 {
1489 if (e->inline_failed && reason)
1490 *reason = e->inline_failed;
1491 return !e->inline_failed;
1492 }
1493 /* We do not record builtins in the callgraph. Perhaps it would make more
1494 sense to do so and then prune out those not overwritten by explicit
1495 function body. */
1496 if (reason)
1497 *reason = "originally indirect function calls never inlined";
1498 return false;
1499 }
1500 /* Expand all functions that must be output.
1501
1502 Attempt to topologically sort the nodes so function is output when
1503 all called functions are already assembled to allow data to be
1504 propagated across the callgraph. Use a stack to get smaller distance
1505 between a function and it's callees (later we may choose to use a more
1506 sophisticated algorithm for function reordering; we will likely want
1507 to use subsections to make the output functions appear in top-down
1508 order). */
1509
1510 static void
1511 cgraph_expand_all_functions (void)
1512 {
1513 struct cgraph_node *node;
1514 struct cgraph_node **order =
1515 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1516 int order_pos = 0;
1517 int i;
1518
1519 cgraph_mark_functions_to_output ();
1520
1521 order_pos = cgraph_postorder (order);
1522
1523 for (i = order_pos - 1; i >= 0; i--)
1524 {
1525 node = order[i];
1526 if (node->output)
1527 {
1528 if (!node->reachable)
1529 abort ();
1530 node->output = 0;
1531 cgraph_expand_function (node);
1532 }
1533 }
1534 free (order);
1535 }
1536
1537 /* Mark all local functions.
1538
1539 A local function is one whose calls can occur only in the
1540 current compilation unit and all it's calls are explicit,
1541 so we can change its calling convention.
1542 We simply mark all static functions whose address is not taken
1543 as local. */
1544
1545 static void
1546 cgraph_mark_local_functions (void)
1547 {
1548 struct cgraph_node *node;
1549
1550 if (cgraph_dump_file)
1551 fprintf (cgraph_dump_file, "\nMarking local functions:");
1552
1553 /* Figure out functions we want to assemble. */
1554 for (node = cgraph_nodes; node; node = node->next)
1555 {
1556 node->local.local = (!node->needed
1557 && DECL_SAVED_TREE (node->decl)
1558 && !TREE_PUBLIC (node->decl));
1559 if (cgraph_dump_file && node->local.local)
1560 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1561 }
1562 if (cgraph_dump_file)
1563 fprintf (cgraph_dump_file, "\n\n");
1564 }
1565
1566 /* Perform simple optimizations based on callgraph. */
1567
1568 void
1569 cgraph_optimize (void)
1570 {
1571 if (!flag_unit_at_a_time)
1572 return;
1573 timevar_push (TV_CGRAPHOPT);
1574 if (!quiet_flag)
1575 fprintf (stderr, "Performing intraprocedural optimizations\n");
1576
1577 cgraph_mark_local_functions ();
1578 if (cgraph_dump_file)
1579 {
1580 fprintf (cgraph_dump_file, "Marked ");
1581 dump_cgraph (cgraph_dump_file);
1582 }
1583
1584 cgraph_decide_inlining ();
1585 cgraph_global_info_ready = true;
1586 if (cgraph_dump_file)
1587 {
1588 fprintf (cgraph_dump_file, "Optimized ");
1589 dump_cgraph (cgraph_dump_file);
1590 }
1591 timevar_pop (TV_CGRAPHOPT);
1592
1593 /* Output everything. */
1594 if (!quiet_flag)
1595 fprintf (stderr, "Assembling functions:\n");
1596 cgraph_expand_all_functions ();
1597 if (cgraph_dump_file)
1598 {
1599 fprintf (cgraph_dump_file, "\nFinal ");
1600 dump_cgraph (cgraph_dump_file);
1601 }
1602 }
This page took 0.104044 seconds and 5 git commands to generate.