]> gcc.gnu.org Git - gcc.git/blame - gcc/cgraphunit.c
c-common.h: Fix comment typos.
[gcc.git] / gcc / cgraphunit.c
CommitLineData
b58b1157 1/* Callgraph based intraprocedural optimizations.
b684a3df 2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
1c4a429a
JH
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "tree-inline.h"
28#include "langhooks.h"
29#include "hashtab.h"
30#include "toplev.h"
31#include "flags.h"
32#include "ggc.h"
33#include "debug.h"
34#include "target.h"
35#include "cgraph.h"
dafc5b82 36#include "diagnostic.h"
a194aa56 37#include "timevar.h"
b58b1157
JH
38#include "params.h"
39#include "fibheap.h"
40#include "c-common.h"
dc0bfe6a 41#include "intl.h"
b58b1157
JH
42
43#define INSNS_PER_CALL 10
1c4a429a 44
a20af5b8 45static void cgraph_expand_all_functions (void);
db0e878d
AJ
46static void cgraph_mark_functions_to_output (void);
47static void cgraph_expand_function (struct cgraph_node *);
48static tree record_call_1 (tree *, int *, void *);
49static void cgraph_mark_local_functions (void);
50static void cgraph_optimize_function (struct cgraph_node *);
4a46cbfb
JH
51static bool cgraph_default_inline_p (struct cgraph_node *n);
52static void cgraph_analyze_function (struct cgraph_node *node);
d4d1ebc1 53static void cgraph_decide_inlining_incrementally (struct cgraph_node *);
1c4a429a 54
b58b1157
JH
55/* Statistics we collect about inlining algorithm. */
56static int ncalls_inlined;
57static int nfunctions_inlined;
58static int initial_insns;
59static int overall_insns;
60
7dff32e6
JS
61/* Records tree nodes seen in cgraph_create_edges. Simply using
62 walk_tree_without_duplicates doesn't guarantee each node is visited
63 once because it gets a new htab upon each recursive call from
64 record_calls_1. */
65static htab_t visited_nodes;
66
8dafba3c
RH
67/* Determine if function DECL is needed. That is, visible to something
68 either outside this translation unit, something magic in the system
69 configury, or (if not doing unit-at-a-time) to something we havn't
70 seen yet. */
71
72static bool
73decide_is_function_needed (struct cgraph_node *node, tree decl)
74{
75 /* If we decided it was needed before, but at the time we didn't have
76 the body of the function available, then it's still needed. We have
77 to go back and re-check its dependencies now. */
78 if (node->needed)
79 return true;
80
81 /* Externally visible functions must be output. The exception is
82 COMDAT functions that must be output only when they are needed. */
83 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
84 return true;
85
86 /* Constructors and destructors are reachable from the runtime by
87 some mechanism. */
88 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
89 return true;
90
91 /* If the user told us it is used, then it must be so. */
92 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
93 return true;
94
95 /* ??? If the assembler name is set by hand, it is possible to assemble
96 the name later after finalizing the function and the fact is noticed
97 in assemble_name then. This is arguably a bug. */
98 if (DECL_ASSEMBLER_NAME_SET_P (decl)
99 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
100 return true;
101
102 if (flag_unit_at_a_time)
103 return false;
104
105 /* If not doing unit at a time, then we'll only defer this function
106 if its marked for inlining. Otherwise we want to emit it now. */
107
108 /* "extern inline" functions are never output locally. */
109 if (DECL_EXTERNAL (decl))
110 return false;
2067c116 111 /* We want to emit COMDAT functions only when absolutely necessary. */
d853a20e 112 if (DECL_COMDAT (decl))
8dafba3c
RH
113 return false;
114 if (!DECL_INLINE (decl)
115 || (!node->local.disregard_inline_limits
116 /* When declared inline, defer even the uninlinable functions.
7d82fe7c 117 This allows them to be eliminated when unused. */
8dafba3c 118 && !DECL_DECLARED_INLINE_P (decl)
d4d1ebc1 119 && (!node->local.inlinable || !cgraph_default_inline_p (node))))
8dafba3c
RH
120 return true;
121
122 return false;
123}
124
d853a20e
JH
125/* When not doing unit-at-a-time, output all functions enqueued.
126 Return true when such a functions were found. */
f6d1b84a
RH
127
128bool
d853a20e
JH
129cgraph_assemble_pending_functions (void)
130{
131 bool output = false;
132
133 if (flag_unit_at_a_time)
134 return false;
135
136 while (cgraph_nodes_queue)
137 {
138 struct cgraph_node *n = cgraph_nodes_queue;
139
140 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
141 if (!n->origin && !DECL_EXTERNAL (n->decl))
f6d1b84a
RH
142 {
143 cgraph_expand_function (n);
144 output = true;
145 }
d853a20e 146 }
f6d1b84a 147
d853a20e
JH
148 return output;
149}
150
6b00c969
RH
151/* DECL has been parsed. Take it, queue it, compile it at the whim of the
152 logic in effect. If NESTED is true, then our caller cannot stand to have
153 the garbage collector run at the moment. We would need to either create
154 a new GC context, or just not compile right now. */
1c4a429a
JH
155
156void
6b00c969 157cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
158{
159 struct cgraph_node *node = cgraph_node (decl);
160
d853a20e
JH
161 if (node->local.finalized)
162 {
163 /* As an GCC extension we allow redefinition of the function. The
6b00c969
RH
164 semantics when both copies of bodies differ is not well defined.
165 We replace the old body with new body so in unit at a time mode
166 we always use new body, while in normal mode we may end up with
167 old body inlined into some functions and new body expanded and
168 inlined in others.
d853a20e 169
6b00c969 170 ??? It may make more sense to use one body for inlining and other
2067c116 171 body for expanding the function but this is difficult to do. */
6b00c969 172
f6d1b84a
RH
173 /* If node->output is set, then this is a unit-at-a-time compilation
174 and we have already begun whole-unit analysis. This is *not*
175 testing for whether we've already emitted the function. That
176 case can be sort-of legitimately seen with real function
177 redefinition errors. I would argue that the front end should
178 never present us with such a case, but don't enforce that for now. */
179 if (node->output)
6b00c969
RH
180 abort ();
181
182 /* Reset our datastructures so we can analyze the function again. */
cd4dea62
JH
183 memset (&node->local, 0, sizeof (node->local));
184 memset (&node->global, 0, sizeof (node->global));
185 memset (&node->rtl, 0, sizeof (node->rtl));
25c84396 186 node->analyzed = false;
95c755e9 187 node->local.redefined_extern_inline = true;
cd4dea62 188 while (node->callees)
cb967da5 189 cgraph_remove_edge (node, node->callees->callee);
6b00c969 190
cd4dea62
JH
191 /* We may need to re-queue the node for assembling in case
192 we already proceeded it and ignored as not needed. */
193 if (node->reachable && !flag_unit_at_a_time)
d853a20e 194 {
cd4dea62
JH
195 struct cgraph_node *n;
196
197 for (n = cgraph_nodes_queue; n; n = n->next_needed)
198 if (n == node)
199 break;
200 if (!n)
201 node->reachable = 0;
d853a20e 202 }
d853a20e 203 }
6b00c969 204
d853a20e 205 notice_global_symbol (decl);
1c4a429a 206 node->decl = decl;
f6981e16 207 node->local.finalized = true;
1c4a429a 208
8dafba3c
RH
209 /* If not unit at a time, then we need to create the call graph
210 now, so that called functions can be queued and emitted now. */
4a46cbfb 211 if (!flag_unit_at_a_time)
d4d1ebc1
JH
212 {
213 cgraph_analyze_function (node);
214 cgraph_decide_inlining_incrementally (node);
215 }
4a46cbfb 216
8dafba3c
RH
217 if (decide_is_function_needed (node, decl))
218 cgraph_mark_needed_node (node);
219
6b00c969
RH
220 /* If not unit at a time, go ahead and emit everything we've found
221 to be reachable at this time. */
222 if (!nested)
d34cb6a1
JH
223 {
224 if (!cgraph_assemble_pending_functions ())
225 ggc_collect ();
226 }
1668aabc 227
8dafba3c 228 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 229 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 230 (*debug_hooks->deferred_inline_function) (decl);
1c4a429a
JH
231}
232
1c4a429a
JH
233/* Walk tree and record all calls. Called via walk_tree. */
234static tree
db0e878d 235record_call_1 (tree *tp, int *walk_subtrees, void *data)
1c4a429a 236{
25c84396
RH
237 tree t = *tp;
238
239 switch (TREE_CODE (t))
1c4a429a 240 {
25c84396
RH
241 case VAR_DECL:
242 /* ??? Really, we should mark this decl as *potentially* referenced
243 by this function and re-examine whether the decl is actually used
244 after rtl has been generated. */
245 if (TREE_STATIC (t))
246 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
247 break;
248
249 case ADDR_EXPR:
250 if (flag_unit_at_a_time)
251 {
252 /* Record dereferences to the functions. This makes the
253 functions reachable unconditionally. */
254 tree decl = TREE_OPERAND (*tp, 0);
255 if (TREE_CODE (decl) == FUNCTION_DECL)
256 cgraph_mark_needed_node (cgraph_node (decl));
257 }
258 break;
259
260 case CALL_EXPR:
261 {
262 tree decl = get_callee_fndecl (*tp);
263 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
264 {
25c84396
RH
265 cgraph_record_call (data, decl);
266
267 /* When we see a function call, we don't want to look at the
268 function reference in the ADDR_EXPR that is hanging from
269 the CALL_EXPR we're examining here, because we would
270 conclude incorrectly that the function's address could be
271 taken by something that is not a function call. So only
272 walk the function parameter list, skip the other subtrees. */
273
274 walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
275 visited_nodes);
276 *walk_subtrees = 0;
277 }
278 break;
279 }
280
281 default:
282 /* Save some cycles by not walking types and declaration as we
283 won't find anything useful there anyway. */
284 if (DECL_P (*tp) || TYPE_P (*tp))
1c4a429a 285 {
1c4a429a 286 *walk_subtrees = 0;
25c84396 287 break;
1c4a429a 288 }
25c84396
RH
289
290 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
291 return (*lang_hooks.callgraph.analyze_expr) (tp, walk_subtrees, data);
292 break;
1c4a429a 293 }
25c84396 294
1c4a429a
JH
295 return NULL;
296}
297
7660e67e 298/* Create cgraph edges for function calls inside BODY from DECL. */
1c4a429a
JH
299
300void
db0e878d 301cgraph_create_edges (tree decl, tree body)
1c4a429a 302{
7660e67e
SB
303 /* The nodes we're interested in are never shared, so walk
304 the tree ignoring duplicates. */
7dff32e6
JS
305 visited_nodes = htab_create (37, htab_hash_pointer,
306 htab_eq_pointer, NULL);
307 walk_tree (&body, record_call_1, decl, visited_nodes);
308 htab_delete (visited_nodes);
309 visited_nodes = NULL;
1c4a429a
JH
310}
311
e767b5be
JH
312/* Analyze the function scheduled to be output. */
313static void
314cgraph_analyze_function (struct cgraph_node *node)
315{
316 tree decl = node->decl;
dc0bfe6a 317 struct cgraph_edge *e;
e767b5be 318
25c84396 319 current_function_decl = decl;
e767b5be
JH
320
321 /* First kill forward declaration so reverse inlining works properly. */
322 cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
323
324 node->local.inlinable = tree_inlinable_function_p (decl);
325 if (!DECL_ESTIMATED_INSNS (decl))
326 DECL_ESTIMATED_INSNS (decl)
327 = (*lang_hooks.tree_inlining.estimate_num_insns) (decl);
328 node->local.self_insns = DECL_ESTIMATED_INSNS (decl);
329 if (node->local.inlinable)
330 node->local.disregard_inline_limits
331 = (*lang_hooks.tree_inlining.disregard_inline_limits) (decl);
dc0bfe6a
JH
332 for (e = node->callers; e; e = e->next_caller)
333 if (e->inline_failed)
95c755e9
JH
334 {
335 if (node->local.redefined_extern_inline)
336 e->inline_failed = N_("redefined extern inline functions are not "
337 "considered for inlining");
338 else if (!node->local.inlinable)
339 e->inline_failed = N_("function not inlinable");
340 else
341 e->inline_failed = N_("function not considered for inlining");
342 }
b684a3df
JH
343 if (flag_really_no_inline && !node->local.disregard_inline_limits)
344 node->local.inlinable = 0;
e767b5be
JH
345 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
346 node->global.insns = node->local.self_insns;
25c84396 347 if (!DECL_EXTERNAL (decl))
e767b5be
JH
348 {
349 node->global.cloned_times = 1;
350 node->global.will_be_output = true;
351 }
352
25c84396 353 node->analyzed = true;
d853a20e 354 current_function_decl = NULL;
e767b5be
JH
355}
356
1c4a429a
JH
357/* Analyze the whole compilation unit once it is parsed completely. */
358
359void
db0e878d 360cgraph_finalize_compilation_unit (void)
1c4a429a
JH
361{
362 struct cgraph_node *node;
1c4a429a 363
4a46cbfb 364 if (!flag_unit_at_a_time)
d853a20e
JH
365 {
366 cgraph_assemble_pending_functions ();
367 return;
368 }
4a46cbfb 369
e69529cd 370 cgraph_varpool_assemble_pending_decls ();
b58b1157
JH
371 if (!quiet_flag)
372 fprintf (stderr, "\nAnalyzing compilation unit\n");
e69529cd 373
a194aa56
JH
374 timevar_push (TV_CGRAPH);
375 if (cgraph_dump_file)
1c4a429a 376 {
7d82fe7c 377 fprintf (cgraph_dump_file, "Initial entry points:");
1668aabc
JH
378 for (node = cgraph_nodes; node; node = node->next)
379 if (node->needed && DECL_SAVED_TREE (node->decl))
a194aa56
JH
380 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
381 fprintf (cgraph_dump_file, "\n");
1c4a429a
JH
382 }
383
7660e67e
SB
384 /* Propagate reachability flag and lower representation of all reachable
385 functions. In the future, lowering will introduce new functions and
386 new entry points on the way (by template instantiation and virtual
387 method table generation for instance). */
1668aabc 388 while (cgraph_nodes_queue)
1c4a429a 389 {
e767b5be 390 struct cgraph_edge *edge;
1668aabc
JH
391 tree decl = cgraph_nodes_queue->decl;
392
393 node = cgraph_nodes_queue;
8bd87c4e 394 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1c4a429a 395
cd4dea62
JH
396 /* ??? It is possible to create extern inline function and later using
397 weak alas attribute to kill it's body. See
398 gcc.c-torture/compile/20011119-1.c */
399 if (!DECL_SAVED_TREE (decl))
400 continue;
401
25c84396 402 if (node->analyzed || !node->reachable || !DECL_SAVED_TREE (decl))
1c4a429a
JH
403 abort ();
404
e767b5be 405 cgraph_analyze_function (node);
8dafba3c 406
1c4a429a 407 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 408 if (!edge->callee->reachable)
8dafba3c
RH
409 cgraph_mark_reachable_node (edge->callee);
410
e69529cd 411 cgraph_varpool_assemble_pending_decls ();
1c4a429a 412 }
8dafba3c 413
1668aabc
JH
414 /* Collect entry points to the unit. */
415
a194aa56 416 if (cgraph_dump_file)
1668aabc 417 {
7d82fe7c 418 fprintf (cgraph_dump_file, "Unit entry points:");
1668aabc
JH
419 for (node = cgraph_nodes; node; node = node->next)
420 if (node->needed && DECL_SAVED_TREE (node->decl))
a194aa56 421 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 422 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 423 dump_cgraph (cgraph_dump_file);
1668aabc 424 }
7660e67e 425
a194aa56
JH
426 if (cgraph_dump_file)
427 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a
JH
428
429 for (node = cgraph_nodes; node; node = node->next)
430 {
431 tree decl = node->decl;
432
433 if (!node->reachable && DECL_SAVED_TREE (decl))
434 {
18d13f34 435 cgraph_remove_node (node);
a194aa56
JH
436 if (cgraph_dump_file)
437 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1c4a429a 438 }
9b0436b7
JH
439 else
440 node->next_needed = NULL;
1c4a429a 441 }
a194aa56 442 if (cgraph_dump_file)
7d82fe7c
KC
443 {
444 fprintf (cgraph_dump_file, "\n\nReclaimed ");
445 dump_cgraph (cgraph_dump_file);
446 }
1c4a429a 447 ggc_collect ();
a194aa56 448 timevar_pop (TV_CGRAPH);
1c4a429a
JH
449}
450
451/* Figure out what functions we want to assemble. */
452
453static void
db0e878d 454cgraph_mark_functions_to_output (void)
1c4a429a
JH
455{
456 struct cgraph_node *node;
457
1c4a429a
JH
458 for (node = cgraph_nodes; node; node = node->next)
459 {
460 tree decl = node->decl;
b58b1157 461 struct cgraph_edge *e;
dc0bfe6a 462
b58b1157
JH
463 if (node->output)
464 abort ();
465
466 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 467 if (e->inline_failed)
b58b1157 468 break;
1c4a429a 469
7660e67e
SB
470 /* We need to output all local functions that are used and not
471 always inlined, as well as those that are reachable from
472 outside the current compilation unit. */
1c4a429a
JH
473 if (DECL_SAVED_TREE (decl)
474 && (node->needed
b58b1157 475 || (e && node->reachable))
1c4a429a
JH
476 && !TREE_ASM_WRITTEN (decl) && !node->origin
477 && !DECL_EXTERNAL (decl))
478 node->output = 1;
479 }
480}
481
18d13f34 482/* Optimize the function before expansion. */
7660e67e 483
18d13f34 484static void
db0e878d 485cgraph_optimize_function (struct cgraph_node *node)
18d13f34
JH
486{
487 tree decl = node->decl;
488
a194aa56 489 timevar_push (TV_INTEGRATION);
b58b1157 490 /* optimize_inline_calls avoids inlining of current_function_decl. */
64384568 491 current_function_decl = decl;
18d13f34 492 if (flag_inline_trees)
b684a3df
JH
493 {
494 struct cgraph_edge *e;
495
496 for (e = node->callees; e; e = e->next_callee)
2d327012
JH
497 if (!e->inline_failed || warn_inline
498 || (DECL_DECLARED_INLINE_P (e->callee->decl)
499 && lookup_attribute ("always_inline",
500 DECL_ATTRIBUTES (e->callee->decl))))
b684a3df
JH
501 break;
502 if (e)
503 optimize_inline_calls (decl);
504 }
18d13f34
JH
505 if (node->nested)
506 {
507 for (node = node->nested; node; node = node->next_nested)
508 cgraph_optimize_function (node);
509 }
a194aa56 510 timevar_pop (TV_INTEGRATION);
18d13f34
JH
511}
512
1c4a429a 513/* Expand function specified by NODE. */
7660e67e 514
1c4a429a 515static void
db0e878d 516cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
517{
518 tree decl = node->decl;
519
6b00c969
RH
520 if (flag_unit_at_a_time)
521 announce_function (decl);
18d13f34
JH
522
523 cgraph_optimize_function (node);
dafc5b82 524
7660e67e
SB
525 /* Generate RTL for the body of DECL. Nested functions are expanded
526 via lang_expand_decl_stmt. */
1c4a429a 527 (*lang_hooks.callgraph.expand_function) (decl);
18d13f34 528
bd04ab32
JH
529 if (!cgraph_function_possibly_inlined_p (decl))
530 DECL_SAVED_TREE (decl) = NULL;
1c4a429a
JH
531 current_function_decl = NULL;
532}
533
b58b1157
JH
534/* Fill array order with all nodes with output flag set in the reverse
535 topological order. */
dc0bfe6a 536
b58b1157
JH
537static int
538cgraph_postorder (struct cgraph_node **order)
1c4a429a
JH
539{
540 struct cgraph_node *node, *node2;
1c4a429a
JH
541 int stack_size = 0;
542 int order_pos = 0;
543 struct cgraph_edge *edge, last;
1c4a429a 544
b58b1157 545 struct cgraph_node **stack =
b3c3af2f 546 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1c4a429a 547
7660e67e
SB
548 /* We have to deal with cycles nicely, so use a depth first traversal
549 output algorithm. Ignore the fact that some functions won't need
550 to be output and put them into order as well, so we get dependencies
551 right through intline functions. */
1c4a429a
JH
552 for (node = cgraph_nodes; node; node = node->next)
553 node->aux = NULL;
554 for (node = cgraph_nodes; node; node = node->next)
1668aabc 555 if (!node->aux)
1c4a429a
JH
556 {
557 node2 = node;
558 if (!node->callers)
559 node->aux = &last;
560 else
561 node->aux = node->callers;
562 while (node2)
563 {
564 while (node2->aux != &last)
565 {
566 edge = node2->aux;
567 if (edge->next_caller)
568 node2->aux = edge->next_caller;
569 else
570 node2->aux = &last;
571 if (!edge->caller->aux)
572 {
573 if (!edge->caller->callers)
574 edge->caller->aux = &last;
575 else
576 edge->caller->aux = edge->caller->callers;
577 stack[stack_size++] = node2;
578 node2 = edge->caller;
579 break;
580 }
581 }
582 if (node2->aux == &last)
583 {
584 order[order_pos++] = node2;
585 if (stack_size)
586 node2 = stack[--stack_size];
587 else
588 node2 = NULL;
589 }
590 }
591 }
b58b1157
JH
592 free (stack);
593 return order_pos;
594}
595
596#define INLINED_TIMES(node) ((size_t)(node)->aux)
597#define SET_INLINED_TIMES(node,times) ((node)->aux = (void *)(times))
598
599/* Return list of nodes we decided to inline NODE into, set their output
db0e878d 600 flag and compute INLINED_TIMES.
b58b1157
JH
601
602 We do simple backtracing to get INLINED_TIMES right. This should not be
603 expensive as we limit the amount of inlining. Alternatively we may first
604 discover set of nodes, topologically sort these and propagate
605 INLINED_TIMES */
606
607static int
608cgraph_inlined_into (struct cgraph_node *node, struct cgraph_node **array)
609{
610 int nfound = 0;
611 struct cgraph_edge **stack;
612 struct cgraph_edge *e, *e1;
613 int sp;
614 int i;
615
616 /* Fast path: since we traverse in mostly topological order, we will likely
617 find no edges. */
618 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 619 if (!e->inline_failed)
b58b1157
JH
620 break;
621
622 if (!e)
623 return 0;
624
625 /* Allocate stack for back-tracking up callgraph. */
626 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
627 sp = 0;
628
629 /* Push the first edge on to the stack. */
630 stack[sp++] = e;
631
632 while (sp)
1c4a429a 633 {
b58b1157
JH
634 struct cgraph_node *caller;
635
636 /* Look at the edge on the top of the stack. */
637 e = stack[sp - 1];
638 caller = e->caller;
639
640 /* Check if the caller destination has been visited yet. */
641 if (!caller->output)
1c4a429a 642 {
b58b1157
JH
643 array[nfound++] = e->caller;
644 /* Mark that we have visited the destination. */
645 caller->output = true;
646 SET_INLINED_TIMES (caller, 0);
647 }
648 SET_INLINED_TIMES (caller, INLINED_TIMES (caller) + 1);
649
650 for (e1 = caller->callers; e1; e1 = e1->next_caller)
dc0bfe6a 651 if (!e1->inline_failed)
b58b1157 652 break;
dc0bfe6a 653
b58b1157
JH
654 if (e1)
655 stack[sp++] = e1;
656 else
657 {
658 while (true)
659 {
660 for (e1 = e->next_caller; e1; e1 = e1->next_caller)
dc0bfe6a 661 if (!e1->inline_failed)
b58b1157
JH
662 break;
663
664 if (e1)
665 {
666 stack[sp - 1] = e1;
667 break;
668 }
669 else
670 {
671 sp--;
672 if (!sp)
673 break;
674 e = stack[sp - 1];
675 }
676 }
1c4a429a
JH
677 }
678 }
b58b1157 679
1c4a429a 680 free (stack);
b58b1157
JH
681
682
683 if (cgraph_dump_file)
684 {
7d82fe7c 685 fprintf (cgraph_dump_file, " Found inline predecesors of %s:",
b58b1157
JH
686 cgraph_node_name (node));
687 for (i = 0; i < nfound; i++)
688 {
689 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
690 if (INLINED_TIMES (array[i]) != 1)
691 fprintf (cgraph_dump_file, " (%i times)",
1a5c5701 692 (int)INLINED_TIMES (array[i]));
b58b1157
JH
693 }
694 fprintf (cgraph_dump_file, "\n");
695 }
696
697 return nfound;
1c4a429a
JH
698}
699
b58b1157 700/* Return list of nodes we decided to inline into NODE, set their output
db0e878d 701 flag and compute INLINED_TIMES.
b58b1157
JH
702
703 This function is identical to cgraph_inlined_into with callers and callees
704 nodes swapped. */
705
706static int
707cgraph_inlined_callees (struct cgraph_node *node, struct cgraph_node **array)
708{
709 int nfound = 0;
710 struct cgraph_edge **stack;
711 struct cgraph_edge *e, *e1;
712 int sp;
713 int i;
714
715 /* Fast path: since we traverse in mostly topological order, we will likely
716 find no edges. */
717 for (e = node->callees; e; e = e->next_callee)
dc0bfe6a 718 if (!e->inline_failed)
b58b1157
JH
719 break;
720
721 if (!e)
722 return 0;
723
724 /* Allocate stack for back-tracking up callgraph. */
725 stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
726 sp = 0;
727
728 /* Push the first edge on to the stack. */
729 stack[sp++] = e;
730
731 while (sp)
732 {
733 struct cgraph_node *callee;
734
735 /* Look at the edge on the top of the stack. */
736 e = stack[sp - 1];
737 callee = e->callee;
738
739 /* Check if the callee destination has been visited yet. */
740 if (!callee->output)
741 {
742 array[nfound++] = e->callee;
743 /* Mark that we have visited the destination. */
744 callee->output = true;
745 SET_INLINED_TIMES (callee, 0);
746 }
747 SET_INLINED_TIMES (callee, INLINED_TIMES (callee) + 1);
748
749 for (e1 = callee->callees; e1; e1 = e1->next_callee)
dc0bfe6a 750 if (!e1->inline_failed)
b58b1157
JH
751 break;
752 if (e1)
753 stack[sp++] = e1;
754 else
755 {
756 while (true)
757 {
758 for (e1 = e->next_callee; e1; e1 = e1->next_callee)
dc0bfe6a 759 if (!e1->inline_failed)
b58b1157
JH
760 break;
761
762 if (e1)
763 {
764 stack[sp - 1] = e1;
765 break;
766 }
767 else
768 {
769 sp--;
770 if (!sp)
771 break;
772 e = stack[sp - 1];
773 }
774 }
775 }
776 }
777
778 free (stack);
779
780 if (cgraph_dump_file)
781 {
7d82fe7c 782 fprintf (cgraph_dump_file, " Found inline successors of %s:",
b58b1157
JH
783 cgraph_node_name (node));
784 for (i = 0; i < nfound; i++)
785 {
786 fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
787 if (INLINED_TIMES (array[i]) != 1)
788 fprintf (cgraph_dump_file, " (%i times)",
1a5c5701 789 (int)INLINED_TIMES (array[i]));
b58b1157
JH
790 }
791 fprintf (cgraph_dump_file, "\n");
792 }
793
794 return nfound;
795}
796
9b0436b7
JH
797/* Perform reachability analysis and reclaim all unreachable nodes.
798 This function also remove unneeded bodies of extern inline functions
799 and thus needs to be done only after inlining decisions has been made. */
800static bool
801cgraph_remove_unreachable_nodes (void)
802{
803 struct cgraph_node *first = (void *) 1;
804 struct cgraph_node *node;
805 bool changed = false;
806 int insns = 0;
807
808 if (cgraph_dump_file)
809 fprintf (cgraph_dump_file, "\nReclaiming functions:");
810#ifdef ENABLE_CHECKING
811 for (node = cgraph_nodes; node; node = node->next)
812 if (node->aux)
813 abort ();
814#endif
815 for (node = cgraph_nodes; node; node = node->next)
816 if (node->needed && (!DECL_EXTERNAL (node->decl) || !node->analyzed))
817 {
818 node->aux = first;
819 first = node;
820 }
821 else if (node->aux)
822 abort ();
823
824 /* Perform reachability analysis. As a special case do not consider
825 extern inline functions not inlined as live because we won't output
826 them at all. */
827 while (first != (void *) 1)
828 {
829 struct cgraph_edge *e;
830 node = first;
831 first = first->aux;
832
833 for (e = node->callees; e; e = e->next_callee)
834 if (!e->callee->aux
835 && node->analyzed
836 && (!e->inline_failed || !e->callee->analyzed
837 || !DECL_EXTERNAL (e->callee->decl)))
838 {
839 e->callee->aux = first;
840 first = e->callee;
841 }
842 }
843
844 /* Remove unreachable nodes. Extern inline functions need special care;
845 Unreachable extern inline functions shall be removed.
846 Reachable extern inline functions we never inlined shall get their bodies
1f52178b 847 eliminated.
9b0436b7
JH
848 Reachable extern inline functions we sometimes inlined will be turned into
849 unanalyzed nodes so they look like for true extern functions to the rest
850 of code. */
851 for (node = cgraph_nodes; node; node = node->next)
852 {
853 if (!node->aux)
854 {
855 int local_insns;
856 tree decl = node->decl;
857
858 if (DECL_SAVED_INSNS (decl))
859 local_insns = node->local.self_insns;
860 else
861 local_insns = 0;
862 if (cgraph_dump_file)
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 if (!node->analyzed || !DECL_EXTERNAL (node->decl))
865 cgraph_remove_node (node);
866 else
867 {
868 struct cgraph_edge *e;
869
870 for (e = node->callers; e; e = e->next_caller)
871 if (e->caller->aux)
872 break;
873 if (e || node->needed)
874 {
875 DECL_SAVED_TREE (node->decl) = NULL_TREE;
876 while (node->callees)
877 cgraph_remove_edge (node, node->callees->callee);
878 node->analyzed = false;
879 }
880 else
881 cgraph_remove_node (node);
882 }
883 if (!DECL_SAVED_TREE (decl))
884 insns += local_insns;
885 changed = true;
886 }
887 }
888 for (node = cgraph_nodes; node; node = node->next)
889 node->aux = NULL;
890 if (cgraph_dump_file)
891 fprintf (cgraph_dump_file, "\nReclaimed %i insns", insns);
892 return changed;
893}
894
895
b58b1157
JH
896/* Estimate size of the function after inlining WHAT into TO. */
897
898static int
db0e878d 899cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
b58b1157
JH
900 struct cgraph_node *what)
901{
7d82fe7c 902 return (what->global.insns - INSNS_PER_CALL) * times + to->global.insns;
b58b1157
JH
903}
904
905/* Estimate the growth caused by inlining NODE into all callees. */
906
907static int
908cgraph_estimate_growth (struct cgraph_node *node)
909{
910 int growth = 0;
911 int calls_saved = 0;
912 int clones_added = 0;
913 struct cgraph_edge *e;
914
915 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 916 if (e->inline_failed)
b58b1157
JH
917 {
918 growth += ((cgraph_estimate_size_after_inlining (1, e->caller, node)
919 -
920 e->caller->global.insns) *e->caller->global.cloned_times);
921 calls_saved += e->caller->global.cloned_times;
922 clones_added += e->caller->global.cloned_times;
923 }
924
925 /* ??? Wrong for self recursive functions or cases where we decide to not
926 inline for different reasons, but it is not big deal as in that case
927 we will keep the body around, but we will also avoid some inlining. */
928 if (!node->needed && !node->origin && !DECL_EXTERNAL (node->decl))
929 growth -= node->global.insns, clones_added--;
930
931 if (!calls_saved)
932 calls_saved = 1;
933
934 return growth;
935}
936
937/* Update insn sizes after inlining WHAT into TO that is already inlined into
938 all nodes in INLINED array. */
dafc5b82
JH
939
940static void
db0e878d 941cgraph_mark_inline (struct cgraph_node *to, struct cgraph_node *what,
b58b1157
JH
942 struct cgraph_node **inlined, int ninlined,
943 struct cgraph_node **inlined_callees,
944 int ninlined_callees)
945{
946 int i;
947 int times = 0;
948 int clones = 0;
949 struct cgraph_edge *e;
950 bool called = false;
951 int new_insns;
952
1bb17c21 953 what->global.inlined = 1;
b58b1157
JH
954 for (e = what->callers; e; e = e->next_caller)
955 {
956 if (e->caller == to)
957 {
dc0bfe6a
JH
958 if (!e->inline_failed)
959 continue;
960 e->inline_failed = NULL;
b58b1157
JH
961 times++;
962 clones += e->caller->global.cloned_times;
963 }
dc0bfe6a 964 else if (e->inline_failed)
b58b1157
JH
965 called = true;
966 }
967 if (!times)
968 abort ();
969 ncalls_inlined += times;
970
971 new_insns = cgraph_estimate_size_after_inlining (times, to, what);
972 if (to->global.will_be_output)
973 overall_insns += new_insns - to->global.insns;
974 to->global.insns = new_insns;
975
b58b1157 976 if (!called && !what->needed && !what->origin
d4d1ebc1 977 && flag_unit_at_a_time
b58b1157
JH
978 && !DECL_EXTERNAL (what->decl))
979 {
980 if (!what->global.will_be_output)
981 abort ();
982 clones--;
983 nfunctions_inlined++;
984 what->global.will_be_output = 0;
985 overall_insns -= what->global.insns;
986 }
987 what->global.cloned_times += clones;
b58b1157
JH
988 for (i = 0; i < ninlined; i++)
989 {
990 new_insns =
991 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
992 times, inlined[i], what);
993 if (inlined[i]->global.will_be_output)
994 overall_insns += new_insns - inlined[i]->global.insns;
995 inlined[i]->global.insns = new_insns;
b58b1157
JH
996 }
997 for (i = 0; i < ninlined_callees; i++)
998 {
999 inlined_callees[i]->global.cloned_times +=
1000 INLINED_TIMES (inlined_callees[i]) * clones;
1001 }
1002}
1003
1004/* Return false when inlining WHAT into TO is not good idea as it would cause
1005 too large growth of function bodies. */
1006
1007static bool
db0e878d 1008cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
dc0bfe6a
JH
1009 struct cgraph_node **inlined, int ninlined,
1010 const char **reason)
dafc5b82 1011{
b58b1157
JH
1012 int i;
1013 int times = 0;
1014 struct cgraph_edge *e;
1015 int newsize;
1016 int limit;
1017
1018 for (e = to->callees; e; e = e->next_callee)
1019 if (e->callee == what)
1020 times++;
1021
1022 /* When inlining large function body called once into small function,
1023 take the inlined function as base for limiting the growth. */
1024 if (to->local.self_insns > what->local.self_insns)
1025 limit = to->local.self_insns;
1026 else
1027 limit = what->local.self_insns;
1028
1029 limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
1030
1031 newsize = cgraph_estimate_size_after_inlining (times, to, what);
1032 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1033 && newsize > limit)
dc0bfe6a
JH
1034 {
1035 *reason = N_("--param large-function-growth limit reached");
1036 return false;
1037 }
b58b1157
JH
1038 for (i = 0; i < ninlined; i++)
1039 {
1040 newsize =
1041 cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
1042 times, inlined[i], what);
1043 if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
1044 && newsize >
1045 inlined[i]->local.self_insns *
1046 (100 + PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH)) / 100)
dc0bfe6a
JH
1047 {
1048 *reason = N_("--param large-function-growth limit reached while inlining the caller");
1049 return false;
1050 }
b58b1157
JH
1051 }
1052 return true;
1053}
1054
7d82fe7c 1055/* Return true when function N is small enough to be inlined. */
b58b1157
JH
1056
1057static bool
1058cgraph_default_inline_p (struct cgraph_node *n)
1059{
1060 if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
1061 return false;
b3c3af2f 1062 if (DECL_DECLARED_INLINE_P (n->decl))
b58b1157 1063 return n->global.insns < MAX_INLINE_INSNS_SINGLE;
b3c3af2f
SB
1064 else
1065 return n->global.insns < MAX_INLINE_INSNS_AUTO;
b58b1157
JH
1066}
1067
dc0bfe6a
JH
1068/* Set inline_failed for all callers of given function to REASON. */
1069
1070static void
1071cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
1072{
1073 struct cgraph_edge *e;
1074
1075 if (cgraph_dump_file)
1076 fprintf (cgraph_dump_file, "Inlining failed: %s\n", reason);
1077 for (e = node->callers; e; e = e->next_caller)
1078 if (e->inline_failed)
1079 e->inline_failed = reason;
1080}
1081
b58b1157
JH
1082/* We use greedy algorithm for inlining of small functions:
1083 All inline candidates are put into prioritized heap based on estimated
1084 growth of the overall number of instructions and then update the estimates.
db0e878d 1085
d91edf86 1086 INLINED and INLINED_CALEES are just pointers to arrays large enough
b58b1157
JH
1087 to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
1088
1089static void
1090cgraph_decide_inlining_of_small_functions (struct cgraph_node **inlined,
db0e878d 1091 struct cgraph_node **inlined_callees)
b58b1157
JH
1092{
1093 int i;
dafc5b82 1094 struct cgraph_node *node;
b58b1157
JH
1095 fibheap_t heap = fibheap_new ();
1096 struct fibnode **heap_node =
b3c3af2f 1097 xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
b58b1157
JH
1098 int ninlined, ninlined_callees;
1099 int max_insns = ((HOST_WIDEST_INT) initial_insns
1100 * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
dafc5b82 1101
b58b1157 1102 /* Put all inline candidates into the heap. */
dafc5b82 1103
dafc5b82
JH
1104 for (node = cgraph_nodes; node; node = node->next)
1105 {
b58b1157 1106 if (!node->local.inlinable || !node->callers
dc0bfe6a 1107 || node->local.disregard_inline_limits)
b58b1157
JH
1108 continue;
1109
dc0bfe6a
JH
1110 if (!cgraph_default_inline_p (node))
1111 {
1112 cgraph_set_inline_failed (node,
1113 N_("--param max-inline-insns-single limit reached"));
1114 continue;
1115 }
b58b1157
JH
1116 heap_node[node->uid] =
1117 fibheap_insert (heap, cgraph_estimate_growth (node), node);
dafc5b82 1118 }
b58b1157 1119
a194aa56 1120 if (cgraph_dump_file)
7d82fe7c 1121 fprintf (cgraph_dump_file, "\nDeciding on smaller functions:\n");
dc0bfe6a 1122 while (overall_insns <= max_insns && (node = fibheap_extract_min (heap)))
b58b1157
JH
1123 {
1124 struct cgraph_edge *e;
1125 int old_insns = overall_insns;
1126
1127 heap_node[node->uid] = NULL;
1128 if (cgraph_dump_file)
7d82fe7c
KC
1129 fprintf (cgraph_dump_file,
1130 "\nConsidering %s with %i insns\n"
1131 " Estimated growth is %+i insns.\n",
b58b1157
JH
1132 cgraph_node_name (node), node->global.insns,
1133 cgraph_estimate_growth (node));
1134 if (!cgraph_default_inline_p (node))
1135 {
dc0bfe6a
JH
1136 cgraph_set_inline_failed (node,
1137 N_("--param max-inline-insns-single limit reached after inlining into the callee"));
b58b1157
JH
1138 continue;
1139 }
1140 ninlined_callees = cgraph_inlined_callees (node, inlined_callees);
1141 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1142 if (e->inline_failed)
b58b1157 1143 {
dc0bfe6a
JH
1144 /* Marking recursive function inlinine has sane semantic and
1145 thus we should not warn on it. */
1146 if (e->caller == node)
1147 {
1148 e->inline_failed = "";
1149 continue;
1150 }
b58b1157 1151 ninlined = cgraph_inlined_into (e->caller, inlined);
dc0bfe6a
JH
1152 if (e->callee->output)
1153 e->inline_failed = "";
b58b1157
JH
1154 if (e->callee->output
1155 || !cgraph_check_inline_limits (e->caller, node, inlined,
dc0bfe6a 1156 ninlined, &e->inline_failed))
b58b1157
JH
1157 {
1158 for (i = 0; i < ninlined; i++)
9b0436b7 1159 inlined[i]->output = 0, inlined[i]->aux = 0;
b58b1157 1160 if (cgraph_dump_file)
7d82fe7c 1161 fprintf (cgraph_dump_file, " Not inlining into %s.\n",
b58b1157
JH
1162 cgraph_node_name (e->caller));
1163 continue;
1164 }
1165 cgraph_mark_inline (e->caller, node, inlined, ninlined,
1166 inlined_callees, ninlined_callees);
1167 if (heap_node[e->caller->uid])
1168 fibheap_replace_key (heap, heap_node[e->caller->uid],
1169 cgraph_estimate_growth (e->caller));
1170
1171 /* Size of the functions we updated into has changed, so update
1172 the keys. */
1173 for (i = 0; i < ninlined; i++)
1174 {
9b0436b7 1175 inlined[i]->output = 0, inlined[i]->aux = 0;
b58b1157
JH
1176 if (heap_node[inlined[i]->uid])
1177 fibheap_replace_key (heap, heap_node[inlined[i]->uid],
1178 cgraph_estimate_growth (inlined[i]));
1179 }
9b0436b7
JH
1180 if (cgraph_dump_file)
1181 fprintf (cgraph_dump_file,
1182 " Inlined into %s which now has %i insns.\n",
1183 cgraph_node_name (e->caller),
1184 e->caller->global.insns);
b58b1157
JH
1185 }
1186
7d82fe7c 1187 /* Similarly all functions called by the function we just inlined
b58b1157
JH
1188 are now called more times; update keys. */
1189
1190 for (e = node->callees; e; e = e->next_callee)
dc0bfe6a 1191 if (e->inline_failed && heap_node[e->callee->uid])
b58b1157
JH
1192 fibheap_replace_key (heap, heap_node[e->callee->uid],
1193 cgraph_estimate_growth (e->callee));
1194
1195 for (i = 0; i < ninlined_callees; i++)
1196 {
1197 struct cgraph_edge *e;
1198
1199 for (e = inlined_callees[i]->callees; e; e = e->next_callee)
dc0bfe6a 1200 if (e->inline_failed && heap_node[e->callee->uid])
b58b1157
JH
1201 fibheap_replace_key (heap, heap_node[e->callee->uid],
1202 cgraph_estimate_growth (e->callee));
1203
9b0436b7
JH
1204 inlined_callees[i]->output = 0;
1205 inlined_callees[i]->aux = 0;
b58b1157
JH
1206 }
1207 if (cgraph_dump_file)
7d82fe7c
KC
1208 fprintf (cgraph_dump_file,
1209 " Inlined %i times for a net change of %+i insns.\n",
1210 node->global.cloned_times, overall_insns - old_insns);
b58b1157 1211 }
dc0bfe6a
JH
1212 while ((node = fibheap_extract_min (heap)) != NULL)
1213 if (!node->local.disregard_inline_limits)
1214 cgraph_set_inline_failed (node, N_("--param inline-unit-growth limit reached"));
b58b1157
JH
1215 fibheap_delete (heap);
1216 free (heap_node);
dafc5b82
JH
1217}
1218
b58b1157
JH
1219/* Decide on the inlining. We do so in the topological order to avoid
1220 expenses on updating datastructures. */
18d13f34
JH
1221
1222static void
b58b1157 1223cgraph_decide_inlining (void)
18d13f34 1224{
b58b1157
JH
1225 struct cgraph_node *node;
1226 int nnodes;
1227 struct cgraph_node **order =
b3c3af2f 1228 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
b58b1157 1229 struct cgraph_node **inlined =
b3c3af2f 1230 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
b58b1157 1231 struct cgraph_node **inlined_callees =
b3c3af2f 1232 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
b58b1157
JH
1233 int ninlined;
1234 int ninlined_callees;
de006bbd 1235 int old_insns = 0;
b58b1157 1236 int i, y;
18d13f34 1237
b58b1157 1238 for (node = cgraph_nodes; node; node = node->next)
e767b5be 1239 initial_insns += node->local.self_insns;
b58b1157
JH
1240 overall_insns = initial_insns;
1241
1242 nnodes = cgraph_postorder (order);
18d13f34 1243
7d82fe7c
KC
1244 if (cgraph_dump_file)
1245 fprintf (cgraph_dump_file,
1246 "\nDeciding on inlining. Starting with %i insns.\n",
1247 initial_insns);
1248
18d13f34 1249 for (node = cgraph_nodes; node; node = node->next)
b58b1157
JH
1250 node->aux = 0;
1251
1252 if (cgraph_dump_file)
7d82fe7c 1253 fprintf (cgraph_dump_file, "\nInlining always_inline functions:\n");
9b0436b7
JH
1254#ifdef ENABLE_CHECKING
1255 for (node = cgraph_nodes; node; node = node->next)
1256 if (node->aux || node->output)
1257 abort ();
1258#endif
b58b1157
JH
1259
1260 /* In the first pass mark all always_inline edges. Do this with a priority
7d82fe7c 1261 so none of our later choices will make this impossible. */
b58b1157
JH
1262 for (i = nnodes - 1; i >= 0; i--)
1263 {
1264 struct cgraph_edge *e;
1265
1266 node = order[i];
1267
1268 for (e = node->callees; e; e = e->next_callee)
b3c3af2f 1269 if (e->callee->local.disregard_inline_limits)
b58b1157
JH
1270 break;
1271 if (!e)
1272 continue;
1273 if (cgraph_dump_file)
1274 fprintf (cgraph_dump_file,
7d82fe7c
KC
1275 "\nConsidering %s %i insns (always inline)\n",
1276 cgraph_node_name (e->callee), e->callee->global.insns);
b58b1157
JH
1277 ninlined = cgraph_inlined_into (order[i], inlined);
1278 for (; e; e = e->next_callee)
1279 {
7d82fe7c 1280 old_insns = overall_insns;
dc0bfe6a
JH
1281 if (!e->inline_failed || !e->callee->local.inlinable
1282 || !e->callee->local.disregard_inline_limits)
1283 continue;
1284 if (e->callee->output || e->callee == node)
1285 {
1286 e->inline_failed = N_("recursive inlining");
1287 continue;
1288 }
b58b1157
JH
1289 ninlined_callees =
1290 cgraph_inlined_callees (e->callee, inlined_callees);
1291 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1292 inlined_callees, ninlined_callees);
1293 for (y = 0; y < ninlined_callees; y++)
9b0436b7 1294 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
b58b1157 1295 if (cgraph_dump_file)
7d82fe7c
KC
1296 fprintf (cgraph_dump_file,
1297 " Inlined into %s which now has %i insns.\n",
1298 cgraph_node_name (node->callees->caller),
1299 node->callees->caller->global.insns);
b58b1157 1300 }
2d327012
JH
1301 if (cgraph_dump_file && node->global.cloned_times > 0)
1302 fprintf (cgraph_dump_file,
1303 " Inlined %i times for a net change of %+i insns.\n",
1304 node->global.cloned_times, overall_insns - old_insns);
b58b1157 1305 for (y = 0; y < ninlined; y++)
9b0436b7 1306 inlined[y]->output = 0, inlined[y]->aux = 0;
b58b1157 1307 }
9b0436b7
JH
1308#ifdef ENABLE_CHECKING
1309 for (node = cgraph_nodes; node; node = node->next)
1310 if (node->aux || node->output)
1311 abort ();
1312#endif
b58b1157 1313
b684a3df
JH
1314 if (!flag_really_no_inline)
1315 {
1316 cgraph_decide_inlining_of_small_functions (inlined, inlined_callees);
9b0436b7
JH
1317#ifdef ENABLE_CHECKING
1318 for (node = cgraph_nodes; node; node = node->next)
1319 if (node->aux || node->output)
1320 abort ();
1321#endif
b58b1157 1322
b684a3df
JH
1323 if (cgraph_dump_file)
1324 fprintf (cgraph_dump_file, "\nDeciding on functions called once:\n");
b58b1157 1325
b684a3df 1326 /* And finally decide what functions are called once. */
b58b1157 1327
b684a3df 1328 for (i = nnodes - 1; i >= 0; i--)
18d13f34 1329 {
b684a3df
JH
1330 node = order[i];
1331
1332 if (node->callers && !node->callers->next_caller && !node->needed
dc0bfe6a 1333 && node->local.inlinable && node->callers->inline_failed
b684a3df 1334 && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
18d13f34 1335 {
b684a3df
JH
1336 bool ok = true;
1337 struct cgraph_node *node1;
1338
1339 /* Verify that we won't duplicate the caller. */
1340 for (node1 = node->callers->caller;
6242fcd8 1341 node1->callers && !node1->callers->inline_failed
b684a3df
JH
1342 && ok; node1 = node1->callers->caller)
1343 if (node1->callers->next_caller || node1->needed)
1344 ok = false;
1345 if (ok)
b58b1157 1346 {
dc0bfe6a 1347 const char *dummy_reason;
b58b1157 1348 if (cgraph_dump_file)
7d82fe7c 1349 fprintf (cgraph_dump_file,
b684a3df
JH
1350 "\nConsidering %s %i insns.\n"
1351 " Called once from %s %i insns.\n",
1352 cgraph_node_name (node), node->global.insns,
7d82fe7c 1353 cgraph_node_name (node->callers->caller),
b684a3df
JH
1354 node->callers->caller->global.insns);
1355 ninlined = cgraph_inlined_into (node->callers->caller,
1356 inlined);
1357 old_insns = overall_insns;
dc0bfe6a
JH
1358
1359 /* Inlining functions once would never cause inlining warnings. */
b684a3df 1360 if (cgraph_check_inline_limits
dc0bfe6a
JH
1361 (node->callers->caller, node, inlined, ninlined,
1362 &dummy_reason))
b684a3df
JH
1363 {
1364 ninlined_callees =
1365 cgraph_inlined_callees (node, inlined_callees);
1366 cgraph_mark_inline (node->callers->caller, node, inlined,
1367 ninlined, inlined_callees,
1368 ninlined_callees);
1369 for (y = 0; y < ninlined_callees; y++)
9b0436b7 1370 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
b684a3df
JH
1371 if (cgraph_dump_file)
1372 fprintf (cgraph_dump_file,
1373 " Inlined into %s which now has %i insns"
1374 " for a net change of %+i insns.\n",
1375 cgraph_node_name (node->callers->caller),
1376 node->callers->caller->global.insns,
1377 overall_insns - old_insns);
1378 }
1379 else
1380 {
1381 if (cgraph_dump_file)
1382 fprintf (cgraph_dump_file,
1383 " Inline limit reached, not inlined.\n");
1384 }
1385 for (y = 0; y < ninlined; y++)
9b0436b7 1386 inlined[y]->output = 0, inlined[y]->aux = 0;
b58b1157 1387 }
18d13f34
JH
1388 }
1389 }
b684a3df 1390 }
9b0436b7 1391 cgraph_remove_unreachable_nodes ();
8b6bd5d7
JH
1392
1393 if (cgraph_dump_file)
1394 fprintf (cgraph_dump_file,
1395 "\nInlined %i calls, eliminated %i functions, "
1396 "%i insns turned to %i insns.\n\n",
1397 ncalls_inlined, nfunctions_inlined, initial_insns,
1398 overall_insns);
1399 free (order);
1400 free (inlined);
1401 free (inlined_callees);
18d13f34
JH
1402}
1403
d4d1ebc1
JH
1404/* Decide on the inlining. We do so in the topological order to avoid
1405 expenses on updating datastructures. */
1406
1407static void
1408cgraph_decide_inlining_incrementally (struct cgraph_node *node)
1409{
1410 struct cgraph_edge *e;
1411 struct cgraph_node **inlined =
1412 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1413 struct cgraph_node **inlined_callees =
1414 xmalloc (sizeof (struct cgraph_node *) * cgraph_n_nodes);
1415 int ninlined;
1416 int ninlined_callees;
1417 int y;
1418
1419 ninlined = cgraph_inlined_into (node, inlined);
1420
1421 /* First of all look for always inline functions. */
1422 for (e = node->callees; e; e = e->next_callee)
dc0bfe6a
JH
1423 if (e->callee->local.disregard_inline_limits && e->inline_failed
1424 /* ??? It is possible that renaming variable removed the function body
1425 in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
1426 && DECL_SAVED_TREE (e->callee->decl))
d4d1ebc1 1427 {
dc0bfe6a
JH
1428 if (e->callee->output || e->callee == node)
1429 {
1430 e->inline_failed = N_("recursive inlining");
1431 continue;
1432 }
d4d1ebc1
JH
1433 ninlined_callees = cgraph_inlined_callees (e->callee, inlined_callees);
1434 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1435 inlined_callees, ninlined_callees);
1436 for (y = 0; y < ninlined_callees; y++)
9b0436b7 1437 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
d4d1ebc1
JH
1438 }
1439
b684a3df
JH
1440 if (!flag_really_no_inline)
1441 {
1442 /* Now do the automatic inlining. */
1443 for (e = node->callees; e; e = e->next_callee)
dc0bfe6a 1444 if (e->callee->local.inlinable && e->inline_failed
b684a3df
JH
1445 && cgraph_default_inline_p (e->callee)
1446 && cgraph_check_inline_limits (node, e->callee, inlined,
dc0bfe6a
JH
1447 ninlined, &e->inline_failed)
1448 && DECL_SAVED_TREE (e->callee->decl))
b684a3df 1449 {
dc0bfe6a
JH
1450 /* Marking recursive function inlinine has sane semantic and thus
1451 we should not warn on it. */
1452 if (e->callee->output || e->callee == node)
1453 {
1454 e->inline_failed = "";
1455 continue;
1456 }
b684a3df
JH
1457 ninlined_callees = cgraph_inlined_callees (e->callee,
1458 inlined_callees);
1459 cgraph_mark_inline (node, e->callee, inlined, ninlined,
1460 inlined_callees, ninlined_callees);
1461 for (y = 0; y < ninlined_callees; y++)
9b0436b7 1462 inlined_callees[y]->output = 0, inlined_callees[y]->aux = 0;
b684a3df
JH
1463 }
1464 }
d4d1ebc1
JH
1465
1466 /* Clear the flags set by cgraph_inlined_into. */
1467 for (y = 0; y < ninlined; y++)
9b0436b7 1468 inlined[y]->output = 0, inlined[y]->aux = 0;
d4d1ebc1
JH
1469
1470 free (inlined);
1471 free (inlined_callees);
1472}
1473
1474
dc0bfe6a
JH
1475/* Return true when CALLER_DECL should be inlined into CALLEE_DECL.
1476 When returned false and reason is non-NULL, set it to the reason
1477 why the call was not inlined. */
b58b1157
JH
1478
1479bool
dc0bfe6a 1480cgraph_inline_p (tree caller_decl, tree callee_decl, const char **reason)
b58b1157
JH
1481{
1482 struct cgraph_node *caller = cgraph_node (caller_decl);
1483 struct cgraph_node *callee = cgraph_node (callee_decl);
1484 struct cgraph_edge *e;
1485
1486 for (e = caller->callees; e; e = e->next_callee)
1487 if (e->callee == callee)
dc0bfe6a
JH
1488 {
1489 if (e->inline_failed && reason)
1490 *reason = e->inline_failed;
1491 return !e->inline_failed;
1492 }
b58b1157 1493 /* We do not record builtins in the callgraph. Perhaps it would make more
a98ebe2e 1494 sense to do so and then prune out those not overwritten by explicit
b58b1157 1495 function body. */
dc0bfe6a
JH
1496 if (reason)
1497 *reason = "originally indirect function calls never inlined";
b58b1157
JH
1498 return false;
1499}
db0e878d
AJ
1500/* Expand all functions that must be output.
1501
b58b1157
JH
1502 Attempt to topologically sort the nodes so function is output when
1503 all called functions are already assembled to allow data to be
a98ebe2e 1504 propagated across the callgraph. Use a stack to get smaller distance
b58b1157
JH
1505 between a function and it's callees (later we may choose to use a more
1506 sophisticated algorithm for function reordering; we will likely want
1507 to use subsections to make the output functions appear in top-down
1508 order). */
1509
1510static void
a20af5b8 1511cgraph_expand_all_functions (void)
b58b1157
JH
1512{
1513 struct cgraph_node *node;
1514 struct cgraph_node **order =
b3c3af2f 1515 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
b58b1157
JH
1516 int order_pos = 0;
1517 int i;
1518
1519 cgraph_mark_functions_to_output ();
1520
1521 order_pos = cgraph_postorder (order);
1522
1523 for (i = order_pos - 1; i >= 0; i--)
1524 {
1525 node = order[i];
1526 if (node->output)
1527 {
1528 if (!node->reachable)
1529 abort ();
1530 node->output = 0;
1531 cgraph_expand_function (node);
1532 }
1533 }
1534 free (order);
1535}
1536
1537/* Mark all local functions.
1538
a98ebe2e 1539 A local function is one whose calls can occur only in the
dc0bfe6a
JH
1540 current compilation unit and all it's calls are explicit,
1541 so we can change its calling convention.
b58b1157
JH
1542 We simply mark all static functions whose address is not taken
1543 as local. */
1544
1545static void
db0e878d 1546cgraph_mark_local_functions (void)
b58b1157
JH
1547{
1548 struct cgraph_node *node;
1549
1550 if (cgraph_dump_file)
7d82fe7c 1551 fprintf (cgraph_dump_file, "\nMarking local functions:");
b58b1157
JH
1552
1553 /* Figure out functions we want to assemble. */
1554 for (node = cgraph_nodes; node; node = node->next)
1555 {
1556 node->local.local = (!node->needed
1557 && DECL_SAVED_TREE (node->decl)
1558 && !TREE_PUBLIC (node->decl));
1559 if (cgraph_dump_file && node->local.local)
1560 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1561 }
1562 if (cgraph_dump_file)
7d82fe7c 1563 fprintf (cgraph_dump_file, "\n\n");
b58b1157 1564}
dafc5b82 1565
1c4a429a
JH
1566/* Perform simple optimizations based on callgraph. */
1567
1568void
db0e878d 1569cgraph_optimize (void)
1c4a429a 1570{
4a46cbfb
JH
1571 if (!flag_unit_at_a_time)
1572 return;
a194aa56 1573 timevar_push (TV_CGRAPHOPT);
b58b1157
JH
1574 if (!quiet_flag)
1575 fprintf (stderr, "Performing intraprocedural optimizations\n");
7d82fe7c
KC
1576
1577 cgraph_mark_local_functions ();
a194aa56
JH
1578 if (cgraph_dump_file)
1579 {
7d82fe7c 1580 fprintf (cgraph_dump_file, "Marked ");
a194aa56
JH
1581 dump_cgraph (cgraph_dump_file);
1582 }
dafc5b82 1583
b58b1157 1584 cgraph_decide_inlining ();
dafc5b82 1585 cgraph_global_info_ready = true;
a194aa56
JH
1586 if (cgraph_dump_file)
1587 {
7d82fe7c 1588 fprintf (cgraph_dump_file, "Optimized ");
a194aa56
JH
1589 dump_cgraph (cgraph_dump_file);
1590 }
1591 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1592
b58b1157 1593 /* Output everything. */
7d82fe7c
KC
1594 if (!quiet_flag)
1595 fprintf (stderr, "Assembling functions:\n");
a20af5b8 1596 cgraph_expand_all_functions ();
a194aa56
JH
1597 if (cgraph_dump_file)
1598 {
7d82fe7c 1599 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1600 dump_cgraph (cgraph_dump_file);
1601 }
1c4a429a 1602}
This page took 0.46506 seconds and 5 git commands to generate.