]> gcc.gnu.org Git - gcc.git/blob - gcc/cgraphbuild.c
Update Copyright years for files modified in 2010.
[gcc.git] / gcc / cgraphbuild.c
1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
30 #include "cgraph.h"
31 #include "intl.h"
32 #include "gimple.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
35 #include "except.h"
36
37 /* Context of record_reference. */
38 struct record_reference_ctx
39 {
40 bool only_vars;
41 struct varpool_node *varpool_node;
42 };
43
44 /* Walk tree and record all calls and references to functions/variables.
45 Called via walk_tree: TP is pointer to tree to be examined.
46 When DATA is non-null, record references to callgraph.
47 */
48
49 static tree
50 record_reference (tree *tp, int *walk_subtrees, void *data)
51 {
52 tree t = *tp;
53 tree decl;
54 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
55
56 switch (TREE_CODE (t))
57 {
58 case VAR_DECL:
59 case FUNCTION_DECL:
60 gcc_unreachable ();
61 break;
62
63 case FDESC_EXPR:
64 case ADDR_EXPR:
65 /* Record dereferences to the functions. This makes the
66 functions reachable unconditionally. */
67 decl = get_base_var (*tp);
68 if (TREE_CODE (decl) == FUNCTION_DECL)
69 {
70 if (!ctx->only_vars)
71 cgraph_mark_address_taken_node (cgraph_node (decl));
72 ipa_record_reference (NULL, ctx->varpool_node,
73 cgraph_node (decl), NULL,
74 IPA_REF_ADDR, NULL);
75 }
76
77 if (TREE_CODE (decl) == VAR_DECL)
78 {
79 struct varpool_node *vnode = varpool_node (decl);
80 if (lang_hooks.callgraph.analyze_expr)
81 lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
82 varpool_mark_needed_node (vnode);
83 if (vnode->alias && vnode->extra_name)
84 vnode = vnode->extra_name;
85 ipa_record_reference (NULL, ctx->varpool_node,
86 NULL, vnode,
87 IPA_REF_ADDR, NULL);
88 }
89 *walk_subtrees = 0;
90 break;
91
92 default:
93 /* Save some cycles by not walking types and declaration as we
94 won't find anything useful there anyway. */
95 if (IS_TYPE_OR_DECL_P (*tp))
96 {
97 *walk_subtrees = 0;
98 break;
99 }
100
101 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
102 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees);
103 break;
104 }
105
106 return NULL_TREE;
107 }
108
109 /* Record references to typeinfos in the type list LIST. */
110
111 static void
112 record_type_list (struct cgraph_node *node, tree list)
113 {
114 for (; list; list = TREE_CHAIN (list))
115 {
116 tree type = TREE_VALUE (list);
117
118 if (TYPE_P (type))
119 type = lookup_type_for_runtime (type);
120 STRIP_NOPS (type);
121 if (TREE_CODE (type) == ADDR_EXPR)
122 {
123 type = TREE_OPERAND (type, 0);
124 if (TREE_CODE (type) == VAR_DECL)
125 {
126 struct varpool_node *vnode = varpool_node (type);
127 varpool_mark_needed_node (vnode);
128 ipa_record_reference (node, NULL,
129 NULL, vnode,
130 IPA_REF_ADDR, NULL);
131 }
132 }
133 }
134 }
135
136 /* Record all references we will introduce by producing EH tables
137 for NODE. */
138
139 static void
140 record_eh_tables (struct cgraph_node *node, struct function *fun)
141 {
142 eh_region i;
143
144 i = fun->eh->region_tree;
145 if (!i)
146 return;
147
148 while (1)
149 {
150 switch (i->type)
151 {
152 case ERT_CLEANUP:
153 case ERT_MUST_NOT_THROW:
154 break;
155
156 case ERT_TRY:
157 {
158 eh_catch c;
159 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
160 record_type_list (node, c->type_list);
161 }
162 break;
163
164 case ERT_ALLOWED_EXCEPTIONS:
165 record_type_list (node, i->u.allowed.type_list);
166 break;
167 }
168 /* If there are sub-regions, process them. */
169 if (i->inner)
170 i = i->inner;
171 /* If there are peers, process them. */
172 else if (i->next_peer)
173 i = i->next_peer;
174 /* Otherwise, step back up the tree to the next peer. */
175 else
176 {
177 do
178 {
179 i = i->outer;
180 if (i == NULL)
181 return;
182 }
183 while (i->next_peer == NULL);
184 i = i->next_peer;
185 }
186 }
187 }
188
189 /* Reset inlining information of all incoming call edges of NODE. */
190
191 void
192 reset_inline_failed (struct cgraph_node *node)
193 {
194 struct cgraph_edge *e;
195
196 for (e = node->callers; e; e = e->next_caller)
197 {
198 e->callee->global.inlined_to = NULL;
199 if (!node->analyzed)
200 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
201 else if (node->local.redefined_extern_inline)
202 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
203 else if (!node->local.inlinable)
204 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
205 else if (e->call_stmt_cannot_inline_p)
206 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
207 else
208 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
209 }
210 }
211
212 /* Computes the frequency of the call statement so that it can be stored in
213 cgraph_edge. BB is the basic block of the call statement. */
214 int
215 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
216 {
217 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
218 (DECL_STRUCT_FUNCTION (decl))->frequency;
219 int freq = bb->frequency;
220
221 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
222 return CGRAPH_FREQ_BASE;
223
224 if (!entry_freq)
225 entry_freq = 1, freq++;
226
227 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
228 if (freq > CGRAPH_FREQ_MAX)
229 freq = CGRAPH_FREQ_MAX;
230
231 return freq;
232 }
233
234 /* Mark address taken in STMT. */
235
236 static bool
237 mark_address (gimple stmt ATTRIBUTE_UNUSED, tree addr,
238 void *data ATTRIBUTE_UNUSED)
239 {
240 addr = get_base_address (addr);
241 if (TREE_CODE (addr) == FUNCTION_DECL)
242 {
243 struct cgraph_node *node = cgraph_node (addr);
244 cgraph_mark_address_taken_node (node);
245 ipa_record_reference ((struct cgraph_node *)data, NULL,
246 node, NULL,
247 IPA_REF_ADDR, stmt);
248 }
249 else if (addr && TREE_CODE (addr) == VAR_DECL
250 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
251 {
252 struct varpool_node *vnode = varpool_node (addr);
253 int walk_subtrees;
254
255 if (lang_hooks.callgraph.analyze_expr)
256 lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
257 varpool_mark_needed_node (vnode);
258 if (vnode->alias && vnode->extra_name)
259 vnode = vnode->extra_name;
260 ipa_record_reference ((struct cgraph_node *)data, NULL,
261 NULL, vnode,
262 IPA_REF_ADDR, stmt);
263 }
264
265 return false;
266 }
267
268 /* Mark load of T. */
269
270 static bool
271 mark_load (gimple stmt ATTRIBUTE_UNUSED, tree t,
272 void *data ATTRIBUTE_UNUSED)
273 {
274 t = get_base_address (t);
275 if (t && TREE_CODE (t) == VAR_DECL
276 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
277 {
278 struct varpool_node *vnode = varpool_node (t);
279 int walk_subtrees;
280
281 if (lang_hooks.callgraph.analyze_expr)
282 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
283 varpool_mark_needed_node (vnode);
284 if (vnode->alias && vnode->extra_name)
285 vnode = vnode->extra_name;
286 ipa_record_reference ((struct cgraph_node *)data, NULL,
287 NULL, vnode,
288 IPA_REF_LOAD, stmt);
289 }
290 return false;
291 }
292
293 /* Mark store of T. */
294
295 static bool
296 mark_store (gimple stmt ATTRIBUTE_UNUSED, tree t,
297 void *data ATTRIBUTE_UNUSED)
298 {
299 t = get_base_address (t);
300 if (t && TREE_CODE (t) == VAR_DECL
301 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
302 {
303 struct varpool_node *vnode = varpool_node (t);
304 int walk_subtrees;
305
306 if (lang_hooks.callgraph.analyze_expr)
307 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
308 varpool_mark_needed_node (vnode);
309 if (vnode->alias && vnode->extra_name)
310 vnode = vnode->extra_name;
311 ipa_record_reference ((struct cgraph_node *)data, NULL,
312 NULL, vnode,
313 IPA_REF_STORE, NULL);
314 }
315 return false;
316 }
317
318 /* Create cgraph edges for function calls.
319 Also look for functions and variables having addresses taken. */
320
321 static unsigned int
322 build_cgraph_edges (void)
323 {
324 basic_block bb;
325 struct cgraph_node *node = cgraph_node (current_function_decl);
326 struct pointer_set_t *visited_nodes = pointer_set_create ();
327 gimple_stmt_iterator gsi;
328 tree decl;
329 unsigned ix;
330
331 /* Create the callgraph edges and record the nodes referenced by the function.
332 body. */
333 FOR_EACH_BB (bb)
334 {
335 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
336 {
337 gimple stmt = gsi_stmt (gsi);
338 tree decl;
339
340 if (is_gimple_call (stmt))
341 {
342 int freq = compute_call_stmt_bb_frequency (current_function_decl,
343 bb);
344 decl = gimple_call_fndecl (stmt);
345 if (decl)
346 cgraph_create_edge (node, cgraph_node (decl), stmt,
347 bb->count, freq,
348 bb->loop_depth);
349 else
350 cgraph_create_indirect_edge (node, stmt,
351 gimple_call_flags (stmt),
352 bb->count, freq,
353 bb->loop_depth);
354 }
355 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
356 mark_store, mark_address);
357 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
358 && gimple_omp_parallel_child_fn (stmt))
359 {
360 tree fn = gimple_omp_parallel_child_fn (stmt);
361 ipa_record_reference (node, NULL, cgraph_node (fn),
362 NULL, IPA_REF_ADDR, stmt);
363 }
364 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
365 {
366 tree fn = gimple_omp_task_child_fn (stmt);
367 if (fn)
368 ipa_record_reference (node, NULL, cgraph_node (fn),
369 NULL, IPA_REF_ADDR, stmt);
370 fn = gimple_omp_task_copy_fn (stmt);
371 if (fn)
372 ipa_record_reference (node, NULL, cgraph_node (fn),
373 NULL, IPA_REF_ADDR, stmt);
374 }
375 }
376 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
377 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
378 mark_load, mark_store, mark_address);
379 }
380
381 /* Look for initializers of constant variables and private statics. */
382 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
383 if (TREE_CODE (decl) == VAR_DECL
384 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)))
385 varpool_finalize_decl (decl);
386 record_eh_tables (node, cfun);
387
388 pointer_set_destroy (visited_nodes);
389 return 0;
390 }
391
392 struct gimple_opt_pass pass_build_cgraph_edges =
393 {
394 {
395 GIMPLE_PASS,
396 "*build_cgraph_edges", /* name */
397 NULL, /* gate */
398 build_cgraph_edges, /* execute */
399 NULL, /* sub */
400 NULL, /* next */
401 0, /* static_pass_number */
402 TV_NONE, /* tv_id */
403 PROP_cfg, /* properties_required */
404 0, /* properties_provided */
405 0, /* properties_destroyed */
406 0, /* todo_flags_start */
407 0 /* todo_flags_finish */
408 }
409 };
410
411 /* Record references to functions and other variables present in the
412 initial value of DECL, a variable.
413 When ONLY_VARS is true, we mark needed only variables, not functions. */
414
415 void
416 record_references_in_initializer (tree decl, bool only_vars)
417 {
418 struct pointer_set_t *visited_nodes = pointer_set_create ();
419 struct varpool_node *node = varpool_node (decl);
420 struct record_reference_ctx ctx = {false, NULL};
421
422 ctx.varpool_node = node;
423 ctx.only_vars = only_vars;
424 walk_tree (&DECL_INITIAL (decl), record_reference,
425 &ctx, visited_nodes);
426 pointer_set_destroy (visited_nodes);
427 }
428
429 /* Rebuild cgraph edges for current function node. This needs to be run after
430 passes that don't update the cgraph. */
431
432 unsigned int
433 rebuild_cgraph_edges (void)
434 {
435 basic_block bb;
436 struct cgraph_node *node = cgraph_node (current_function_decl);
437 gimple_stmt_iterator gsi;
438
439 cgraph_node_remove_callees (node);
440 ipa_remove_all_references (&node->ref_list);
441
442 node->count = ENTRY_BLOCK_PTR->count;
443
444 FOR_EACH_BB (bb)
445 {
446 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
447 {
448 gimple stmt = gsi_stmt (gsi);
449 tree decl;
450
451 if (is_gimple_call (stmt))
452 {
453 int freq = compute_call_stmt_bb_frequency (current_function_decl,
454 bb);
455 decl = gimple_call_fndecl (stmt);
456 if (decl)
457 cgraph_create_edge (node, cgraph_node (decl), stmt,
458 bb->count, freq,
459 bb->loop_depth);
460 else
461 cgraph_create_indirect_edge (node, stmt,
462 gimple_call_flags (stmt),
463 bb->count, freq,
464 bb->loop_depth);
465 }
466 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
467 mark_store, mark_address);
468
469 }
470 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
472 mark_load, mark_store, mark_address);
473 }
474 record_eh_tables (node, cfun);
475 gcc_assert (!node->global.inlined_to);
476
477 return 0;
478 }
479
480 /* Rebuild cgraph edges for current function node. This needs to be run after
481 passes that don't update the cgraph. */
482
483 void
484 cgraph_rebuild_references (void)
485 {
486 basic_block bb;
487 struct cgraph_node *node = cgraph_node (current_function_decl);
488 gimple_stmt_iterator gsi;
489
490 ipa_remove_all_references (&node->ref_list);
491
492 node->count = ENTRY_BLOCK_PTR->count;
493
494 FOR_EACH_BB (bb)
495 {
496 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
497 {
498 gimple stmt = gsi_stmt (gsi);
499
500 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
501 mark_store, mark_address);
502
503 }
504 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
505 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
506 mark_load, mark_store, mark_address);
507 }
508 record_eh_tables (node, cfun);
509 }
510
511 struct gimple_opt_pass pass_rebuild_cgraph_edges =
512 {
513 {
514 GIMPLE_PASS,
515 "*rebuild_cgraph_edges", /* name */
516 NULL, /* gate */
517 rebuild_cgraph_edges, /* execute */
518 NULL, /* sub */
519 NULL, /* next */
520 0, /* static_pass_number */
521 TV_CGRAPH, /* tv_id */
522 PROP_cfg, /* properties_required */
523 0, /* properties_provided */
524 0, /* properties_destroyed */
525 0, /* todo_flags_start */
526 0, /* todo_flags_finish */
527 }
528 };
529
530
531 static unsigned int
532 remove_cgraph_callee_edges (void)
533 {
534 cgraph_node_remove_callees (cgraph_node (current_function_decl));
535 return 0;
536 }
537
538 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
539 {
540 {
541 GIMPLE_PASS,
542 "*remove_cgraph_callee_edges", /* name */
543 NULL, /* gate */
544 remove_cgraph_callee_edges, /* execute */
545 NULL, /* sub */
546 NULL, /* next */
547 0, /* static_pass_number */
548 TV_NONE, /* tv_id */
549 0, /* properties_required */
550 0, /* properties_provided */
551 0, /* properties_destroyed */
552 0, /* todo_flags_start */
553 0, /* todo_flags_finish */
554 }
555 };
This page took 0.064581 seconds and 5 git commands to generate.