]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-inline.c
d5ceac6d6c6f926fd44ebaccae81b76a9d4e1d45
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42 #include "tree-mudflap.h"
43 #include "function.h"
44 #include "diagnostic.h"
45
46 /* I'm not real happy about this, but we need to handle gimple and
47 non-gimple trees. */
48 #include "tree-iterator.h"
49 #include "tree-gimple.h"
50
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
55
56 int flag_inline_trees = 0;
57
58 /* To Do:
59
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
66
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
69
70 /* Data required for function inlining. */
71
72 typedef struct inline_data
73 {
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The VAR_DECL for the return value. */
89 tree retvar;
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
92 splay_tree decl_map;
93 /* Nonzero if we are currently within the cleanup for a
94 TARGET_EXPR. */
95 int in_target_cleanup_p;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns;
98 /* We use the same mechanism to build clones that we do to perform
99 inlining. However, there are a few places where we need to
100 distinguish between those two situations. This flag is true if
101 we are cloning, rather than inlining. */
102 bool cloning_p;
103 /* Similarly for saving function body. */
104 bool saving_p;
105 /* Hash table used to prevent walk_tree from visiting the same node
106 umpteen million times. */
107 htab_t tree_pruner;
108 /* Callgraph node of function we are inlining into. */
109 struct cgraph_node *node;
110 /* Callgraph node of currently inlined function. */
111 struct cgraph_node *current_node;
112 /* Statement iterator. We need this so we can keep the tree in
113 gimple form when we insert the inlined function. It is not
114 used when we are not dealing with gimple trees. */
115 tree_stmt_iterator tsi;
116 } inline_data;
117
118 /* Prototypes. */
119
120 /* The approximate number of instructions per statement. This number
121 need not be particularly accurate; it is used only to make
122 decisions about when a function is too big to inline. */
123 #define INSNS_PER_STMT (10)
124
125 static tree declare_return_variable (inline_data *, tree, tree *);
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
134 tree, tree, tree);
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static tree unsave_r (tree *, int *, void *);
140 static void declare_inline_vars (tree bind_expr, tree vars);
141
142 /* Insert a tree->tree mapping for ID. Despite the name suggests
143 that the trees should be variables, it is used for more than that. */
144
145 static void
146 insert_decl_map (inline_data *id, tree key, tree value)
147 {
148 splay_tree_insert (id->decl_map, (splay_tree_key) key,
149 (splay_tree_value) value);
150
151 /* Always insert an identity map as well. If we see this same new
152 node again, we won't want to duplicate it a second time. */
153 if (key != value)
154 splay_tree_insert (id->decl_map, (splay_tree_key) value,
155 (splay_tree_value) value);
156 }
157
158 /* Remap DECL during the copying of the BLOCK tree for the function. */
159
160 static tree
161 remap_decl (tree decl, inline_data *id)
162 {
163 splay_tree_node n;
164 tree fn;
165
166 /* We only remap local variables in the current function. */
167 fn = VARRAY_TOP_TREE (id->fns);
168 #if 0
169 /* We need to remap statics, too, so that they get expanded even if the
170 inline function is never emitted out of line. We might as well also
171 remap extern decls so that they show up in the debug info. */
172 if (! lang_hooks.tree_inlining.auto_var_in_fn_p (decl, fn))
173 return NULL_TREE;
174 #endif
175
176 /* See if we have remapped this declaration. */
177 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
178
179 /* If we didn't already have an equivalent for this declaration,
180 create one now. */
181 if (!n)
182 {
183 tree t;
184
185 /* Make a copy of the variable or label. */
186 t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
187
188 /* Remap types, if necessary. */
189 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
190 if (TREE_CODE (t) == TYPE_DECL)
191 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
192 else if (TREE_CODE (t) == PARM_DECL)
193 DECL_ARG_TYPE_AS_WRITTEN (t)
194 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
195
196 /* Remap sizes as necessary. */
197 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
198 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
199
200 #if 0
201 /* FIXME handle anon aggrs. */
202 if (! DECL_NAME (t) && TREE_TYPE (t)
203 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
204 {
205 /* For a VAR_DECL of anonymous type, we must also copy the
206 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
207 tree members = NULL;
208 tree src;
209
210 for (src = DECL_ANON_UNION_ELEMS (t); src;
211 src = TREE_CHAIN (src))
212 {
213 tree member = remap_decl (TREE_VALUE (src), id);
214
215 if (TREE_PURPOSE (src))
216 abort ();
217 members = tree_cons (NULL, member, members);
218 }
219 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
220 }
221 #endif
222
223 /* Remember it, so that if we encounter this local entity
224 again we can reuse this copy. */
225 insert_decl_map (id, decl, t);
226 return t;
227 }
228
229 return unshare_expr ((tree) n->value);
230 }
231
232 static tree
233 remap_type (tree type, inline_data *id)
234 {
235 splay_tree_node node;
236 tree new, t;
237
238 if (type == NULL)
239 return type;
240
241 /* See if we have remapped this type. */
242 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
243 if (node)
244 return (tree) node->value;
245
246 /* The type only needs remapping if it's variably modified. */
247 if (! variably_modified_type_p (type))
248 {
249 insert_decl_map (id, type, type);
250 return type;
251 }
252
253 /* We do need a copy. build and register it now. */
254 new = copy_node (type);
255 insert_decl_map (id, type, new);
256
257 /* This is a new type, not a copy of an old type. Need to reassociate
258 variants. We can handle everything except the main variant lazily. */
259 t = TYPE_MAIN_VARIANT (type);
260 if (type != t)
261 {
262 t = remap_type (t, id);
263 TYPE_MAIN_VARIANT (new) = t;
264 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
265 TYPE_NEXT_VARIANT (t) = new;
266 }
267 else
268 {
269 TYPE_MAIN_VARIANT (new) = new;
270 TYPE_NEXT_VARIANT (new) = NULL;
271 }
272
273 /* Lazily create pointer and reference types. */
274 TYPE_POINTER_TO (new) = NULL;
275 TYPE_REFERENCE_TO (new) = NULL;
276
277 switch (TREE_CODE (new))
278 {
279 case INTEGER_TYPE:
280 case REAL_TYPE:
281 case ENUMERAL_TYPE:
282 case BOOLEAN_TYPE:
283 case CHAR_TYPE:
284 t = TYPE_MIN_VALUE (new);
285 if (t && TREE_CODE (t) != INTEGER_CST)
286 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
287
288 t = TYPE_MAX_VALUE (new);
289 if (t && TREE_CODE (t) != INTEGER_CST)
290 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
291 return new;
292
293 case POINTER_TYPE:
294 TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
295 TYPE_NEXT_PTR_TO (new) = TYPE_POINTER_TO (t);
296 TYPE_POINTER_TO (t) = new;
297 return new;
298
299 case REFERENCE_TYPE:
300 TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
301 TYPE_NEXT_REF_TO (new) = TYPE_REFERENCE_TO (t);
302 TYPE_REFERENCE_TO (t) = new;
303 return new;
304
305 case METHOD_TYPE:
306 case FUNCTION_TYPE:
307 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
308 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
309 return new;
310
311 case ARRAY_TYPE:
312 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
313 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
314 break;
315
316 case RECORD_TYPE:
317 case UNION_TYPE:
318 case QUAL_UNION_TYPE:
319 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
320 break;
321
322 case FILE_TYPE:
323 case SET_TYPE:
324 case OFFSET_TYPE:
325 default:
326 /* Shouldn't have been thought variable sized. */
327 abort ();
328 }
329
330 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
331 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
332
333 return new;
334 }
335
336 static tree
337 remap_decls (tree decls, inline_data *id)
338 {
339 tree old_var;
340 tree new_decls = NULL_TREE;
341
342 /* Remap its variables. */
343 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
344 {
345 tree new_var;
346
347 /* Remap the variable. */
348 new_var = remap_decl (old_var, id);
349
350 /* If we didn't remap this variable, so we can't mess with its
351 TREE_CHAIN. If we remapped this variable to the return slot, it's
352 already declared somewhere else, so don't declare it here. */
353 if (!new_var || new_var == id->retvar)
354 ;
355 #ifdef ENABLE_CHECKING
356 else if (!DECL_P (new_var))
357 abort ();
358 #endif
359 else
360 {
361 TREE_CHAIN (new_var) = new_decls;
362 new_decls = new_var;
363 }
364 }
365
366 return nreverse (new_decls);
367 }
368
369 /* Copy the BLOCK to contain remapped versions of the variables
370 therein. And hook the new block into the block-tree. */
371
372 static void
373 remap_block (tree *block, inline_data *id)
374 {
375 tree old_block;
376 tree new_block;
377 tree fn;
378
379 /* Make the new block. */
380 old_block = *block;
381 new_block = make_node (BLOCK);
382 TREE_USED (new_block) = TREE_USED (old_block);
383 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
384 *block = new_block;
385
386 /* Remap its variables. */
387 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
388
389 fn = VARRAY_TREE (id->fns, 0);
390 #if 1
391 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
392 rest_of_compilation is a good start. */
393 if (id->cloning_p)
394 /* We're building a clone; DECL_INITIAL is still
395 error_mark_node, and current_binding_level is the parm
396 binding level. */
397 lang_hooks.decls.insert_block (new_block);
398 else
399 {
400 /* Attach this new block after the DECL_INITIAL block for the
401 function into which this block is being inlined. In
402 rest_of_compilation we will straighten out the BLOCK tree. */
403 tree *first_block;
404 if (DECL_INITIAL (fn))
405 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
406 else
407 first_block = &DECL_INITIAL (fn);
408 BLOCK_CHAIN (new_block) = *first_block;
409 *first_block = new_block;
410 }
411 #endif
412 /* Remember the remapped block. */
413 insert_decl_map (id, old_block, new_block);
414 }
415
416 static void
417 copy_statement_list (tree *tp)
418 {
419 tree_stmt_iterator oi, ni;
420 tree new;
421
422 new = alloc_stmt_list ();
423 ni = tsi_start (new);
424 oi = tsi_start (*tp);
425 *tp = new;
426
427 for (; !tsi_end_p (oi); tsi_next (&oi))
428 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
429 }
430
431 static void
432 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
433 {
434 tree block = BIND_EXPR_BLOCK (*tp);
435 /* Copy (and replace) the statement. */
436 copy_tree_r (tp, walk_subtrees, NULL);
437 if (block)
438 {
439 remap_block (&block, id);
440 BIND_EXPR_BLOCK (*tp) = block;
441 }
442
443 if (BIND_EXPR_VARS (*tp))
444 /* This will remap a lot of the same decls again, but this should be
445 harmless. */
446 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
447 }
448
449 /* Called from copy_body via walk_tree. DATA is really an
450 `inline_data *'. */
451 static tree
452 copy_body_r (tree *tp, int *walk_subtrees, void *data)
453 {
454 inline_data* id;
455 tree fn;
456
457 /* Set up. */
458 id = (inline_data *) data;
459 fn = VARRAY_TOP_TREE (id->fns);
460
461 #if 0
462 /* All automatic variables should have a DECL_CONTEXT indicating
463 what function they come from. */
464 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
465 && DECL_NAMESPACE_SCOPE_P (*tp))
466 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
467 abort ();
468 #endif
469
470 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
471 GOTO_EXPR with the RET_LABEL as its target. */
472 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
473 {
474 tree return_stmt = *tp;
475 tree goto_stmt;
476
477 /* Build the GOTO_EXPR. */
478 tree assignment = TREE_OPERAND (return_stmt, 0);
479 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
480 TREE_USED (id->ret_label) = 1;
481
482 /* If we're returning something, just turn that into an
483 assignment into the equivalent of the original
484 RESULT_DECL. */
485 if (assignment)
486 {
487 /* Do not create a statement containing a naked RESULT_DECL. */
488 if (lang_hooks.gimple_before_inlining)
489 if (TREE_CODE (assignment) == RESULT_DECL)
490 gimplify_stmt (&assignment);
491
492 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
493 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
494 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
495 }
496 /* If we're not returning anything just do the jump. */
497 else
498 *tp = goto_stmt;
499 }
500 /* Local variables and labels need to be replaced by equivalent
501 variables. We don't want to copy static variables; there's only
502 one of those, no matter how many times we inline the containing
503 function. */
504 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
505 {
506 tree new_decl;
507
508 /* Remap the declaration. */
509 new_decl = remap_decl (*tp, id);
510 if (! new_decl)
511 abort ();
512 /* Replace this variable with the copy. */
513 STRIP_TYPE_NOPS (new_decl);
514 *tp = new_decl;
515 }
516 #if 0
517 else if (nonstatic_local_decl_p (*tp)
518 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
519 abort ();
520 #endif
521 else if (TREE_CODE (*tp) == STATEMENT_LIST)
522 copy_statement_list (tp);
523 else if (TREE_CODE (*tp) == SAVE_EXPR)
524 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
525 walk_subtrees);
526 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
527 /* UNSAVE_EXPRs should not be generated until expansion time. */
528 abort ();
529 else if (TREE_CODE (*tp) == BIND_EXPR)
530 copy_bind_expr (tp, walk_subtrees, id);
531 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
532 {
533 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
534 will refer to it, so save a copy ready for remapping. We
535 save it in the decl_map, although it isn't a decl. */
536 tree new_block = copy_node (*tp);
537 insert_decl_map (id, *tp, new_block);
538 *tp = new_block;
539 }
540 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
541 {
542 splay_tree_node n
543 = splay_tree_lookup (id->decl_map,
544 (splay_tree_key) TREE_OPERAND (*tp, 0));
545 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
546 if (! n)
547 abort ();
548 *tp = copy_node (*tp);
549 TREE_OPERAND (*tp, 0) = (tree) n->value;
550 }
551 /* Types may need remapping as well. */
552 else if (TYPE_P (*tp))
553 *tp = remap_type (*tp, id);
554
555 /* Otherwise, just copy the node. Note that copy_tree_r already
556 knows not to copy VAR_DECLs, etc., so this is safe. */
557 else
558 {
559 tree old_node = *tp;
560
561 if (TREE_CODE (*tp) == MODIFY_EXPR
562 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
563 && (lang_hooks.tree_inlining.auto_var_in_fn_p
564 (TREE_OPERAND (*tp, 0), fn)))
565 {
566 /* Some assignments VAR = VAR; don't generate any rtl code
567 and thus don't count as variable modification. Avoid
568 keeping bogosities like 0 = 0. */
569 tree decl = TREE_OPERAND (*tp, 0), value;
570 splay_tree_node n;
571
572 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
573 if (n)
574 {
575 value = (tree) n->value;
576 STRIP_TYPE_NOPS (value);
577 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
578 {
579 *tp = value;
580 return copy_body_r (tp, walk_subtrees, data);
581 }
582 }
583 }
584 else if (TREE_CODE (*tp) == ADDR_EXPR
585 && (lang_hooks.tree_inlining.auto_var_in_fn_p
586 (TREE_OPERAND (*tp, 0), fn)))
587 {
588 /* Get rid of &* from inline substitutions. It can occur when
589 someone takes the address of a parm or return slot passed by
590 invisible reference. */
591 tree decl = TREE_OPERAND (*tp, 0), value;
592 splay_tree_node n;
593
594 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
595 if (n)
596 {
597 value = (tree) n->value;
598 if (TREE_CODE (value) == INDIRECT_REF)
599 {
600 /* Assume that the argument types properly match the
601 parameter types. We can't compare them well enough
602 without a comptypes langhook, and we don't want to
603 call convert and introduce a NOP_EXPR to convert
604 between two equivalent types (i.e. that only differ
605 in use of typedef names). */
606 *tp = TREE_OPERAND (value, 0);
607 return copy_body_r (tp, walk_subtrees, data);
608 }
609 }
610 }
611 else if (TREE_CODE (*tp) == INDIRECT_REF)
612 {
613 /* Get rid of *& from inline substitutions that can happen when a
614 pointer argument is an ADDR_EXPR. */
615 tree decl = TREE_OPERAND (*tp, 0), value;
616 splay_tree_node n;
617
618 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
619 if (n)
620 {
621 value = (tree) n->value;
622 STRIP_NOPS (value);
623 if (TREE_CODE (value) == ADDR_EXPR)
624 {
625 *tp = TREE_OPERAND (value, 0);
626 return copy_body_r (tp, walk_subtrees, data);
627 }
628 }
629 }
630
631 copy_tree_r (tp, walk_subtrees, NULL);
632
633 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
634 {
635 if (id->saving_p)
636 {
637 struct cgraph_node *node;
638 struct cgraph_edge *edge;
639
640 for (node = id->node->next_clone; node; node = node->next_clone)
641 {
642 edge = cgraph_edge (node, old_node);
643 if (edge)
644 edge->call_expr = *tp;
645 else
646 abort ();
647 }
648 }
649 else
650 {
651 struct cgraph_edge *edge;
652
653 edge = cgraph_edge (id->current_node, old_node);
654 if (edge)
655 cgraph_clone_edge (edge, id->node, *tp);
656 }
657 }
658
659 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
660
661 /* The copied TARGET_EXPR has never been expanded, even if the
662 original node was expanded already. */
663 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
664 {
665 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
666 TREE_OPERAND (*tp, 3) = NULL_TREE;
667 }
668 }
669
670 /* Keep iterating. */
671 return NULL_TREE;
672 }
673
674 /* Make a copy of the body of FN so that it can be inserted inline in
675 another function. */
676
677 static tree
678 copy_body (inline_data *id)
679 {
680 tree body;
681 tree fndecl = VARRAY_TOP_TREE (id->fns);
682
683 if (fndecl == current_function_decl
684 && cfun->saved_tree)
685 body = cfun->saved_tree;
686 else
687 body = DECL_SAVED_TREE (fndecl);
688 walk_tree (&body, copy_body_r, id, NULL);
689
690 return body;
691 }
692
693 static void
694 setup_one_parameter (inline_data *id, tree p, tree value,
695 tree fn, tree *init_stmts, tree *vars,
696 bool *gimplify_init_stmts_p)
697 {
698 tree init_stmt;
699 tree var;
700 tree var_sub;
701
702 /* If the parameter is never assigned to, we may not need to
703 create a new variable here at all. Instead, we may be able
704 to just use the argument value. */
705 if (TREE_READONLY (p)
706 && !TREE_ADDRESSABLE (p)
707 && value && !TREE_SIDE_EFFECTS (value))
708 {
709 /* We can't risk substituting complex expressions. They
710 might contain variables that will be assigned to later.
711 Theoretically, we could check the expression to see if
712 all of the variables that determine its value are
713 read-only, but we don't bother. */
714 if ((TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
715 /* We may produce non-gimple trees by adding NOPs or introduce
716 invalid sharing when operand is not really constant.
717 It is not big deal to prohibit constant propagation here as
718 we will constant propagate in DOM1 pass anyway. */
719 && (!lang_hooks.gimple_before_inlining
720 || (is_gimple_min_invariant (value)
721 && TREE_TYPE (value) == TREE_TYPE (p))))
722 {
723 /* If this is a declaration, wrap it a NOP_EXPR so that
724 we don't try to put the VALUE on the list of BLOCK_VARS. */
725 if (DECL_P (value))
726 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
727
728 /* If this is a constant, make sure it has the right type. */
729 else if (TREE_TYPE (value) != TREE_TYPE (p))
730 value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
731
732 insert_decl_map (id, p, value);
733 return;
734 }
735 }
736
737 /* Make an equivalent VAR_DECL. */
738 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
739
740 /* See if the frontend wants to pass this by invisible reference. If
741 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
742 replace uses of the PARM_DECL with dereferences. */
743 if (TREE_TYPE (var) != TREE_TYPE (p)
744 && POINTER_TYPE_P (TREE_TYPE (var))
745 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
746 {
747 insert_decl_map (id, var, var);
748 var_sub = build1 (INDIRECT_REF, TREE_TYPE (p), var);
749 }
750 else
751 var_sub = var;
752
753 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
754 that way, when the PARM_DECL is encountered, it will be
755 automatically replaced by the VAR_DECL. */
756 insert_decl_map (id, p, var_sub);
757
758 /* Declare this new variable. */
759 TREE_CHAIN (var) = *vars;
760 *vars = var;
761
762 /* Make gimplifier happy about this variable. */
763 var->decl.seen_in_bind_expr = lang_hooks.gimple_before_inlining;
764
765 /* Even if P was TREE_READONLY, the new VAR should not be.
766 In the original code, we would have constructed a
767 temporary, and then the function body would have never
768 changed the value of P. However, now, we will be
769 constructing VAR directly. The constructor body may
770 change its value multiple times as it is being
771 constructed. Therefore, it must not be TREE_READONLY;
772 the back-end assumes that TREE_READONLY variable is
773 assigned to only once. */
774 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
775 TREE_READONLY (var) = 0;
776
777 /* Initialize this VAR_DECL from the equivalent argument. Convert
778 the argument to the proper type in case it was promoted. */
779 if (value)
780 {
781 tree rhs = fold_convert (TREE_TYPE (var), value);
782
783 if (rhs == error_mark_node)
784 return;
785
786 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
787 keep our trees in gimple form. */
788 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
789 append_to_statement_list (init_stmt, init_stmts);
790
791 /* If we did not create a gimple value and we did not create a gimple
792 cast of a gimple value, then we will need to gimplify INIT_STMTS
793 at the end. Note that is_gimple_cast only checks the outer
794 tree code, not its operand. Thus the explicit check that it's
795 operand is a gimple value. */
796 if (!is_gimple_val (rhs)
797 && (!is_gimple_cast (rhs)
798 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
799 *gimplify_init_stmts_p = true;
800 }
801 }
802
803 /* Generate code to initialize the parameters of the function at the
804 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
805
806 static tree
807 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
808 tree fn, tree bind_expr)
809 {
810 tree init_stmts = NULL_TREE;
811 tree parms;
812 tree a;
813 tree p;
814 tree vars = NULL_TREE;
815 bool gimplify_init_stmts_p = false;
816 int argnum = 0;
817
818 /* Figure out what the parameters are. */
819 parms = DECL_ARGUMENTS (fn);
820 if (fn == current_function_decl)
821 parms = cfun->saved_args;
822
823 /* Loop through the parameter declarations, replacing each with an
824 equivalent VAR_DECL, appropriately initialized. */
825 for (p = parms, a = args; p;
826 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
827 {
828 tree value;
829
830 ++argnum;
831
832 /* Find the initializer. */
833 value = lang_hooks.tree_inlining.convert_parm_for_inlining
834 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
835
836 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
837 &gimplify_init_stmts_p);
838 }
839
840 /* Evaluate trailing arguments. */
841 for (; a; a = TREE_CHAIN (a))
842 {
843 tree value = TREE_VALUE (a);
844 append_to_statement_list (value, &init_stmts);
845 }
846
847 /* Initialize the static chain. */
848 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
849 if (p)
850 {
851 /* No static chain? Seems like a bug in tree-nested.c. */
852 if (!static_chain)
853 abort ();
854
855 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
856 &gimplify_init_stmts_p);
857 }
858
859 if (gimplify_init_stmts_p && lang_hooks.gimple_before_inlining)
860 gimplify_body (&init_stmts, fn);
861
862 declare_inline_vars (bind_expr, vars);
863 return init_stmts;
864 }
865
866 /* Declare a return variable to replace the RESULT_DECL for the
867 function we are calling. An appropriate DECL_STMT is returned.
868 The USE_STMT is filled in to contain a use of the declaration to
869 indicate the return value of the function. */
870
871 static tree
872 declare_return_variable (inline_data *id, tree return_slot_addr, tree *use_p)
873 {
874 tree fn = VARRAY_TOP_TREE (id->fns);
875 tree result = DECL_RESULT (fn);
876 int need_return_decl = 1;
877 tree var;
878
879 /* We don't need to do anything for functions that don't return
880 anything. */
881 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
882 {
883 *use_p = NULL_TREE;
884 return NULL_TREE;
885 }
886
887 var = (lang_hooks.tree_inlining.copy_res_decl_for_inlining
888 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
889 &need_return_decl, return_slot_addr));
890
891 /* Do not have the rest of GCC warn about this variable as it should
892 not be visible to the user. */
893 TREE_NO_WARNING (var) = 1;
894
895 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
896 way, when the RESULT_DECL is encountered, it will be
897 automatically replaced by the VAR_DECL. */
898 insert_decl_map (id, result, var);
899
900 /* Remember this so we can ignore it in remap_decls. */
901 id->retvar = var;
902
903 /* Build the use expr. If the return type of the function was
904 promoted, convert it back to the expected type. */
905 if (return_slot_addr)
906 /* The function returns through an explicit return slot, not a normal
907 return value. */
908 *use_p = NULL_TREE;
909 else if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
910 *use_p = var;
911 else if (TREE_CODE (var) == INDIRECT_REF)
912 *use_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (fn)),
913 TREE_OPERAND (var, 0));
914 else if (TREE_ADDRESSABLE (TREE_TYPE (var)))
915 abort ();
916 else
917 *use_p = build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)), var);
918
919 /* Build the declaration statement if FN does not return an
920 aggregate. */
921 if (need_return_decl)
922 return var;
923 /* If FN does return an aggregate, there's no need to declare the
924 return variable; we're using a variable in our caller's frame. */
925 else
926 return NULL_TREE;
927 }
928
929 /* Returns nonzero if a function can be inlined as a tree. */
930
931 bool
932 tree_inlinable_function_p (tree fn)
933 {
934 return inlinable_function_p (fn);
935 }
936
937 static const char *inline_forbidden_reason;
938
939 static tree
940 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
941 void *fnp)
942 {
943 tree node = *nodep;
944 tree fn = (tree) fnp;
945 tree t;
946
947 switch (TREE_CODE (node))
948 {
949 case CALL_EXPR:
950 /* Refuse to inline alloca call unless user explicitly forced so as
951 this may change program's memory overhead drastically when the
952 function using alloca is called in loop. In GCC present in
953 SPEC2000 inlining into schedule_block cause it to require 2GB of
954 RAM instead of 256MB. */
955 if (alloca_call_p (node)
956 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
957 {
958 inline_forbidden_reason
959 = N_("%Jfunction '%F' can never be inlined because it uses "
960 "alloca (override using the always_inline attribute)");
961 return node;
962 }
963 t = get_callee_fndecl (node);
964 if (! t)
965 break;
966
967
968 /* We cannot inline functions that call setjmp. */
969 if (setjmp_call_p (t))
970 {
971 inline_forbidden_reason
972 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
973 return node;
974 }
975
976 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
977 switch (DECL_FUNCTION_CODE (t))
978 {
979 /* We cannot inline functions that take a variable number of
980 arguments. */
981 case BUILT_IN_VA_START:
982 case BUILT_IN_STDARG_START:
983 case BUILT_IN_NEXT_ARG:
984 case BUILT_IN_VA_END:
985 inline_forbidden_reason
986 = N_("%Jfunction '%F' can never be inlined because it "
987 "uses variable argument lists");
988 return node;
989
990 case BUILT_IN_LONGJMP:
991 /* We can't inline functions that call __builtin_longjmp at
992 all. The non-local goto machinery really requires the
993 destination be in a different function. If we allow the
994 function calling __builtin_longjmp to be inlined into the
995 function calling __builtin_setjmp, Things will Go Awry. */
996 inline_forbidden_reason
997 = N_("%Jfunction '%F' can never be inlined because "
998 "it uses setjmp-longjmp exception handling");
999 return node;
1000
1001 case BUILT_IN_NONLOCAL_GOTO:
1002 /* Similarly. */
1003 inline_forbidden_reason
1004 = N_("%Jfunction '%F' can never be inlined because "
1005 "it uses non-local goto");
1006 return node;
1007
1008 default:
1009 break;
1010 }
1011 break;
1012
1013 case BIND_EXPR:
1014 for (t = BIND_EXPR_VARS (node); t ; t = TREE_CHAIN (t))
1015 {
1016 /* We cannot inline functions that contain other functions. */
1017 if (TREE_CODE (t) == FUNCTION_DECL && DECL_INITIAL (t))
1018 {
1019 inline_forbidden_reason
1020 = N_("%Jfunction '%F' can never be inlined "
1021 "because it contains a nested function");
1022 return node;
1023 }
1024 }
1025 break;
1026
1027 case GOTO_EXPR:
1028 t = TREE_OPERAND (node, 0);
1029
1030 /* We will not inline a function which uses computed goto. The
1031 addresses of its local labels, which may be tucked into
1032 global storage, are of course not constant across
1033 instantiations, which causes unexpected behavior. */
1034 if (TREE_CODE (t) != LABEL_DECL)
1035 {
1036 inline_forbidden_reason
1037 = N_("%Jfunction '%F' can never be inlined "
1038 "because it contains a computed goto");
1039 return node;
1040 }
1041 break;
1042
1043 case LABEL_EXPR:
1044 t = TREE_OPERAND (node, 0);
1045 if (DECL_NONLOCAL (t))
1046 {
1047 /* We cannot inline a function that receives a non-local goto
1048 because we cannot remap the destination label used in the
1049 function that is performing the non-local goto. */
1050 inline_forbidden_reason
1051 = N_("%Jfunction '%F' can never be inlined "
1052 "because it receives a non-local goto");
1053 }
1054 break;
1055
1056 case RECORD_TYPE:
1057 case UNION_TYPE:
1058 /* We cannot inline a function of the form
1059
1060 void F (int i) { struct S { int ar[i]; } s; }
1061
1062 Attempting to do so produces a catch-22.
1063 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1064 UNION_TYPE nodes, then it goes into infinite recursion on a
1065 structure containing a pointer to its own type. If it doesn't,
1066 then the type node for S doesn't get adjusted properly when
1067 F is inlined, and we abort in find_function_data. */
1068 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1069 if (variably_modified_type_p (TREE_TYPE (t)))
1070 {
1071 inline_forbidden_reason
1072 = N_("%Jfunction '%F' can never be inlined "
1073 "because it uses variable sized variables");
1074 return node;
1075 }
1076
1077 default:
1078 break;
1079 }
1080
1081 return NULL_TREE;
1082 }
1083
1084 /* Return subexpression representing possible alloca call, if any. */
1085 static tree
1086 inline_forbidden_p (tree fndecl)
1087 {
1088 location_t saved_loc = input_location;
1089 tree ret = walk_tree_without_duplicates
1090 (&DECL_SAVED_TREE (fndecl), inline_forbidden_p_1, fndecl);
1091 input_location = saved_loc;
1092 return ret;
1093 }
1094
1095 /* Returns nonzero if FN is a function that does not have any
1096 fundamental inline blocking properties. */
1097
1098 static bool
1099 inlinable_function_p (tree fn)
1100 {
1101 bool inlinable = true;
1102
1103 /* If we've already decided this function shouldn't be inlined,
1104 there's no need to check again. */
1105 if (DECL_UNINLINABLE (fn))
1106 return false;
1107
1108 /* See if there is any language-specific reason it cannot be
1109 inlined. (It is important that this hook be called early because
1110 in C++ it may result in template instantiation.)
1111 If the function is not inlinable for language-specific reasons,
1112 it is left up to the langhook to explain why. */
1113 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1114
1115 /* If we don't have the function body available, we can't inline it.
1116 However, this should not be recorded since we also get here for
1117 forward declared inline functions. Therefore, return at once. */
1118 if (!DECL_SAVED_TREE (fn))
1119 return false;
1120
1121 /* If we're not inlining at all, then we cannot inline this function. */
1122 else if (!flag_inline_trees)
1123 inlinable = false;
1124
1125 /* Only try to inline functions if DECL_INLINE is set. This should be
1126 true for all functions declared `inline', and for all other functions
1127 as well with -finline-functions.
1128
1129 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1130 it's the front-end that must set DECL_INLINE in this case, because
1131 dwarf2out loses if a function that does not have DECL_INLINE set is
1132 inlined anyway. That is why we have both DECL_INLINE and
1133 DECL_DECLARED_INLINE_P. */
1134 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1135 here should be redundant. */
1136 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1137 inlinable = false;
1138
1139 #ifdef INLINER_FOR_JAVA
1140 /* Synchronized methods can't be inlined. This is a bug. */
1141 else if (METHOD_SYNCHRONIZED (fn))
1142 inlinable = false;
1143 #endif /* INLINER_FOR_JAVA */
1144
1145 else if (inline_forbidden_p (fn))
1146 {
1147 /* See if we should warn about uninlinable functions. Previously,
1148 some of these warnings would be issued while trying to expand
1149 the function inline, but that would cause multiple warnings
1150 about functions that would for example call alloca. But since
1151 this a property of the function, just one warning is enough.
1152 As a bonus we can now give more details about the reason why a
1153 function is not inlinable.
1154 We only warn for functions declared `inline' by the user. */
1155 bool do_warning = (warn_inline
1156 && DECL_INLINE (fn)
1157 && DECL_DECLARED_INLINE_P (fn)
1158 && !DECL_IN_SYSTEM_HEADER (fn));
1159
1160 if (lookup_attribute ("always_inline",
1161 DECL_ATTRIBUTES (fn)))
1162 sorry (inline_forbidden_reason, fn, fn);
1163 else if (do_warning)
1164 warning (inline_forbidden_reason, fn, fn);
1165
1166 inlinable = false;
1167 }
1168
1169 /* Squirrel away the result so that we don't have to check again. */
1170 DECL_UNINLINABLE (fn) = !inlinable;
1171
1172 return inlinable;
1173 }
1174
1175 /* Used by estimate_num_insns. Estimate number of instructions seen
1176 by given statement. */
1177 static tree
1178 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1179 {
1180 int *count = data;
1181 tree x = *tp;
1182
1183 if (TYPE_P (x) || DECL_P (x))
1184 {
1185 *walk_subtrees = 0;
1186 return NULL;
1187 }
1188 /* Assume that constants and references counts nothing. These should
1189 be majorized by amount of operations among them we count later
1190 and are common target of CSE and similar optimizations. */
1191 if (TREE_CODE_CLASS (TREE_CODE (x)) == 'c'
1192 || TREE_CODE_CLASS (TREE_CODE (x)) == 'r')
1193 return NULL;
1194 switch (TREE_CODE (x))
1195 {
1196 /* Containers have no cost. */
1197 case TREE_LIST:
1198 case TREE_VEC:
1199 case BLOCK:
1200 case COMPONENT_REF:
1201 case BIT_FIELD_REF:
1202 case INDIRECT_REF:
1203 case BUFFER_REF:
1204 case ARRAY_REF:
1205 case ARRAY_RANGE_REF:
1206 case VTABLE_REF:
1207 case EXC_PTR_EXPR: /* ??? */
1208 case FILTER_EXPR: /* ??? */
1209 case COMPOUND_EXPR:
1210 case BIND_EXPR:
1211 case LABELED_BLOCK_EXPR:
1212 case WITH_CLEANUP_EXPR:
1213 case NOP_EXPR:
1214 case VIEW_CONVERT_EXPR:
1215 case SAVE_EXPR:
1216 case UNSAVE_EXPR:
1217 case ADDR_EXPR:
1218 case REFERENCE_EXPR:
1219 case COMPLEX_EXPR:
1220 case REALPART_EXPR:
1221 case IMAGPART_EXPR:
1222 case EXIT_BLOCK_EXPR:
1223 case CASE_LABEL_EXPR:
1224 case SSA_NAME:
1225 case CATCH_EXPR:
1226 case EH_FILTER_EXPR:
1227 case STATEMENT_LIST:
1228 case ERROR_MARK:
1229 case NON_LVALUE_EXPR:
1230 case ENTRY_VALUE_EXPR:
1231 case FDESC_EXPR:
1232 case VA_ARG_EXPR:
1233 case TRY_CATCH_EXPR:
1234 case TRY_FINALLY_EXPR:
1235 case LABEL_EXPR:
1236 case GOTO_EXPR:
1237 case RETURN_EXPR:
1238 case EXIT_EXPR:
1239 case LOOP_EXPR:
1240 case PHI_NODE:
1241 break;
1242 /* We don't account constants for now. Assume that the cost is amortized
1243 by operations that do use them. We may re-consider this decision once
1244 we are able to optimize the tree before estimating it's size and break
1245 out static initializers. */
1246 case IDENTIFIER_NODE:
1247 case INTEGER_CST:
1248 case REAL_CST:
1249 case COMPLEX_CST:
1250 case VECTOR_CST:
1251 case STRING_CST:
1252 *walk_subtrees = 0;
1253 return NULL;
1254 /* Recognize assignments of large structures and constructors of
1255 big arrays. */
1256 case INIT_EXPR:
1257 case TARGET_EXPR:
1258 case MODIFY_EXPR:
1259 case CONSTRUCTOR:
1260 {
1261 HOST_WIDE_INT size;
1262
1263 size = int_size_in_bytes (TREE_TYPE (x));
1264
1265 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1266 *count += 10;
1267 else
1268 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1269 }
1270 break;
1271
1272 /* Assign cost of 1 to usual operations.
1273 ??? We may consider mapping RTL costs to this. */
1274 case COND_EXPR:
1275
1276 case PLUS_EXPR:
1277 case MINUS_EXPR:
1278 case MULT_EXPR:
1279
1280 case FIX_TRUNC_EXPR:
1281 case FIX_CEIL_EXPR:
1282 case FIX_FLOOR_EXPR:
1283 case FIX_ROUND_EXPR:
1284
1285 case NEGATE_EXPR:
1286 case FLOAT_EXPR:
1287 case MIN_EXPR:
1288 case MAX_EXPR:
1289 case ABS_EXPR:
1290
1291 case LSHIFT_EXPR:
1292 case RSHIFT_EXPR:
1293 case LROTATE_EXPR:
1294 case RROTATE_EXPR:
1295
1296 case BIT_IOR_EXPR:
1297 case BIT_XOR_EXPR:
1298 case BIT_AND_EXPR:
1299 case BIT_NOT_EXPR:
1300
1301 case TRUTH_ANDIF_EXPR:
1302 case TRUTH_ORIF_EXPR:
1303 case TRUTH_AND_EXPR:
1304 case TRUTH_OR_EXPR:
1305 case TRUTH_XOR_EXPR:
1306 case TRUTH_NOT_EXPR:
1307
1308 case LT_EXPR:
1309 case LE_EXPR:
1310 case GT_EXPR:
1311 case GE_EXPR:
1312 case EQ_EXPR:
1313 case NE_EXPR:
1314 case ORDERED_EXPR:
1315 case UNORDERED_EXPR:
1316
1317 case UNLT_EXPR:
1318 case UNLE_EXPR:
1319 case UNGT_EXPR:
1320 case UNGE_EXPR:
1321 case UNEQ_EXPR:
1322 case LTGT_EXPR:
1323
1324 case CONVERT_EXPR:
1325
1326 case CONJ_EXPR:
1327
1328 case PREDECREMENT_EXPR:
1329 case PREINCREMENT_EXPR:
1330 case POSTDECREMENT_EXPR:
1331 case POSTINCREMENT_EXPR:
1332
1333 case SWITCH_EXPR:
1334
1335 case ASM_EXPR:
1336
1337 case RESX_EXPR:
1338 *count++;
1339 break;
1340
1341 /* Few special cases of expensive operations. This is useful
1342 to avoid inlining on functions having too many of these. */
1343 case TRUNC_DIV_EXPR:
1344 case CEIL_DIV_EXPR:
1345 case FLOOR_DIV_EXPR:
1346 case ROUND_DIV_EXPR:
1347 case EXACT_DIV_EXPR:
1348 case TRUNC_MOD_EXPR:
1349 case CEIL_MOD_EXPR:
1350 case FLOOR_MOD_EXPR:
1351 case ROUND_MOD_EXPR:
1352 case RDIV_EXPR:
1353 *count += 10;
1354 break;
1355 case CALL_EXPR:
1356 {
1357 tree decl = get_callee_fndecl (x);
1358
1359 if (decl && DECL_BUILT_IN (decl))
1360 switch (DECL_FUNCTION_CODE (decl))
1361 {
1362 case BUILT_IN_CONSTANT_P:
1363 *walk_subtrees = 0;
1364 return NULL_TREE;
1365 case BUILT_IN_EXPECT:
1366 return NULL_TREE;
1367 default:
1368 break;
1369 }
1370 *count += 10;
1371 break;
1372 }
1373 default:
1374 /* Abort here se we know we don't miss any nodes. */
1375 abort ();
1376 }
1377 return NULL;
1378 }
1379
1380 /* Estimate number of instructions that will be created by expanding EXPR. */
1381 int
1382 estimate_num_insns (tree expr)
1383 {
1384 int num = 0;
1385 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1386 return num;
1387 }
1388
1389 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1390
1391 static tree
1392 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1393 {
1394 inline_data *id;
1395 tree t;
1396 tree expr;
1397 tree stmt;
1398 tree use_retvar;
1399 tree decl;
1400 tree fn;
1401 tree arg_inits;
1402 tree *inlined_body;
1403 tree inline_result;
1404 splay_tree st;
1405 tree args;
1406 tree return_slot_addr;
1407 location_t saved_location;
1408 struct cgraph_edge *edge;
1409 const char *reason;
1410
1411 /* See what we've got. */
1412 id = (inline_data *) data;
1413 t = *tp;
1414
1415 /* Set input_location here so we get the right instantiation context
1416 if we call instantiate_decl from inlinable_function_p. */
1417 saved_location = input_location;
1418 if (EXPR_HAS_LOCATION (t))
1419 input_location = EXPR_LOCATION (t);
1420
1421 /* Recurse, but letting recursive invocations know that we are
1422 inside the body of a TARGET_EXPR. */
1423 if (TREE_CODE (*tp) == TARGET_EXPR)
1424 {
1425 #if 0
1426 int i, len = first_rtl_op (TARGET_EXPR);
1427
1428 /* We're walking our own subtrees. */
1429 *walk_subtrees = 0;
1430
1431 /* Actually walk over them. This loop is the body of
1432 walk_trees, omitting the case where the TARGET_EXPR
1433 itself is handled. */
1434 for (i = 0; i < len; ++i)
1435 {
1436 if (i == 2)
1437 ++id->in_target_cleanup_p;
1438 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1439 id->tree_pruner);
1440 if (i == 2)
1441 --id->in_target_cleanup_p;
1442 }
1443
1444 goto egress;
1445 #endif
1446 }
1447
1448 if (TYPE_P (t))
1449 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1450 them should not be expanded. This can happen if the type is a
1451 dynamic array type, for example. */
1452 *walk_subtrees = 0;
1453
1454 /* From here on, we're only interested in CALL_EXPRs. */
1455 if (TREE_CODE (t) != CALL_EXPR)
1456 goto egress;
1457
1458 /* First, see if we can figure out what function is being called.
1459 If we cannot, then there is no hope of inlining the function. */
1460 fn = get_callee_fndecl (t);
1461 if (!fn)
1462 goto egress;
1463
1464 /* Turn forward declarations into real ones. */
1465 fn = cgraph_node (fn)->decl;
1466
1467 /* If fn is a declaration of a function in a nested scope that was
1468 globally declared inline, we don't set its DECL_INITIAL.
1469 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1470 C++ front-end uses it for cdtors to refer to their internal
1471 declarations, that are not real functions. Fortunately those
1472 don't have trees to be saved, so we can tell by checking their
1473 DECL_SAVED_TREE. */
1474 if (! DECL_INITIAL (fn)
1475 && DECL_ABSTRACT_ORIGIN (fn)
1476 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1477 fn = DECL_ABSTRACT_ORIGIN (fn);
1478
1479 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1480 Kill this check once this is fixed. */
1481 if (!id->current_node->analyzed)
1482 goto egress;
1483
1484 edge = cgraph_edge (id->current_node, t);
1485
1486 /* Constant propagation on argument done during previous inlining
1487 may create new direct call. Produce an edge for it. */
1488 if (!edge)
1489 {
1490 struct cgraph_node *dest = cgraph_node (fn);
1491
1492 /* We have missing edge in the callgraph. This can happen in one case
1493 where previous inlining turned indirect call into direct call by
1494 constant propagating arguments. In all other cases we hit a bug
1495 (incorrect node sharing is most common reason for missing edges. */
1496 if (!dest->needed)
1497 abort ();
1498 cgraph_create_edge (id->node, dest, t)->inline_failed
1499 = N_("originally indirect function call not considered for inlining");
1500 goto egress;
1501 }
1502
1503 /* Don't try to inline functions that are not well-suited to
1504 inlining. */
1505 if (!cgraph_inline_p (edge, &reason))
1506 {
1507 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1508 {
1509 sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1510 sorry ("called from here");
1511 }
1512 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1513 && !DECL_IN_SYSTEM_HEADER (fn)
1514 && strlen (reason))
1515 {
1516 warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1517 warning ("called from here");
1518 }
1519 goto egress;
1520 }
1521
1522 #ifdef ENABLE_CHECKING
1523 if (edge->callee->decl != id->node->decl)
1524 verify_cgraph_node (edge->callee);
1525 #endif
1526
1527 if (! lang_hooks.tree_inlining.start_inlining (fn))
1528 goto egress;
1529
1530 /* Build a block containing code to initialize the arguments, the
1531 actual inline expansion of the body, and a label for the return
1532 statements within the function to jump to. The type of the
1533 statement expression is the return type of the function call. */
1534 stmt = NULL;
1535 expr = build (BIND_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE,
1536 stmt, make_node (BLOCK));
1537 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1538
1539 /* Local declarations will be replaced by their equivalents in this
1540 map. */
1541 st = id->decl_map;
1542 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1543 NULL, NULL);
1544
1545 /* Initialize the parameters. */
1546 args = TREE_OPERAND (t, 1);
1547 return_slot_addr = NULL_TREE;
1548 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1549 {
1550 return_slot_addr = TREE_VALUE (args);
1551 args = TREE_CHAIN (args);
1552 TREE_TYPE (expr) = void_type_node;
1553 }
1554
1555 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1556 fn, expr);
1557 if (arg_inits)
1558 {
1559 /* Expand any inlined calls in the initializers. Do this before we
1560 push FN on the stack of functions we are inlining; we want to
1561 inline calls to FN that appear in the initializers for the
1562 parameters.
1563
1564 Note we need to save and restore the saved tree statement iterator
1565 to avoid having it clobbered by expand_calls_inline. */
1566 tree_stmt_iterator save_tsi;
1567
1568 save_tsi = id->tsi;
1569 expand_calls_inline (&arg_inits, id);
1570 id->tsi = save_tsi;
1571
1572 /* And add them to the tree. */
1573 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1574 }
1575
1576 /* Record the function we are about to inline so that we can avoid
1577 recursing into it. */
1578 VARRAY_PUSH_TREE (id->fns, fn);
1579
1580 /* Record the function we are about to inline if optimize_function
1581 has not been called on it yet and we don't have it in the list. */
1582 if (! DECL_INLINED_FNS (fn))
1583 {
1584 int i;
1585
1586 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1587 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1588 break;
1589 if (i < 0)
1590 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1591 }
1592
1593 /* Return statements in the function body will be replaced by jumps
1594 to the RET_LABEL. */
1595 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1596 DECL_ARTIFICIAL (id->ret_label) = 1;
1597 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1598 insert_decl_map (id, id->ret_label, id->ret_label);
1599
1600 if (! DECL_INITIAL (fn)
1601 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1602 abort ();
1603
1604 /* Declare the return variable for the function. */
1605 decl = declare_return_variable (id, return_slot_addr, &use_retvar);
1606 if (decl)
1607 declare_inline_vars (expr, decl);
1608
1609 /* After we've initialized the parameters, we insert the body of the
1610 function itself. */
1611 {
1612 struct cgraph_node *old_node = id->current_node;
1613
1614 id->current_node = edge->callee;
1615 append_to_statement_list (copy_body (id), &BIND_EXPR_BODY (expr));
1616 id->current_node = old_node;
1617 }
1618 inlined_body = &BIND_EXPR_BODY (expr);
1619
1620 /* After the body of the function comes the RET_LABEL. This must come
1621 before we evaluate the returned value below, because that evaluation
1622 may cause RTL to be generated. */
1623 if (TREE_USED (id->ret_label))
1624 {
1625 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1626 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1627 }
1628
1629 /* Finally, mention the returned value so that the value of the
1630 statement-expression is the returned value of the function. */
1631 if (use_retvar)
1632 /* Set TREE_TYPE on BIND_EXPR? */
1633 append_to_statement_list_force (use_retvar, &BIND_EXPR_BODY (expr));
1634
1635 /* Clean up. */
1636 splay_tree_delete (id->decl_map);
1637 id->decl_map = st;
1638
1639 /* The new expression has side-effects if the old one did. */
1640 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1641
1642 /* If we are working with gimple form, then we need to keep the tree
1643 in gimple form. If we are not in gimple form, we can just replace
1644 *tp with the new BIND_EXPR. */
1645 if (lang_hooks.gimple_before_inlining)
1646 {
1647 tree save_decl;
1648
1649 /* Keep the new trees in gimple form. */
1650 BIND_EXPR_BODY (expr)
1651 = rationalize_compound_expr (BIND_EXPR_BODY (expr));
1652
1653 /* We want to create a new variable to hold the result of the
1654 inlined body. This new variable needs to be added to the
1655 function which we are inlining into, thus the saving and
1656 restoring of current_function_decl. */
1657 save_decl = current_function_decl;
1658 current_function_decl = id->node->decl;
1659 inline_result = voidify_wrapper_expr (expr, NULL);
1660 current_function_decl = save_decl;
1661
1662 /* If the inlined function returns a result that we care about,
1663 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1664 the call was a standalone statement and we can just replace it
1665 with the BIND_EXPR inline representation of the called function. */
1666 if (TREE_CODE (tsi_stmt (id->tsi)) != CALL_EXPR)
1667 {
1668 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1669 *tp = inline_result;
1670 }
1671 else
1672 *tp = expr;
1673
1674 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS
1675 on the call if it is to a "const" function. Thus the copy of
1676 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above
1677 with result in TREE_SIDE_EFFECTS not being set for the inlined
1678 copy of a "const" function.
1679
1680 Unfortunately, that is wrong as inlining the function
1681 can create/expose interesting side effects (such as setting
1682 of a return value).
1683
1684 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS
1685 for the toplevel expression. */
1686 recalculate_side_effects (expr);
1687 }
1688 else
1689 *tp = expr;
1690
1691 /* If the value of the new expression is ignored, that's OK. We
1692 don't warn about this for CALL_EXPRs, so we shouldn't warn about
1693 the equivalent inlined version either. */
1694 TREE_USED (*tp) = 1;
1695
1696 /* Update callgraph if needed. */
1697 cgraph_remove_node (edge->callee);
1698
1699 /* Recurse into the body of the just inlined function. */
1700 expand_calls_inline (inlined_body, id);
1701 VARRAY_POP (id->fns);
1702
1703 /* Don't walk into subtrees. We've already handled them above. */
1704 *walk_subtrees = 0;
1705
1706 lang_hooks.tree_inlining.end_inlining (fn);
1707
1708 /* Keep iterating. */
1709 egress:
1710 input_location = saved_location;
1711 return NULL_TREE;
1712 }
1713
1714 static void
1715 gimple_expand_calls_inline (tree *stmt_p, inline_data *id)
1716 {
1717 tree stmt = *stmt_p;
1718 enum tree_code code = TREE_CODE (stmt);
1719 int dummy;
1720
1721 switch (code)
1722 {
1723 case STATEMENT_LIST:
1724 {
1725 tree_stmt_iterator i;
1726 tree new;
1727
1728 for (i = tsi_start (stmt); !tsi_end_p (i); )
1729 {
1730 id->tsi = i;
1731 gimple_expand_calls_inline (tsi_stmt_ptr (i), id);
1732
1733 new = tsi_stmt (i);
1734 if (TREE_CODE (new) == STATEMENT_LIST)
1735 {
1736 tsi_link_before (&i, new, TSI_SAME_STMT);
1737 tsi_delink (&i);
1738 }
1739 else
1740 tsi_next (&i);
1741 }
1742 }
1743 break;
1744
1745 case COND_EXPR:
1746 gimple_expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1747 gimple_expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1748 break;
1749 case CATCH_EXPR:
1750 gimple_expand_calls_inline (&CATCH_BODY (stmt), id);
1751 break;
1752 case EH_FILTER_EXPR:
1753 gimple_expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1754 break;
1755 case TRY_CATCH_EXPR:
1756 case TRY_FINALLY_EXPR:
1757 gimple_expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1758 gimple_expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1759 break;
1760 case BIND_EXPR:
1761 gimple_expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1762 break;
1763
1764 case COMPOUND_EXPR:
1765 /* We're gimple. We should have gotten rid of all these. */
1766 abort ();
1767
1768 case RETURN_EXPR:
1769 stmt_p = &TREE_OPERAND (stmt, 0);
1770 stmt = *stmt_p;
1771 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1772 break;
1773 /* FALLTHRU */
1774 case MODIFY_EXPR:
1775 stmt_p = &TREE_OPERAND (stmt, 1);
1776 stmt = *stmt_p;
1777 if (TREE_CODE (stmt) != CALL_EXPR)
1778 break;
1779 /* FALLTHRU */
1780 case CALL_EXPR:
1781 expand_call_inline (stmt_p, &dummy, id);
1782 break;
1783
1784 default:
1785 break;
1786 }
1787 }
1788
1789 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
1790 expansions as appropriate. */
1791
1792 static void
1793 expand_calls_inline (tree *tp, inline_data *id)
1794 {
1795 /* If we are not in gimple form, then we want to walk the tree
1796 recursively as we do not know anything about the structure
1797 of the tree. */
1798
1799 if (!lang_hooks.gimple_before_inlining)
1800 {
1801 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
1802 return;
1803 }
1804
1805 /* We are in gimple form. We want to stay in gimple form. Walk
1806 the statements, inlining calls in each statement. By walking
1807 the statements, we have enough information to keep the tree
1808 in gimple form as we insert inline bodies. */
1809
1810 gimple_expand_calls_inline (tp, id);
1811 }
1812
1813 /* Expand calls to inline functions in the body of FN. */
1814
1815 void
1816 optimize_inline_calls (tree fn)
1817 {
1818 inline_data id;
1819 tree prev_fn;
1820
1821 /* There is no point in performing inlining if errors have already
1822 occurred -- and we might crash if we try to inline invalid
1823 code. */
1824 if (errorcount || sorrycount)
1825 return;
1826
1827 /* Clear out ID. */
1828 memset (&id, 0, sizeof (id));
1829
1830 id.current_node = id.node = cgraph_node (fn);
1831 /* Don't allow recursion into FN. */
1832 VARRAY_TREE_INIT (id.fns, 32, "fns");
1833 VARRAY_PUSH_TREE (id.fns, fn);
1834 /* Or any functions that aren't finished yet. */
1835 prev_fn = NULL_TREE;
1836 if (current_function_decl)
1837 {
1838 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1839 prev_fn = current_function_decl;
1840 }
1841
1842 prev_fn = (lang_hooks.tree_inlining.add_pending_fn_decls
1843 (&id.fns, prev_fn));
1844
1845 /* Create the list of functions this call will inline. */
1846 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1847
1848 /* Keep track of the low-water mark, i.e., the point where the first
1849 real inlining is represented in ID.FNS. */
1850 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1851
1852 /* Replace all calls to inline functions with the bodies of those
1853 functions. */
1854 id.tree_pruner = htab_create (37, htab_hash_pointer,
1855 htab_eq_pointer, NULL);
1856 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1857
1858 /* Clean up. */
1859 htab_delete (id.tree_pruner);
1860 if (DECL_LANG_SPECIFIC (fn))
1861 {
1862 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1863
1864 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1865 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1866 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1867 DECL_INLINED_FNS (fn) = ifn;
1868 }
1869
1870 #ifdef ENABLE_CHECKING
1871 {
1872 struct cgraph_edge *e;
1873
1874 verify_cgraph_node (id.node);
1875
1876 /* Double check that we inlined everything we are supposed to inline. */
1877 for (e = id.node->callees; e; e = e->next_callee)
1878 if (!e->inline_failed)
1879 abort ();
1880 }
1881 #endif
1882 }
1883
1884 /* FN is a function that has a complete body, and CLONE is a function
1885 whose body is to be set to a copy of FN, mapping argument
1886 declarations according to the ARG_MAP splay_tree. */
1887
1888 void
1889 clone_body (tree clone, tree fn, void *arg_map)
1890 {
1891 inline_data id;
1892
1893 /* Clone the body, as if we were making an inline call. But, remap
1894 the parameters in the callee to the parameters of caller. If
1895 there's an in-charge parameter, map it to an appropriate
1896 constant. */
1897 memset (&id, 0, sizeof (id));
1898 VARRAY_TREE_INIT (id.fns, 2, "fns");
1899 VARRAY_PUSH_TREE (id.fns, clone);
1900 VARRAY_PUSH_TREE (id.fns, fn);
1901 id.decl_map = (splay_tree)arg_map;
1902
1903 /* Cloning is treated slightly differently from inlining. Set
1904 CLONING_P so that it's clear which operation we're performing. */
1905 id.cloning_p = true;
1906
1907 /* Actually copy the body. */
1908 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1909 }
1910
1911 /* Save duplicate of body in FN. MAP is used to pass around splay tree
1912 used to update arguments in restore_body. */
1913 tree
1914 save_body (tree fn, tree *arg_copy)
1915 {
1916 inline_data id;
1917 tree body, *parg;
1918
1919 memset (&id, 0, sizeof (id));
1920 VARRAY_TREE_INIT (id.fns, 1, "fns");
1921 VARRAY_PUSH_TREE (id.fns, fn);
1922 id.node = cgraph_node (fn);
1923 id.saving_p = true;
1924 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1925 *arg_copy = DECL_ARGUMENTS (fn);
1926 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1927 {
1928 tree new = copy_node (*parg);
1929 lang_hooks.dup_lang_specific_decl (new);
1930 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1931 insert_decl_map (&id, *parg, new);
1932 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1933 *parg = new;
1934 }
1935 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1936
1937 /* Actually copy the body. */
1938 body = copy_body (&id);
1939
1940 /* Clean up. */
1941 splay_tree_delete (id.decl_map);
1942 return body;
1943 }
1944
1945 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1946 FUNC is called with the DATA and the address of each sub-tree. If
1947 FUNC returns a non-NULL value, the traversal is aborted, and the
1948 value returned by FUNC is returned. If HTAB is non-NULL it is used
1949 to record the nodes visited, and to avoid visiting a node more than
1950 once. */
1951
1952 tree
1953 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
1954 {
1955 htab_t htab = (htab_t) htab_;
1956 enum tree_code code;
1957 int walk_subtrees;
1958 tree result;
1959
1960 #define WALK_SUBTREE(NODE) \
1961 do \
1962 { \
1963 result = walk_tree (&(NODE), func, data, htab); \
1964 if (result) \
1965 return result; \
1966 } \
1967 while (0)
1968
1969 #define WALK_SUBTREE_TAIL(NODE) \
1970 do \
1971 { \
1972 tp = & (NODE); \
1973 goto tail_recurse; \
1974 } \
1975 while (0)
1976
1977 tail_recurse:
1978 /* Skip empty subtrees. */
1979 if (!*tp)
1980 return NULL_TREE;
1981
1982 if (htab)
1983 {
1984 void **slot;
1985
1986 /* Don't walk the same tree twice, if the user has requested
1987 that we avoid doing so. */
1988 slot = htab_find_slot (htab, *tp, INSERT);
1989 if (*slot)
1990 return NULL_TREE;
1991 *slot = *tp;
1992 }
1993
1994 /* Call the function. */
1995 walk_subtrees = 1;
1996 result = (*func) (tp, &walk_subtrees, data);
1997
1998 /* If we found something, return it. */
1999 if (result)
2000 return result;
2001
2002 code = TREE_CODE (*tp);
2003
2004 /* Even if we didn't, FUNC may have decided that there was nothing
2005 interesting below this point in the tree. */
2006 if (!walk_subtrees)
2007 {
2008 if (code == TREE_LIST)
2009 /* But we still need to check our siblings. */
2010 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2011 else
2012 return NULL_TREE;
2013 }
2014
2015 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2016 data, htab);
2017 if (result || ! walk_subtrees)
2018 return result;
2019
2020 if (code != EXIT_BLOCK_EXPR
2021 && code != SAVE_EXPR
2022 && code != BIND_EXPR
2023 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2024 {
2025 int i, len;
2026
2027 /* Walk over all the sub-trees of this operand. */
2028 len = first_rtl_op (code);
2029 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2030 But, we only want to walk once. */
2031 if (code == TARGET_EXPR
2032 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2033 --len;
2034 /* Go through the subtrees. We need to do this in forward order so
2035 that the scope of a FOR_EXPR is handled properly. */
2036 #ifdef DEBUG_WALK_TREE
2037 for (i = 0; i < len; ++i)
2038 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2039 #else
2040 for (i = 0; i < len - 1; ++i)
2041 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2042
2043 if (len)
2044 {
2045 /* The common case is that we may tail recurse here. */
2046 if (code != BIND_EXPR
2047 && !TREE_CHAIN (*tp))
2048 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2049 else
2050 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2051 }
2052 #endif
2053 }
2054 else if (TREE_CODE_CLASS (code) == 'd')
2055 {
2056 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
2057 }
2058 else
2059 {
2060 if (TREE_CODE_CLASS (code) == 't')
2061 {
2062 WALK_SUBTREE (TYPE_SIZE (*tp));
2063 WALK_SUBTREE (TYPE_SIZE_UNIT (*tp));
2064 /* Also examine various special fields, below. */
2065 }
2066
2067 /* Not one of the easy cases. We must explicitly go through the
2068 children. */
2069 switch (code)
2070 {
2071 case ERROR_MARK:
2072 case IDENTIFIER_NODE:
2073 case INTEGER_CST:
2074 case REAL_CST:
2075 case VECTOR_CST:
2076 case STRING_CST:
2077 case REAL_TYPE:
2078 case COMPLEX_TYPE:
2079 case VECTOR_TYPE:
2080 case VOID_TYPE:
2081 case BOOLEAN_TYPE:
2082 case UNION_TYPE:
2083 case ENUMERAL_TYPE:
2084 case BLOCK:
2085 case RECORD_TYPE:
2086 case PLACEHOLDER_EXPR:
2087 case SSA_NAME:
2088 /* None of thse have subtrees other than those already walked
2089 above. */
2090 break;
2091
2092 case POINTER_TYPE:
2093 case REFERENCE_TYPE:
2094 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
2095 break;
2096
2097 case TREE_LIST:
2098 WALK_SUBTREE (TREE_VALUE (*tp));
2099 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2100 break;
2101
2102 case TREE_VEC:
2103 {
2104 int len = TREE_VEC_LENGTH (*tp);
2105
2106 if (len == 0)
2107 break;
2108
2109 /* Walk all elements but the first. */
2110 while (--len)
2111 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2112
2113 /* Now walk the first one as a tail call. */
2114 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2115 }
2116
2117 case COMPLEX_CST:
2118 WALK_SUBTREE (TREE_REALPART (*tp));
2119 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2120
2121 case CONSTRUCTOR:
2122 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2123
2124 case METHOD_TYPE:
2125 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
2126 /* Fall through. */
2127
2128 case FUNCTION_TYPE:
2129 WALK_SUBTREE (TREE_TYPE (*tp));
2130 {
2131 tree arg = TYPE_ARG_TYPES (*tp);
2132
2133 /* We never want to walk into default arguments. */
2134 for (; arg; arg = TREE_CHAIN (arg))
2135 WALK_SUBTREE (TREE_VALUE (arg));
2136 }
2137 break;
2138
2139 case ARRAY_TYPE:
2140 WALK_SUBTREE (TREE_TYPE (*tp));
2141 WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
2142
2143 case INTEGER_TYPE:
2144 case CHAR_TYPE:
2145 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
2146 WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
2147
2148 case OFFSET_TYPE:
2149 WALK_SUBTREE (TREE_TYPE (*tp));
2150 WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
2151
2152 case EXIT_BLOCK_EXPR:
2153 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
2154
2155 case SAVE_EXPR:
2156 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2157
2158 case BIND_EXPR:
2159 {
2160 tree decl;
2161 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2162 {
2163 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2164 into declarations that are just mentioned, rather than
2165 declared; they don't really belong to this part of the tree.
2166 And, we can see cycles: the initializer for a declaration can
2167 refer to the declaration itself. */
2168 WALK_SUBTREE (DECL_INITIAL (decl));
2169 WALK_SUBTREE (DECL_SIZE (decl));
2170 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2171 WALK_SUBTREE (TREE_TYPE (decl));
2172 }
2173 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2174 }
2175
2176 case STATEMENT_LIST:
2177 {
2178 tree_stmt_iterator i;
2179 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2180 WALK_SUBTREE (*tsi_stmt_ptr (i));
2181 }
2182 break;
2183
2184 default:
2185 abort ();
2186 }
2187 }
2188
2189 /* We didn't find what we were looking for. */
2190 return NULL_TREE;
2191
2192 #undef WALK_SUBTREE
2193 #undef WALK_SUBTREE_TAIL
2194 }
2195
2196 /* Like walk_tree, but does not walk duplicate nodes more than
2197 once. */
2198
2199 tree
2200 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2201 {
2202 tree result;
2203 htab_t htab;
2204
2205 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
2206 result = walk_tree (tp, func, data, htab);
2207 htab_delete (htab);
2208 return result;
2209 }
2210
2211 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2212
2213 tree
2214 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2215 {
2216 enum tree_code code = TREE_CODE (*tp);
2217
2218 /* We make copies of most nodes. */
2219 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2220 || TREE_CODE_CLASS (code) == 'c'
2221 || code == TREE_LIST
2222 || code == TREE_VEC
2223 || code == TYPE_DECL)
2224 {
2225 /* Because the chain gets clobbered when we make a copy, we save it
2226 here. */
2227 tree chain = TREE_CHAIN (*tp);
2228 tree new;
2229
2230 /* Copy the node. */
2231 new = copy_node (*tp);
2232
2233 /* Propagate mudflap marked-ness. */
2234 if (flag_mudflap && mf_marked_p (*tp))
2235 mf_mark (new);
2236
2237 *tp = new;
2238
2239 /* Now, restore the chain, if appropriate. That will cause
2240 walk_tree to walk into the chain as well. */
2241 if (code == PARM_DECL || code == TREE_LIST)
2242 TREE_CHAIN (*tp) = chain;
2243
2244 /* For now, we don't update BLOCKs when we make copies. So, we
2245 have to nullify all BIND_EXPRs. */
2246 if (TREE_CODE (*tp) == BIND_EXPR)
2247 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2248 }
2249 else if (TREE_CODE_CLASS (code) == 't')
2250 *walk_subtrees = 0;
2251 else if (TREE_CODE_CLASS (code) == 'd')
2252 *walk_subtrees = 0;
2253 else if (code == STATEMENT_LIST)
2254 abort ();
2255
2256 return NULL_TREE;
2257 }
2258
2259 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2260 information indicating to what new SAVE_EXPR this one should be
2261 mapped, use that one. Otherwise, create a new node and enter it in
2262 ST. FN is the function into which the copy will be placed. */
2263
2264 void
2265 remap_save_expr (tree *tp, void *st_, tree fn, int *walk_subtrees)
2266 {
2267 splay_tree st = (splay_tree) st_;
2268 splay_tree_node n;
2269 tree t;
2270
2271 /* See if we already encountered this SAVE_EXPR. */
2272 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2273
2274 /* If we didn't already remap this SAVE_EXPR, do so now. */
2275 if (!n)
2276 {
2277 t = copy_node (*tp);
2278
2279 /* The SAVE_EXPR is now part of the function into which we
2280 are inlining this body. */
2281 SAVE_EXPR_CONTEXT (t) = fn;
2282 /* And we haven't evaluated it yet. */
2283 SAVE_EXPR_RTL (t) = NULL_RTX;
2284 /* Remember this SAVE_EXPR. */
2285 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2286 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2287 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2288 }
2289 else
2290 {
2291 /* We've already walked into this SAVE_EXPR; don't do it again. */
2292 *walk_subtrees = 0;
2293 t = (tree) n->value;
2294 }
2295
2296 /* Replace this SAVE_EXPR with the copy. */
2297 *tp = t;
2298 }
2299
2300 /* Called via walk_tree. If *TP points to a DECL_STMT for a local
2301 declaration, copies the declaration and enters it in the splay_tree
2302 in DATA (which is really an `inline_data *'). */
2303
2304 static tree
2305 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2306 void *data)
2307 {
2308 tree t = *tp;
2309 inline_data *id = (inline_data *) data;
2310 tree decl;
2311
2312 /* Don't walk into types. */
2313 if (TYPE_P (t))
2314 {
2315 *walk_subtrees = 0;
2316 return NULL_TREE;
2317 }
2318
2319 if (TREE_CODE (t) == LABEL_EXPR)
2320 decl = TREE_OPERAND (t, 0);
2321 else
2322 /* We don't need to handle anything else ahead of time. */
2323 decl = NULL_TREE;
2324
2325 if (decl)
2326 {
2327 tree copy;
2328
2329 /* Make a copy. */
2330 copy = copy_decl_for_inlining (decl,
2331 DECL_CONTEXT (decl),
2332 DECL_CONTEXT (decl));
2333
2334 /* Remember the copy. */
2335 insert_decl_map (id, decl, copy);
2336 }
2337
2338 return NULL_TREE;
2339 }
2340
2341 /* Called via walk_tree when an expression is unsaved. Using the
2342 splay_tree pointed to by ST (which is really a `splay_tree'),
2343 remaps all local declarations to appropriate replacements. */
2344
2345 static tree
2346 unsave_r (tree *tp, int *walk_subtrees, void *data)
2347 {
2348 inline_data *id = (inline_data *) data;
2349 splay_tree st = id->decl_map;
2350 splay_tree_node n;
2351
2352 /* Only a local declaration (variable or label). */
2353 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2354 || TREE_CODE (*tp) == LABEL_DECL)
2355 {
2356 /* Lookup the declaration. */
2357 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2358
2359 /* If it's there, remap it. */
2360 if (n)
2361 *tp = (tree) n->value;
2362 }
2363 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2364 copy_statement_list (tp);
2365 else if (TREE_CODE (*tp) == BIND_EXPR)
2366 copy_bind_expr (tp, walk_subtrees, id);
2367 else if (TREE_CODE (*tp) == SAVE_EXPR)
2368 remap_save_expr (tp, st, current_function_decl, walk_subtrees);
2369 else
2370 {
2371 copy_tree_r (tp, walk_subtrees, NULL);
2372
2373 /* Do whatever unsaving is required. */
2374 unsave_expr_1 (*tp);
2375 }
2376
2377 /* Keep iterating. */
2378 return NULL_TREE;
2379 }
2380
2381 /* Default lang hook for "unsave_expr_now". Copies everything in EXPR and
2382 replaces variables, labels and SAVE_EXPRs local to EXPR. */
2383
2384 tree
2385 lhd_unsave_expr_now (tree expr)
2386 {
2387 inline_data id;
2388
2389 /* There's nothing to do for NULL_TREE. */
2390 if (expr == 0)
2391 return expr;
2392
2393 /* Set up ID. */
2394 memset (&id, 0, sizeof (id));
2395 VARRAY_TREE_INIT (id.fns, 1, "fns");
2396 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2397 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2398
2399 /* Walk the tree once to find local labels. */
2400 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2401
2402 /* Walk the tree again, copying, remapping, and unsaving. */
2403 walk_tree (&expr, unsave_r, &id, NULL);
2404
2405 /* Clean up. */
2406 splay_tree_delete (id.decl_map);
2407
2408 return expr;
2409 }
2410
2411 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2412 static tree
2413 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2414 {
2415 if (*tp == data)
2416 return (tree) data;
2417 else
2418 return NULL;
2419 }
2420
2421 extern bool debug_find_tree (tree top, tree search);
2422
2423 bool
2424 debug_find_tree (tree top, tree search)
2425 {
2426 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2427 }
2428
2429
2430 /* Declare the variables created by the inliner. Add all the variables in
2431 VARS to BIND_EXPR. */
2432
2433 static void
2434 declare_inline_vars (tree bind_expr, tree vars)
2435 {
2436 if (lang_hooks.gimple_before_inlining)
2437 {
2438 tree t;
2439 for (t = vars; t; t = TREE_CHAIN (t))
2440 vars->decl.seen_in_bind_expr = 1;
2441 }
2442
2443 add_var_to_bind_expr (bind_expr, vars);
2444 }
This page took 0.144299 seconds and 4 git commands to generate.