]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/optimize.c
optimize.c: Include toplev.h for note_deferral_of_defined_inline_function prototype.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "toplev.h"
31 #include "varray.h"
32 #include "ggc.h"
33
34 /* To Do:
35
36 o In order to make inlining-on-trees work, we pessimized
37 function-local static constants. In particular, they are now
38 always output, even when not addressed. Fix this by treating
39 function-local static constants just like global static
40 constants; the back-end already knows not to output them if they
41 are not needed.
42
43 o Provide heuristics to clamp inlining of recursive template
44 calls? */
45
46 /* Data required for function inlining. */
47
48 typedef struct inline_data
49 {
50 /* A stack of the functions we are inlining. For example, if we are
51 compiling `f', which calls `g', which calls `h', and we are
52 inlining the body of `h', the stack will contain, `h', followed
53 by `g', followed by `f'. */
54 varray_type fns;
55 /* The label to jump to when a return statement is encountered. If
56 this value is NULL, then return statements will simply be
57 remapped as return statements, rather than as jumps. */
58 tree ret_label;
59 /* The map from local declarations in the inlined function to
60 equivalents in the function into which it is being inlined. */
61 splay_tree decl_map;
62 /* Nonzero if we are currently within the cleanup for a
63 TARGET_EXPR. */
64 int in_target_cleanup_p;
65 /* A stack of the TARGET_EXPRs that we are currently processing. */
66 varray_type target_exprs;
67 /* A list of the functions current function has inlined. */
68 varray_type inlined_fns;
69 } inline_data;
70
71 /* Prototypes. */
72
73 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
74 static tree declare_return_variable PARAMS ((inline_data *, tree *));
75 static tree copy_body_r PARAMS ((tree *, int *, void *));
76 static tree copy_body PARAMS ((inline_data *));
77 static tree expand_call_inline PARAMS ((tree *, int *, void *));
78 static void expand_calls_inline PARAMS ((tree *, inline_data *));
79 static int inlinable_function_p PARAMS ((tree, inline_data *));
80 static tree remap_decl PARAMS ((tree, inline_data *));
81 static void remap_block PARAMS ((tree, tree, inline_data *));
82 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
83 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
84
85 /* Remap DECL during the copying of the BLOCK tree for the function.
86 DATA is really an `inline_data *'. */
87
88 static tree
89 remap_decl (decl, id)
90 tree decl;
91 inline_data *id;
92 {
93 splay_tree_node n;
94 tree fn;
95
96 /* We only remap local variables in the current function. */
97 fn = VARRAY_TOP_TREE (id->fns);
98 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
99 return NULL_TREE;
100
101 /* See if we have remapped this declaration. */
102 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
103 /* If we didn't already have an equivalent for this declaration,
104 create one now. */
105 if (!n)
106 {
107 tree t;
108
109 /* Make a copy of the variable or label. */
110 t = copy_decl_for_inlining (decl, fn,
111 VARRAY_TREE (id->fns, 0));
112
113 /* The decl T could be a dynamic array or other variable size type,
114 in which case some fields need to be remapped because they may
115 contain SAVE_EXPRs. */
116 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
117 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
118 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
119 && TYPE_DOMAIN (TREE_TYPE (t)))
120 {
121 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
122 TYPE_DOMAIN (TREE_TYPE (t))
123 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
124 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
125 copy_body_r, id, NULL);
126 }
127
128 /* Remember it, so that if we encounter this local entity
129 again we can reuse this copy. */
130 n = splay_tree_insert (id->decl_map,
131 (splay_tree_key) decl,
132 (splay_tree_value) t);
133 }
134
135 return (tree) n->value;
136 }
137
138 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
139 remapped versions of the variables therein. And hook the new block
140 into the block-tree. If non-NULL, the DECLS are declarations to
141 add to use instead of the BLOCK_VARS in the old block. */
142
143 static void
144 remap_block (scope_stmt, decls, id)
145 tree scope_stmt;
146 tree decls;
147 inline_data *id;
148 {
149 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
150 not know whether or not expand_expr will actually write out the
151 code we put there. If it does not, then we'll have more BLOCKs
152 than block-notes, and things will go awry. At some point, we
153 should make the back-end handle BLOCK notes in a tidier way,
154 without requiring a strict correspondence to the block-tree; then
155 this check can go. */
156 if (id->in_target_cleanup_p)
157 {
158 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
159 return;
160 }
161
162 /* If this is the beginning of a scope, remap the associated BLOCK. */
163 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
164 {
165 tree old_block;
166 tree new_block;
167 tree old_var;
168 tree fn;
169
170 /* Make the new block. */
171 old_block = SCOPE_STMT_BLOCK (scope_stmt);
172 new_block = make_node (BLOCK);
173 TREE_USED (new_block) = TREE_USED (old_block);
174 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
175 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
176
177 /* Remap its variables. */
178 for (old_var = decls ? decls : BLOCK_VARS (old_block);
179 old_var;
180 old_var = TREE_CHAIN (old_var))
181 {
182 tree new_var;
183
184 /* Remap the variable. */
185 new_var = remap_decl (old_var, id);
186 /* If we didn't remap this variable, so we can't mess with
187 its TREE_CHAIN. If we remapped this variable to
188 something other than a declaration (say, if we mapped it
189 to a constant), then we must similarly omit any mention
190 of it here. */
191 if (!new_var || !DECL_P (new_var))
192 ;
193 else
194 {
195 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
196 BLOCK_VARS (new_block) = new_var;
197 }
198 }
199 /* We put the BLOCK_VARS in reverse order; fix that now. */
200 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
201 fn = VARRAY_TREE (id->fns, 0);
202 if (fn == current_function_decl)
203 /* We're building a clone; DECL_INITIAL is still error_mark_node, and
204 current_binding_level is the parm binding level. */
205 insert_block (new_block);
206 else
207 {
208 /* Attach this new block after the DECL_INITIAL block for the
209 function into which this block is being inlined. In
210 rest_of_compilation we will straighten out the BLOCK tree. */
211 tree *first_block;
212 if (DECL_INITIAL (fn))
213 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
214 else
215 first_block = &DECL_INITIAL (fn);
216 BLOCK_CHAIN (new_block) = *first_block;
217 *first_block = new_block;
218 }
219 /* Remember the remapped block. */
220 splay_tree_insert (id->decl_map,
221 (splay_tree_key) old_block,
222 (splay_tree_value) new_block);
223 }
224 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
225 remapped block. */
226 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
227 {
228 splay_tree_node n;
229
230 /* Find this block in the table of remapped things. */
231 n = splay_tree_lookup (id->decl_map,
232 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
233 my_friendly_assert (n != NULL, 19991203);
234 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
235 }
236 }
237
238 /* Copy the SCOPE_STMT pointed to by TP. */
239
240 static void
241 copy_scope_stmt (tp, walk_subtrees, id)
242 tree *tp;
243 int *walk_subtrees;
244 inline_data *id;
245 {
246 tree block;
247
248 /* Remember whether or not this statement was nullified. When
249 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
250 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
251 deal with copying BLOCKs if they do not wish to do so. */
252 block = SCOPE_STMT_BLOCK (*tp);
253 /* Copy (and replace) the statement. */
254 copy_tree_r (tp, walk_subtrees, NULL);
255 /* Restore the SCOPE_STMT_BLOCK. */
256 SCOPE_STMT_BLOCK (*tp) = block;
257
258 /* Remap the associated block. */
259 remap_block (*tp, NULL_TREE, id);
260 }
261
262 /* Called from copy_body via walk_tree. DATA is really an
263 `inline_data *'. */
264
265 static tree
266 copy_body_r (tp, walk_subtrees, data)
267 tree *tp;
268 int *walk_subtrees;
269 void *data;
270 {
271 inline_data* id;
272 tree fn;
273
274 /* Set up. */
275 id = (inline_data *) data;
276 fn = VARRAY_TOP_TREE (id->fns);
277
278 /* All automatic variables should have a DECL_CONTEXT indicating
279 what function they come from. */
280 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
281 && DECL_NAMESPACE_SCOPE_P (*tp))
282 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
283 19991113);
284
285 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
286 GOTO_STMT with the RET_LABEL as its target. */
287 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
288 {
289 tree return_stmt = *tp;
290 tree goto_stmt;
291
292 /* Build the GOTO_STMT. */
293 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
294 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
295
296 /* If we're returning something, just turn that into an
297 assignment into the equivalent of the original
298 RESULT_DECL. */
299 if (RETURN_EXPR (return_stmt))
300 {
301 *tp = build_stmt (EXPR_STMT,
302 RETURN_EXPR (return_stmt));
303 STMT_IS_FULL_EXPR_P (*tp) = 1;
304 /* And then jump to the end of the function. */
305 TREE_CHAIN (*tp) = goto_stmt;
306 }
307 /* If we're not returning anything just do the jump. */
308 else
309 *tp = goto_stmt;
310 }
311 /* Local variables and labels need to be replaced by equivalent
312 variables. We don't want to copy static variables; there's only
313 one of those, no matter how many times we inline the containing
314 function. */
315 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
316 {
317 tree new_decl;
318
319 /* Remap the declaration. */
320 new_decl = remap_decl (*tp, id);
321 my_friendly_assert (new_decl != NULL_TREE, 19991203);
322 /* Replace this variable with the copy. */
323 STRIP_TYPE_NOPS (new_decl);
324 *tp = new_decl;
325 }
326 else if (nonstatic_local_decl_p (*tp)
327 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
328 my_friendly_abort (0);
329 else if (TREE_CODE (*tp) == SAVE_EXPR)
330 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
331 walk_subtrees);
332 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
333 /* UNSAVE_EXPRs should not be generated until expansion time. */
334 my_friendly_abort (19991113);
335 /* For a SCOPE_STMT, we must copy the associated block so that we
336 can write out debugging information for the inlined variables. */
337 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
338 copy_scope_stmt (tp, walk_subtrees, id);
339 /* Otherwise, just copy the node. Note that copy_tree_r already
340 knows not to copy VAR_DECLs, etc., so this is safe. */
341 else
342 {
343 copy_tree_r (tp, walk_subtrees, NULL);
344
345 /* The copied TARGET_EXPR has never been expanded, even if the
346 original node was expanded already. */
347 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
348 {
349 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
350 TREE_OPERAND (*tp, 3) = NULL_TREE;
351 }
352 else if (TREE_CODE (*tp) == MODIFY_EXPR
353 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
354 && nonstatic_local_decl_p (TREE_OPERAND (*tp, 0))
355 && DECL_CONTEXT (TREE_OPERAND (*tp, 0)) == fn)
356 {
357 /* Some assignments VAR = VAR; don't generate any rtl code
358 and thus don't count as variable modification. Avoid
359 keeping bogosities like 0 = 0. */
360 tree decl = TREE_OPERAND (*tp, 0), value;
361 splay_tree_node n;
362
363 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
364 if (n)
365 {
366 value = (tree) n->value;
367 STRIP_TYPE_NOPS (value);
368 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
369 *tp = value;
370 }
371 }
372 }
373
374 /* Keep iterating. */
375 return NULL_TREE;
376 }
377
378 /* Make a copy of the body of FN so that it can be inserted inline in
379 another function. */
380
381 static tree
382 copy_body (id)
383 inline_data *id;
384 {
385 tree body;
386
387 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
388 walk_tree (&body, copy_body_r, id, NULL);
389
390 return body;
391 }
392
393 /* Generate code to initialize the parameters of the function at the
394 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
395
396 static tree
397 initialize_inlined_parameters (id, args, fn)
398 inline_data *id;
399 tree args;
400 tree fn;
401 {
402 tree init_stmts;
403 tree parms;
404 tree a;
405 tree p;
406
407 /* Figure out what the parameters are. */
408 parms = DECL_ARGUMENTS (fn);
409
410 /* Start with no initializations whatsoever. */
411 init_stmts = NULL_TREE;
412
413 /* Loop through the parameter declarations, replacing each with an
414 equivalent VAR_DECL, appropriately initialized. */
415 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
416 {
417 tree init_stmt;
418 tree var;
419 tree value;
420
421 /* Find the initializer. */
422 value = TREE_VALUE (a);
423 /* If the parameter is never assigned to, we may not need to
424 create a new variable here at all. Instead, we may be able
425 to just use the argument value. */
426 if (TREE_READONLY (p)
427 && !TREE_ADDRESSABLE (p)
428 && !TREE_SIDE_EFFECTS (value))
429 {
430 /* Simplify the value, if possible. */
431 value = fold (decl_constant_value (value));
432
433 /* We can't risk substituting complex expressions. They
434 might contain variables that will be assigned to later.
435 Theoretically, we could check the expression to see if
436 all of the variables that determine its value are
437 read-only, but we don't bother. */
438 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
439 {
440 /* If this is a declaration, wrap it a NOP_EXPR so that
441 we don't try to put the VALUE on the list of
442 BLOCK_VARS. */
443 if (DECL_P (value))
444 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
445
446 splay_tree_insert (id->decl_map,
447 (splay_tree_key) p,
448 (splay_tree_value) value);
449 continue;
450 }
451 }
452
453 /* Make an equivalent VAR_DECL. */
454 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
455 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
456 that way, when the PARM_DECL is encountered, it will be
457 automatically replaced by the VAR_DECL. */
458 splay_tree_insert (id->decl_map,
459 (splay_tree_key) p,
460 (splay_tree_value) var);
461
462 /* Declare this new variable. */
463 init_stmt = build_stmt (DECL_STMT, var);
464 TREE_CHAIN (init_stmt) = init_stmts;
465 init_stmts = init_stmt;
466
467 /* Initialize this VAR_DECL from the equivalent argument. If
468 the argument is an object, created via a constructor or copy,
469 this will not result in an extra copy: the TARGET_EXPR
470 representing the argument will be bound to VAR, and the
471 object will be constructed in VAR. */
472 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
473 DECL_INITIAL (var) = value;
474 else
475 {
476 init_stmt = build_stmt (EXPR_STMT,
477 build (INIT_EXPR, TREE_TYPE (p),
478 var, value));
479 /* Add this initialization to the list. Note that we want the
480 declaration *after* the initialization because we are going
481 to reverse all the initialization statements below. */
482 TREE_CHAIN (init_stmt) = init_stmts;
483 init_stmts = init_stmt;
484 }
485 }
486
487 /* The initialization statements have been built up in reverse
488 order. Straighten them out now. */
489 return nreverse (init_stmts);
490 }
491
492 /* Declare a return variable to replace the RESULT_DECL for the
493 function we are calling. An appropriate DECL_STMT is returned.
494 The USE_STMT is filled in to contain a use of the declaration to
495 indicate the return value of the function. */
496
497 static tree
498 declare_return_variable (id, use_stmt)
499 struct inline_data *id;
500 tree *use_stmt;
501 {
502 tree fn = VARRAY_TOP_TREE (id->fns);
503 tree result = DECL_RESULT (fn);
504 tree var;
505 int aggregate_return_p;
506
507 /* We don't need to do anything for functions that don't return
508 anything. */
509 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
510 {
511 *use_stmt = NULL_TREE;
512 return NULL_TREE;
513 }
514
515 /* Figure out whether or not FN returns an aggregate. */
516 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
517
518 /* If FN returns an aggregate then the caller will always create the
519 temporary (using a TARGET_EXPR) and the call will be the
520 initializing expression for the TARGET_EXPR. If we were just to
521 create a new VAR_DECL here, then the result of this function
522 would be copied (bitwise) into the variable initialized by the
523 TARGET_EXPR. That's incorrect, so we must transform any
524 references to the RESULT into references to the target. */
525 if (aggregate_return_p)
526 {
527 my_friendly_assert (VARRAY_ACTIVE_SIZE (id->target_exprs) != 0,
528 20000430);
529 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
530 my_friendly_assert
531 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
532 TREE_TYPE (result)),
533 20000430);
534 }
535 /* Otherwise, make an appropriate copy. */
536 else
537 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
538
539 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
540 way, when the RESULT_DECL is encountered, it will be
541 automatically replaced by the VAR_DECL. */
542 splay_tree_insert (id->decl_map,
543 (splay_tree_key) result,
544 (splay_tree_value) var);
545
546 /* Build the USE_STMT. */
547 *use_stmt = build_stmt (EXPR_STMT, var);
548
549 /* Build the declaration statement if FN does not return an
550 aggregate. */
551 if (!aggregate_return_p)
552 return build_stmt (DECL_STMT, var);
553 /* If FN does return an aggregate, there's no need to declare the
554 return variable; we're using a variable in our caller's frame. */
555 else
556 return NULL_TREE;
557 }
558
559 /* Returns non-zero if FN is a function that can be inlined. */
560
561 static int
562 inlinable_function_p (fn, id)
563 tree fn;
564 inline_data *id;
565 {
566 int inlinable;
567
568 /* If we've already decided this function shouldn't be inlined,
569 there's no need to check again. */
570 if (DECL_UNINLINABLE (fn))
571 return 0;
572
573 /* Assume it is not inlinable. */
574 inlinable = 0;
575
576 /* If we're not inlining things, then nothing is inlinable. */
577 if (!flag_inline_trees)
578 ;
579 /* If the function was not declared `inline', then we don't inline
580 it. */
581 else if (!DECL_INLINE (fn))
582 ;
583 /* We can't inline varargs functions. */
584 else if (varargs_function_p (fn))
585 ;
586 /* All is well. We can inline this function. Traditionally, GCC
587 has refused to inline functions using alloca, or functions whose
588 values are returned in a PARALLEL, and a few other such obscure
589 conditions. We are not equally constrained at the tree level. */
590 else
591 inlinable = 1;
592
593 /* Squirrel away the result so that we don't have to check again. */
594 DECL_UNINLINABLE (fn) = !inlinable;
595
596 /* We can inline a template instantiation only if it's fully
597 instantiated. */
598 if (inlinable
599 && DECL_TEMPLATE_INFO (fn)
600 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
601 {
602 fn = instantiate_decl (fn, /*defer_ok=*/0);
603 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
604 }
605
606 /* If we don't have the function body available, we can't inline
607 it. */
608 if (!DECL_SAVED_TREE (fn))
609 inlinable = 0;
610
611 /* Don't do recursive inlining, either. We don't record this in
612 DECL_UNINLINABLE; we may be able to inline this function later. */
613 if (inlinable)
614 {
615 size_t i;
616
617 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
618 if (VARRAY_TREE (id->fns, i) == fn)
619 return 0;
620
621 if (inlinable && DECL_LANG_SPECIFIC (fn) && DECL_INLINED_FNS (fn))
622 {
623 struct lang_decl_inlined_fns *ifn = DECL_INLINED_FNS (fn);
624
625 for (i = 0; i < ifn->num_fns; ++i)
626 if (ifn->fns [i] == VARRAY_TREE (id->fns, 0))
627 return 0;
628 }
629 }
630
631 /* Return the result. */
632 return inlinable;
633 }
634
635 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
636
637 static tree
638 expand_call_inline (tp, walk_subtrees, data)
639 tree *tp;
640 int *walk_subtrees;
641 void *data;
642 {
643 inline_data *id;
644 tree t;
645 tree expr;
646 tree chain;
647 tree fn;
648 tree scope_stmt;
649 tree use_stmt;
650 tree arg_inits;
651 tree *inlined_body;
652 splay_tree st;
653
654 /* See what we've got. */
655 id = (inline_data *) data;
656 t = *tp;
657
658 /* Recurse, but letting recursive invocations know that we are
659 inside the body of a TARGET_EXPR. */
660 if (TREE_CODE (*tp) == TARGET_EXPR)
661 {
662 int i, len = first_rtl_op (TARGET_EXPR);
663
664 /* We're walking our own subtrees. */
665 *walk_subtrees = 0;
666
667 /* Push *TP on the stack of pending TARGET_EXPRs. */
668 VARRAY_PUSH_TREE (id->target_exprs, *tp);
669
670 /* Actually walk over them. This loop is the body of
671 walk_trees, omitting the case where the TARGET_EXPR
672 itself is handled. */
673 for (i = 0; i < len; ++i)
674 {
675 if (i == 2)
676 ++id->in_target_cleanup_p;
677 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
678 NULL);
679 if (i == 2)
680 --id->in_target_cleanup_p;
681 }
682
683 /* We're done with this TARGET_EXPR now. */
684 VARRAY_POP (id->target_exprs);
685
686 return NULL_TREE;
687 }
688
689 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
690 /* Because types were not copied in copy_body, CALL_EXPRs beneath
691 them should not be expanded. This can happen if the type is a
692 dynamic array type, for example. */
693 *walk_subtrees = 0;
694
695 /* From here on, we're only interested in CALL_EXPRs. */
696 if (TREE_CODE (t) != CALL_EXPR)
697 return NULL_TREE;
698
699 /* First, see if we can figure out what function is being called.
700 If we cannot, then there is no hope of inlining the function. */
701 fn = get_callee_fndecl (t);
702 if (!fn)
703 return NULL_TREE;
704
705 /* Don't try to inline functions that are not well-suited to
706 inlining. */
707 if (!inlinable_function_p (fn, id))
708 return NULL_TREE;
709
710 /* Set the current filename and line number to the function we are
711 inlining so that when we create new _STMT nodes here they get
712 line numbers corresponding to the function we are calling. We
713 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
714 because individual statements don't record the filename. */
715 push_srcloc (fn->decl.filename, fn->decl.linenum);
716
717 /* Build a statement-expression containing code to initialize the
718 arguments, the actual inline expansion of the body, and a label
719 for the return statements within the function to jump to. The
720 type of the statement expression is the return type of the
721 function call. */
722 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
723
724 /* Local declarations will be replaced by their equivalents in this
725 map. */
726 st = id->decl_map;
727 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
728 NULL, NULL);
729
730 /* Initialize the parameters. */
731 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
732 /* Expand any inlined calls in the initializers. Do this before we
733 push FN on the stack of functions we are inlining; we want to
734 inline calls to FN that appear in the initializers for the
735 parameters. */
736 expand_calls_inline (&arg_inits, id);
737 /* And add them to the tree. */
738 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
739
740 /* Record the function we are about to inline so that we can avoid
741 recursing into it. */
742 VARRAY_PUSH_TREE (id->fns, fn);
743
744 /* Record the function we are about to inline if optimize_function
745 has not been called on it yet and we don't have it in the list. */
746 if (DECL_LANG_SPECIFIC (fn) && !DECL_INLINED_FNS (fn))
747 {
748 int i;
749
750 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
751 if (VARRAY_TREE (id->inlined_fns, i) == fn)
752 break;
753 if (i < 0)
754 VARRAY_PUSH_TREE (id->inlined_fns, fn);
755 }
756
757 /* Return statements in the function body will be replaced by jumps
758 to the RET_LABEL. */
759 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
760 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
761
762 /* Create a block to put the parameters in. We have to do this
763 after the parameters have been remapped because remapping
764 parameters is different from remapping ordinary variables. */
765 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
766 SCOPE_BEGIN_P (scope_stmt) = 1;
767 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
768 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
769 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
770 STMT_EXPR_STMT (expr) = scope_stmt;
771
772 /* Tell the debugging backends that this block represents the
773 outermost scope of the inlined function. */
774 if (SCOPE_STMT_BLOCK (scope_stmt))
775 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
776
777 /* Declare the return variable for the function. */
778 STMT_EXPR_STMT (expr)
779 = chainon (STMT_EXPR_STMT (expr),
780 declare_return_variable (id, &use_stmt));
781
782 /* After we've initialized the parameters, we insert the body of the
783 function itself. */
784 inlined_body = &STMT_EXPR_STMT (expr);
785 while (*inlined_body)
786 inlined_body = &TREE_CHAIN (*inlined_body);
787 *inlined_body = copy_body (id);
788
789 /* Close the block for the parameters. */
790 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
791 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
792 my_friendly_assert (DECL_INITIAL (fn)
793 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
794 19991203);
795 remap_block (scope_stmt, NULL_TREE, id);
796 STMT_EXPR_STMT (expr)
797 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
798
799 /* After the body of the function comes the RET_LABEL. This must come
800 before we evaluate the returned value below, because that evalulation
801 may cause RTL to be generated. */
802 STMT_EXPR_STMT (expr)
803 = chainon (STMT_EXPR_STMT (expr),
804 build_stmt (LABEL_STMT, id->ret_label));
805
806 /* Finally, mention the returned value so that the value of the
807 statement-expression is the returned value of the function. */
808 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
809
810 /* Clean up. */
811 splay_tree_delete (id->decl_map);
812 id->decl_map = st;
813
814 /* The new expression has side-effects if the old one did. */
815 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
816
817 /* Replace the call by the inlined body. Wrap it in an
818 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
819 pointing to the right place. */
820 chain = TREE_CHAIN (*tp);
821 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
822 /*col=*/0);
823 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
824 TREE_CHAIN (*tp) = chain;
825 pop_srcloc ();
826
827 /* If the value of the new expression is ignored, that's OK. We
828 don't warn about this for CALL_EXPRs, so we shouldn't warn about
829 the equivalent inlined version either. */
830 TREE_USED (*tp) = 1;
831
832 /* Recurse into the body of the just inlined function. */
833 expand_calls_inline (inlined_body, id);
834 VARRAY_POP (id->fns);
835
836 /* Don't walk into subtrees. We've already handled them above. */
837 *walk_subtrees = 0;
838
839 /* Keep iterating. */
840 return NULL_TREE;
841 }
842
843 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
844 expansions as appropriate. */
845
846 static void
847 expand_calls_inline (tp, id)
848 tree *tp;
849 inline_data *id;
850 {
851 /* Search through *TP, replacing all calls to inline functions by
852 appropriate equivalents. */
853 walk_tree (tp, expand_call_inline, id, NULL);
854 }
855
856 /* Optimize the body of FN. */
857
858 void
859 optimize_function (fn)
860 tree fn;
861 {
862 /* While in this function, we may choose to go off and compile
863 another function. For example, we might instantiate a function
864 in the hopes of inlining it. Normally, that wouldn't trigger any
865 actual RTL code-generation -- but it will if the template is
866 actually needed. (For example, if it's address is taken, or if
867 some other function already refers to the template.) If
868 code-generation occurs, then garbage collection will occur, so we
869 must protect ourselves, just as we do while building up the body
870 of the function. */
871 ++function_depth;
872
873 /* Expand calls to inline functions. */
874 if (flag_inline_trees)
875 {
876 inline_data id;
877 tree prev_fn;
878 struct saved_scope *s;
879
880 /* Clear out ID. */
881 memset (&id, 0, sizeof (id));
882
883 /* Don't allow recursion into FN. */
884 VARRAY_TREE_INIT (id.fns, 32, "fns");
885 VARRAY_PUSH_TREE (id.fns, fn);
886 /* Or any functions that aren't finished yet. */
887 prev_fn = NULL_TREE;
888 if (current_function_decl)
889 {
890 VARRAY_PUSH_TREE (id.fns, current_function_decl);
891 prev_fn = current_function_decl;
892 }
893 for (s = scope_chain; s; s = s->prev)
894 if (s->function_decl && s->function_decl != prev_fn)
895 {
896 VARRAY_PUSH_TREE (id.fns, s->function_decl);
897 prev_fn = s->function_decl;
898 }
899
900 /* Create the stack of TARGET_EXPRs. */
901 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
902
903 /* Create the list of functions this call will inline. */
904 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
905
906 /* Replace all calls to inline functions with the bodies of those
907 functions. */
908 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
909
910 /* Clean up. */
911 VARRAY_FREE (id.fns);
912 VARRAY_FREE (id.target_exprs);
913 if (DECL_LANG_SPECIFIC (fn))
914 {
915 struct lang_decl_inlined_fns *ifn;
916
917 ifn = ggc_alloc (sizeof (struct lang_decl_inlined_fns)
918 + (VARRAY_ACTIVE_SIZE (id.inlined_fns) - 1)
919 * sizeof (tree));
920 ifn->num_fns = VARRAY_ACTIVE_SIZE (id.inlined_fns);
921 memcpy (&ifn->fns[0], &VARRAY_TREE (id.inlined_fns, 0),
922 ifn->num_fns * sizeof (tree));
923 DECL_INLINED_FNS (fn) = ifn;
924 }
925 VARRAY_FREE (id.inlined_fns);
926 }
927
928 /* Undo the call to ggc_push_context above. */
929 --function_depth;
930 }
931
932 /* Called from calls_setjmp_p via walk_tree. */
933
934 static tree
935 calls_setjmp_r (tp, walk_subtrees, data)
936 tree *tp;
937 int *walk_subtrees ATTRIBUTE_UNUSED;
938 void *data ATTRIBUTE_UNUSED;
939 {
940 /* We're only interested in FUNCTION_DECLS. */
941 if (TREE_CODE (*tp) != FUNCTION_DECL)
942 return NULL_TREE;
943
944 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
945 }
946
947 /* Returns non-zero if FN calls `setjmp' or some other function that
948 can return more than once. This function is conservative; it may
949 occasionally return a non-zero value even when FN does not actually
950 call `setjmp'. */
951
952 int
953 calls_setjmp_p (fn)
954 tree fn;
955 {
956 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
957 calls_setjmp_r,
958 NULL) != NULL_TREE;
959 }
960
961 /* FN is a function that has a complete body. Clone the body as
962 necessary. Returns non-zero if there's no longer any need to
963 process the main body. */
964
965 int
966 maybe_clone_body (fn)
967 tree fn;
968 {
969 inline_data id;
970 tree clone;
971
972 /* We only clone constructors and destructors. */
973 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
974 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
975 return 0;
976
977 /* Emit the DWARF1 abstract instance. */
978 note_deferral_of_defined_inline_function (fn);
979
980 /* We know that any clones immediately follow FN in the TYPE_METHODS
981 list. */
982 for (clone = TREE_CHAIN (fn);
983 clone && DECL_CLONED_FUNCTION_P (clone);
984 clone = TREE_CHAIN (clone))
985 {
986 tree parm;
987 tree clone_parm;
988 int parmno;
989
990 /* Update CLONE's source position information to match FN's. */
991 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
992 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
993 DECL_INLINE (clone) = DECL_INLINE (fn);
994 DECL_THIS_INLINE (clone) = DECL_THIS_INLINE (fn);
995 DECL_COMDAT (clone) = DECL_COMDAT (fn);
996 DECL_WEAK (clone) = DECL_WEAK (fn);
997 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
998 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
999 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
1000 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
1001 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
1002 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
1003
1004 /* Start processing the function. */
1005 push_to_top_level ();
1006 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
1007
1008 /* Just clone the body, as if we were making an inline call.
1009 But, remap the parameters in the callee to the parameters of
1010 caller. If there's an in-charge parameter, map it to an
1011 appropriate constant. */
1012 memset (&id, 0, sizeof (id));
1013 VARRAY_TREE_INIT (id.fns, 2, "fns");
1014 VARRAY_PUSH_TREE (id.fns, clone);
1015 VARRAY_PUSH_TREE (id.fns, fn);
1016
1017 /* Remap the parameters. */
1018 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
1019 NULL, NULL);
1020 for (parmno = 0,
1021 parm = DECL_ARGUMENTS (fn),
1022 clone_parm = DECL_ARGUMENTS (clone);
1023 parm;
1024 ++parmno,
1025 parm = TREE_CHAIN (parm))
1026 {
1027 /* Map the in-charge parameter to an appropriate constant. */
1028 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
1029 {
1030 tree in_charge;
1031 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
1032 splay_tree_insert (id.decl_map,
1033 (splay_tree_key) parm,
1034 (splay_tree_value) in_charge);
1035 }
1036 else if (DECL_ARTIFICIAL (parm)
1037 && DECL_NAME (parm) == vtt_parm_identifier)
1038 {
1039 /* For a subobject constructor or destructor, the next
1040 argument is the VTT parameter. Remap the VTT_PARM
1041 from the CLONE to this parameter. */
1042 if (DECL_HAS_VTT_PARM_P (clone))
1043 {
1044 DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
1045 splay_tree_insert (id.decl_map,
1046 (splay_tree_key) parm,
1047 (splay_tree_value) clone_parm);
1048 clone_parm = TREE_CHAIN (clone_parm);
1049 }
1050 /* Otherwise, map the VTT parameter to `NULL'. */
1051 else
1052 {
1053 splay_tree_insert (id.decl_map,
1054 (splay_tree_key) parm,
1055 (splay_tree_value) null_pointer_node);
1056 }
1057 }
1058 /* Map other parameters to their equivalents in the cloned
1059 function. */
1060 else
1061 {
1062 DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
1063 splay_tree_insert (id.decl_map,
1064 (splay_tree_key) parm,
1065 (splay_tree_value) clone_parm);
1066 clone_parm = TREE_CHAIN (clone_parm);
1067 }
1068 }
1069
1070 /* Actually copy the body. */
1071 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1072
1073 /* Clean up. */
1074 splay_tree_delete (id.decl_map);
1075 VARRAY_FREE (id.fns);
1076
1077 /* Now, expand this function into RTL, if appropriate. */
1078 function_name_declared_p = 1;
1079 finish_function (0);
1080 BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
1081 expand_body (clone);
1082 pop_from_top_level ();
1083 }
1084
1085 /* We don't need to process the original function any further. */
1086 return 1;
1087 }
This page took 0.09319 seconds and 6 git commands to generate.