]> gcc.gnu.org Git - gcc.git/blob - gcc/cp/optimize.c
tree.c (build_target_expr_with_type): If we already have a TARGET_EXPR, just return it.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls? */
43
44 /* Data required for function inlining. */
45
46 typedef struct inline_data
47 {
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
52 varray_type fns;
53 /* The label to jump to when a return statement is encountered. If
54 this value is NULL, then return statements will simply be
55 remapped as return statements, rather than as jumps. */
56 tree ret_label;
57 /* The map from local declarations in the inlined function to
58 equivalents in the function into which it is being inlined. */
59 splay_tree decl_map;
60 /* Nonzero if we are currently within the cleanup for a
61 TARGET_EXPR. */
62 int in_target_cleanup_p;
63 /* A stack of the TARGET_EXPRs that we are currently processing. */
64 varray_type target_exprs;
65 } inline_data;
66
67 /* Prototypes. */
68
69 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
70 static tree declare_return_variable PARAMS ((inline_data *, tree *));
71 static tree copy_body_r PARAMS ((tree *, int *, void *));
72 static tree copy_body PARAMS ((inline_data *));
73 static tree expand_call_inline PARAMS ((tree *, int *, void *));
74 static void expand_calls_inline PARAMS ((tree *, inline_data *));
75 static int inlinable_function_p PARAMS ((tree, inline_data *));
76 static tree remap_decl PARAMS ((tree, inline_data *));
77 static void remap_block PARAMS ((tree, tree, inline_data *));
78 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
79 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
80
81 /* Remap DECL during the copying of the BLOCK tree for the function.
82 DATA is really an `inline_data *'. */
83
84 static tree
85 remap_decl (decl, id)
86 tree decl;
87 inline_data *id;
88 {
89 splay_tree_node n;
90 tree fn;
91
92 /* We only remap local variables in the current function. */
93 fn = VARRAY_TOP_TREE (id->fns);
94 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
95 return NULL_TREE;
96
97 /* See if we have remapped this declaration. */
98 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
99 /* If we didn't already have an equivalent for this declaration,
100 create one now. */
101 if (!n)
102 {
103 tree t;
104
105 /* Make a copy of the variable or label. */
106 t = copy_decl_for_inlining (decl, fn,
107 VARRAY_TREE (id->fns, 0));
108
109 /* The decl T could be a dynamic array or other variable size type,
110 in which case some fields need to be remapped because they may
111 contain SAVE_EXPRs. */
112 walk_tree (&DECL_SIZE (t), copy_body_r, id);
113 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id);
114 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
115 && TYPE_DOMAIN (TREE_TYPE (t)))
116 {
117 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
118 TYPE_DOMAIN (TREE_TYPE (t))
119 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
120 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
121 copy_body_r, id);
122 }
123
124 /* Remember it, so that if we encounter this local entity
125 again we can reuse this copy. */
126 n = splay_tree_insert (id->decl_map,
127 (splay_tree_key) decl,
128 (splay_tree_value) t);
129 }
130
131 return (tree) n->value;
132 }
133
134 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
135 remapped versions of the variables therein. And hook the new block
136 into the block-tree. If non-NULL, the DECLS are declarations to
137 add to use instead of the BLOCK_VARS in the old block. */
138
139 static void
140 remap_block (scope_stmt, decls, id)
141 tree scope_stmt;
142 tree decls;
143 inline_data *id;
144 {
145 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
146 not know whether or not expand_expr will actually write out the
147 code we put there. If it does not, then we'll have more BLOCKs
148 than block-notes, and things will go awry. At some point, we
149 should make the back-end handle BLOCK notes in a tidier way,
150 without requiring a strict correspondence to the block-tree; then
151 this check can go. */
152 if (id->in_target_cleanup_p)
153 {
154 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
155 return;
156 }
157
158 /* If this is the beginning of a scope, remap the associated BLOCK. */
159 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
160 {
161 tree old_block;
162 tree new_block;
163 tree old_var;
164 tree *first_block;
165 tree fn;
166
167 /* Make the new block. */
168 old_block = SCOPE_STMT_BLOCK (scope_stmt);
169 new_block = make_node (BLOCK);
170 TREE_USED (new_block) = TREE_USED (old_block);
171 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
172 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
173
174 /* Remap its variables. */
175 for (old_var = decls ? decls : BLOCK_VARS (old_block);
176 old_var;
177 old_var = TREE_CHAIN (old_var))
178 {
179 tree new_var;
180
181 /* Remap the variable. */
182 new_var = remap_decl (old_var, id);
183 /* If we didn't remap this variable, so we can't mess with
184 its TREE_CHAIN. If we remapped this variable to
185 something other than a declaration (say, if we mapped it
186 to a constant), then we must similarly omit any mention
187 of it here. */
188 if (!new_var || !DECL_P (new_var))
189 ;
190 else
191 {
192 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
193 BLOCK_VARS (new_block) = new_var;
194 }
195 }
196 /* We put the BLOCK_VARS in reverse order; fix that now. */
197 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
198 /* Attach this new block after the DECL_INITIAL block for the
199 function into which this block is being inlined. In
200 rest_of_compilation we will straighten out the BLOCK tree. */
201 fn = VARRAY_TREE (id->fns, 0);
202 if (DECL_INITIAL (fn))
203 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
204 else
205 first_block = &DECL_INITIAL (fn);
206 BLOCK_CHAIN (new_block) = *first_block;
207 *first_block = new_block;
208 /* Remember the remapped block. */
209 splay_tree_insert (id->decl_map,
210 (splay_tree_key) old_block,
211 (splay_tree_value) new_block);
212 }
213 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
214 remapped block. */
215 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
216 {
217 splay_tree_node n;
218
219 /* Find this block in the table of remapped things. */
220 n = splay_tree_lookup (id->decl_map,
221 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
222 my_friendly_assert (n != NULL, 19991203);
223 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
224 }
225 }
226
227 /* Copy the SCOPE_STMT pointed to by TP. */
228
229 static void
230 copy_scope_stmt (tp, walk_subtrees, id)
231 tree *tp;
232 int *walk_subtrees;
233 inline_data *id;
234 {
235 tree block;
236
237 /* Remember whether or not this statement was nullified. When
238 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
239 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
240 deal with copying BLOCKs if they do not wish to do so. */
241 block = SCOPE_STMT_BLOCK (*tp);
242 /* Copy (and replace) the statement. */
243 copy_tree_r (tp, walk_subtrees, NULL);
244 /* Restore the SCOPE_STMT_BLOCK. */
245 SCOPE_STMT_BLOCK (*tp) = block;
246
247 /* Remap the associated block. */
248 remap_block (*tp, NULL_TREE, id);
249 }
250
251 /* Called from copy_body via walk_tree. DATA is really an
252 `inline_data *'. */
253
254 static tree
255 copy_body_r (tp, walk_subtrees, data)
256 tree *tp;
257 int *walk_subtrees;
258 void *data;
259 {
260 inline_data* id;
261 tree fn;
262
263 /* Set up. */
264 id = (inline_data *) data;
265 fn = VARRAY_TOP_TREE (id->fns);
266
267 /* All automatic variables should have a DECL_CONTEXT indicating
268 what function they come from. */
269 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
270 && DECL_NAMESPACE_SCOPE_P (*tp))
271 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
272 19991113);
273
274 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
275 GOTO_STMT with the RET_LABEL as its target. */
276 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
277 {
278 tree return_stmt = *tp;
279 tree goto_stmt;
280
281 /* Build the GOTO_STMT. */
282 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
283 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
284
285 /* If we're returning something, just turn that into an
286 assignment into the equivalent of the original
287 RESULT_DECL. */
288 if (RETURN_EXPR (return_stmt))
289 {
290 *tp = build_min_nt (EXPR_STMT,
291 RETURN_EXPR (return_stmt));
292 /* And then jump to the end of the function. */
293 TREE_CHAIN (*tp) = goto_stmt;
294 }
295 /* If we're not returning anything just do the jump. */
296 else
297 *tp = goto_stmt;
298 }
299 /* Local variables and labels need to be replaced by equivalent
300 variables. We don't want to copy static variables; there's only
301 one of those, no matter how many times we inline the containing
302 function. */
303 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
304 {
305 tree new_decl;
306
307 /* Remap the declaration. */
308 new_decl = remap_decl (*tp, id);
309 my_friendly_assert (new_decl != NULL_TREE, 19991203);
310 /* Replace this variable with the copy. */
311 STRIP_TYPE_NOPS (new_decl);
312 *tp = new_decl;
313 }
314 else if (nonstatic_local_decl_p (*tp)
315 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
316 my_friendly_abort (0);
317 else if (TREE_CODE (*tp) == SAVE_EXPR)
318 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
319 walk_subtrees);
320 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
321 my_friendly_abort (19991113);
322 /* For a SCOPE_STMT, we must copy the associated block so that we
323 can write out debugging information for the inlined variables. */
324 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
325 copy_scope_stmt (tp, walk_subtrees, id);
326 /* Otherwise, just copy the node. Note that copy_tree_r already
327 knows not to copy VAR_DECLs, etc., so this is safe. */
328 else
329 {
330 copy_tree_r (tp, walk_subtrees, NULL);
331
332 /* The copied TARGET_EXPR has never been expanded, even if the
333 original node was expanded already. */
334 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
335 {
336 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
337 TREE_OPERAND (*tp, 3) = NULL_TREE;
338 }
339 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
340 result is no longer valid. */
341 else if (TREE_CODE (*tp) == CALL_EXPR)
342 CALL_EXPR_RTL (*tp) = NULL_RTX;
343 }
344
345 /* Keep iterating. */
346 return NULL_TREE;
347 }
348
349 /* Make a copy of the body of FN so that it can be inserted inline in
350 another function. */
351
352 static tree
353 copy_body (id)
354 inline_data *id;
355 {
356 tree body;
357
358 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
359 walk_tree (&body, copy_body_r, id);
360
361 return body;
362 }
363
364 /* Generate code to initialize the parameters of the function at the
365 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
366
367 static tree
368 initialize_inlined_parameters (id, args, fn)
369 inline_data *id;
370 tree args;
371 tree fn;
372 {
373 tree init_stmts;
374 tree parms;
375 tree a;
376 tree p;
377
378 /* Figure out what the parameters are. */
379 parms = DECL_ARGUMENTS (fn);
380
381 /* Start with no initializations whatsoever. */
382 init_stmts = NULL_TREE;
383
384 /* Loop through the parameter declarations, replacing each with an
385 equivalent VAR_DECL, appropriately initialized. */
386 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
387 {
388 tree init_stmt;
389 tree var;
390 tree value;
391
392 /* Find the initializer. */
393 value = TREE_VALUE (a);
394 /* If the parameter is never assigned to, we may not need to
395 create a new variable here at all. Instead, we may be able
396 to just use the argument value. */
397 if (TREE_READONLY (p)
398 && !TREE_ADDRESSABLE (p)
399 && !TREE_SIDE_EFFECTS (value))
400 {
401 /* Simplify the value, if possible. */
402 value = fold (decl_constant_value (value));
403
404 /* We can't risk substituting complex expressions. They
405 might contain variables that will be assigned to later.
406 Theoretically, we could check the expression to see if
407 all of the variables that determine its value are
408 read-only, but we don't bother. */
409 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
410 {
411 /* If this is a declaration, wrap it a NOP_EXPR so that
412 we don't try to put the VALUE on the list of
413 BLOCK_VARS. */
414 if (DECL_P (value))
415 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
416
417 splay_tree_insert (id->decl_map,
418 (splay_tree_key) p,
419 (splay_tree_value) value);
420 continue;
421 }
422 }
423
424 /* Make an equivalent VAR_DECL. */
425 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
426 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
427 that way, when the PARM_DECL is encountered, it will be
428 automatically replaced by the VAR_DECL. */
429 splay_tree_insert (id->decl_map,
430 (splay_tree_key) p,
431 (splay_tree_value) var);
432
433 /* Declare this new variable. */
434 init_stmt = build_min_nt (DECL_STMT, var);
435 TREE_CHAIN (init_stmt) = init_stmts;
436 init_stmts = init_stmt;
437
438 /* Initialize this VAR_DECL from the equivalent argument. If
439 the argument is an object, created via a constructor or copy,
440 this will not result in an extra copy: the TARGET_EXPR
441 representing the argument will be bound to VAR, and the
442 object will be constructed in VAR. */
443 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
444 DECL_INITIAL (var) = value;
445 else
446 {
447 init_stmt = build_min_nt (EXPR_STMT,
448 build (INIT_EXPR, TREE_TYPE (p),
449 var, value));
450 /* Add this initialization to the list. Note that we want the
451 declaration *after* the initialization because we are going
452 to reverse all the initialization statements below. */
453 TREE_CHAIN (init_stmt) = init_stmts;
454 init_stmts = init_stmt;
455 }
456 }
457
458 /* The initialization statements have been built up in reverse
459 order. Straighten them out now. */
460 return nreverse (init_stmts);
461 }
462
463 /* Declare a return variable to replace the RESULT_DECL for the
464 function we are calling. An appropriate DECL_STMT is returned.
465 The USE_STMT is filled in to contain a use of the declaration to
466 indicate the return value of the function. */
467
468 static tree
469 declare_return_variable (id, use_stmt)
470 struct inline_data *id;
471 tree *use_stmt;
472 {
473 tree fn = VARRAY_TOP_TREE (id->fns);
474 tree result = DECL_RESULT (fn);
475 tree var;
476 int aggregate_return_p;
477
478 /* We don't need to do anything for functions that don't return
479 anything. */
480 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
481 void_type_node))
482 {
483 *use_stmt = NULL_TREE;
484 return NULL_TREE;
485 }
486
487 /* Figure out whether or not FN returns an aggregate. */
488 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
489
490 /* If FN returns an aggregate then the caller will always create the
491 temporary (using a TARGET_EXPR) and the call will be the
492 initializing expression for the TARGET_EXPR. If we were just to
493 create a new VAR_DECL here, then the result of this function
494 would be copied (bitwise) into the variable initialized by the
495 TARGET_EXPR. That's incorrect, so we must transform any
496 references to the RESULT into references to the target. */
497 if (aggregate_return_p)
498 {
499 my_friendly_assert (id->target_exprs->elements_used != 0,
500 20000430);
501 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
502 my_friendly_assert
503 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
504 TREE_TYPE (result)),
505 20000430);
506 }
507 /* Otherwise, make an appropriate copy. */
508 else
509 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
510
511 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
512 way, when the RESULT_DECL is encountered, it will be
513 automatically replaced by the VAR_DECL. */
514 splay_tree_insert (id->decl_map,
515 (splay_tree_key) result,
516 (splay_tree_value) var);
517
518 /* Build the USE_STMT. */
519 *use_stmt = build_min_nt (EXPR_STMT, var);
520
521 /* Build the declaration statement if FN does not return an
522 aggregate. */
523 if (!aggregate_return_p)
524 return build_min_nt (DECL_STMT, var);
525 /* If FN does return an aggregate, there's no need to declare the
526 return variable; we're using a variable in our caller's frame. */
527 else
528 return NULL_TREE;
529 }
530
531 /* Returns non-zero if FN is a function that can be inlined. */
532
533 static int
534 inlinable_function_p (fn, id)
535 tree fn;
536 inline_data *id;
537 {
538 int inlinable;
539
540 /* If we've already decided this function shouldn't be inlined,
541 there's no need to check again. */
542 if (DECL_UNINLINABLE (fn))
543 return 0;
544
545 /* Assume it is not inlinable. */
546 inlinable = 0;
547
548 /* If we're not inlining things, then nothing is inlinable. */
549 if (!flag_inline_trees)
550 ;
551 /* If the function was not declared `inline', then we don't inline
552 it. */
553 else if (!DECL_INLINE (fn))
554 ;
555 /* We can't inline varargs functions. */
556 else if (varargs_function_p (fn))
557 ;
558 /* All is well. We can inline this function. Traditionally, GCC
559 has refused to inline functions using alloca, or functions whose
560 values are returned in a PARALLEL, and a few other such obscure
561 conditions. We are not equally constrained at the tree level. */
562 else
563 inlinable = 1;
564
565 /* Squirrel away the result so that we don't have to check again. */
566 DECL_UNINLINABLE (fn) = !inlinable;
567
568 /* We can inline a template instantiation only if it's fully
569 instantiated. */
570 if (inlinable
571 && DECL_TEMPLATE_INFO (fn)
572 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
573 {
574 fn = instantiate_decl (fn, /*defer_ok=*/0);
575 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
576 }
577
578 /* If we don't have the function body available, we can't inline
579 it. */
580 if (!DECL_SAVED_TREE (fn))
581 inlinable = 0;
582
583 /* Don't do recursive inlining, either. We don't record this in
584 DECL_UNLINABLE; we may be able to inline this function later. */
585 if (inlinable)
586 {
587 size_t i;
588
589 for (i = 0; i < id->fns->elements_used; ++i)
590 if (VARRAY_TREE (id->fns, i) == fn)
591 inlinable = 0;
592 }
593
594 /* Return the result. */
595 return inlinable;
596 }
597
598 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
599
600 static tree
601 expand_call_inline (tp, walk_subtrees, data)
602 tree *tp;
603 int *walk_subtrees;
604 void *data;
605 {
606 inline_data *id;
607 tree t;
608 tree expr;
609 tree chain;
610 tree fn;
611 tree scope_stmt;
612 tree use_stmt;
613 tree arg_inits;
614 splay_tree st;
615
616 /* See what we've got. */
617 id = (inline_data *) data;
618 t = *tp;
619
620 /* Recurse, but letting recursive invocations know that we are
621 inside the body of a TARGET_EXPR. */
622 if (TREE_CODE (*tp) == TARGET_EXPR)
623 {
624 int i, len = first_rtl_op (TARGET_EXPR);
625
626 /* We're walking our own subtrees. */
627 *walk_subtrees = 0;
628
629 /* Push *TP on the stack of pending TARGET_EXPRs. */
630 VARRAY_PUSH_TREE (id->target_exprs, *tp);
631
632 /* Actually walk over them. This loop is the body of
633 walk_trees, omitting the case where the TARGET_EXPR
634 itself is handled. */
635 for (i = 0; i < len; ++i)
636 {
637 if (i == 2)
638 ++id->in_target_cleanup_p;
639 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
640 if (i == 2)
641 --id->in_target_cleanup_p;
642 }
643
644 /* We're done with this TARGET_EXPR now. */
645 VARRAY_POP (id->target_exprs);
646
647 return NULL_TREE;
648 }
649
650 /* From here on, we're only interested in CALL_EXPRs. */
651 if (TREE_CODE (t) != CALL_EXPR)
652 return NULL_TREE;
653
654 /* First, see if we can figure out what function is being called.
655 If we cannot, then there is no hope of inlining the function. */
656 fn = get_callee_fndecl (t);
657 if (!fn)
658 return NULL_TREE;
659
660 /* Don't try to inline functions that are not well-suited to
661 inlining. */
662 if (!inlinable_function_p (fn, id))
663 return NULL_TREE;
664
665 /* Set the current filename and line number to the function we are
666 inlining so that when we create new _STMT nodes here they get
667 line numbers corresponding to the function we are calling. We
668 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
669 because individual statements don't record the filename. */
670 push_srcloc (fn->decl.filename, fn->decl.linenum);
671
672 /* Build a statement-expression containing code to initialize the
673 arguments, the actual inline expansion of the body, and a label
674 for the return statements within the function to jump to. The
675 type of the statement expression is the return type of the
676 function call. */
677 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
678
679 /* Local declarations will be replaced by their equivalents in this
680 map. */
681 st = id->decl_map;
682 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
683 NULL, NULL);
684
685 /* Initialize the parameters. */
686 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
687 /* Expand any inlined calls in the initializers. Do this before we
688 push FN on the stack of functions we are inlining; we want to
689 inline calls to FN that appear in the initializers for the
690 parameters. */
691 expand_calls_inline (&arg_inits, id);
692 /* And add them to the tree. */
693 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
694
695 /* Record the function we are about to inline so that we can avoid
696 recursing into it. */
697 VARRAY_PUSH_TREE (id->fns, fn);
698
699 /* Return statements in the function body will be replaced by jumps
700 to the RET_LABEL. */
701 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
702 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
703
704 /* Create a block to put the parameters in. We have to do this
705 after the parameters have been remapped because remapping
706 parameters is different from remapping ordinary variables. */
707 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
708 SCOPE_BEGIN_P (scope_stmt) = 1;
709 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
710 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
711 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
712 STMT_EXPR_STMT (expr) = scope_stmt;
713
714 /* Tell the debugging backends that this block represents the
715 outermost scope of the inlined function. */
716 if (SCOPE_STMT_BLOCK (scope_stmt))
717 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
718
719 /* Declare the return variable for the function. */
720 STMT_EXPR_STMT (expr)
721 = chainon (STMT_EXPR_STMT (expr),
722 declare_return_variable (id, &use_stmt));
723
724 /* After we've initialized the parameters, we insert the body of the
725 function itself. */
726 STMT_EXPR_STMT (expr)
727 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
728
729 /* Close the block for the parameters. */
730 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
731 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
732 my_friendly_assert (DECL_INITIAL (fn)
733 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
734 19991203);
735 remap_block (scope_stmt, NULL_TREE, id);
736 STMT_EXPR_STMT (expr)
737 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
738
739 /* After the body of the function comes the RET_LABEL. This must come
740 before we evaluate the returned value below, because that evalulation
741 may cause RTL to be generated. */
742 STMT_EXPR_STMT (expr)
743 = chainon (STMT_EXPR_STMT (expr),
744 build_min_nt (LABEL_STMT, id->ret_label));
745
746 /* Finally, mention the returned value so that the value of the
747 statement-expression is the returned value of the function. */
748 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
749
750 /* Clean up. */
751 splay_tree_delete (id->decl_map);
752 id->decl_map = st;
753
754 /* The new expression has side-effects if the old one did. */
755 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
756
757 /* Replace the call by the inlined body. Wrap it in an
758 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
759 pointing to the right place. */
760 chain = TREE_CHAIN (*tp);
761 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
762 /*col=*/0);
763 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
764 TREE_CHAIN (*tp) = chain;
765 pop_srcloc ();
766
767 /* If the value of the new expression is ignored, that's OK. We
768 don't warn about this for CALL_EXPRs, so we shouldn't warn about
769 the equivalent inlined version either. */
770 TREE_USED (*tp) = 1;
771
772 /* Recurse into the body of the just inlined function. */
773 expand_calls_inline (tp, id);
774 VARRAY_POP (id->fns);
775
776 /* Don't walk into subtrees. We've already handled them above. */
777 *walk_subtrees = 0;
778
779 /* Keep iterating. */
780 return NULL_TREE;
781 }
782
783 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
784 expansions as appropriate. */
785
786 static void
787 expand_calls_inline (tp, id)
788 tree *tp;
789 inline_data *id;
790 {
791 /* Search through *TP, replacing all calls to inline functions by
792 appropriate equivalents. */
793 walk_tree (tp, expand_call_inline, id);
794 }
795
796 /* Optimize the body of FN. */
797
798 void
799 optimize_function (fn)
800 tree fn;
801 {
802 /* Expand calls to inline functions. */
803 if (flag_inline_trees)
804 {
805 inline_data id;
806 tree prev_fn;
807 struct saved_scope *s;
808
809 /* Clear out ID. */
810 memset (&id, 0, sizeof (id));
811
812 /* Don't allow recursion into FN. */
813 VARRAY_TREE_INIT (id.fns, 32, "fns");
814 VARRAY_PUSH_TREE (id.fns, fn);
815 /* Or any functions that aren't finished yet. */
816 prev_fn = NULL_TREE;
817 if (current_function_decl)
818 {
819 VARRAY_PUSH_TREE (id.fns, current_function_decl);
820 prev_fn = current_function_decl;
821 }
822 for (s = scope_chain; s; s = s->prev)
823 if (s->function_decl && s->function_decl != prev_fn)
824 {
825 VARRAY_PUSH_TREE (id.fns, s->function_decl);
826 prev_fn = s->function_decl;
827 }
828
829 /* Create the stack of TARGET_EXPRs. */
830 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
831
832 /* Replace all calls to inline functions with the bodies of those
833 functions. */
834 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
835
836 /* Clean up. */
837 VARRAY_FREE (id.fns);
838 VARRAY_FREE (id.target_exprs);
839 }
840 }
841
842 /* Called from calls_setjmp_p via walk_tree. */
843
844 static tree
845 calls_setjmp_r (tp, walk_subtrees, data)
846 tree *tp;
847 int *walk_subtrees ATTRIBUTE_UNUSED;
848 void *data ATTRIBUTE_UNUSED;
849 {
850 /* We're only interested in FUNCTION_DECLS. */
851 if (TREE_CODE (*tp) != FUNCTION_DECL)
852 return NULL_TREE;
853
854 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
855 }
856
857 /* Returns non-zero if FN calls `setjmp' or some other function that
858 can return more than once. This function is conservative; it may
859 occasionally return a non-zero value even when FN does not actually
860 call `setjmp'. */
861
862 int
863 calls_setjmp_p (fn)
864 tree fn;
865 {
866 return (walk_tree (&DECL_SAVED_TREE (fn), calls_setjmp_r, NULL)
867 != NULL_TREE);
868 }
869
870 /* FN is a function that has a complete body. Clone the body as
871 necessary. Returns non-zero if there's no longer any need to
872 process the main body. */
873
874 int
875 maybe_clone_body (fn)
876 tree fn;
877 {
878 inline_data id;
879 tree clone;
880
881 /* We don't clone constructors and destructors under the old ABI. */
882 if (!flag_new_abi)
883 return 0;
884
885 /* We only clone constructors and destructors. */
886 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
887 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
888 return 0;
889
890 /* We know that any clones immediately follow FN in the TYPE_METHODS
891 list. */
892 for (clone = TREE_CHAIN (fn);
893 clone && DECL_CLONED_FUNCTION_P (clone);
894 clone = TREE_CHAIN (clone))
895 {
896 tree parm;
897 tree clone_parm;
898 int parmno;
899
900 /* Update CLONE's source position information to match FN's. */
901 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
902 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
903
904 /* Start processing the function. */
905 push_to_top_level ();
906 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
907 store_parm_decls ();
908
909 /* Just clone the body, as if we were making an inline call.
910 But, remap the parameters in the callee to the parameters of
911 caller. If there's an in-charge parameter, map it to an
912 appropriate constant. */
913 memset (&id, 0, sizeof (id));
914 VARRAY_TREE_INIT (id.fns, 2, "fns");
915 VARRAY_PUSH_TREE (id.fns, clone);
916 VARRAY_PUSH_TREE (id.fns, fn);
917
918 /* Remap the parameters. */
919 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
920 NULL, NULL);
921 for (parmno = 0,
922 parm = DECL_ARGUMENTS (fn),
923 clone_parm = DECL_ARGUMENTS (clone);
924 parm;
925 ++parmno,
926 parm = TREE_CHAIN (parm))
927 {
928 /* Map the in-charge parameter to an appropriate constant. */
929 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
930 {
931 tree in_charge;
932 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
933 splay_tree_insert (id.decl_map,
934 (splay_tree_key) parm,
935 (splay_tree_key) in_charge);
936 }
937 /* Map other parameters to their equivalents in the cloned
938 function. */
939 else
940 {
941 splay_tree_insert (id.decl_map,
942 (splay_tree_key) parm,
943 (splay_tree_value) clone_parm);
944 clone_parm = TREE_CHAIN (clone_parm);
945 }
946 }
947
948 /* Actually copy the body. */
949 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
950
951 /* Clean up. */
952 splay_tree_delete (id.decl_map);
953 VARRAY_FREE (id.fns);
954
955 /* Now, expand this function into RTL, if appropriate. */
956 current_function_name_declared = 1;
957 expand_body (finish_function (0));
958 pop_from_top_level ();
959 }
960
961 /* We don't need to process the original function any further. */
962 return 1;
963 }
This page took 0.08427 seconds and 6 git commands to generate.