]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-nested.c
re PR middle-end/35130 (OpenMP: Private variable passed to subroutine)
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-inline.h"
30 #include "tree-gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-flow.h"
33 #include "cgraph.h"
34 #include "expr.h"
35 #include "langhooks.h"
36 #include "pointer-set.h"
37 #include "ggc.h"
38
39
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
44
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
55
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
66 inlinable.
67
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
73
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
78
79
80 struct nesting_info
81 {
82 struct nesting_info *outer;
83 struct nesting_info *inner;
84 struct nesting_info *next;
85
86 struct pointer_map_t *field_map;
87 struct pointer_map_t *var_map;
88 bitmap suppress_expansion;
89
90 tree context;
91 tree new_local_var_chain;
92 tree debug_var_chain;
93 tree frame_type;
94 tree frame_decl;
95 tree chain_field;
96 tree chain_decl;
97 tree nl_goto_field;
98
99 bool any_parm_remapped;
100 bool any_tramp_created;
101 char static_chain_added;
102 };
103
104
105 /* Obstack used for the bitmaps in the struct above. */
106 static struct bitmap_obstack nesting_info_bitmap_obstack;
107
108
109 /* We're working in so many different function contexts simultaneously,
110 that create_tmp_var is dangerous. Prevent mishap. */
111 #define create_tmp_var cant_use_create_tmp_var_here_dummy
112
113 /* Like create_tmp_var, except record the variable for registration at
114 the given nesting level. */
115
116 static tree
117 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
118 {
119 tree tmp_var;
120
121 /* If the type is of variable size or a type which must be created by the
122 frontend, something is wrong. Note that we explicitly allow
123 incomplete types here, since we create them ourselves here. */
124 gcc_assert (!TREE_ADDRESSABLE (type));
125 gcc_assert (!TYPE_SIZE_UNIT (type)
126 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
127
128 tmp_var = create_tmp_var_raw (type, prefix);
129 DECL_CONTEXT (tmp_var) = info->context;
130 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
131 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
132 if (TREE_CODE (type) == COMPLEX_TYPE
133 || TREE_CODE (type) == VECTOR_TYPE)
134 DECL_GIMPLE_REG_P (tmp_var) = 1;
135
136 info->new_local_var_chain = tmp_var;
137
138 return tmp_var;
139 }
140
141 /* Take the address of EXP to be used within function CONTEXT.
142 Mark it for addressability as necessary. */
143
144 tree
145 build_addr (tree exp, tree context)
146 {
147 tree base = exp;
148 tree save_context;
149 tree retval;
150
151 while (handled_component_p (base))
152 base = TREE_OPERAND (base, 0);
153
154 if (DECL_P (base))
155 TREE_ADDRESSABLE (base) = 1;
156
157 /* Building the ADDR_EXPR will compute a set of properties for
158 that ADDR_EXPR. Those properties are unfortunately context
159 specific. ie, they are dependent on CURRENT_FUNCTION_DECL.
160
161 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
162 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
163 way the properties are for the ADDR_EXPR are computed properly. */
164 save_context = current_function_decl;
165 current_function_decl = context;
166 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
167 current_function_decl = save_context;
168 return retval;
169 }
170
171 /* Insert FIELD into TYPE, sorted by alignment requirements. */
172
173 void
174 insert_field_into_struct (tree type, tree field)
175 {
176 tree *p;
177
178 DECL_CONTEXT (field) = type;
179
180 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
181 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
182 break;
183
184 TREE_CHAIN (field) = *p;
185 *p = field;
186 }
187
188 /* Build or return the RECORD_TYPE that describes the frame state that is
189 shared between INFO->CONTEXT and its nested functions. This record will
190 not be complete until finalize_nesting_tree; up until that point we'll
191 be adding fields as necessary.
192
193 We also build the DECL that represents this frame in the function. */
194
195 static tree
196 get_frame_type (struct nesting_info *info)
197 {
198 tree type = info->frame_type;
199 if (!type)
200 {
201 char *name;
202
203 type = make_node (RECORD_TYPE);
204
205 name = concat ("FRAME.",
206 IDENTIFIER_POINTER (DECL_NAME (info->context)),
207 NULL);
208 TYPE_NAME (type) = get_identifier (name);
209 free (name);
210
211 info->frame_type = type;
212 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
213
214 /* ??? Always make it addressable for now, since it is meant to
215 be pointed to by the static chain pointer. This pessimizes
216 when it turns out that no static chains are needed because
217 the nested functions referencing non-local variables are not
218 reachable, but the true pessimization is to create the non-
219 local frame structure in the first place. */
220 TREE_ADDRESSABLE (info->frame_decl) = 1;
221 }
222 return type;
223 }
224
225 /* Return true if DECL should be referenced by pointer in the non-local
226 frame structure. */
227
228 static bool
229 use_pointer_in_frame (tree decl)
230 {
231 if (TREE_CODE (decl) == PARM_DECL)
232 {
233 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
234 sized decls, and inefficient to copy large aggregates. Don't bother
235 moving anything but scalar variables. */
236 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
237 }
238 else
239 {
240 /* Variable sized types make things "interesting" in the frame. */
241 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
242 }
243 }
244
245 /* Given DECL, a non-locally accessed variable, find or create a field
246 in the non-local frame structure for the given nesting context. */
247
248 static tree
249 lookup_field_for_decl (struct nesting_info *info, tree decl,
250 enum insert_option insert)
251 {
252 void **slot;
253
254 if (insert == NO_INSERT)
255 {
256 slot = pointer_map_contains (info->field_map, decl);
257 return slot ? *slot : NULL;
258 }
259
260 slot = pointer_map_insert (info->field_map, decl);
261 if (!*slot)
262 {
263 tree field = make_node (FIELD_DECL);
264 DECL_NAME (field) = DECL_NAME (decl);
265
266 if (use_pointer_in_frame (decl))
267 {
268 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
269 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
270 DECL_NONADDRESSABLE_P (field) = 1;
271 }
272 else
273 {
274 TREE_TYPE (field) = TREE_TYPE (decl);
275 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
276 DECL_ALIGN (field) = DECL_ALIGN (decl);
277 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
278 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
279 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
280 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
281 }
282
283 insert_field_into_struct (get_frame_type (info), field);
284 *slot = field;
285
286 if (TREE_CODE (decl) == PARM_DECL)
287 info->any_parm_remapped = true;
288 }
289
290 return *slot;
291 }
292
293 /* Build or return the variable that holds the static chain within
294 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
295
296 static tree
297 get_chain_decl (struct nesting_info *info)
298 {
299 tree decl = info->chain_decl;
300 if (!decl)
301 {
302 tree type;
303
304 type = get_frame_type (info->outer);
305 type = build_pointer_type (type);
306
307 /* Note that this variable is *not* entered into any BIND_EXPR;
308 the construction of this variable is handled specially in
309 expand_function_start and initialize_inlined_parameters.
310 Note also that it's represented as a parameter. This is more
311 close to the truth, since the initial value does come from
312 the caller. */
313 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
314 DECL_ARTIFICIAL (decl) = 1;
315 DECL_IGNORED_P (decl) = 1;
316 TREE_USED (decl) = 1;
317 DECL_CONTEXT (decl) = info->context;
318 DECL_ARG_TYPE (decl) = type;
319
320 /* Tell tree-inline.c that we never write to this variable, so
321 it can copy-prop the replacement value immediately. */
322 TREE_READONLY (decl) = 1;
323
324 info->chain_decl = decl;
325 }
326 return decl;
327 }
328
329 /* Build or return the field within the non-local frame state that holds
330 the static chain for INFO->CONTEXT. This is the way to walk back up
331 multiple nesting levels. */
332
333 static tree
334 get_chain_field (struct nesting_info *info)
335 {
336 tree field = info->chain_field;
337 if (!field)
338 {
339 tree type = build_pointer_type (get_frame_type (info->outer));
340
341 field = make_node (FIELD_DECL);
342 DECL_NAME (field) = get_identifier ("__chain");
343 TREE_TYPE (field) = type;
344 DECL_ALIGN (field) = TYPE_ALIGN (type);
345 DECL_NONADDRESSABLE_P (field) = 1;
346
347 insert_field_into_struct (get_frame_type (info), field);
348
349 info->chain_field = field;
350 }
351 return field;
352 }
353
354 /* Copy EXP into a temporary. Allocate the temporary in the context of
355 INFO and insert the initialization statement before TSI. */
356
357 static tree
358 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
359 {
360 tree t, stmt;
361
362 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
363 stmt = build_gimple_modify_stmt (t, exp);
364 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
365 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
366
367 return t;
368 }
369
370 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
371
372 static tree
373 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
374 {
375 if (is_gimple_val (exp))
376 return exp;
377 else
378 return init_tmp_var (info, exp, tsi);
379 }
380
381 /* Similarly, but copy from the temporary and insert the statement
382 after the iterator. */
383
384 static tree
385 save_tmp_var (struct nesting_info *info, tree exp,
386 tree_stmt_iterator *tsi)
387 {
388 tree t, stmt;
389
390 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
391 stmt = build_gimple_modify_stmt (exp, t);
392 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
393 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
394
395 return t;
396 }
397
398 /* Build or return the type used to represent a nested function trampoline. */
399
400 static GTY(()) tree trampoline_type;
401
402 static tree
403 get_trampoline_type (void)
404 {
405 unsigned align, size;
406 tree t;
407
408 if (trampoline_type)
409 return trampoline_type;
410
411 align = TRAMPOLINE_ALIGNMENT;
412 size = TRAMPOLINE_SIZE;
413
414 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
415 then allocate extra space so that we can do dynamic alignment. */
416 if (align > STACK_BOUNDARY)
417 {
418 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
419 align = STACK_BOUNDARY;
420 }
421
422 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
423 t = build_array_type (char_type_node, t);
424 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
425 DECL_ALIGN (t) = align;
426 DECL_USER_ALIGN (t) = 1;
427
428 trampoline_type = make_node (RECORD_TYPE);
429 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
430 TYPE_FIELDS (trampoline_type) = t;
431 layout_type (trampoline_type);
432
433 return trampoline_type;
434 }
435
436 /* Given DECL, a nested function, find or create a field in the non-local
437 frame structure for a trampoline for this function. */
438
439 static tree
440 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
441 enum insert_option insert)
442 {
443 void **slot;
444
445 if (insert == NO_INSERT)
446 {
447 slot = pointer_map_contains (info->var_map, decl);
448 return slot ? *slot : NULL;
449 }
450
451 slot = pointer_map_insert (info->var_map, decl);
452 if (!*slot)
453 {
454 tree field = make_node (FIELD_DECL);
455 DECL_NAME (field) = DECL_NAME (decl);
456 TREE_TYPE (field) = get_trampoline_type ();
457 TREE_ADDRESSABLE (field) = 1;
458
459 insert_field_into_struct (get_frame_type (info), field);
460 *slot = field;
461
462 info->any_tramp_created = true;
463 }
464
465 return *slot;
466 }
467
468 /* Build or return the field within the non-local frame state that holds
469 the non-local goto "jmp_buf". The buffer itself is maintained by the
470 rtl middle-end as dynamic stack space is allocated. */
471
472 static tree
473 get_nl_goto_field (struct nesting_info *info)
474 {
475 tree field = info->nl_goto_field;
476 if (!field)
477 {
478 unsigned size;
479 tree type;
480
481 /* For __builtin_nonlocal_goto, we need N words. The first is the
482 frame pointer, the rest is for the target's stack pointer save
483 area. The number of words is controlled by STACK_SAVEAREA_MODE;
484 not the best interface, but it'll do for now. */
485 if (Pmode == ptr_mode)
486 type = ptr_type_node;
487 else
488 type = lang_hooks.types.type_for_mode (Pmode, 1);
489
490 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
491 size = size / GET_MODE_SIZE (Pmode);
492 size = size + 1;
493
494 type = build_array_type
495 (type, build_index_type (build_int_cst (NULL_TREE, size)));
496
497 field = make_node (FIELD_DECL);
498 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
499 TREE_TYPE (field) = type;
500 DECL_ALIGN (field) = TYPE_ALIGN (type);
501 TREE_ADDRESSABLE (field) = 1;
502
503 insert_field_into_struct (get_frame_type (info), field);
504
505 info->nl_goto_field = field;
506 }
507
508 return field;
509 }
510 \f
511 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
512
513 static void
514 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
515 {
516 int noutputs = list_length (ASM_OUTPUTS (stmt));
517 const char **oconstraints
518 = (const char **) alloca ((noutputs) * sizeof (const char *));
519 int i;
520 tree link;
521 const char *constraint;
522 bool allows_mem, allows_reg, is_inout;
523
524 wi->is_lhs = true;
525 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
526 {
527 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
528 oconstraints[i] = constraint;
529 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
530 &allows_reg, &is_inout);
531
532 wi->val_only = (allows_reg || !allows_mem);
533 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
534 }
535
536 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
537 {
538 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
539 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
540 oconstraints, &allows_mem, &allows_reg);
541
542 wi->val_only = (allows_reg || !allows_mem);
543 /* Although input "m" is not really a LHS, we need a lvalue. */
544 wi->is_lhs = !wi->val_only;
545 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
546 }
547
548 wi->is_lhs = false;
549 wi->val_only = true;
550 }
551
552 /* Iterate over all sub-statements of *TP calling walk_tree with
553 WI->CALLBACK for every sub-expression in each statement found. */
554
555 void
556 walk_stmts (struct walk_stmt_info *wi, tree *tp)
557 {
558 tree t = *tp;
559 int walk_subtrees;
560
561 if (!t)
562 return;
563
564 if (wi->want_locations && EXPR_HAS_LOCATION (t))
565 input_location = EXPR_LOCATION (t);
566
567 switch (TREE_CODE (t))
568 {
569 case STATEMENT_LIST:
570 {
571 tree_stmt_iterator i;
572 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
573 {
574 wi->tsi = i;
575 walk_stmts (wi, tsi_stmt_ptr (i));
576 }
577 }
578 break;
579
580 case COND_EXPR:
581 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
582 walk_stmts (wi, &COND_EXPR_THEN (t));
583 walk_stmts (wi, &COND_EXPR_ELSE (t));
584 break;
585 case CATCH_EXPR:
586 walk_stmts (wi, &CATCH_BODY (t));
587 break;
588 case EH_FILTER_EXPR:
589 walk_stmts (wi, &EH_FILTER_FAILURE (t));
590 break;
591 case TRY_CATCH_EXPR:
592 case TRY_FINALLY_EXPR:
593 walk_stmts (wi, &TREE_OPERAND (t, 0));
594 walk_stmts (wi, &TREE_OPERAND (t, 1));
595 break;
596
597 case BIND_EXPR:
598 if (wi->want_bind_expr)
599 {
600 walk_subtrees = 1;
601 wi->callback (tp, &walk_subtrees, wi);
602 if (!walk_subtrees)
603 break;
604 }
605 walk_stmts (wi, &BIND_EXPR_BODY (t));
606 break;
607
608 case RETURN_EXPR:
609 if (wi->want_return_expr)
610 {
611 walk_subtrees = 1;
612 wi->callback (tp, &walk_subtrees, wi);
613 if (!walk_subtrees)
614 break;
615 }
616 walk_stmts (wi, &TREE_OPERAND (t, 0));
617 break;
618
619 case GIMPLE_MODIFY_STMT:
620 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
621 wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
622 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
623
624 /* If the rhs is appropriate for a memory, we may use a
625 COMPONENT_REF on the lhs. */
626 wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
627 wi->is_lhs = true;
628 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
629
630 wi->val_only = true;
631 wi->is_lhs = false;
632 break;
633
634 case ASM_EXPR:
635 walk_asm_expr (wi, *tp);
636 break;
637
638 default:
639 wi->val_only = true;
640 walk_tree (tp, wi->callback, wi, NULL);
641 break;
642 }
643 }
644
645 /* Invoke CALLBACK on all statements of *STMT_P. */
646
647 static void
648 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
649 {
650 struct walk_stmt_info wi;
651
652 memset (&wi, 0, sizeof (wi));
653 wi.callback = callback;
654 wi.info = info;
655 wi.val_only = true;
656
657 walk_stmts (&wi, stmt_p);
658 }
659
660 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
661
662 static inline void
663 walk_function (walk_tree_fn callback, struct nesting_info *info)
664 {
665 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
666 }
667
668 /* Invoke CALLBACK on OMP_FOR init, cond, incr and pre-body. */
669
670 static void
671 walk_omp_for (walk_tree_fn callback, struct nesting_info *info, tree for_stmt)
672 {
673 struct walk_stmt_info wi;
674 tree t, list = NULL, empty;
675
676 walk_body (callback, info, &OMP_FOR_PRE_BODY (for_stmt));
677
678 empty = build_empty_stmt ();
679 append_to_statement_list_force (empty, &list);
680 memset (&wi, 0, sizeof (wi));
681 wi.callback = callback;
682 wi.info = info;
683 wi.tsi = tsi_last (list);
684
685 t = OMP_FOR_INIT (for_stmt);
686 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
687 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
688 wi.val_only = false;
689 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
690 wi.val_only = true;
691 wi.is_lhs = false;
692 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), callback, &wi, NULL);
693
694 t = OMP_FOR_COND (for_stmt);
695 gcc_assert (COMPARISON_CLASS_P (t));
696 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
697 wi.val_only = false;
698 walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
699 wi.val_only = true;
700 wi.is_lhs = false;
701 walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
702
703 t = OMP_FOR_INCR (for_stmt);
704 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
705 SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
706 wi.val_only = false;
707 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
708 t = GIMPLE_STMT_OPERAND (t, 1);
709 gcc_assert (BINARY_CLASS_P (t));
710 wi.val_only = false;
711 walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
712 wi.val_only = true;
713 wi.is_lhs = false;
714 walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
715
716 /* Remove empty statement added above from the end of statement list. */
717 tsi_delink (&wi.tsi);
718 append_to_statement_list (list, &OMP_FOR_PRE_BODY (for_stmt));
719 }
720
721 /* Similarly for ROOT and all functions nested underneath, depth first. */
722
723 static void
724 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
725 {
726 do
727 {
728 if (root->inner)
729 walk_all_functions (callback, root->inner);
730 walk_function (callback, root);
731 root = root->next;
732 }
733 while (root);
734 }
735 \f
736 /* We have to check for a fairly pathological case. The operands of function
737 nested function are to be interpreted in the context of the enclosing
738 function. So if any are variably-sized, they will get remapped when the
739 enclosing function is inlined. But that remapping would also have to be
740 done in the types of the PARM_DECLs of the nested function, meaning the
741 argument types of that function will disagree with the arguments in the
742 calls to that function. So we'd either have to make a copy of the nested
743 function corresponding to each time the enclosing function was inlined or
744 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
745 function. The former is not practical. The latter would still require
746 detecting this case to know when to add the conversions. So, for now at
747 least, we don't inline such an enclosing function.
748
749 We have to do that check recursively, so here return indicating whether
750 FNDECL has such a nested function. ORIG_FN is the function we were
751 trying to inline to use for checking whether any argument is variably
752 modified by anything in it.
753
754 It would be better to do this in tree-inline.c so that we could give
755 the appropriate warning for why a function can't be inlined, but that's
756 too late since the nesting structure has already been flattened and
757 adding a flag just to record this fact seems a waste of a flag. */
758
759 static bool
760 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
761 {
762 struct cgraph_node *cgn = cgraph_node (fndecl);
763 tree arg;
764
765 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
766 {
767 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
768 if (variably_modified_type_p (TREE_TYPE (arg), 0), orig_fndecl)
769 return true;
770
771 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
772 return true;
773 }
774
775 return false;
776 }
777
778 /* Construct our local datastructure describing the function nesting
779 tree rooted by CGN. */
780
781 static struct nesting_info *
782 create_nesting_tree (struct cgraph_node *cgn)
783 {
784 struct nesting_info *info = XCNEW (struct nesting_info);
785 info->field_map = pointer_map_create ();
786 info->var_map = pointer_map_create ();
787 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
788 info->context = cgn->decl;
789
790 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
791 {
792 struct nesting_info *sub = create_nesting_tree (cgn);
793 sub->outer = info;
794 sub->next = info->inner;
795 info->inner = sub;
796 }
797
798 /* See discussion at check_for_nested_with_variably_modified for a
799 discussion of why this has to be here. */
800 if (check_for_nested_with_variably_modified (info->context, info->context))
801 DECL_UNINLINABLE (info->context) = true;
802
803 return info;
804 }
805
806 /* Return an expression computing the static chain for TARGET_CONTEXT
807 from INFO->CONTEXT. Insert any necessary computations before TSI. */
808
809 static tree
810 get_static_chain (struct nesting_info *info, tree target_context,
811 tree_stmt_iterator *tsi)
812 {
813 struct nesting_info *i;
814 tree x;
815
816 if (info->context == target_context)
817 {
818 x = build_addr (info->frame_decl, target_context);
819 }
820 else
821 {
822 x = get_chain_decl (info);
823
824 for (i = info->outer; i->context != target_context; i = i->outer)
825 {
826 tree field = get_chain_field (i);
827
828 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
829 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
830 x = init_tmp_var (info, x, tsi);
831 }
832 }
833
834 return x;
835 }
836
837 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
838 frame as seen from INFO->CONTEXT. Insert any necessary computations
839 before TSI. */
840
841 static tree
842 get_frame_field (struct nesting_info *info, tree target_context,
843 tree field, tree_stmt_iterator *tsi)
844 {
845 struct nesting_info *i;
846 tree x;
847
848 if (info->context == target_context)
849 {
850 /* Make sure frame_decl gets created. */
851 (void) get_frame_type (info);
852 x = info->frame_decl;
853 }
854 else
855 {
856 x = get_chain_decl (info);
857
858 for (i = info->outer; i->context != target_context; i = i->outer)
859 {
860 tree field = get_chain_field (i);
861
862 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
863 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
864 x = init_tmp_var (info, x, tsi);
865 }
866
867 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
868 }
869
870 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
871 return x;
872 }
873
874 /* A subroutine of convert_nonlocal_reference. Create a local variable
875 in the nested function with DECL_VALUE_EXPR set to reference the true
876 variable in the parent function. This is used both for debug info
877 and in OpenMP lowering. */
878
879 static tree
880 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
881 {
882 tree target_context;
883 struct nesting_info *i;
884 tree x, field, new_decl;
885 void **slot;
886
887 slot = pointer_map_insert (info->var_map, decl);
888
889 if (*slot)
890 return *slot;
891
892 target_context = decl_function_context (decl);
893
894 /* A copy of the code in get_frame_field, but without the temporaries. */
895 if (info->context == target_context)
896 {
897 /* Make sure frame_decl gets created. */
898 (void) get_frame_type (info);
899 x = info->frame_decl;
900 i = info;
901 }
902 else
903 {
904 x = get_chain_decl (info);
905 for (i = info->outer; i->context != target_context; i = i->outer)
906 {
907 field = get_chain_field (i);
908 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
909 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
910 }
911 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
912 }
913
914 field = lookup_field_for_decl (i, decl, INSERT);
915 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
916 if (use_pointer_in_frame (decl))
917 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
918
919 /* ??? We should be remapping types as well, surely. */
920 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
921 DECL_CONTEXT (new_decl) = info->context;
922 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
923 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
924 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
925 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
926 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
927 TREE_READONLY (new_decl) = TREE_READONLY (decl);
928 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
929 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
930
931 SET_DECL_VALUE_EXPR (new_decl, x);
932 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
933
934 *slot = new_decl;
935 TREE_CHAIN (new_decl) = info->debug_var_chain;
936 info->debug_var_chain = new_decl;
937
938 return new_decl;
939 }
940
941 /* Called via walk_function+walk_tree, rewrite all references to VAR
942 and PARM_DECLs that belong to outer functions.
943
944 The rewrite will involve some number of structure accesses back up
945 the static chain. E.g. for a variable FOO up one nesting level it'll
946 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
947 indirections apply to decls for which use_pointer_in_frame is true. */
948
949 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
950
951 static tree
952 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
953 {
954 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
955 struct nesting_info *info = wi->info;
956 tree t = *tp;
957 tree save_local_var_chain;
958 bitmap save_suppress;
959
960 *walk_subtrees = 0;
961 switch (TREE_CODE (t))
962 {
963 case VAR_DECL:
964 /* Non-automatic variables are never processed. */
965 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
966 break;
967 /* FALLTHRU */
968
969 case PARM_DECL:
970 if (decl_function_context (t) != info->context)
971 {
972 tree x;
973 wi->changed = true;
974
975 x = get_nonlocal_debug_decl (info, t);
976 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
977 {
978 tree target_context = decl_function_context (t);
979 struct nesting_info *i;
980 for (i = info->outer; i->context != target_context; i = i->outer)
981 continue;
982 x = lookup_field_for_decl (i, t, INSERT);
983 x = get_frame_field (info, target_context, x, &wi->tsi);
984 if (use_pointer_in_frame (t))
985 {
986 x = init_tmp_var (info, x, &wi->tsi);
987 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
988 }
989 }
990
991 if (wi->val_only)
992 {
993 if (wi->is_lhs)
994 x = save_tmp_var (info, x, &wi->tsi);
995 else
996 x = init_tmp_var (info, x, &wi->tsi);
997 }
998
999 *tp = x;
1000 }
1001 break;
1002
1003 case GOTO_EXPR:
1004 /* Don't walk non-local gotos for now. */
1005 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
1006 {
1007 *walk_subtrees = 1;
1008 wi->val_only = true;
1009 wi->is_lhs = false;
1010 }
1011 break;
1012
1013 case LABEL_DECL:
1014 /* We're taking the address of a label from a parent function, but
1015 this is not itself a non-local goto. Mark the label such that it
1016 will not be deleted, much as we would with a label address in
1017 static storage. */
1018 if (decl_function_context (t) != info->context)
1019 FORCED_LABEL (t) = 1;
1020 break;
1021
1022 case ADDR_EXPR:
1023 {
1024 bool save_val_only = wi->val_only;
1025
1026 wi->val_only = false;
1027 wi->is_lhs = false;
1028 wi->changed = false;
1029 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
1030 wi->val_only = true;
1031
1032 if (wi->changed)
1033 {
1034 tree save_context;
1035
1036 /* If we changed anything, then TREE_INVARIANT is be wrong,
1037 since we're no longer directly referencing a decl. */
1038 save_context = current_function_decl;
1039 current_function_decl = info->context;
1040 recompute_tree_invariant_for_addr_expr (t);
1041 current_function_decl = save_context;
1042
1043 /* If the callback converted the address argument in a context
1044 where we only accept variables (and min_invariant, presumably),
1045 then compute the address into a temporary. */
1046 if (save_val_only)
1047 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1048 }
1049 }
1050 break;
1051
1052 case REALPART_EXPR:
1053 case IMAGPART_EXPR:
1054 case COMPONENT_REF:
1055 case ARRAY_REF:
1056 case ARRAY_RANGE_REF:
1057 case BIT_FIELD_REF:
1058 /* Go down this entire nest and just look at the final prefix and
1059 anything that describes the references. Otherwise, we lose track
1060 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1061 wi->val_only = true;
1062 wi->is_lhs = false;
1063 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1064 {
1065 if (TREE_CODE (t) == COMPONENT_REF)
1066 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1067 NULL);
1068 else if (TREE_CODE (t) == ARRAY_REF
1069 || TREE_CODE (t) == ARRAY_RANGE_REF)
1070 {
1071 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1072 NULL);
1073 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1074 NULL);
1075 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1076 NULL);
1077 }
1078 else if (TREE_CODE (t) == BIT_FIELD_REF)
1079 {
1080 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1081 NULL);
1082 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1083 NULL);
1084 }
1085 }
1086 wi->val_only = false;
1087 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1088 break;
1089
1090 case VIEW_CONVERT_EXPR:
1091 /* Just request to look at the subtrees, leaving val_only and lhs
1092 untouched. This might actually be for !val_only + lhs, in which
1093 case we don't want to force a replacement by a temporary. */
1094 *walk_subtrees = 1;
1095 break;
1096
1097 case OMP_PARALLEL:
1098 save_suppress = info->suppress_expansion;
1099 if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1100 {
1101 tree c, decl;
1102 decl = get_chain_decl (info);
1103 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1104 OMP_CLAUSE_DECL (c) = decl;
1105 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1106 OMP_PARALLEL_CLAUSES (t) = c;
1107 }
1108
1109 save_local_var_chain = info->new_local_var_chain;
1110 info->new_local_var_chain = NULL;
1111
1112 walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
1113
1114 if (info->new_local_var_chain)
1115 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1116 info->new_local_var_chain = save_local_var_chain;
1117 info->suppress_expansion = save_suppress;
1118 break;
1119
1120 case OMP_FOR:
1121 save_suppress = info->suppress_expansion;
1122 convert_nonlocal_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
1123 walk_omp_for (convert_nonlocal_reference, info, t);
1124 walk_body (convert_nonlocal_reference, info, &OMP_FOR_BODY (t));
1125 info->suppress_expansion = save_suppress;
1126 break;
1127
1128 case OMP_SECTIONS:
1129 case OMP_SINGLE:
1130 save_suppress = info->suppress_expansion;
1131 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1132 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1133 info->suppress_expansion = save_suppress;
1134 break;
1135
1136 case OMP_SECTION:
1137 case OMP_MASTER:
1138 case OMP_ORDERED:
1139 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1140 break;
1141
1142 default:
1143 if (!IS_TYPE_OR_DECL_P (t))
1144 {
1145 *walk_subtrees = 1;
1146 wi->val_only = true;
1147 wi->is_lhs = false;
1148 }
1149 break;
1150 }
1151
1152 return NULL_TREE;
1153 }
1154
1155 static bool
1156 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1157 {
1158 struct nesting_info *info = wi->info;
1159 bool need_chain = false;
1160 tree clause, decl;
1161 int dummy;
1162 bitmap new_suppress;
1163
1164 new_suppress = BITMAP_GGC_ALLOC ();
1165 bitmap_copy (new_suppress, info->suppress_expansion);
1166
1167 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1168 {
1169 switch (OMP_CLAUSE_CODE (clause))
1170 {
1171 case OMP_CLAUSE_PRIVATE:
1172 case OMP_CLAUSE_FIRSTPRIVATE:
1173 case OMP_CLAUSE_LASTPRIVATE:
1174 case OMP_CLAUSE_REDUCTION:
1175 case OMP_CLAUSE_COPYPRIVATE:
1176 case OMP_CLAUSE_SHARED:
1177 decl = OMP_CLAUSE_DECL (clause);
1178 if (decl_function_context (decl) != info->context)
1179 {
1180 bitmap_set_bit (new_suppress, DECL_UID (decl));
1181 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1182 need_chain = true;
1183 }
1184 break;
1185
1186 case OMP_CLAUSE_SCHEDULE:
1187 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1188 break;
1189 /* FALLTHRU */
1190 case OMP_CLAUSE_IF:
1191 case OMP_CLAUSE_NUM_THREADS:
1192 wi->val_only = true;
1193 wi->is_lhs = false;
1194 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1195 wi);
1196 break;
1197
1198 case OMP_CLAUSE_NOWAIT:
1199 case OMP_CLAUSE_ORDERED:
1200 case OMP_CLAUSE_DEFAULT:
1201 case OMP_CLAUSE_COPYIN:
1202 break;
1203
1204 default:
1205 gcc_unreachable ();
1206 }
1207 }
1208
1209 info->suppress_expansion = new_suppress;
1210
1211 return need_chain;
1212 }
1213
1214 /* A subroutine of convert_local_reference. Create a local variable
1215 in the parent function with DECL_VALUE_EXPR set to reference the
1216 field in FRAME. This is used both for debug info and in OpenMP
1217 lowering. */
1218
1219 static tree
1220 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1221 {
1222 tree x, new_decl;
1223 void **slot;
1224
1225 slot = pointer_map_insert (info->var_map, decl);
1226 if (*slot)
1227 return *slot;
1228
1229 /* Make sure frame_decl gets created. */
1230 (void) get_frame_type (info);
1231 x = info->frame_decl;
1232 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1233
1234 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1235 DECL_CONTEXT (new_decl) = info->context;
1236 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1237 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1238 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1239 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1240 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1241 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1242 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1243 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1244
1245 SET_DECL_VALUE_EXPR (new_decl, x);
1246 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1247 *slot = new_decl;
1248
1249 TREE_CHAIN (new_decl) = info->debug_var_chain;
1250 info->debug_var_chain = new_decl;
1251
1252 /* Do not emit debug info twice. */
1253 DECL_IGNORED_P (decl) = 1;
1254
1255 return new_decl;
1256 }
1257
1258 /* Called via walk_function+walk_tree, rewrite all references to VAR
1259 and PARM_DECLs that were referenced by inner nested functions.
1260 The rewrite will be a structure reference to the local frame variable. */
1261
1262 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1263
1264 static tree
1265 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1266 {
1267 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1268 struct nesting_info *info = wi->info;
1269 tree t = *tp, field, x;
1270 bool save_val_only;
1271 tree save_local_var_chain;
1272 bitmap save_suppress;
1273
1274 *walk_subtrees = 0;
1275 switch (TREE_CODE (t))
1276 {
1277 case VAR_DECL:
1278 /* Non-automatic variables are never processed. */
1279 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1280 break;
1281 /* FALLTHRU */
1282
1283 case PARM_DECL:
1284 if (decl_function_context (t) == info->context)
1285 {
1286 /* If we copied a pointer to the frame, then the original decl
1287 is used unchanged in the parent function. */
1288 if (use_pointer_in_frame (t))
1289 break;
1290
1291 /* No need to transform anything if no child references the
1292 variable. */
1293 field = lookup_field_for_decl (info, t, NO_INSERT);
1294 if (!field)
1295 break;
1296 wi->changed = true;
1297
1298 x = get_local_debug_decl (info, t, field);
1299 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1300 x = get_frame_field (info, info->context, field, &wi->tsi);
1301
1302 if (wi->val_only)
1303 {
1304 if (wi->is_lhs)
1305 x = save_tmp_var (info, x, &wi->tsi);
1306 else
1307 x = init_tmp_var (info, x, &wi->tsi);
1308 }
1309
1310 *tp = x;
1311 }
1312 break;
1313
1314 case ADDR_EXPR:
1315 save_val_only = wi->val_only;
1316 wi->val_only = false;
1317 wi->is_lhs = false;
1318 wi->changed = false;
1319 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1320 wi->val_only = save_val_only;
1321
1322 /* If we converted anything ... */
1323 if (wi->changed)
1324 {
1325 tree save_context;
1326
1327 /* Then the frame decl is now addressable. */
1328 TREE_ADDRESSABLE (info->frame_decl) = 1;
1329
1330 save_context = current_function_decl;
1331 current_function_decl = info->context;
1332 recompute_tree_invariant_for_addr_expr (t);
1333 current_function_decl = save_context;
1334
1335 /* If we are in a context where we only accept values, then
1336 compute the address into a temporary. */
1337 if (save_val_only)
1338 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1339 }
1340 break;
1341
1342 case REALPART_EXPR:
1343 case IMAGPART_EXPR:
1344 case COMPONENT_REF:
1345 case ARRAY_REF:
1346 case ARRAY_RANGE_REF:
1347 case BIT_FIELD_REF:
1348 /* Go down this entire nest and just look at the final prefix and
1349 anything that describes the references. Otherwise, we lose track
1350 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1351 save_val_only = wi->val_only;
1352 wi->val_only = true;
1353 wi->is_lhs = false;
1354 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1355 {
1356 if (TREE_CODE (t) == COMPONENT_REF)
1357 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1358 NULL);
1359 else if (TREE_CODE (t) == ARRAY_REF
1360 || TREE_CODE (t) == ARRAY_RANGE_REF)
1361 {
1362 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1363 NULL);
1364 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1365 NULL);
1366 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1367 NULL);
1368 }
1369 else if (TREE_CODE (t) == BIT_FIELD_REF)
1370 {
1371 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1372 NULL);
1373 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1374 NULL);
1375 }
1376 }
1377 wi->val_only = false;
1378 walk_tree (tp, convert_local_reference, wi, NULL);
1379 wi->val_only = save_val_only;
1380 break;
1381
1382 case VIEW_CONVERT_EXPR:
1383 /* Just request to look at the subtrees, leaving val_only and lhs
1384 untouched. This might actually be for !val_only + lhs, in which
1385 case we don't want to force a replacement by a temporary. */
1386 *walk_subtrees = 1;
1387 break;
1388
1389 case OMP_PARALLEL:
1390 save_suppress = info->suppress_expansion;
1391 if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1392 {
1393 tree c;
1394 (void) get_frame_type (info);
1395 c = build_omp_clause (OMP_CLAUSE_SHARED);
1396 OMP_CLAUSE_DECL (c) = info->frame_decl;
1397 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1398 OMP_PARALLEL_CLAUSES (t) = c;
1399 }
1400
1401 save_local_var_chain = info->new_local_var_chain;
1402 info->new_local_var_chain = NULL;
1403
1404 walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
1405
1406 if (info->new_local_var_chain)
1407 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1408 info->new_local_var_chain = save_local_var_chain;
1409 info->suppress_expansion = save_suppress;
1410 break;
1411
1412 case OMP_FOR:
1413 save_suppress = info->suppress_expansion;
1414 convert_local_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
1415 walk_omp_for (convert_local_reference, info, t);
1416 walk_body (convert_local_reference, info, &OMP_FOR_BODY (t));
1417 info->suppress_expansion = save_suppress;
1418 break;
1419
1420 case OMP_SECTIONS:
1421 case OMP_SINGLE:
1422 save_suppress = info->suppress_expansion;
1423 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1424 walk_body (convert_local_reference, info, &OMP_BODY (t));
1425 info->suppress_expansion = save_suppress;
1426 break;
1427
1428 case OMP_SECTION:
1429 case OMP_MASTER:
1430 case OMP_ORDERED:
1431 walk_body (convert_local_reference, info, &OMP_BODY (t));
1432 break;
1433
1434 default:
1435 if (!IS_TYPE_OR_DECL_P (t))
1436 {
1437 *walk_subtrees = 1;
1438 wi->val_only = true;
1439 wi->is_lhs = false;
1440 }
1441 break;
1442 }
1443
1444 return NULL_TREE;
1445 }
1446
1447 static bool
1448 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1449 {
1450 struct nesting_info *info = wi->info;
1451 bool need_frame = false;
1452 tree clause, decl;
1453 int dummy;
1454 bitmap new_suppress;
1455
1456 new_suppress = BITMAP_GGC_ALLOC ();
1457 bitmap_copy (new_suppress, info->suppress_expansion);
1458
1459 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1460 {
1461 switch (OMP_CLAUSE_CODE (clause))
1462 {
1463 case OMP_CLAUSE_PRIVATE:
1464 case OMP_CLAUSE_FIRSTPRIVATE:
1465 case OMP_CLAUSE_LASTPRIVATE:
1466 case OMP_CLAUSE_REDUCTION:
1467 case OMP_CLAUSE_COPYPRIVATE:
1468 case OMP_CLAUSE_SHARED:
1469 decl = OMP_CLAUSE_DECL (clause);
1470 if (decl_function_context (decl) == info->context
1471 && !use_pointer_in_frame (decl))
1472 {
1473 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1474 if (field)
1475 {
1476 bitmap_set_bit (new_suppress, DECL_UID (decl));
1477 OMP_CLAUSE_DECL (clause)
1478 = get_local_debug_decl (info, decl, field);
1479 need_frame = true;
1480 }
1481 }
1482 break;
1483
1484 case OMP_CLAUSE_SCHEDULE:
1485 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1486 break;
1487 /* FALLTHRU */
1488 case OMP_CLAUSE_IF:
1489 case OMP_CLAUSE_NUM_THREADS:
1490 wi->val_only = true;
1491 wi->is_lhs = false;
1492 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1493 break;
1494
1495 case OMP_CLAUSE_NOWAIT:
1496 case OMP_CLAUSE_ORDERED:
1497 case OMP_CLAUSE_DEFAULT:
1498 case OMP_CLAUSE_COPYIN:
1499 break;
1500
1501 default:
1502 gcc_unreachable ();
1503 }
1504 }
1505
1506 info->suppress_expansion = new_suppress;
1507
1508 return need_frame;
1509 }
1510
1511 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1512 reference labels from outer functions. The rewrite will be a
1513 call to __builtin_nonlocal_goto. */
1514
1515 static tree
1516 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1517 {
1518 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1519 struct nesting_info *info = wi->info, *i;
1520 tree t = *tp, label, new_label, target_context, x, field;
1521 void **slot;
1522
1523 *walk_subtrees = 0;
1524 if (TREE_CODE (t) != GOTO_EXPR)
1525 return NULL_TREE;
1526 label = GOTO_DESTINATION (t);
1527 if (TREE_CODE (label) != LABEL_DECL)
1528 return NULL_TREE;
1529 target_context = decl_function_context (label);
1530 if (target_context == info->context)
1531 return NULL_TREE;
1532
1533 for (i = info->outer; target_context != i->context; i = i->outer)
1534 continue;
1535
1536 /* The original user label may also be use for a normal goto, therefore
1537 we must create a new label that will actually receive the abnormal
1538 control transfer. This new label will be marked LABEL_NONLOCAL; this
1539 mark will trigger proper behavior in the cfg, as well as cause the
1540 (hairy target-specific) non-local goto receiver code to be generated
1541 when we expand rtl. Enter this association into var_map so that we
1542 can insert the new label into the IL during a second pass. */
1543 slot = pointer_map_insert (i->var_map, label);
1544 if (*slot == NULL)
1545 {
1546 new_label = create_artificial_label ();
1547 DECL_NONLOCAL (new_label) = 1;
1548 *slot = new_label;
1549 }
1550 else
1551 new_label = *slot;
1552
1553 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1554 field = get_nl_goto_field (i);
1555 x = get_frame_field (info, target_context, field, &wi->tsi);
1556 x = build_addr (x, target_context);
1557 x = tsi_gimplify_val (info, x, &wi->tsi);
1558 x = build_call_expr (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1559 build_addr (new_label, target_context), x);
1560
1561 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1562 *tsi_stmt_ptr (wi->tsi) = x;
1563
1564 return NULL_TREE;
1565 }
1566
1567 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1568 are referenced via nonlocal goto from a nested function. The rewrite
1569 will involve installing a newly generated DECL_NONLOCAL label, and
1570 (potentially) a branch around the rtl gunk that is assumed to be
1571 attached to such a label. */
1572
1573 static tree
1574 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1575 {
1576 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1577 struct nesting_info *info = wi->info;
1578 tree t = *tp, label, new_label, x;
1579 tree_stmt_iterator tmp_tsi;
1580 void **slot;
1581
1582 *walk_subtrees = 0;
1583 if (TREE_CODE (t) != LABEL_EXPR)
1584 return NULL_TREE;
1585 label = LABEL_EXPR_LABEL (t);
1586
1587 slot = pointer_map_contains (info->var_map, label);
1588 if (!slot)
1589 return NULL_TREE;
1590
1591 /* If there's any possibility that the previous statement falls through,
1592 then we must branch around the new non-local label. */
1593 tmp_tsi = wi->tsi;
1594 tsi_prev (&tmp_tsi);
1595 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1596 {
1597 x = build1 (GOTO_EXPR, void_type_node, label);
1598 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1599 }
1600
1601 new_label = (tree) *slot;
1602 x = build1 (LABEL_EXPR, void_type_node, new_label);
1603 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1604
1605 return NULL_TREE;
1606 }
1607
1608 /* Called via walk_function+walk_tree, rewrite all references to addresses
1609 of nested functions that require the use of trampolines. The rewrite
1610 will involve a reference a trampoline generated for the occasion. */
1611
1612 static tree
1613 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1614 {
1615 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1616 struct nesting_info *info = wi->info, *i;
1617 tree t = *tp, decl, target_context, x;
1618
1619 *walk_subtrees = 0;
1620 switch (TREE_CODE (t))
1621 {
1622 case ADDR_EXPR:
1623 /* Build
1624 T.1 = &CHAIN->tramp;
1625 T.2 = __builtin_adjust_trampoline (T.1);
1626 T.3 = (func_type)T.2;
1627 */
1628
1629 decl = TREE_OPERAND (t, 0);
1630 if (TREE_CODE (decl) != FUNCTION_DECL)
1631 break;
1632
1633 /* Only need to process nested functions. */
1634 target_context = decl_function_context (decl);
1635 if (!target_context)
1636 break;
1637
1638 /* If the nested function doesn't use a static chain, then
1639 it doesn't need a trampoline. */
1640 if (DECL_NO_STATIC_CHAIN (decl))
1641 break;
1642
1643 /* Lookup the immediate parent of the callee, as that's where
1644 we need to insert the trampoline. */
1645 for (i = info; i->context != target_context; i = i->outer)
1646 continue;
1647 x = lookup_tramp_for_decl (i, decl, INSERT);
1648
1649 /* Compute the address of the field holding the trampoline. */
1650 x = get_frame_field (info, target_context, x, &wi->tsi);
1651 x = build_addr (x, target_context);
1652 x = tsi_gimplify_val (info, x, &wi->tsi);
1653
1654 /* Do machine-specific ugliness. Normally this will involve
1655 computing extra alignment, but it can really be anything. */
1656 x = build_call_expr (implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE],
1657 1, x);
1658 x = init_tmp_var (info, x, &wi->tsi);
1659
1660 /* Cast back to the proper function type. */
1661 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1662 x = init_tmp_var (info, x, &wi->tsi);
1663
1664 *tp = x;
1665 break;
1666
1667 case CALL_EXPR:
1668 /* Only walk call arguments, lest we generate trampolines for
1669 direct calls. */
1670 {
1671 int nargs = call_expr_nargs (t);
1672 int i;
1673 for (i = 0; i < nargs; i++)
1674 walk_tree (&CALL_EXPR_ARG (t, i), convert_tramp_reference, wi, NULL);
1675 }
1676 break;
1677
1678 default:
1679 if (!IS_TYPE_OR_DECL_P (t))
1680 *walk_subtrees = 1;
1681 break;
1682 }
1683
1684 return NULL_TREE;
1685 }
1686
1687 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1688 reference nested functions to make sure that the static chain is
1689 set up properly for the call. */
1690
1691 static tree
1692 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1693 {
1694 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1695 struct nesting_info *info = wi->info;
1696 tree t = *tp, decl, target_context;
1697 char save_static_chain_added;
1698 int i;
1699
1700 *walk_subtrees = 0;
1701 switch (TREE_CODE (t))
1702 {
1703 case CALL_EXPR:
1704 decl = get_callee_fndecl (t);
1705 if (!decl)
1706 break;
1707 target_context = decl_function_context (decl);
1708 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1709 {
1710 CALL_EXPR_STATIC_CHAIN (t)
1711 = get_static_chain (info, target_context, &wi->tsi);
1712 info->static_chain_added
1713 |= (1 << (info->context != target_context));
1714 }
1715 break;
1716
1717 case RETURN_EXPR:
1718 case GIMPLE_MODIFY_STMT:
1719 case WITH_SIZE_EXPR:
1720 /* Only return modify and with_size_expr may contain calls. */
1721 *walk_subtrees = 1;
1722 break;
1723
1724 case OMP_PARALLEL:
1725 save_static_chain_added = info->static_chain_added;
1726 info->static_chain_added = 0;
1727 walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
1728 for (i = 0; i < 2; i++)
1729 {
1730 tree c, decl;
1731 if ((info->static_chain_added & (1 << i)) == 0)
1732 continue;
1733 decl = i ? get_chain_decl (info) : info->frame_decl;
1734 /* Don't add CHAIN.* or FRAME.* twice. */
1735 for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1736 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1737 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1738 && OMP_CLAUSE_DECL (c) == decl)
1739 break;
1740 if (c == NULL)
1741 {
1742 c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
1743 : OMP_CLAUSE_SHARED);
1744 OMP_CLAUSE_DECL (c) = decl;
1745 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1746 OMP_PARALLEL_CLAUSES (t) = c;
1747 }
1748 }
1749 info->static_chain_added |= save_static_chain_added;
1750 break;
1751
1752 case OMP_FOR:
1753 walk_body (convert_call_expr, info, &OMP_FOR_PRE_BODY (t));
1754 /* FALLTHRU */
1755 case OMP_SECTIONS:
1756 case OMP_SECTION:
1757 case OMP_SINGLE:
1758 case OMP_MASTER:
1759 case OMP_ORDERED:
1760 case OMP_CRITICAL:
1761 walk_body (convert_call_expr, info, &OMP_BODY (t));
1762 break;
1763
1764 default:
1765 break;
1766 }
1767
1768 return NULL_TREE;
1769 }
1770
1771 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1772 trampolines and call expressions. On the way back up, determine if
1773 a nested function actually uses its static chain; if not, remember that. */
1774
1775 static void
1776 convert_all_function_calls (struct nesting_info *root)
1777 {
1778 do
1779 {
1780 if (root->inner)
1781 convert_all_function_calls (root->inner);
1782
1783 walk_function (convert_tramp_reference, root);
1784 walk_function (convert_call_expr, root);
1785
1786 /* If the function does not use a static chain, then remember that. */
1787 if (root->outer && !root->chain_decl && !root->chain_field)
1788 DECL_NO_STATIC_CHAIN (root->context) = 1;
1789 else
1790 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1791
1792 root = root->next;
1793 }
1794 while (root);
1795 }
1796
1797 /* Do "everything else" to clean up or complete state collected by the
1798 various walking passes -- lay out the types and decls, generate code
1799 to initialize the frame decl, store critical expressions in the
1800 struct function for rtl to find. */
1801
1802 static void
1803 finalize_nesting_tree_1 (struct nesting_info *root)
1804 {
1805 tree stmt_list = NULL;
1806 tree context = root->context;
1807 struct function *sf;
1808
1809 /* If we created a non-local frame type or decl, we need to lay them
1810 out at this time. */
1811 if (root->frame_type)
1812 {
1813 /* In some cases the frame type will trigger the -Wpadded warning.
1814 This is not helpful; suppress it. */
1815 int save_warn_padded = warn_padded;
1816 warn_padded = 0;
1817 layout_type (root->frame_type);
1818 warn_padded = save_warn_padded;
1819 layout_decl (root->frame_decl, 0);
1820 }
1821
1822 /* If any parameters were referenced non-locally, then we need to
1823 insert a copy. Likewise, if any variables were referenced by
1824 pointer, we need to initialize the address. */
1825 if (root->any_parm_remapped)
1826 {
1827 tree p;
1828 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1829 {
1830 tree field, x, y;
1831
1832 field = lookup_field_for_decl (root, p, NO_INSERT);
1833 if (!field)
1834 continue;
1835
1836 if (use_pointer_in_frame (p))
1837 x = build_addr (p, context);
1838 else
1839 x = p;
1840
1841 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1842 root->frame_decl, field, NULL_TREE);
1843 x = build_gimple_modify_stmt (y, x);
1844 append_to_statement_list (x, &stmt_list);
1845 }
1846 }
1847
1848 /* If a chain_field was created, then it needs to be initialized
1849 from chain_decl. */
1850 if (root->chain_field)
1851 {
1852 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1853 root->frame_decl, root->chain_field, NULL_TREE);
1854 x = build_gimple_modify_stmt (x, get_chain_decl (root));
1855 append_to_statement_list (x, &stmt_list);
1856 }
1857
1858 /* If trampolines were created, then we need to initialize them. */
1859 if (root->any_tramp_created)
1860 {
1861 struct nesting_info *i;
1862 for (i = root->inner; i ; i = i->next)
1863 {
1864 tree arg1, arg2, arg3, x, field;
1865
1866 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1867 if (!field)
1868 continue;
1869
1870 if (DECL_NO_STATIC_CHAIN (i->context))
1871 arg3 = null_pointer_node;
1872 else
1873 arg3 = build_addr (root->frame_decl, context);
1874
1875 arg2 = build_addr (i->context, context);
1876
1877 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1878 root->frame_decl, field, NULL_TREE);
1879 arg1 = build_addr (x, context);
1880
1881 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1882 x = build_call_expr (x, 3, arg1, arg2, arg3);
1883 append_to_statement_list (x, &stmt_list);
1884 }
1885 }
1886
1887 /* If we created initialization statements, insert them. */
1888 if (stmt_list)
1889 {
1890 annotate_all_with_locus (&stmt_list,
1891 DECL_SOURCE_LOCATION (context));
1892 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1893 &stmt_list);
1894 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1895 }
1896
1897 /* If a chain_decl was created, then it needs to be registered with
1898 struct function so that it gets initialized from the static chain
1899 register at the beginning of the function. */
1900 sf = DECL_STRUCT_FUNCTION (root->context);
1901 sf->static_chain_decl = root->chain_decl;
1902
1903 /* Similarly for the non-local goto save area. */
1904 if (root->nl_goto_field)
1905 {
1906 sf->nonlocal_goto_save_area
1907 = get_frame_field (root, context, root->nl_goto_field, NULL);
1908 sf->has_nonlocal_label = 1;
1909 }
1910
1911 /* Make sure all new local variables get inserted into the
1912 proper BIND_EXPR. */
1913 if (root->new_local_var_chain)
1914 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
1915 false);
1916 if (root->debug_var_chain)
1917 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
1918 true);
1919
1920 /* Dump the translated tree function. */
1921 dump_function (TDI_nested, root->context);
1922 }
1923
1924 static void
1925 finalize_nesting_tree (struct nesting_info *root)
1926 {
1927 do
1928 {
1929 if (root->inner)
1930 finalize_nesting_tree (root->inner);
1931 finalize_nesting_tree_1 (root);
1932 root = root->next;
1933 }
1934 while (root);
1935 }
1936
1937 /* Unnest the nodes and pass them to cgraph. */
1938
1939 static void
1940 unnest_nesting_tree_1 (struct nesting_info *root)
1941 {
1942 struct cgraph_node *node = cgraph_node (root->context);
1943
1944 /* For nested functions update the cgraph to reflect unnesting.
1945 We also delay finalizing of these functions up to this point. */
1946 if (node->origin)
1947 {
1948 cgraph_unnest_node (cgraph_node (root->context));
1949 cgraph_finalize_function (root->context, true);
1950 }
1951 }
1952
1953 static void
1954 unnest_nesting_tree (struct nesting_info *root)
1955 {
1956 do
1957 {
1958 if (root->inner)
1959 unnest_nesting_tree (root->inner);
1960 unnest_nesting_tree_1 (root);
1961 root = root->next;
1962 }
1963 while (root);
1964 }
1965
1966 /* Free the data structures allocated during this pass. */
1967
1968 static void
1969 free_nesting_tree (struct nesting_info *root)
1970 {
1971 struct nesting_info *next;
1972 do
1973 {
1974 if (root->inner)
1975 free_nesting_tree (root->inner);
1976 pointer_map_destroy (root->var_map);
1977 pointer_map_destroy (root->field_map);
1978 next = root->next;
1979 free (root);
1980 root = next;
1981 }
1982 while (root);
1983 }
1984
1985 /* Main entry point for this pass. Process FNDECL and all of its nested
1986 subroutines and turn them into something less tightly bound. */
1987
1988 void
1989 lower_nested_functions (tree fndecl)
1990 {
1991 struct cgraph_node *cgn;
1992 struct nesting_info *root;
1993
1994 /* If there are no nested functions, there's nothing to do. */
1995 cgn = cgraph_node (fndecl);
1996 if (!cgn->nested)
1997 return;
1998
1999 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2000 root = create_nesting_tree (cgn);
2001 walk_all_functions (convert_nonlocal_reference, root);
2002 walk_all_functions (convert_local_reference, root);
2003 walk_all_functions (convert_nl_goto_reference, root);
2004 walk_all_functions (convert_nl_goto_receiver, root);
2005 convert_all_function_calls (root);
2006 finalize_nesting_tree (root);
2007 unnest_nesting_tree (root);
2008 free_nesting_tree (root);
2009 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2010 }
2011
2012 #include "gt-tree-nested.h"
This page took 0.131346 seconds and 6 git commands to generate.