]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-nested.c
cgraphunit.c (cgraph_lower_function): Revert last change.
[gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
38 #include "ggc.h"
39
40
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
45
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
56
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
67 inlinable.
68
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
74
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
79
80
81 struct nesting_info
82 {
83 struct nesting_info *outer;
84 struct nesting_info *inner;
85 struct nesting_info *next;
86
87 struct pointer_map_t *field_map;
88 struct pointer_map_t *var_map;
89 bitmap suppress_expansion;
90
91 tree context;
92 tree new_local_var_chain;
93 tree debug_var_chain;
94 tree frame_type;
95 tree frame_decl;
96 tree chain_field;
97 tree chain_decl;
98 tree nl_goto_field;
99
100 bool any_parm_remapped;
101 bool any_tramp_created;
102 char static_chain_added;
103 };
104
105
106 /* Iterate over the nesting tree, starting with ROOT, depth first. */
107
108 static inline struct nesting_info *
109 iter_nestinfo_start (struct nesting_info *root)
110 {
111 while (root->inner)
112 root = root->inner;
113 return root;
114 }
115
116 static inline struct nesting_info *
117 iter_nestinfo_next (struct nesting_info *node)
118 {
119 if (node->next)
120 return iter_nestinfo_start (node->next);
121 return node->outer;
122 }
123
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
126
127 /* Obstack used for the bitmaps in the struct above. */
128 static struct bitmap_obstack nesting_info_bitmap_obstack;
129
130
131 /* We're working in so many different function contexts simultaneously,
132 that create_tmp_var is dangerous. Prevent mishap. */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
134
135 /* Like create_tmp_var, except record the variable for registration at
136 the given nesting level. */
137
138 static tree
139 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
140 {
141 tree tmp_var;
142
143 /* If the type is of variable size or a type which must be created by the
144 frontend, something is wrong. Note that we explicitly allow
145 incomplete types here, since we create them ourselves here. */
146 gcc_assert (!TREE_ADDRESSABLE (type));
147 gcc_assert (!TYPE_SIZE_UNIT (type)
148 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
149
150 tmp_var = create_tmp_var_raw (type, prefix);
151 DECL_CONTEXT (tmp_var) = info->context;
152 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
153 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
154 if (TREE_CODE (type) == COMPLEX_TYPE
155 || TREE_CODE (type) == VECTOR_TYPE)
156 DECL_GIMPLE_REG_P (tmp_var) = 1;
157
158 info->new_local_var_chain = tmp_var;
159
160 return tmp_var;
161 }
162
163 /* Take the address of EXP to be used within function CONTEXT.
164 Mark it for addressability as necessary. */
165
166 tree
167 build_addr (tree exp, tree context)
168 {
169 tree base = exp;
170 tree save_context;
171 tree retval;
172
173 while (handled_component_p (base))
174 base = TREE_OPERAND (base, 0);
175
176 if (DECL_P (base))
177 TREE_ADDRESSABLE (base) = 1;
178
179 /* Building the ADDR_EXPR will compute a set of properties for
180 that ADDR_EXPR. Those properties are unfortunately context
181 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
182
183 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
185 way the properties are for the ADDR_EXPR are computed properly. */
186 save_context = current_function_decl;
187 current_function_decl = context;
188 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
189 current_function_decl = save_context;
190 return retval;
191 }
192
193 /* Insert FIELD into TYPE, sorted by alignment requirements. */
194
195 void
196 insert_field_into_struct (tree type, tree field)
197 {
198 tree *p;
199
200 DECL_CONTEXT (field) = type;
201
202 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
203 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
204 break;
205
206 TREE_CHAIN (field) = *p;
207 *p = field;
208
209 /* Set correct alignment for frame struct type. */
210 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
211 TYPE_ALIGN (type) = DECL_ALIGN (field);
212 }
213
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215 shared between INFO->CONTEXT and its nested functions. This record will
216 not be complete until finalize_nesting_tree; up until that point we'll
217 be adding fields as necessary.
218
219 We also build the DECL that represents this frame in the function. */
220
221 static tree
222 get_frame_type (struct nesting_info *info)
223 {
224 tree type = info->frame_type;
225 if (!type)
226 {
227 char *name;
228
229 type = make_node (RECORD_TYPE);
230
231 name = concat ("FRAME.",
232 IDENTIFIER_POINTER (DECL_NAME (info->context)),
233 NULL);
234 TYPE_NAME (type) = get_identifier (name);
235 free (name);
236
237 info->frame_type = type;
238 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
239
240 /* ??? Always make it addressable for now, since it is meant to
241 be pointed to by the static chain pointer. This pessimizes
242 when it turns out that no static chains are needed because
243 the nested functions referencing non-local variables are not
244 reachable, but the true pessimization is to create the non-
245 local frame structure in the first place. */
246 TREE_ADDRESSABLE (info->frame_decl) = 1;
247 }
248 return type;
249 }
250
251 /* Return true if DECL should be referenced by pointer in the non-local
252 frame structure. */
253
254 static bool
255 use_pointer_in_frame (tree decl)
256 {
257 if (TREE_CODE (decl) == PARM_DECL)
258 {
259 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260 sized decls, and inefficient to copy large aggregates. Don't bother
261 moving anything but scalar variables. */
262 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
263 }
264 else
265 {
266 /* Variable sized types make things "interesting" in the frame. */
267 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
268 }
269 }
270
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
273
274 static tree
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 enum insert_option insert)
277 {
278 void **slot;
279
280 if (insert == NO_INSERT)
281 {
282 slot = pointer_map_contains (info->field_map, decl);
283 return slot ? (tree) *slot : NULL_TREE;
284 }
285
286 slot = pointer_map_insert (info->field_map, decl);
287 if (!*slot)
288 {
289 tree field = make_node (FIELD_DECL);
290 DECL_NAME (field) = DECL_NAME (decl);
291
292 if (use_pointer_in_frame (decl))
293 {
294 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
295 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
296 DECL_NONADDRESSABLE_P (field) = 1;
297 }
298 else
299 {
300 TREE_TYPE (field) = TREE_TYPE (decl);
301 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
302 DECL_ALIGN (field) = DECL_ALIGN (decl);
303 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
304 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
305 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
306 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
307 }
308
309 insert_field_into_struct (get_frame_type (info), field);
310 *slot = field;
311
312 if (TREE_CODE (decl) == PARM_DECL)
313 info->any_parm_remapped = true;
314 }
315
316 return (tree) *slot;
317 }
318
319 /* Build or return the variable that holds the static chain within
320 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
321
322 static tree
323 get_chain_decl (struct nesting_info *info)
324 {
325 tree decl = info->chain_decl;
326
327 if (!decl)
328 {
329 tree type;
330
331 type = get_frame_type (info->outer);
332 type = build_pointer_type (type);
333
334 /* Note that this variable is *not* entered into any BIND_EXPR;
335 the construction of this variable is handled specially in
336 expand_function_start and initialize_inlined_parameters.
337 Note also that it's represented as a parameter. This is more
338 close to the truth, since the initial value does come from
339 the caller. */
340 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
341 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
342 DECL_ARTIFICIAL (decl) = 1;
343 DECL_IGNORED_P (decl) = 1;
344 TREE_USED (decl) = 1;
345 DECL_CONTEXT (decl) = info->context;
346 DECL_ARG_TYPE (decl) = type;
347
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl) = 1;
351
352 info->chain_decl = decl;
353
354 if (dump_file
355 && (dump_flags & TDF_DETAILS)
356 && !DECL_STATIC_CHAIN (info->context))
357 fprintf (dump_file, "Setting static-chain for %s\n",
358 lang_hooks.decl_printable_name (info->context, 2));
359
360 DECL_STATIC_CHAIN (info->context) = 1;
361 }
362 return decl;
363 }
364
365 /* Build or return the field within the non-local frame state that holds
366 the static chain for INFO->CONTEXT. This is the way to walk back up
367 multiple nesting levels. */
368
369 static tree
370 get_chain_field (struct nesting_info *info)
371 {
372 tree field = info->chain_field;
373
374 if (!field)
375 {
376 tree type = build_pointer_type (get_frame_type (info->outer));
377
378 field = make_node (FIELD_DECL);
379 DECL_NAME (field) = get_identifier ("__chain");
380 TREE_TYPE (field) = type;
381 DECL_ALIGN (field) = TYPE_ALIGN (type);
382 DECL_NONADDRESSABLE_P (field) = 1;
383
384 insert_field_into_struct (get_frame_type (info), field);
385
386 info->chain_field = field;
387
388 if (dump_file
389 && (dump_flags & TDF_DETAILS)
390 && !DECL_STATIC_CHAIN (info->context))
391 fprintf (dump_file, "Setting static-chain for %s\n",
392 lang_hooks.decl_printable_name (info->context, 2));
393
394 DECL_STATIC_CHAIN (info->context) = 1;
395 }
396 return field;
397 }
398
399 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
400
401 static tree
402 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
403 gimple call)
404 {
405 tree t;
406
407 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
408 gimple_call_set_lhs (call, t);
409 if (! gsi_end_p (*gsi))
410 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
411 gsi_insert_before (gsi, call, GSI_SAME_STMT);
412
413 return t;
414 }
415
416
417 /* Copy EXP into a temporary. Allocate the temporary in the context of
418 INFO and insert the initialization statement before GSI. */
419
420 static tree
421 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
422 {
423 tree t;
424 gimple stmt;
425
426 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
427 stmt = gimple_build_assign (t, exp);
428 if (! gsi_end_p (*gsi))
429 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
430 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
431
432 return t;
433 }
434
435
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
437
438 static tree
439 gsi_gimplify_val (struct nesting_info *info, tree exp,
440 gimple_stmt_iterator *gsi)
441 {
442 if (is_gimple_val (exp))
443 return exp;
444 else
445 return init_tmp_var (info, exp, gsi);
446 }
447
448 /* Similarly, but copy from the temporary and insert the statement
449 after the iterator. */
450
451 static tree
452 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
453 {
454 tree t;
455 gimple stmt;
456
457 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
458 stmt = gimple_build_assign (exp, t);
459 if (! gsi_end_p (*gsi))
460 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
461 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
462
463 return t;
464 }
465
466 /* Build or return the type used to represent a nested function trampoline. */
467
468 static GTY(()) tree trampoline_type;
469
470 static tree
471 get_trampoline_type (struct nesting_info *info)
472 {
473 unsigned align, size;
474 tree t;
475
476 if (trampoline_type)
477 return trampoline_type;
478
479 align = TRAMPOLINE_ALIGNMENT;
480 size = TRAMPOLINE_SIZE;
481
482 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483 then allocate extra space so that we can do dynamic alignment. */
484 if (align > STACK_BOUNDARY)
485 {
486 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
487 align = STACK_BOUNDARY;
488 }
489
490 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
491 t = build_array_type (char_type_node, t);
492 t = build_decl (DECL_SOURCE_LOCATION (info->context),
493 FIELD_DECL, get_identifier ("__data"), t);
494 DECL_ALIGN (t) = align;
495 DECL_USER_ALIGN (t) = 1;
496
497 trampoline_type = make_node (RECORD_TYPE);
498 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
499 TYPE_FIELDS (trampoline_type) = t;
500 layout_type (trampoline_type);
501 DECL_CONTEXT (t) = trampoline_type;
502
503 return trampoline_type;
504 }
505
506 /* Given DECL, a nested function, find or create a field in the non-local
507 frame structure for a trampoline for this function. */
508
509 static tree
510 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
511 enum insert_option insert)
512 {
513 void **slot;
514
515 if (insert == NO_INSERT)
516 {
517 slot = pointer_map_contains (info->var_map, decl);
518 return slot ? (tree) *slot : NULL_TREE;
519 }
520
521 slot = pointer_map_insert (info->var_map, decl);
522 if (!*slot)
523 {
524 tree field = make_node (FIELD_DECL);
525 DECL_NAME (field) = DECL_NAME (decl);
526 TREE_TYPE (field) = get_trampoline_type (info);
527 TREE_ADDRESSABLE (field) = 1;
528
529 insert_field_into_struct (get_frame_type (info), field);
530 *slot = field;
531
532 info->any_tramp_created = true;
533 }
534
535 return (tree) *slot;
536 }
537
538 /* Build or return the field within the non-local frame state that holds
539 the non-local goto "jmp_buf". The buffer itself is maintained by the
540 rtl middle-end as dynamic stack space is allocated. */
541
542 static tree
543 get_nl_goto_field (struct nesting_info *info)
544 {
545 tree field = info->nl_goto_field;
546 if (!field)
547 {
548 unsigned size;
549 tree type;
550
551 /* For __builtin_nonlocal_goto, we need N words. The first is the
552 frame pointer, the rest is for the target's stack pointer save
553 area. The number of words is controlled by STACK_SAVEAREA_MODE;
554 not the best interface, but it'll do for now. */
555 if (Pmode == ptr_mode)
556 type = ptr_type_node;
557 else
558 type = lang_hooks.types.type_for_mode (Pmode, 1);
559
560 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
561 size = size / GET_MODE_SIZE (Pmode);
562 size = size + 1;
563
564 type = build_array_type
565 (type, build_index_type (build_int_cst (NULL_TREE, size)));
566
567 field = make_node (FIELD_DECL);
568 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
569 TREE_TYPE (field) = type;
570 DECL_ALIGN (field) = TYPE_ALIGN (type);
571 TREE_ADDRESSABLE (field) = 1;
572
573 insert_field_into_struct (get_frame_type (info), field);
574
575 info->nl_goto_field = field;
576 }
577
578 return field;
579 }
580
581 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
582
583 static void
584 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
585 struct nesting_info *info, gimple_seq seq)
586 {
587 struct walk_stmt_info wi;
588
589 memset (&wi, 0, sizeof (wi));
590 wi.info = info;
591 wi.val_only = true;
592 walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
593 }
594
595
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
597
598 static inline void
599 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
600 struct nesting_info *info)
601 {
602 walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
603 }
604
605 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
606
607 static void
608 walk_gimple_omp_for (gimple for_stmt,
609 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
610 struct nesting_info *info)
611 {
612 struct walk_stmt_info wi;
613 gimple_seq seq;
614 tree t;
615 size_t i;
616
617 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
618
619 seq = gimple_seq_alloc ();
620 memset (&wi, 0, sizeof (wi));
621 wi.info = info;
622 wi.gsi = gsi_last (seq);
623
624 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
625 {
626 wi.val_only = false;
627 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
628 &wi, NULL);
629 wi.val_only = true;
630 wi.is_lhs = false;
631 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
632 &wi, NULL);
633
634 wi.val_only = true;
635 wi.is_lhs = false;
636 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
637 &wi, NULL);
638
639 t = gimple_omp_for_incr (for_stmt, i);
640 gcc_assert (BINARY_CLASS_P (t));
641 wi.val_only = false;
642 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
643 wi.val_only = true;
644 wi.is_lhs = false;
645 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
646 }
647
648 if (gimple_seq_empty_p (seq))
649 gimple_seq_free (seq);
650 else
651 {
652 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
653 annotate_all_with_location (seq, gimple_location (for_stmt));
654 gimple_seq_add_seq (&pre_body, seq);
655 gimple_omp_for_set_pre_body (for_stmt, pre_body);
656 }
657 }
658
659 /* Similarly for ROOT and all functions nested underneath, depth first. */
660
661 static void
662 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
663 struct nesting_info *root)
664 {
665 struct nesting_info *n;
666 FOR_EACH_NEST_INFO (n, root)
667 walk_function (callback_stmt, callback_op, n);
668 }
669
670
671 /* We have to check for a fairly pathological case. The operands of function
672 nested function are to be interpreted in the context of the enclosing
673 function. So if any are variably-sized, they will get remapped when the
674 enclosing function is inlined. But that remapping would also have to be
675 done in the types of the PARM_DECLs of the nested function, meaning the
676 argument types of that function will disagree with the arguments in the
677 calls to that function. So we'd either have to make a copy of the nested
678 function corresponding to each time the enclosing function was inlined or
679 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680 function. The former is not practical. The latter would still require
681 detecting this case to know when to add the conversions. So, for now at
682 least, we don't inline such an enclosing function.
683
684 We have to do that check recursively, so here return indicating whether
685 FNDECL has such a nested function. ORIG_FN is the function we were
686 trying to inline to use for checking whether any argument is variably
687 modified by anything in it.
688
689 It would be better to do this in tree-inline.c so that we could give
690 the appropriate warning for why a function can't be inlined, but that's
691 too late since the nesting structure has already been flattened and
692 adding a flag just to record this fact seems a waste of a flag. */
693
694 static bool
695 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
696 {
697 struct cgraph_node *cgn = cgraph_node (fndecl);
698 tree arg;
699
700 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
701 {
702 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
703 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
704 return true;
705
706 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
707 return true;
708 }
709
710 return false;
711 }
712
713 /* Construct our local datastructure describing the function nesting
714 tree rooted by CGN. */
715
716 static struct nesting_info *
717 create_nesting_tree (struct cgraph_node *cgn)
718 {
719 struct nesting_info *info = XCNEW (struct nesting_info);
720 info->field_map = pointer_map_create ();
721 info->var_map = pointer_map_create ();
722 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
723 info->context = cgn->decl;
724
725 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
726 {
727 struct nesting_info *sub = create_nesting_tree (cgn);
728 sub->outer = info;
729 sub->next = info->inner;
730 info->inner = sub;
731 }
732
733 /* See discussion at check_for_nested_with_variably_modified for a
734 discussion of why this has to be here. */
735 if (check_for_nested_with_variably_modified (info->context, info->context))
736 DECL_UNINLINABLE (info->context) = true;
737
738 return info;
739 }
740
741 /* Return an expression computing the static chain for TARGET_CONTEXT
742 from INFO->CONTEXT. Insert any necessary computations before TSI. */
743
744 static tree
745 get_static_chain (struct nesting_info *info, tree target_context,
746 gimple_stmt_iterator *gsi)
747 {
748 struct nesting_info *i;
749 tree x;
750
751 if (info->context == target_context)
752 {
753 x = build_addr (info->frame_decl, target_context);
754 }
755 else
756 {
757 x = get_chain_decl (info);
758
759 for (i = info->outer; i->context != target_context; i = i->outer)
760 {
761 tree field = get_chain_field (i);
762
763 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
764 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
765 x = init_tmp_var (info, x, gsi);
766 }
767 }
768
769 return x;
770 }
771
772
773 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
774 frame as seen from INFO->CONTEXT. Insert any necessary computations
775 before GSI. */
776
777 static tree
778 get_frame_field (struct nesting_info *info, tree target_context,
779 tree field, gimple_stmt_iterator *gsi)
780 {
781 struct nesting_info *i;
782 tree x;
783
784 if (info->context == target_context)
785 {
786 /* Make sure frame_decl gets created. */
787 (void) get_frame_type (info);
788 x = info->frame_decl;
789 }
790 else
791 {
792 x = get_chain_decl (info);
793
794 for (i = info->outer; i->context != target_context; i = i->outer)
795 {
796 tree field = get_chain_field (i);
797
798 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
799 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
800 x = init_tmp_var (info, x, gsi);
801 }
802
803 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
804 }
805
806 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
807 return x;
808 }
809
810 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
811
812 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
813 in the nested function with DECL_VALUE_EXPR set to reference the true
814 variable in the parent function. This is used both for debug info
815 and in OpenMP lowering. */
816
817 static tree
818 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
819 {
820 tree target_context;
821 struct nesting_info *i;
822 tree x, field, new_decl;
823 void **slot;
824
825 slot = pointer_map_insert (info->var_map, decl);
826
827 if (*slot)
828 return (tree) *slot;
829
830 target_context = decl_function_context (decl);
831
832 /* A copy of the code in get_frame_field, but without the temporaries. */
833 if (info->context == target_context)
834 {
835 /* Make sure frame_decl gets created. */
836 (void) get_frame_type (info);
837 x = info->frame_decl;
838 i = info;
839 }
840 else
841 {
842 x = get_chain_decl (info);
843 for (i = info->outer; i->context != target_context; i = i->outer)
844 {
845 field = get_chain_field (i);
846 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
847 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
848 }
849 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
850 }
851
852 field = lookup_field_for_decl (i, decl, INSERT);
853 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
854 if (use_pointer_in_frame (decl))
855 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
856
857 /* ??? We should be remapping types as well, surely. */
858 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
859 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
860 DECL_CONTEXT (new_decl) = info->context;
861 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
862 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
863 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
864 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
865 TREE_READONLY (new_decl) = TREE_READONLY (decl);
866 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
867 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
868 if ((TREE_CODE (decl) == PARM_DECL
869 || TREE_CODE (decl) == RESULT_DECL
870 || TREE_CODE (decl) == VAR_DECL)
871 && DECL_BY_REFERENCE (decl))
872 DECL_BY_REFERENCE (new_decl) = 1;
873
874 SET_DECL_VALUE_EXPR (new_decl, x);
875 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
876
877 *slot = new_decl;
878 TREE_CHAIN (new_decl) = info->debug_var_chain;
879 info->debug_var_chain = new_decl;
880
881 if (!optimize
882 && info->context != target_context
883 && variably_modified_type_p (TREE_TYPE (decl), NULL))
884 note_nonlocal_vla_type (info, TREE_TYPE (decl));
885
886 return new_decl;
887 }
888
889
890 /* Callback for walk_gimple_stmt, rewrite all references to VAR
891 and PARM_DECLs that belong to outer functions.
892
893 The rewrite will involve some number of structure accesses back up
894 the static chain. E.g. for a variable FOO up one nesting level it'll
895 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
896 indirections apply to decls for which use_pointer_in_frame is true. */
897
898 static tree
899 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
900 {
901 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
902 struct nesting_info *const info = (struct nesting_info *) wi->info;
903 tree t = *tp;
904
905 *walk_subtrees = 0;
906 switch (TREE_CODE (t))
907 {
908 case VAR_DECL:
909 /* Non-automatic variables are never processed. */
910 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
911 break;
912 /* FALLTHRU */
913
914 case PARM_DECL:
915 if (decl_function_context (t) != info->context)
916 {
917 tree x;
918 wi->changed = true;
919
920 x = get_nonlocal_debug_decl (info, t);
921 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
922 {
923 tree target_context = decl_function_context (t);
924 struct nesting_info *i;
925 for (i = info->outer; i->context != target_context; i = i->outer)
926 continue;
927 x = lookup_field_for_decl (i, t, INSERT);
928 x = get_frame_field (info, target_context, x, &wi->gsi);
929 if (use_pointer_in_frame (t))
930 {
931 x = init_tmp_var (info, x, &wi->gsi);
932 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
933 }
934 }
935
936 if (wi->val_only)
937 {
938 if (wi->is_lhs)
939 x = save_tmp_var (info, x, &wi->gsi);
940 else
941 x = init_tmp_var (info, x, &wi->gsi);
942 }
943
944 *tp = x;
945 }
946 break;
947
948 case LABEL_DECL:
949 /* We're taking the address of a label from a parent function, but
950 this is not itself a non-local goto. Mark the label such that it
951 will not be deleted, much as we would with a label address in
952 static storage. */
953 if (decl_function_context (t) != info->context)
954 FORCED_LABEL (t) = 1;
955 break;
956
957 case ADDR_EXPR:
958 {
959 bool save_val_only = wi->val_only;
960
961 wi->val_only = false;
962 wi->is_lhs = false;
963 wi->changed = false;
964 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
965 wi->val_only = true;
966
967 if (wi->changed)
968 {
969 tree save_context;
970
971 /* If we changed anything, we might no longer be directly
972 referencing a decl. */
973 save_context = current_function_decl;
974 current_function_decl = info->context;
975 recompute_tree_invariant_for_addr_expr (t);
976 current_function_decl = save_context;
977
978 /* If the callback converted the address argument in a context
979 where we only accept variables (and min_invariant, presumably),
980 then compute the address into a temporary. */
981 if (save_val_only)
982 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
983 t, &wi->gsi);
984 }
985 }
986 break;
987
988 case REALPART_EXPR:
989 case IMAGPART_EXPR:
990 case COMPONENT_REF:
991 case ARRAY_REF:
992 case ARRAY_RANGE_REF:
993 case BIT_FIELD_REF:
994 /* Go down this entire nest and just look at the final prefix and
995 anything that describes the references. Otherwise, we lose track
996 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
997 wi->val_only = true;
998 wi->is_lhs = false;
999 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1000 {
1001 if (TREE_CODE (t) == COMPONENT_REF)
1002 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1003 NULL);
1004 else if (TREE_CODE (t) == ARRAY_REF
1005 || TREE_CODE (t) == ARRAY_RANGE_REF)
1006 {
1007 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1008 wi, NULL);
1009 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1010 wi, NULL);
1011 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1012 wi, NULL);
1013 }
1014 else if (TREE_CODE (t) == BIT_FIELD_REF)
1015 {
1016 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1017 wi, NULL);
1018 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1019 wi, NULL);
1020 }
1021 }
1022 wi->val_only = false;
1023 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1024 break;
1025
1026 case VIEW_CONVERT_EXPR:
1027 /* Just request to look at the subtrees, leaving val_only and lhs
1028 untouched. This might actually be for !val_only + lhs, in which
1029 case we don't want to force a replacement by a temporary. */
1030 *walk_subtrees = 1;
1031 break;
1032
1033 default:
1034 if (!IS_TYPE_OR_DECL_P (t))
1035 {
1036 *walk_subtrees = 1;
1037 wi->val_only = true;
1038 wi->is_lhs = false;
1039 }
1040 break;
1041 }
1042
1043 return NULL_TREE;
1044 }
1045
1046 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1047 struct walk_stmt_info *);
1048
1049 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1050 and PARM_DECLs that belong to outer functions. */
1051
1052 static bool
1053 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1054 {
1055 struct nesting_info *const info = (struct nesting_info *) wi->info;
1056 bool need_chain = false, need_stmts = false;
1057 tree clause, decl;
1058 int dummy;
1059 bitmap new_suppress;
1060
1061 new_suppress = BITMAP_GGC_ALLOC ();
1062 bitmap_copy (new_suppress, info->suppress_expansion);
1063
1064 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1065 {
1066 switch (OMP_CLAUSE_CODE (clause))
1067 {
1068 case OMP_CLAUSE_REDUCTION:
1069 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1070 need_stmts = true;
1071 goto do_decl_clause;
1072
1073 case OMP_CLAUSE_LASTPRIVATE:
1074 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1075 need_stmts = true;
1076 goto do_decl_clause;
1077
1078 case OMP_CLAUSE_PRIVATE:
1079 case OMP_CLAUSE_FIRSTPRIVATE:
1080 case OMP_CLAUSE_COPYPRIVATE:
1081 case OMP_CLAUSE_SHARED:
1082 do_decl_clause:
1083 decl = OMP_CLAUSE_DECL (clause);
1084 if (TREE_CODE (decl) == VAR_DECL
1085 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1086 break;
1087 if (decl_function_context (decl) != info->context)
1088 {
1089 bitmap_set_bit (new_suppress, DECL_UID (decl));
1090 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1091 need_chain = true;
1092 }
1093 break;
1094
1095 case OMP_CLAUSE_SCHEDULE:
1096 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1097 break;
1098 /* FALLTHRU */
1099 case OMP_CLAUSE_IF:
1100 case OMP_CLAUSE_NUM_THREADS:
1101 wi->val_only = true;
1102 wi->is_lhs = false;
1103 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1104 &dummy, wi);
1105 break;
1106
1107 case OMP_CLAUSE_NOWAIT:
1108 case OMP_CLAUSE_ORDERED:
1109 case OMP_CLAUSE_DEFAULT:
1110 case OMP_CLAUSE_COPYIN:
1111 case OMP_CLAUSE_COLLAPSE:
1112 case OMP_CLAUSE_UNTIED:
1113 break;
1114
1115 default:
1116 gcc_unreachable ();
1117 }
1118 }
1119
1120 info->suppress_expansion = new_suppress;
1121
1122 if (need_stmts)
1123 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1124 switch (OMP_CLAUSE_CODE (clause))
1125 {
1126 case OMP_CLAUSE_REDUCTION:
1127 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1128 {
1129 tree old_context
1130 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1131 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1132 = info->context;
1133 walk_body (convert_nonlocal_reference_stmt,
1134 convert_nonlocal_reference_op, info,
1135 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1136 walk_body (convert_nonlocal_reference_stmt,
1137 convert_nonlocal_reference_op, info,
1138 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1139 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1140 = old_context;
1141 }
1142 break;
1143
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 walk_body (convert_nonlocal_reference_stmt,
1146 convert_nonlocal_reference_op, info,
1147 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1148 break;
1149
1150 default:
1151 break;
1152 }
1153
1154 return need_chain;
1155 }
1156
1157 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1158
1159 static void
1160 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1161 {
1162 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1163 type = TREE_TYPE (type);
1164
1165 if (TYPE_NAME (type)
1166 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1167 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1168 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1169
1170 while (POINTER_TYPE_P (type)
1171 || TREE_CODE (type) == VECTOR_TYPE
1172 || TREE_CODE (type) == FUNCTION_TYPE
1173 || TREE_CODE (type) == METHOD_TYPE)
1174 type = TREE_TYPE (type);
1175
1176 if (TREE_CODE (type) == ARRAY_TYPE)
1177 {
1178 tree domain, t;
1179
1180 note_nonlocal_vla_type (info, TREE_TYPE (type));
1181 domain = TYPE_DOMAIN (type);
1182 if (domain)
1183 {
1184 t = TYPE_MIN_VALUE (domain);
1185 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1186 && decl_function_context (t) != info->context)
1187 get_nonlocal_debug_decl (info, t);
1188 t = TYPE_MAX_VALUE (domain);
1189 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1190 && decl_function_context (t) != info->context)
1191 get_nonlocal_debug_decl (info, t);
1192 }
1193 }
1194 }
1195
1196 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1197 in BLOCK. */
1198
1199 static void
1200 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1201 {
1202 tree var;
1203
1204 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1205 if (TREE_CODE (var) == VAR_DECL
1206 && variably_modified_type_p (TREE_TYPE (var), NULL)
1207 && DECL_HAS_VALUE_EXPR_P (var)
1208 && decl_function_context (var) != info->context)
1209 note_nonlocal_vla_type (info, TREE_TYPE (var));
1210 }
1211
1212 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1213 PARM_DECLs that belong to outer functions. This handles statements
1214 that are not handled via the standard recursion done in
1215 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1216 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1217 operands of STMT have been handled by this function. */
1218
1219 static tree
1220 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1221 struct walk_stmt_info *wi)
1222 {
1223 struct nesting_info *info = (struct nesting_info *) wi->info;
1224 tree save_local_var_chain;
1225 bitmap save_suppress;
1226 gimple stmt = gsi_stmt (*gsi);
1227
1228 switch (gimple_code (stmt))
1229 {
1230 case GIMPLE_GOTO:
1231 /* Don't walk non-local gotos for now. */
1232 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1233 {
1234 wi->val_only = true;
1235 wi->is_lhs = false;
1236 *handled_ops_p = true;
1237 return NULL_TREE;
1238 }
1239 break;
1240
1241 case GIMPLE_OMP_PARALLEL:
1242 case GIMPLE_OMP_TASK:
1243 save_suppress = info->suppress_expansion;
1244 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1245 wi))
1246 {
1247 tree c, decl;
1248 decl = get_chain_decl (info);
1249 c = build_omp_clause (gimple_location (stmt),
1250 OMP_CLAUSE_FIRSTPRIVATE);
1251 OMP_CLAUSE_DECL (c) = decl;
1252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1253 gimple_omp_taskreg_set_clauses (stmt, c);
1254 }
1255
1256 save_local_var_chain = info->new_local_var_chain;
1257 info->new_local_var_chain = NULL;
1258
1259 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1260 info, gimple_omp_body (stmt));
1261
1262 if (info->new_local_var_chain)
1263 declare_vars (info->new_local_var_chain,
1264 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1265 false);
1266 info->new_local_var_chain = save_local_var_chain;
1267 info->suppress_expansion = save_suppress;
1268 break;
1269
1270 case GIMPLE_OMP_FOR:
1271 save_suppress = info->suppress_expansion;
1272 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1273 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1274 convert_nonlocal_reference_op, info);
1275 walk_body (convert_nonlocal_reference_stmt,
1276 convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
1277 info->suppress_expansion = save_suppress;
1278 break;
1279
1280 case GIMPLE_OMP_SECTIONS:
1281 save_suppress = info->suppress_expansion;
1282 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1283 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1284 info, gimple_omp_body (stmt));
1285 info->suppress_expansion = save_suppress;
1286 break;
1287
1288 case GIMPLE_OMP_SINGLE:
1289 save_suppress = info->suppress_expansion;
1290 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1291 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1292 info, gimple_omp_body (stmt));
1293 info->suppress_expansion = save_suppress;
1294 break;
1295
1296 case GIMPLE_OMP_SECTION:
1297 case GIMPLE_OMP_MASTER:
1298 case GIMPLE_OMP_ORDERED:
1299 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1300 info, gimple_omp_body (stmt));
1301 break;
1302
1303 case GIMPLE_BIND:
1304 if (!optimize && gimple_bind_block (stmt))
1305 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1306
1307 *handled_ops_p = false;
1308 return NULL_TREE;
1309
1310 default:
1311 /* For every other statement that we are not interested in
1312 handling here, let the walker traverse the operands. */
1313 *handled_ops_p = false;
1314 return NULL_TREE;
1315 }
1316
1317 /* We have handled all of STMT operands, no need to traverse the operands. */
1318 *handled_ops_p = true;
1319 return NULL_TREE;
1320 }
1321
1322
1323 /* A subroutine of convert_local_reference. Create a local variable
1324 in the parent function with DECL_VALUE_EXPR set to reference the
1325 field in FRAME. This is used both for debug info and in OpenMP
1326 lowering. */
1327
1328 static tree
1329 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1330 {
1331 tree x, new_decl;
1332 void **slot;
1333
1334 slot = pointer_map_insert (info->var_map, decl);
1335 if (*slot)
1336 return (tree) *slot;
1337
1338 /* Make sure frame_decl gets created. */
1339 (void) get_frame_type (info);
1340 x = info->frame_decl;
1341 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1342
1343 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1344 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1345 DECL_CONTEXT (new_decl) = info->context;
1346 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1347 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1348 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1349 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1350 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1351 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1352 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1353 if ((TREE_CODE (decl) == PARM_DECL
1354 || TREE_CODE (decl) == RESULT_DECL
1355 || TREE_CODE (decl) == VAR_DECL)
1356 && DECL_BY_REFERENCE (decl))
1357 DECL_BY_REFERENCE (new_decl) = 1;
1358
1359 SET_DECL_VALUE_EXPR (new_decl, x);
1360 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1361 *slot = new_decl;
1362
1363 TREE_CHAIN (new_decl) = info->debug_var_chain;
1364 info->debug_var_chain = new_decl;
1365
1366 /* Do not emit debug info twice. */
1367 DECL_IGNORED_P (decl) = 1;
1368
1369 return new_decl;
1370 }
1371
1372
1373 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1374 and PARM_DECLs that were referenced by inner nested functions.
1375 The rewrite will be a structure reference to the local frame variable. */
1376
1377 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1378
1379 static tree
1380 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1381 {
1382 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1383 struct nesting_info *const info = (struct nesting_info *) wi->info;
1384 tree t = *tp, field, x;
1385 bool save_val_only;
1386
1387 *walk_subtrees = 0;
1388 switch (TREE_CODE (t))
1389 {
1390 case VAR_DECL:
1391 /* Non-automatic variables are never processed. */
1392 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1393 break;
1394 /* FALLTHRU */
1395
1396 case PARM_DECL:
1397 if (decl_function_context (t) == info->context)
1398 {
1399 /* If we copied a pointer to the frame, then the original decl
1400 is used unchanged in the parent function. */
1401 if (use_pointer_in_frame (t))
1402 break;
1403
1404 /* No need to transform anything if no child references the
1405 variable. */
1406 field = lookup_field_for_decl (info, t, NO_INSERT);
1407 if (!field)
1408 break;
1409 wi->changed = true;
1410
1411 x = get_local_debug_decl (info, t, field);
1412 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1413 x = get_frame_field (info, info->context, field, &wi->gsi);
1414
1415 if (wi->val_only)
1416 {
1417 if (wi->is_lhs)
1418 x = save_tmp_var (info, x, &wi->gsi);
1419 else
1420 x = init_tmp_var (info, x, &wi->gsi);
1421 }
1422
1423 *tp = x;
1424 }
1425 break;
1426
1427 case ADDR_EXPR:
1428 save_val_only = wi->val_only;
1429 wi->val_only = false;
1430 wi->is_lhs = false;
1431 wi->changed = false;
1432 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1433 wi->val_only = save_val_only;
1434
1435 /* If we converted anything ... */
1436 if (wi->changed)
1437 {
1438 tree save_context;
1439
1440 /* Then the frame decl is now addressable. */
1441 TREE_ADDRESSABLE (info->frame_decl) = 1;
1442
1443 save_context = current_function_decl;
1444 current_function_decl = info->context;
1445 recompute_tree_invariant_for_addr_expr (t);
1446 current_function_decl = save_context;
1447
1448 /* If we are in a context where we only accept values, then
1449 compute the address into a temporary. */
1450 if (save_val_only)
1451 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1452 t, &wi->gsi);
1453 }
1454 break;
1455
1456 case REALPART_EXPR:
1457 case IMAGPART_EXPR:
1458 case COMPONENT_REF:
1459 case ARRAY_REF:
1460 case ARRAY_RANGE_REF:
1461 case BIT_FIELD_REF:
1462 /* Go down this entire nest and just look at the final prefix and
1463 anything that describes the references. Otherwise, we lose track
1464 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1465 save_val_only = wi->val_only;
1466 wi->val_only = true;
1467 wi->is_lhs = false;
1468 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1469 {
1470 if (TREE_CODE (t) == COMPONENT_REF)
1471 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1472 NULL);
1473 else if (TREE_CODE (t) == ARRAY_REF
1474 || TREE_CODE (t) == ARRAY_RANGE_REF)
1475 {
1476 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1477 NULL);
1478 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1479 NULL);
1480 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1481 NULL);
1482 }
1483 else if (TREE_CODE (t) == BIT_FIELD_REF)
1484 {
1485 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1486 NULL);
1487 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1488 NULL);
1489 }
1490 }
1491 wi->val_only = false;
1492 walk_tree (tp, convert_local_reference_op, wi, NULL);
1493 wi->val_only = save_val_only;
1494 break;
1495
1496 case VIEW_CONVERT_EXPR:
1497 /* Just request to look at the subtrees, leaving val_only and lhs
1498 untouched. This might actually be for !val_only + lhs, in which
1499 case we don't want to force a replacement by a temporary. */
1500 *walk_subtrees = 1;
1501 break;
1502
1503 default:
1504 if (!IS_TYPE_OR_DECL_P (t))
1505 {
1506 *walk_subtrees = 1;
1507 wi->val_only = true;
1508 wi->is_lhs = false;
1509 }
1510 break;
1511 }
1512
1513 return NULL_TREE;
1514 }
1515
1516 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1517 struct walk_stmt_info *);
1518
1519 /* Helper for convert_local_reference. Convert all the references in
1520 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1521
1522 static bool
1523 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1524 {
1525 struct nesting_info *const info = (struct nesting_info *) wi->info;
1526 bool need_frame = false, need_stmts = false;
1527 tree clause, decl;
1528 int dummy;
1529 bitmap new_suppress;
1530
1531 new_suppress = BITMAP_GGC_ALLOC ();
1532 bitmap_copy (new_suppress, info->suppress_expansion);
1533
1534 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1535 {
1536 switch (OMP_CLAUSE_CODE (clause))
1537 {
1538 case OMP_CLAUSE_REDUCTION:
1539 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1540 need_stmts = true;
1541 goto do_decl_clause;
1542
1543 case OMP_CLAUSE_LASTPRIVATE:
1544 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1545 need_stmts = true;
1546 goto do_decl_clause;
1547
1548 case OMP_CLAUSE_PRIVATE:
1549 case OMP_CLAUSE_FIRSTPRIVATE:
1550 case OMP_CLAUSE_COPYPRIVATE:
1551 case OMP_CLAUSE_SHARED:
1552 do_decl_clause:
1553 decl = OMP_CLAUSE_DECL (clause);
1554 if (TREE_CODE (decl) == VAR_DECL
1555 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1556 break;
1557 if (decl_function_context (decl) == info->context
1558 && !use_pointer_in_frame (decl))
1559 {
1560 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1561 if (field)
1562 {
1563 bitmap_set_bit (new_suppress, DECL_UID (decl));
1564 OMP_CLAUSE_DECL (clause)
1565 = get_local_debug_decl (info, decl, field);
1566 need_frame = true;
1567 }
1568 }
1569 break;
1570
1571 case OMP_CLAUSE_SCHEDULE:
1572 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1573 break;
1574 /* FALLTHRU */
1575 case OMP_CLAUSE_IF:
1576 case OMP_CLAUSE_NUM_THREADS:
1577 wi->val_only = true;
1578 wi->is_lhs = false;
1579 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1580 wi);
1581 break;
1582
1583 case OMP_CLAUSE_NOWAIT:
1584 case OMP_CLAUSE_ORDERED:
1585 case OMP_CLAUSE_DEFAULT:
1586 case OMP_CLAUSE_COPYIN:
1587 case OMP_CLAUSE_COLLAPSE:
1588 case OMP_CLAUSE_UNTIED:
1589 break;
1590
1591 default:
1592 gcc_unreachable ();
1593 }
1594 }
1595
1596 info->suppress_expansion = new_suppress;
1597
1598 if (need_stmts)
1599 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1600 switch (OMP_CLAUSE_CODE (clause))
1601 {
1602 case OMP_CLAUSE_REDUCTION:
1603 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1604 {
1605 tree old_context
1606 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1607 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1608 = info->context;
1609 walk_body (convert_local_reference_stmt,
1610 convert_local_reference_op, info,
1611 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1612 walk_body (convert_local_reference_stmt,
1613 convert_local_reference_op, info,
1614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1615 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1616 = old_context;
1617 }
1618 break;
1619
1620 case OMP_CLAUSE_LASTPRIVATE:
1621 walk_body (convert_local_reference_stmt,
1622 convert_local_reference_op, info,
1623 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1624 break;
1625
1626 default:
1627 break;
1628 }
1629
1630 return need_frame;
1631 }
1632
1633
1634 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1635 and PARM_DECLs that were referenced by inner nested functions.
1636 The rewrite will be a structure reference to the local frame variable. */
1637
1638 static tree
1639 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1640 struct walk_stmt_info *wi)
1641 {
1642 struct nesting_info *info = (struct nesting_info *) wi->info;
1643 tree save_local_var_chain;
1644 bitmap save_suppress;
1645 gimple stmt = gsi_stmt (*gsi);
1646
1647 switch (gimple_code (stmt))
1648 {
1649 case GIMPLE_OMP_PARALLEL:
1650 case GIMPLE_OMP_TASK:
1651 save_suppress = info->suppress_expansion;
1652 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1653 wi))
1654 {
1655 tree c;
1656 (void) get_frame_type (info);
1657 c = build_omp_clause (gimple_location (stmt),
1658 OMP_CLAUSE_SHARED);
1659 OMP_CLAUSE_DECL (c) = info->frame_decl;
1660 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1661 gimple_omp_taskreg_set_clauses (stmt, c);
1662 }
1663
1664 save_local_var_chain = info->new_local_var_chain;
1665 info->new_local_var_chain = NULL;
1666
1667 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1668 gimple_omp_body (stmt));
1669
1670 if (info->new_local_var_chain)
1671 declare_vars (info->new_local_var_chain,
1672 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1673 info->new_local_var_chain = save_local_var_chain;
1674 info->suppress_expansion = save_suppress;
1675 break;
1676
1677 case GIMPLE_OMP_FOR:
1678 save_suppress = info->suppress_expansion;
1679 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1680 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1681 convert_local_reference_op, info);
1682 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1683 info, gimple_omp_body (stmt));
1684 info->suppress_expansion = save_suppress;
1685 break;
1686
1687 case GIMPLE_OMP_SECTIONS:
1688 save_suppress = info->suppress_expansion;
1689 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1690 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1691 info, gimple_omp_body (stmt));
1692 info->suppress_expansion = save_suppress;
1693 break;
1694
1695 case GIMPLE_OMP_SINGLE:
1696 save_suppress = info->suppress_expansion;
1697 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1698 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1699 info, gimple_omp_body (stmt));
1700 info->suppress_expansion = save_suppress;
1701 break;
1702
1703 case GIMPLE_OMP_SECTION:
1704 case GIMPLE_OMP_MASTER:
1705 case GIMPLE_OMP_ORDERED:
1706 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1707 info, gimple_omp_body (stmt));
1708 break;
1709
1710 default:
1711 /* For every other statement that we are not interested in
1712 handling here, let the walker traverse the operands. */
1713 *handled_ops_p = false;
1714 return NULL_TREE;
1715 }
1716
1717 /* Indicate that we have handled all the operands ourselves. */
1718 *handled_ops_p = true;
1719 return NULL_TREE;
1720 }
1721
1722
1723 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1724 that reference labels from outer functions. The rewrite will be a
1725 call to __builtin_nonlocal_goto. */
1726
1727 static tree
1728 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1729 struct walk_stmt_info *wi)
1730 {
1731 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1732 tree label, new_label, target_context, x, field;
1733 void **slot;
1734 gimple call;
1735 gimple stmt = gsi_stmt (*gsi);
1736
1737 if (gimple_code (stmt) != GIMPLE_GOTO)
1738 {
1739 *handled_ops_p = false;
1740 return NULL_TREE;
1741 }
1742
1743 label = gimple_goto_dest (stmt);
1744 if (TREE_CODE (label) != LABEL_DECL)
1745 {
1746 *handled_ops_p = false;
1747 return NULL_TREE;
1748 }
1749
1750 target_context = decl_function_context (label);
1751 if (target_context == info->context)
1752 {
1753 *handled_ops_p = false;
1754 return NULL_TREE;
1755 }
1756
1757 for (i = info->outer; target_context != i->context; i = i->outer)
1758 continue;
1759
1760 /* The original user label may also be use for a normal goto, therefore
1761 we must create a new label that will actually receive the abnormal
1762 control transfer. This new label will be marked LABEL_NONLOCAL; this
1763 mark will trigger proper behavior in the cfg, as well as cause the
1764 (hairy target-specific) non-local goto receiver code to be generated
1765 when we expand rtl. Enter this association into var_map so that we
1766 can insert the new label into the IL during a second pass. */
1767 slot = pointer_map_insert (i->var_map, label);
1768 if (*slot == NULL)
1769 {
1770 new_label = create_artificial_label (UNKNOWN_LOCATION);
1771 DECL_NONLOCAL (new_label) = 1;
1772 *slot = new_label;
1773 }
1774 else
1775 new_label = (tree) *slot;
1776
1777 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1778 field = get_nl_goto_field (i);
1779 x = get_frame_field (info, target_context, field, &wi->gsi);
1780 x = build_addr (x, target_context);
1781 x = gsi_gimplify_val (info, x, &wi->gsi);
1782 call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1783 build_addr (new_label, target_context), x);
1784 gsi_replace (&wi->gsi, call, false);
1785
1786 /* We have handled all of STMT's operands, no need to keep going. */
1787 *handled_ops_p = true;
1788 return NULL_TREE;
1789 }
1790
1791
1792 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1793 are referenced via nonlocal goto from a nested function. The rewrite
1794 will involve installing a newly generated DECL_NONLOCAL label, and
1795 (potentially) a branch around the rtl gunk that is assumed to be
1796 attached to such a label. */
1797
1798 static tree
1799 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1800 struct walk_stmt_info *wi)
1801 {
1802 struct nesting_info *const info = (struct nesting_info *) wi->info;
1803 tree label, new_label;
1804 gimple_stmt_iterator tmp_gsi;
1805 void **slot;
1806 gimple stmt = gsi_stmt (*gsi);
1807
1808 if (gimple_code (stmt) != GIMPLE_LABEL)
1809 {
1810 *handled_ops_p = false;
1811 return NULL_TREE;
1812 }
1813
1814 label = gimple_label_label (stmt);
1815
1816 slot = pointer_map_contains (info->var_map, label);
1817 if (!slot)
1818 {
1819 *handled_ops_p = false;
1820 return NULL_TREE;
1821 }
1822
1823 /* If there's any possibility that the previous statement falls through,
1824 then we must branch around the new non-local label. */
1825 tmp_gsi = wi->gsi;
1826 gsi_prev (&tmp_gsi);
1827 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1828 {
1829 gimple stmt = gimple_build_goto (label);
1830 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1831 }
1832
1833 new_label = (tree) *slot;
1834 stmt = gimple_build_label (new_label);
1835 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1836
1837 *handled_ops_p = true;
1838 return NULL_TREE;
1839 }
1840
1841
1842 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1843 of nested functions that require the use of trampolines. The rewrite
1844 will involve a reference a trampoline generated for the occasion. */
1845
1846 static tree
1847 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1848 {
1849 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1850 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1851 tree t = *tp, decl, target_context, x, builtin;
1852 gimple call;
1853
1854 *walk_subtrees = 0;
1855 switch (TREE_CODE (t))
1856 {
1857 case ADDR_EXPR:
1858 /* Build
1859 T.1 = &CHAIN->tramp;
1860 T.2 = __builtin_adjust_trampoline (T.1);
1861 T.3 = (func_type)T.2;
1862 */
1863
1864 decl = TREE_OPERAND (t, 0);
1865 if (TREE_CODE (decl) != FUNCTION_DECL)
1866 break;
1867
1868 /* Only need to process nested functions. */
1869 target_context = decl_function_context (decl);
1870 if (!target_context)
1871 break;
1872
1873 /* If the nested function doesn't use a static chain, then
1874 it doesn't need a trampoline. */
1875 if (!DECL_STATIC_CHAIN (decl))
1876 break;
1877
1878 /* If we don't want a trampoline, then don't build one. */
1879 if (TREE_NO_TRAMPOLINE (t))
1880 break;
1881
1882 /* Lookup the immediate parent of the callee, as that's where
1883 we need to insert the trampoline. */
1884 for (i = info; i->context != target_context; i = i->outer)
1885 continue;
1886 x = lookup_tramp_for_decl (i, decl, INSERT);
1887
1888 /* Compute the address of the field holding the trampoline. */
1889 x = get_frame_field (info, target_context, x, &wi->gsi);
1890 x = build_addr (x, target_context);
1891 x = gsi_gimplify_val (info, x, &wi->gsi);
1892
1893 /* Do machine-specific ugliness. Normally this will involve
1894 computing extra alignment, but it can really be anything. */
1895 builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1896 call = gimple_build_call (builtin, 1, x);
1897 x = init_tmp_var_with_call (info, &wi->gsi, call);
1898
1899 /* Cast back to the proper function type. */
1900 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1901 x = init_tmp_var (info, x, &wi->gsi);
1902
1903 *tp = x;
1904 break;
1905
1906 default:
1907 if (!IS_TYPE_OR_DECL_P (t))
1908 *walk_subtrees = 1;
1909 break;
1910 }
1911
1912 return NULL_TREE;
1913 }
1914
1915
1916 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1917 to addresses of nested functions that require the use of
1918 trampolines. The rewrite will involve a reference a trampoline
1919 generated for the occasion. */
1920
1921 static tree
1922 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1923 struct walk_stmt_info *wi)
1924 {
1925 gimple stmt = gsi_stmt (*gsi);
1926
1927 switch (gimple_code (stmt))
1928 {
1929 case GIMPLE_CALL:
1930 {
1931 /* Only walk call arguments, lest we generate trampolines for
1932 direct calls. */
1933 unsigned long i, nargs = gimple_call_num_args (stmt);
1934 for (i = 0; i < nargs; i++)
1935 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
1936 wi, NULL);
1937
1938 *handled_ops_p = true;
1939 return NULL_TREE;
1940 }
1941
1942 default:
1943 break;
1944 }
1945
1946 *handled_ops_p = false;
1947 return NULL_TREE;
1948 }
1949
1950
1951
1952 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1953 that reference nested functions to make sure that the static chain
1954 is set up properly for the call. */
1955
1956 static tree
1957 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1958 struct walk_stmt_info *wi)
1959 {
1960 struct nesting_info *const info = (struct nesting_info *) wi->info;
1961 tree decl, target_context;
1962 char save_static_chain_added;
1963 int i;
1964 gimple stmt = gsi_stmt (*gsi);
1965
1966 switch (gimple_code (stmt))
1967 {
1968 case GIMPLE_CALL:
1969 if (gimple_call_chain (stmt))
1970 break;
1971 decl = gimple_call_fndecl (stmt);
1972 if (!decl)
1973 break;
1974 target_context = decl_function_context (decl);
1975 if (target_context && DECL_STATIC_CHAIN (decl))
1976 {
1977 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
1978 &wi->gsi));
1979 info->static_chain_added |= (1 << (info->context != target_context));
1980 }
1981 break;
1982
1983 case GIMPLE_OMP_PARALLEL:
1984 case GIMPLE_OMP_TASK:
1985 save_static_chain_added = info->static_chain_added;
1986 info->static_chain_added = 0;
1987 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
1988 for (i = 0; i < 2; i++)
1989 {
1990 tree c, decl;
1991 if ((info->static_chain_added & (1 << i)) == 0)
1992 continue;
1993 decl = i ? get_chain_decl (info) : info->frame_decl;
1994 /* Don't add CHAIN.* or FRAME.* twice. */
1995 for (c = gimple_omp_taskreg_clauses (stmt);
1996 c;
1997 c = OMP_CLAUSE_CHAIN (c))
1998 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1999 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2000 && OMP_CLAUSE_DECL (c) == decl)
2001 break;
2002 if (c == NULL)
2003 {
2004 c = build_omp_clause (gimple_location (stmt),
2005 i ? OMP_CLAUSE_FIRSTPRIVATE
2006 : OMP_CLAUSE_SHARED);
2007 OMP_CLAUSE_DECL (c) = decl;
2008 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2009 gimple_omp_taskreg_set_clauses (stmt, c);
2010 }
2011 }
2012 info->static_chain_added |= save_static_chain_added;
2013 break;
2014
2015 case GIMPLE_OMP_FOR:
2016 walk_body (convert_gimple_call, NULL, info,
2017 gimple_omp_for_pre_body (stmt));
2018 /* FALLTHRU */
2019 case GIMPLE_OMP_SECTIONS:
2020 case GIMPLE_OMP_SECTION:
2021 case GIMPLE_OMP_SINGLE:
2022 case GIMPLE_OMP_MASTER:
2023 case GIMPLE_OMP_ORDERED:
2024 case GIMPLE_OMP_CRITICAL:
2025 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2026 break;
2027
2028 default:
2029 /* Keep looking for other operands. */
2030 *handled_ops_p = false;
2031 return NULL_TREE;
2032 }
2033
2034 *handled_ops_p = true;
2035 return NULL_TREE;
2036 }
2037
2038 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2039 call expressions. At the same time, determine if a nested function
2040 actually uses its static chain; if not, remember that. */
2041
2042 static void
2043 convert_all_function_calls (struct nesting_info *root)
2044 {
2045 struct nesting_info *n;
2046 int iter_count;
2047 bool any_changed;
2048
2049 /* First, optimistically clear static_chain for all decls that haven't
2050 used the static chain already for variable access. */
2051 FOR_EACH_NEST_INFO (n, root)
2052 {
2053 tree decl = n->context;
2054 if (!n->outer || (!n->chain_decl && !n->chain_field))
2055 {
2056 DECL_STATIC_CHAIN (decl) = 0;
2057 if (dump_file && (dump_flags & TDF_DETAILS))
2058 fprintf (dump_file, "Guessing no static-chain for %s\n",
2059 lang_hooks.decl_printable_name (decl, 2));
2060 }
2061 else
2062 DECL_STATIC_CHAIN (decl) = 1;
2063 }
2064
2065 /* Walk the functions and perform transformations. Note that these
2066 transformations can induce new uses of the static chain, which in turn
2067 require re-examining all users of the decl. */
2068 /* ??? It would make sense to try to use the call graph to speed this up,
2069 but the call graph hasn't really been built yet. Even if it did, we
2070 would still need to iterate in this loop since address-of references
2071 wouldn't show up in the callgraph anyway. */
2072 iter_count = 0;
2073 do
2074 {
2075 any_changed = false;
2076 iter_count++;
2077
2078 if (dump_file && (dump_flags & TDF_DETAILS))
2079 fputc ('\n', dump_file);
2080
2081 FOR_EACH_NEST_INFO (n, root)
2082 {
2083 tree decl = n->context;
2084 bool old_static_chain = DECL_STATIC_CHAIN (decl);
2085
2086 walk_function (convert_tramp_reference_stmt,
2087 convert_tramp_reference_op, n);
2088 walk_function (convert_gimple_call, NULL, n);
2089
2090 /* If a call to another function created the use of a chain
2091 within this function, we'll have to continue iteration. */
2092 if (!old_static_chain && DECL_STATIC_CHAIN (decl))
2093 any_changed = true;
2094 }
2095 }
2096 while (any_changed);
2097
2098 if (dump_file && (dump_flags & TDF_DETAILS))
2099 fprintf (dump_file, "convert_all_function_calls iterations: %d\n\n",
2100 iter_count);
2101 }
2102
2103 struct nesting_copy_body_data
2104 {
2105 copy_body_data cb;
2106 struct nesting_info *root;
2107 };
2108
2109 /* A helper subroutine for debug_var_chain type remapping. */
2110
2111 static tree
2112 nesting_copy_decl (tree decl, copy_body_data *id)
2113 {
2114 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2115 void **slot = pointer_map_contains (nid->root->var_map, decl);
2116
2117 if (slot)
2118 return (tree) *slot;
2119
2120 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2121 {
2122 tree new_decl = copy_decl_no_change (decl, id);
2123 DECL_ORIGINAL_TYPE (new_decl)
2124 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2125 return new_decl;
2126 }
2127
2128 if (TREE_CODE (decl) == VAR_DECL
2129 || TREE_CODE (decl) == PARM_DECL
2130 || TREE_CODE (decl) == RESULT_DECL)
2131 return decl;
2132
2133 return copy_decl_no_change (decl, id);
2134 }
2135
2136 /* A helper function for remap_vla_decls. See if *TP contains
2137 some remapped variables. */
2138
2139 static tree
2140 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2141 {
2142 struct nesting_info *root = (struct nesting_info *) data;
2143 tree t = *tp;
2144 void **slot;
2145
2146 if (DECL_P (t))
2147 {
2148 *walk_subtrees = 0;
2149 slot = pointer_map_contains (root->var_map, t);
2150
2151 if (slot)
2152 return (tree) *slot;
2153 }
2154 return NULL;
2155 }
2156
2157 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2158 involved. */
2159
2160 static void
2161 remap_vla_decls (tree block, struct nesting_info *root)
2162 {
2163 tree var, subblock, val, type;
2164 struct nesting_copy_body_data id;
2165
2166 for (subblock = BLOCK_SUBBLOCKS (block);
2167 subblock;
2168 subblock = BLOCK_CHAIN (subblock))
2169 remap_vla_decls (subblock, root);
2170
2171 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
2172 {
2173 if (TREE_CODE (var) == VAR_DECL
2174 && variably_modified_type_p (TREE_TYPE (var), NULL)
2175 && DECL_HAS_VALUE_EXPR_P (var))
2176 {
2177 type = TREE_TYPE (var);
2178 val = DECL_VALUE_EXPR (var);
2179 if (walk_tree (&type, contains_remapped_vars, root, NULL) != NULL
2180 || walk_tree (&val, contains_remapped_vars, root, NULL) != NULL)
2181 break;
2182 }
2183 }
2184 if (var == NULL_TREE)
2185 return;
2186
2187 memset (&id, 0, sizeof (id));
2188 id.cb.copy_decl = nesting_copy_decl;
2189 id.cb.decl_map = pointer_map_create ();
2190 id.root = root;
2191
2192 for (; var; var = TREE_CHAIN (var))
2193 if (TREE_CODE (var) == VAR_DECL
2194 && variably_modified_type_p (TREE_TYPE (var), NULL)
2195 && DECL_HAS_VALUE_EXPR_P (var))
2196 {
2197 struct nesting_info *i;
2198 tree newt, t, context;
2199
2200 t = type = TREE_TYPE (var);
2201 val = DECL_VALUE_EXPR (var);
2202 if (walk_tree (&type, contains_remapped_vars, root, NULL) == NULL
2203 && walk_tree (&val, contains_remapped_vars, root, NULL) == NULL)
2204 continue;
2205
2206 context = decl_function_context (var);
2207 for (i = root; i; i = i->outer)
2208 if (i->context == context)
2209 break;
2210
2211 if (i == NULL)
2212 continue;
2213
2214 id.cb.src_fn = i->context;
2215 id.cb.dst_fn = i->context;
2216 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2217
2218 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2219 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2220 {
2221 newt = TREE_TYPE (newt);
2222 t = TREE_TYPE (t);
2223 }
2224 if (TYPE_NAME (newt)
2225 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2226 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2227 && newt != t
2228 && TYPE_NAME (newt) == TYPE_NAME (t))
2229 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2230
2231 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2232 if (val != DECL_VALUE_EXPR (var))
2233 SET_DECL_VALUE_EXPR (var, val);
2234 }
2235
2236 pointer_map_destroy (id.cb.decl_map);
2237 }
2238
2239 /* Do "everything else" to clean up or complete state collected by the
2240 various walking passes -- lay out the types and decls, generate code
2241 to initialize the frame decl, store critical expressions in the
2242 struct function for rtl to find. */
2243
2244 static void
2245 finalize_nesting_tree_1 (struct nesting_info *root)
2246 {
2247 gimple_seq stmt_list;
2248 gimple stmt;
2249 tree context = root->context;
2250 struct function *sf;
2251
2252 stmt_list = NULL;
2253
2254 /* If we created a non-local frame type or decl, we need to lay them
2255 out at this time. */
2256 if (root->frame_type)
2257 {
2258 /* In some cases the frame type will trigger the -Wpadded warning.
2259 This is not helpful; suppress it. */
2260 int save_warn_padded = warn_padded;
2261 tree *adjust;
2262
2263 warn_padded = 0;
2264 layout_type (root->frame_type);
2265 warn_padded = save_warn_padded;
2266 layout_decl (root->frame_decl, 0);
2267
2268 /* Remove root->frame_decl from root->new_local_var_chain, so
2269 that we can declare it also in the lexical blocks, which
2270 helps ensure virtual regs that end up appearing in its RTL
2271 expression get substituted in instantiate_virtual_regs(). */
2272 for (adjust = &root->new_local_var_chain;
2273 *adjust != root->frame_decl;
2274 adjust = &TREE_CHAIN (*adjust))
2275 gcc_assert (TREE_CHAIN (*adjust));
2276 *adjust = TREE_CHAIN (*adjust);
2277
2278 TREE_CHAIN (root->frame_decl) = NULL_TREE;
2279 declare_vars (root->frame_decl,
2280 gimple_seq_first_stmt (gimple_body (context)), true);
2281 }
2282
2283 /* If any parameters were referenced non-locally, then we need to
2284 insert a copy. Likewise, if any variables were referenced by
2285 pointer, we need to initialize the address. */
2286 if (root->any_parm_remapped)
2287 {
2288 tree p;
2289 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
2290 {
2291 tree field, x, y;
2292
2293 field = lookup_field_for_decl (root, p, NO_INSERT);
2294 if (!field)
2295 continue;
2296
2297 if (use_pointer_in_frame (p))
2298 x = build_addr (p, context);
2299 else
2300 x = p;
2301
2302 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2303 root->frame_decl, field, NULL_TREE);
2304 stmt = gimple_build_assign (y, x);
2305 gimple_seq_add_stmt (&stmt_list, stmt);
2306 /* If the assignment is from a non-register the stmt is
2307 not valid gimple. Make it so by using a temporary instead. */
2308 if (!is_gimple_reg (x)
2309 && is_gimple_reg_type (TREE_TYPE (x)))
2310 {
2311 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2312 x = init_tmp_var (root, x, &gsi);
2313 gimple_assign_set_rhs1 (stmt, x);
2314 }
2315 }
2316 }
2317
2318 /* If a chain_field was created, then it needs to be initialized
2319 from chain_decl. */
2320 if (root->chain_field)
2321 {
2322 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2323 root->frame_decl, root->chain_field, NULL_TREE);
2324 stmt = gimple_build_assign (x, get_chain_decl (root));
2325 gimple_seq_add_stmt (&stmt_list, stmt);
2326 }
2327
2328 /* If trampolines were created, then we need to initialize them. */
2329 if (root->any_tramp_created)
2330 {
2331 struct nesting_info *i;
2332 for (i = root->inner; i ; i = i->next)
2333 {
2334 tree arg1, arg2, arg3, x, field;
2335
2336 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2337 if (!field)
2338 continue;
2339
2340 gcc_assert (DECL_STATIC_CHAIN (i->context));
2341 arg3 = build_addr (root->frame_decl, context);
2342
2343 arg2 = build_addr (i->context, context);
2344
2345 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2346 root->frame_decl, field, NULL_TREE);
2347 arg1 = build_addr (x, context);
2348
2349 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
2350 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2351 gimple_seq_add_stmt (&stmt_list, stmt);
2352 }
2353 }
2354
2355 /* If we created initialization statements, insert them. */
2356 if (stmt_list)
2357 {
2358 gimple bind;
2359 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2360 bind = gimple_seq_first_stmt (gimple_body (context));
2361 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2362 gimple_bind_set_body (bind, stmt_list);
2363 }
2364
2365 /* If a chain_decl was created, then it needs to be registered with
2366 struct function so that it gets initialized from the static chain
2367 register at the beginning of the function. */
2368 sf = DECL_STRUCT_FUNCTION (root->context);
2369 sf->static_chain_decl = root->chain_decl;
2370
2371 /* Similarly for the non-local goto save area. */
2372 if (root->nl_goto_field)
2373 {
2374 sf->nonlocal_goto_save_area
2375 = get_frame_field (root, context, root->nl_goto_field, NULL);
2376 sf->has_nonlocal_label = 1;
2377 }
2378
2379 /* Make sure all new local variables get inserted into the
2380 proper BIND_EXPR. */
2381 if (root->new_local_var_chain)
2382 declare_vars (root->new_local_var_chain,
2383 gimple_seq_first_stmt (gimple_body (root->context)),
2384 false);
2385
2386 if (root->debug_var_chain)
2387 {
2388 tree debug_var;
2389 gimple scope;
2390
2391 remap_vla_decls (DECL_INITIAL (root->context), root);
2392
2393 for (debug_var = root->debug_var_chain; debug_var;
2394 debug_var = TREE_CHAIN (debug_var))
2395 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2396 break;
2397
2398 /* If there are any debug decls with variable length types,
2399 remap those types using other debug_var_chain variables. */
2400 if (debug_var)
2401 {
2402 struct nesting_copy_body_data id;
2403
2404 memset (&id, 0, sizeof (id));
2405 id.cb.copy_decl = nesting_copy_decl;
2406 id.cb.decl_map = pointer_map_create ();
2407 id.root = root;
2408
2409 for (; debug_var; debug_var = TREE_CHAIN (debug_var))
2410 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2411 {
2412 tree type = TREE_TYPE (debug_var);
2413 tree newt, t = type;
2414 struct nesting_info *i;
2415
2416 for (i = root; i; i = i->outer)
2417 if (variably_modified_type_p (type, i->context))
2418 break;
2419
2420 if (i == NULL)
2421 continue;
2422
2423 id.cb.src_fn = i->context;
2424 id.cb.dst_fn = i->context;
2425 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2426
2427 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2428 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2429 {
2430 newt = TREE_TYPE (newt);
2431 t = TREE_TYPE (t);
2432 }
2433 if (TYPE_NAME (newt)
2434 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2435 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2436 && newt != t
2437 && TYPE_NAME (newt) == TYPE_NAME (t))
2438 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2439 }
2440
2441 pointer_map_destroy (id.cb.decl_map);
2442 }
2443
2444 scope = gimple_seq_first_stmt (gimple_body (root->context));
2445 if (gimple_bind_block (scope))
2446 declare_vars (root->debug_var_chain, scope, true);
2447 else
2448 BLOCK_VARS (DECL_INITIAL (root->context))
2449 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2450 root->debug_var_chain);
2451 }
2452
2453 /* Dump the translated tree function. */
2454 if (dump_file)
2455 {
2456 fputs ("\n\n", dump_file);
2457 dump_function_to_file (root->context, dump_file, dump_flags);
2458 }
2459 }
2460
2461 static void
2462 finalize_nesting_tree (struct nesting_info *root)
2463 {
2464 struct nesting_info *n;
2465 FOR_EACH_NEST_INFO (n, root)
2466 finalize_nesting_tree_1 (n);
2467 }
2468
2469 /* Unnest the nodes and pass them to cgraph. */
2470
2471 static void
2472 unnest_nesting_tree_1 (struct nesting_info *root)
2473 {
2474 struct cgraph_node *node = cgraph_node (root->context);
2475
2476 /* For nested functions update the cgraph to reflect unnesting.
2477 We also delay finalizing of these functions up to this point. */
2478 if (node->origin)
2479 {
2480 cgraph_unnest_node (cgraph_node (root->context));
2481 cgraph_finalize_function (root->context, true);
2482 }
2483 }
2484
2485 static void
2486 unnest_nesting_tree (struct nesting_info *root)
2487 {
2488 struct nesting_info *n;
2489 FOR_EACH_NEST_INFO (n, root)
2490 unnest_nesting_tree_1 (n);
2491 }
2492
2493 /* Free the data structures allocated during this pass. */
2494
2495 static void
2496 free_nesting_tree (struct nesting_info *root)
2497 {
2498 struct nesting_info *node, *next;
2499
2500 node = iter_nestinfo_start (root);
2501 do
2502 {
2503 next = iter_nestinfo_next (node);
2504 pointer_map_destroy (node->var_map);
2505 pointer_map_destroy (node->field_map);
2506 free (node);
2507 node = next;
2508 }
2509 while (node);
2510 }
2511
2512 /* Gimplify a function and all its nested functions. */
2513 static void
2514 gimplify_all_functions (struct cgraph_node *root)
2515 {
2516 struct cgraph_node *iter;
2517 if (!gimple_body (root->decl))
2518 gimplify_function_tree (root->decl);
2519 for (iter = root->nested; iter; iter = iter->next_nested)
2520 gimplify_all_functions (iter);
2521 }
2522
2523 /* Main entry point for this pass. Process FNDECL and all of its nested
2524 subroutines and turn them into something less tightly bound. */
2525
2526 void
2527 lower_nested_functions (tree fndecl)
2528 {
2529 struct cgraph_node *cgn;
2530 struct nesting_info *root;
2531
2532 /* If there are no nested functions, there's nothing to do. */
2533 cgn = cgraph_node (fndecl);
2534 if (!cgn->nested)
2535 return;
2536
2537 gimplify_all_functions (cgn);
2538
2539 dump_file = dump_begin (TDI_nested, &dump_flags);
2540 if (dump_file)
2541 fprintf (dump_file, "\n;; Function %s\n\n",
2542 lang_hooks.decl_printable_name (fndecl, 2));
2543
2544 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2545 root = create_nesting_tree (cgn);
2546
2547 walk_all_functions (convert_nonlocal_reference_stmt,
2548 convert_nonlocal_reference_op,
2549 root);
2550 walk_all_functions (convert_local_reference_stmt,
2551 convert_local_reference_op,
2552 root);
2553 walk_all_functions (convert_nl_goto_reference, NULL, root);
2554 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2555
2556 convert_all_function_calls (root);
2557 finalize_nesting_tree (root);
2558 unnest_nesting_tree (root);
2559
2560 free_nesting_tree (root);
2561 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2562
2563 if (dump_file)
2564 {
2565 dump_end (TDI_nested, dump_file);
2566 dump_file = NULL;
2567 }
2568 }
2569
2570 #include "gt-tree-nested.h"
This page took 0.139685 seconds and 6 git commands to generate.