1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
83 struct nesting_info
*outer
;
84 struct nesting_info
*inner
;
85 struct nesting_info
*next
;
87 struct pointer_map_t
*field_map
;
88 struct pointer_map_t
*var_map
;
89 bitmap suppress_expansion
;
92 tree new_local_var_chain
;
100 bool any_parm_remapped
;
101 bool any_tramp_created
;
102 char static_chain_added
;
106 /* Iterate over the nesting tree, starting with ROOT, depth first. */
108 static inline struct nesting_info
*
109 iter_nestinfo_start (struct nesting_info
*root
)
116 static inline struct nesting_info
*
117 iter_nestinfo_next (struct nesting_info
*node
)
120 return iter_nestinfo_start (node
->next
);
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
127 /* Obstack used for the bitmaps in the struct above. */
128 static struct bitmap_obstack nesting_info_bitmap_obstack
;
131 /* We're working in so many different function contexts simultaneously,
132 that create_tmp_var is dangerous. Prevent mishap. */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
135 /* Like create_tmp_var, except record the variable for registration at
136 the given nesting level. */
139 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
143 /* If the type is of variable size or a type which must be created by the
144 frontend, something is wrong. Note that we explicitly allow
145 incomplete types here, since we create them ourselves here. */
146 gcc_assert (!TREE_ADDRESSABLE (type
));
147 gcc_assert (!TYPE_SIZE_UNIT (type
)
148 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
150 tmp_var
= create_tmp_var_raw (type
, prefix
);
151 DECL_CONTEXT (tmp_var
) = info
->context
;
152 TREE_CHAIN (tmp_var
) = info
->new_local_var_chain
;
153 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
154 if (TREE_CODE (type
) == COMPLEX_TYPE
155 || TREE_CODE (type
) == VECTOR_TYPE
)
156 DECL_GIMPLE_REG_P (tmp_var
) = 1;
158 info
->new_local_var_chain
= tmp_var
;
163 /* Take the address of EXP to be used within function CONTEXT.
164 Mark it for addressability as necessary. */
167 build_addr (tree exp
, tree context
)
173 while (handled_component_p (base
))
174 base
= TREE_OPERAND (base
, 0);
177 TREE_ADDRESSABLE (base
) = 1;
179 /* Building the ADDR_EXPR will compute a set of properties for
180 that ADDR_EXPR. Those properties are unfortunately context
181 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
183 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
185 way the properties are for the ADDR_EXPR are computed properly. */
186 save_context
= current_function_decl
;
187 current_function_decl
= context
;
188 retval
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (exp
)), exp
);
189 current_function_decl
= save_context
;
193 /* Insert FIELD into TYPE, sorted by alignment requirements. */
196 insert_field_into_struct (tree type
, tree field
)
200 DECL_CONTEXT (field
) = type
;
202 for (p
= &TYPE_FIELDS (type
); *p
; p
= &TREE_CHAIN (*p
))
203 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
206 TREE_CHAIN (field
) = *p
;
209 /* Set correct alignment for frame struct type. */
210 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
211 TYPE_ALIGN (type
) = DECL_ALIGN (field
);
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215 shared between INFO->CONTEXT and its nested functions. This record will
216 not be complete until finalize_nesting_tree; up until that point we'll
217 be adding fields as necessary.
219 We also build the DECL that represents this frame in the function. */
222 get_frame_type (struct nesting_info
*info
)
224 tree type
= info
->frame_type
;
229 type
= make_node (RECORD_TYPE
);
231 name
= concat ("FRAME.",
232 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
234 TYPE_NAME (type
) = get_identifier (name
);
237 info
->frame_type
= type
;
238 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
240 /* ??? Always make it addressable for now, since it is meant to
241 be pointed to by the static chain pointer. This pessimizes
242 when it turns out that no static chains are needed because
243 the nested functions referencing non-local variables are not
244 reachable, but the true pessimization is to create the non-
245 local frame structure in the first place. */
246 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
251 /* Return true if DECL should be referenced by pointer in the non-local
255 use_pointer_in_frame (tree decl
)
257 if (TREE_CODE (decl
) == PARM_DECL
)
259 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260 sized decls, and inefficient to copy large aggregates. Don't bother
261 moving anything but scalar variables. */
262 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
266 /* Variable sized types make things "interesting" in the frame. */
267 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
275 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
276 enum insert_option insert
)
280 if (insert
== NO_INSERT
)
282 slot
= pointer_map_contains (info
->field_map
, decl
);
283 return slot
? (tree
) *slot
: NULL_TREE
;
286 slot
= pointer_map_insert (info
->field_map
, decl
);
289 tree field
= make_node (FIELD_DECL
);
290 DECL_NAME (field
) = DECL_NAME (decl
);
292 if (use_pointer_in_frame (decl
))
294 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
295 DECL_ALIGN (field
) = TYPE_ALIGN (TREE_TYPE (field
));
296 DECL_NONADDRESSABLE_P (field
) = 1;
300 TREE_TYPE (field
) = TREE_TYPE (decl
);
301 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
302 DECL_ALIGN (field
) = DECL_ALIGN (decl
);
303 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
304 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
305 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
306 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
309 insert_field_into_struct (get_frame_type (info
), field
);
312 if (TREE_CODE (decl
) == PARM_DECL
)
313 info
->any_parm_remapped
= true;
319 /* Build or return the variable that holds the static chain within
320 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
323 get_chain_decl (struct nesting_info
*info
)
325 tree decl
= info
->chain_decl
;
331 type
= get_frame_type (info
->outer
);
332 type
= build_pointer_type (type
);
334 /* Note that this variable is *not* entered into any BIND_EXPR;
335 the construction of this variable is handled specially in
336 expand_function_start and initialize_inlined_parameters.
337 Note also that it's represented as a parameter. This is more
338 close to the truth, since the initial value does come from
340 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
341 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
342 DECL_ARTIFICIAL (decl
) = 1;
343 DECL_IGNORED_P (decl
) = 1;
344 TREE_USED (decl
) = 1;
345 DECL_CONTEXT (decl
) = info
->context
;
346 DECL_ARG_TYPE (decl
) = type
;
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl
) = 1;
352 info
->chain_decl
= decl
;
355 && (dump_flags
& TDF_DETAILS
)
356 && !DECL_STATIC_CHAIN (info
->context
))
357 fprintf (dump_file
, "Setting static-chain for %s\n",
358 lang_hooks
.decl_printable_name (info
->context
, 2));
360 DECL_STATIC_CHAIN (info
->context
) = 1;
365 /* Build or return the field within the non-local frame state that holds
366 the static chain for INFO->CONTEXT. This is the way to walk back up
367 multiple nesting levels. */
370 get_chain_field (struct nesting_info
*info
)
372 tree field
= info
->chain_field
;
376 tree type
= build_pointer_type (get_frame_type (info
->outer
));
378 field
= make_node (FIELD_DECL
);
379 DECL_NAME (field
) = get_identifier ("__chain");
380 TREE_TYPE (field
) = type
;
381 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
382 DECL_NONADDRESSABLE_P (field
) = 1;
384 insert_field_into_struct (get_frame_type (info
), field
);
386 info
->chain_field
= field
;
389 && (dump_flags
& TDF_DETAILS
)
390 && !DECL_STATIC_CHAIN (info
->context
))
391 fprintf (dump_file
, "Setting static-chain for %s\n",
392 lang_hooks
.decl_printable_name (info
->context
, 2));
394 DECL_STATIC_CHAIN (info
->context
) = 1;
399 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
402 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
407 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
408 gimple_call_set_lhs (call
, t
);
409 if (! gsi_end_p (*gsi
))
410 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
411 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
417 /* Copy EXP into a temporary. Allocate the temporary in the context of
418 INFO and insert the initialization statement before GSI. */
421 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
426 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
427 stmt
= gimple_build_assign (t
, exp
);
428 if (! gsi_end_p (*gsi
))
429 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
430 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
439 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
440 gimple_stmt_iterator
*gsi
)
442 if (is_gimple_val (exp
))
445 return init_tmp_var (info
, exp
, gsi
);
448 /* Similarly, but copy from the temporary and insert the statement
449 after the iterator. */
452 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
457 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
458 stmt
= gimple_build_assign (exp
, t
);
459 if (! gsi_end_p (*gsi
))
460 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
461 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
466 /* Build or return the type used to represent a nested function trampoline. */
468 static GTY(()) tree trampoline_type
;
471 get_trampoline_type (struct nesting_info
*info
)
473 unsigned align
, size
;
477 return trampoline_type
;
479 align
= TRAMPOLINE_ALIGNMENT
;
480 size
= TRAMPOLINE_SIZE
;
482 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483 then allocate extra space so that we can do dynamic alignment. */
484 if (align
> STACK_BOUNDARY
)
486 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
487 align
= STACK_BOUNDARY
;
490 t
= build_index_type (build_int_cst (NULL_TREE
, size
- 1));
491 t
= build_array_type (char_type_node
, t
);
492 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
493 FIELD_DECL
, get_identifier ("__data"), t
);
494 DECL_ALIGN (t
) = align
;
495 DECL_USER_ALIGN (t
) = 1;
497 trampoline_type
= make_node (RECORD_TYPE
);
498 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
499 TYPE_FIELDS (trampoline_type
) = t
;
500 layout_type (trampoline_type
);
501 DECL_CONTEXT (t
) = trampoline_type
;
503 return trampoline_type
;
506 /* Given DECL, a nested function, find or create a field in the non-local
507 frame structure for a trampoline for this function. */
510 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
511 enum insert_option insert
)
515 if (insert
== NO_INSERT
)
517 slot
= pointer_map_contains (info
->var_map
, decl
);
518 return slot
? (tree
) *slot
: NULL_TREE
;
521 slot
= pointer_map_insert (info
->var_map
, decl
);
524 tree field
= make_node (FIELD_DECL
);
525 DECL_NAME (field
) = DECL_NAME (decl
);
526 TREE_TYPE (field
) = get_trampoline_type (info
);
527 TREE_ADDRESSABLE (field
) = 1;
529 insert_field_into_struct (get_frame_type (info
), field
);
532 info
->any_tramp_created
= true;
538 /* Build or return the field within the non-local frame state that holds
539 the non-local goto "jmp_buf". The buffer itself is maintained by the
540 rtl middle-end as dynamic stack space is allocated. */
543 get_nl_goto_field (struct nesting_info
*info
)
545 tree field
= info
->nl_goto_field
;
551 /* For __builtin_nonlocal_goto, we need N words. The first is the
552 frame pointer, the rest is for the target's stack pointer save
553 area. The number of words is controlled by STACK_SAVEAREA_MODE;
554 not the best interface, but it'll do for now. */
555 if (Pmode
== ptr_mode
)
556 type
= ptr_type_node
;
558 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
560 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
561 size
= size
/ GET_MODE_SIZE (Pmode
);
564 type
= build_array_type
565 (type
, build_index_type (build_int_cst (NULL_TREE
, size
)));
567 field
= make_node (FIELD_DECL
);
568 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
569 TREE_TYPE (field
) = type
;
570 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
571 TREE_ADDRESSABLE (field
) = 1;
573 insert_field_into_struct (get_frame_type (info
), field
);
575 info
->nl_goto_field
= field
;
581 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
584 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
585 struct nesting_info
*info
, gimple_seq seq
)
587 struct walk_stmt_info wi
;
589 memset (&wi
, 0, sizeof (wi
));
592 walk_gimple_seq (seq
, callback_stmt
, callback_op
, &wi
);
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
599 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
600 struct nesting_info
*info
)
602 walk_body (callback_stmt
, callback_op
, info
, gimple_body (info
->context
));
605 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
608 walk_gimple_omp_for (gimple for_stmt
,
609 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
610 struct nesting_info
*info
)
612 struct walk_stmt_info wi
;
617 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body (for_stmt
));
619 seq
= gimple_seq_alloc ();
620 memset (&wi
, 0, sizeof (wi
));
622 wi
.gsi
= gsi_last (seq
);
624 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
627 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
631 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
636 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
639 t
= gimple_omp_for_incr (for_stmt
, i
);
640 gcc_assert (BINARY_CLASS_P (t
));
642 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
645 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
648 if (gimple_seq_empty_p (seq
))
649 gimple_seq_free (seq
);
652 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
653 annotate_all_with_location (seq
, gimple_location (for_stmt
));
654 gimple_seq_add_seq (&pre_body
, seq
);
655 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
659 /* Similarly for ROOT and all functions nested underneath, depth first. */
662 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
663 struct nesting_info
*root
)
665 struct nesting_info
*n
;
666 FOR_EACH_NEST_INFO (n
, root
)
667 walk_function (callback_stmt
, callback_op
, n
);
671 /* We have to check for a fairly pathological case. The operands of function
672 nested function are to be interpreted in the context of the enclosing
673 function. So if any are variably-sized, they will get remapped when the
674 enclosing function is inlined. But that remapping would also have to be
675 done in the types of the PARM_DECLs of the nested function, meaning the
676 argument types of that function will disagree with the arguments in the
677 calls to that function. So we'd either have to make a copy of the nested
678 function corresponding to each time the enclosing function was inlined or
679 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680 function. The former is not practical. The latter would still require
681 detecting this case to know when to add the conversions. So, for now at
682 least, we don't inline such an enclosing function.
684 We have to do that check recursively, so here return indicating whether
685 FNDECL has such a nested function. ORIG_FN is the function we were
686 trying to inline to use for checking whether any argument is variably
687 modified by anything in it.
689 It would be better to do this in tree-inline.c so that we could give
690 the appropriate warning for why a function can't be inlined, but that's
691 too late since the nesting structure has already been flattened and
692 adding a flag just to record this fact seems a waste of a flag. */
695 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
697 struct cgraph_node
*cgn
= cgraph_node (fndecl
);
700 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
702 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= TREE_CHAIN (arg
))
703 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
706 if (check_for_nested_with_variably_modified (cgn
->decl
, orig_fndecl
))
713 /* Construct our local datastructure describing the function nesting
714 tree rooted by CGN. */
716 static struct nesting_info
*
717 create_nesting_tree (struct cgraph_node
*cgn
)
719 struct nesting_info
*info
= XCNEW (struct nesting_info
);
720 info
->field_map
= pointer_map_create ();
721 info
->var_map
= pointer_map_create ();
722 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
723 info
->context
= cgn
->decl
;
725 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
727 struct nesting_info
*sub
= create_nesting_tree (cgn
);
729 sub
->next
= info
->inner
;
733 /* See discussion at check_for_nested_with_variably_modified for a
734 discussion of why this has to be here. */
735 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
736 DECL_UNINLINABLE (info
->context
) = true;
741 /* Return an expression computing the static chain for TARGET_CONTEXT
742 from INFO->CONTEXT. Insert any necessary computations before TSI. */
745 get_static_chain (struct nesting_info
*info
, tree target_context
,
746 gimple_stmt_iterator
*gsi
)
748 struct nesting_info
*i
;
751 if (info
->context
== target_context
)
753 x
= build_addr (info
->frame_decl
, target_context
);
757 x
= get_chain_decl (info
);
759 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
761 tree field
= get_chain_field (i
);
763 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
764 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
765 x
= init_tmp_var (info
, x
, gsi
);
773 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
774 frame as seen from INFO->CONTEXT. Insert any necessary computations
778 get_frame_field (struct nesting_info
*info
, tree target_context
,
779 tree field
, gimple_stmt_iterator
*gsi
)
781 struct nesting_info
*i
;
784 if (info
->context
== target_context
)
786 /* Make sure frame_decl gets created. */
787 (void) get_frame_type (info
);
788 x
= info
->frame_decl
;
792 x
= get_chain_decl (info
);
794 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
796 tree field
= get_chain_field (i
);
798 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
799 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
800 x
= init_tmp_var (info
, x
, gsi
);
803 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
806 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
810 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
812 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
813 in the nested function with DECL_VALUE_EXPR set to reference the true
814 variable in the parent function. This is used both for debug info
815 and in OpenMP lowering. */
818 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
821 struct nesting_info
*i
;
822 tree x
, field
, new_decl
;
825 slot
= pointer_map_insert (info
->var_map
, decl
);
830 target_context
= decl_function_context (decl
);
832 /* A copy of the code in get_frame_field, but without the temporaries. */
833 if (info
->context
== target_context
)
835 /* Make sure frame_decl gets created. */
836 (void) get_frame_type (info
);
837 x
= info
->frame_decl
;
842 x
= get_chain_decl (info
);
843 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
845 field
= get_chain_field (i
);
846 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
847 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
849 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
852 field
= lookup_field_for_decl (i
, decl
, INSERT
);
853 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
854 if (use_pointer_in_frame (decl
))
855 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
857 /* ??? We should be remapping types as well, surely. */
858 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
859 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
860 DECL_CONTEXT (new_decl
) = info
->context
;
861 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
862 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
863 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
864 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
865 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
866 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
867 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
868 if ((TREE_CODE (decl
) == PARM_DECL
869 || TREE_CODE (decl
) == RESULT_DECL
870 || TREE_CODE (decl
) == VAR_DECL
)
871 && DECL_BY_REFERENCE (decl
))
872 DECL_BY_REFERENCE (new_decl
) = 1;
874 SET_DECL_VALUE_EXPR (new_decl
, x
);
875 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
878 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
879 info
->debug_var_chain
= new_decl
;
882 && info
->context
!= target_context
883 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
884 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
890 /* Callback for walk_gimple_stmt, rewrite all references to VAR
891 and PARM_DECLs that belong to outer functions.
893 The rewrite will involve some number of structure accesses back up
894 the static chain. E.g. for a variable FOO up one nesting level it'll
895 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
896 indirections apply to decls for which use_pointer_in_frame is true. */
899 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
901 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
902 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
906 switch (TREE_CODE (t
))
909 /* Non-automatic variables are never processed. */
910 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
915 if (decl_function_context (t
) != info
->context
)
920 x
= get_nonlocal_debug_decl (info
, t
);
921 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
923 tree target_context
= decl_function_context (t
);
924 struct nesting_info
*i
;
925 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
927 x
= lookup_field_for_decl (i
, t
, INSERT
);
928 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
929 if (use_pointer_in_frame (t
))
931 x
= init_tmp_var (info
, x
, &wi
->gsi
);
932 x
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (x
)), x
);
939 x
= save_tmp_var (info
, x
, &wi
->gsi
);
941 x
= init_tmp_var (info
, x
, &wi
->gsi
);
949 /* We're taking the address of a label from a parent function, but
950 this is not itself a non-local goto. Mark the label such that it
951 will not be deleted, much as we would with a label address in
953 if (decl_function_context (t
) != info
->context
)
954 FORCED_LABEL (t
) = 1;
959 bool save_val_only
= wi
->val_only
;
961 wi
->val_only
= false;
964 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
971 /* If we changed anything, we might no longer be directly
972 referencing a decl. */
973 save_context
= current_function_decl
;
974 current_function_decl
= info
->context
;
975 recompute_tree_invariant_for_addr_expr (t
);
976 current_function_decl
= save_context
;
978 /* If the callback converted the address argument in a context
979 where we only accept variables (and min_invariant, presumably),
980 then compute the address into a temporary. */
982 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
992 case ARRAY_RANGE_REF
:
994 /* Go down this entire nest and just look at the final prefix and
995 anything that describes the references. Otherwise, we lose track
996 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
999 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1001 if (TREE_CODE (t
) == COMPONENT_REF
)
1002 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1004 else if (TREE_CODE (t
) == ARRAY_REF
1005 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1007 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1009 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1011 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1014 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
1016 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1018 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1022 wi
->val_only
= false;
1023 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1026 case VIEW_CONVERT_EXPR
:
1027 /* Just request to look at the subtrees, leaving val_only and lhs
1028 untouched. This might actually be for !val_only + lhs, in which
1029 case we don't want to force a replacement by a temporary. */
1034 if (!IS_TYPE_OR_DECL_P (t
))
1037 wi
->val_only
= true;
1046 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1047 struct walk_stmt_info
*);
1049 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1050 and PARM_DECLs that belong to outer functions. */
1053 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1055 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1056 bool need_chain
= false, need_stmts
= false;
1059 bitmap new_suppress
;
1061 new_suppress
= BITMAP_GGC_ALLOC ();
1062 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1064 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1066 switch (OMP_CLAUSE_CODE (clause
))
1068 case OMP_CLAUSE_REDUCTION
:
1069 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1071 goto do_decl_clause
;
1073 case OMP_CLAUSE_LASTPRIVATE
:
1074 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1076 goto do_decl_clause
;
1078 case OMP_CLAUSE_PRIVATE
:
1079 case OMP_CLAUSE_FIRSTPRIVATE
:
1080 case OMP_CLAUSE_COPYPRIVATE
:
1081 case OMP_CLAUSE_SHARED
:
1083 decl
= OMP_CLAUSE_DECL (clause
);
1084 if (TREE_CODE (decl
) == VAR_DECL
1085 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1087 if (decl_function_context (decl
) != info
->context
)
1089 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1090 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1095 case OMP_CLAUSE_SCHEDULE
:
1096 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1100 case OMP_CLAUSE_NUM_THREADS
:
1101 wi
->val_only
= true;
1103 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1107 case OMP_CLAUSE_NOWAIT
:
1108 case OMP_CLAUSE_ORDERED
:
1109 case OMP_CLAUSE_DEFAULT
:
1110 case OMP_CLAUSE_COPYIN
:
1111 case OMP_CLAUSE_COLLAPSE
:
1112 case OMP_CLAUSE_UNTIED
:
1120 info
->suppress_expansion
= new_suppress
;
1123 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1124 switch (OMP_CLAUSE_CODE (clause
))
1126 case OMP_CLAUSE_REDUCTION
:
1127 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1130 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1131 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1133 walk_body (convert_nonlocal_reference_stmt
,
1134 convert_nonlocal_reference_op
, info
,
1135 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1136 walk_body (convert_nonlocal_reference_stmt
,
1137 convert_nonlocal_reference_op
, info
,
1138 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1139 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1144 case OMP_CLAUSE_LASTPRIVATE
:
1145 walk_body (convert_nonlocal_reference_stmt
,
1146 convert_nonlocal_reference_op
, info
,
1147 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1157 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1160 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1162 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1163 type
= TREE_TYPE (type
);
1165 if (TYPE_NAME (type
)
1166 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1167 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1168 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1170 while (POINTER_TYPE_P (type
)
1171 || TREE_CODE (type
) == VECTOR_TYPE
1172 || TREE_CODE (type
) == FUNCTION_TYPE
1173 || TREE_CODE (type
) == METHOD_TYPE
)
1174 type
= TREE_TYPE (type
);
1176 if (TREE_CODE (type
) == ARRAY_TYPE
)
1180 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1181 domain
= TYPE_DOMAIN (type
);
1184 t
= TYPE_MIN_VALUE (domain
);
1185 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1186 && decl_function_context (t
) != info
->context
)
1187 get_nonlocal_debug_decl (info
, t
);
1188 t
= TYPE_MAX_VALUE (domain
);
1189 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1190 && decl_function_context (t
) != info
->context
)
1191 get_nonlocal_debug_decl (info
, t
);
1196 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1200 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1204 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
1205 if (TREE_CODE (var
) == VAR_DECL
1206 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1207 && DECL_HAS_VALUE_EXPR_P (var
)
1208 && decl_function_context (var
) != info
->context
)
1209 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1212 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1213 PARM_DECLs that belong to outer functions. This handles statements
1214 that are not handled via the standard recursion done in
1215 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1216 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1217 operands of STMT have been handled by this function. */
1220 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1221 struct walk_stmt_info
*wi
)
1223 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1224 tree save_local_var_chain
;
1225 bitmap save_suppress
;
1226 gimple stmt
= gsi_stmt (*gsi
);
1228 switch (gimple_code (stmt
))
1231 /* Don't walk non-local gotos for now. */
1232 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1234 wi
->val_only
= true;
1236 *handled_ops_p
= true;
1241 case GIMPLE_OMP_PARALLEL
:
1242 case GIMPLE_OMP_TASK
:
1243 save_suppress
= info
->suppress_expansion
;
1244 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1248 decl
= get_chain_decl (info
);
1249 c
= build_omp_clause (gimple_location (stmt
),
1250 OMP_CLAUSE_FIRSTPRIVATE
);
1251 OMP_CLAUSE_DECL (c
) = decl
;
1252 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1253 gimple_omp_taskreg_set_clauses (stmt
, c
);
1256 save_local_var_chain
= info
->new_local_var_chain
;
1257 info
->new_local_var_chain
= NULL
;
1259 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1260 info
, gimple_omp_body (stmt
));
1262 if (info
->new_local_var_chain
)
1263 declare_vars (info
->new_local_var_chain
,
1264 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1266 info
->new_local_var_chain
= save_local_var_chain
;
1267 info
->suppress_expansion
= save_suppress
;
1270 case GIMPLE_OMP_FOR
:
1271 save_suppress
= info
->suppress_expansion
;
1272 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1273 walk_gimple_omp_for (stmt
, convert_nonlocal_reference_stmt
,
1274 convert_nonlocal_reference_op
, info
);
1275 walk_body (convert_nonlocal_reference_stmt
,
1276 convert_nonlocal_reference_op
, info
, gimple_omp_body (stmt
));
1277 info
->suppress_expansion
= save_suppress
;
1280 case GIMPLE_OMP_SECTIONS
:
1281 save_suppress
= info
->suppress_expansion
;
1282 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1283 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1284 info
, gimple_omp_body (stmt
));
1285 info
->suppress_expansion
= save_suppress
;
1288 case GIMPLE_OMP_SINGLE
:
1289 save_suppress
= info
->suppress_expansion
;
1290 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1291 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1292 info
, gimple_omp_body (stmt
));
1293 info
->suppress_expansion
= save_suppress
;
1296 case GIMPLE_OMP_SECTION
:
1297 case GIMPLE_OMP_MASTER
:
1298 case GIMPLE_OMP_ORDERED
:
1299 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1300 info
, gimple_omp_body (stmt
));
1304 if (!optimize
&& gimple_bind_block (stmt
))
1305 note_nonlocal_block_vlas (info
, gimple_bind_block (stmt
));
1307 *handled_ops_p
= false;
1311 /* For every other statement that we are not interested in
1312 handling here, let the walker traverse the operands. */
1313 *handled_ops_p
= false;
1317 /* We have handled all of STMT operands, no need to traverse the operands. */
1318 *handled_ops_p
= true;
1323 /* A subroutine of convert_local_reference. Create a local variable
1324 in the parent function with DECL_VALUE_EXPR set to reference the
1325 field in FRAME. This is used both for debug info and in OpenMP
1329 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1334 slot
= pointer_map_insert (info
->var_map
, decl
);
1336 return (tree
) *slot
;
1338 /* Make sure frame_decl gets created. */
1339 (void) get_frame_type (info
);
1340 x
= info
->frame_decl
;
1341 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1343 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1344 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1345 DECL_CONTEXT (new_decl
) = info
->context
;
1346 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1347 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1348 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1349 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1350 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1351 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1352 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1353 if ((TREE_CODE (decl
) == PARM_DECL
1354 || TREE_CODE (decl
) == RESULT_DECL
1355 || TREE_CODE (decl
) == VAR_DECL
)
1356 && DECL_BY_REFERENCE (decl
))
1357 DECL_BY_REFERENCE (new_decl
) = 1;
1359 SET_DECL_VALUE_EXPR (new_decl
, x
);
1360 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1363 TREE_CHAIN (new_decl
) = info
->debug_var_chain
;
1364 info
->debug_var_chain
= new_decl
;
1366 /* Do not emit debug info twice. */
1367 DECL_IGNORED_P (decl
) = 1;
1373 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1374 and PARM_DECLs that were referenced by inner nested functions.
1375 The rewrite will be a structure reference to the local frame variable. */
1377 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1380 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1382 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1383 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1384 tree t
= *tp
, field
, x
;
1388 switch (TREE_CODE (t
))
1391 /* Non-automatic variables are never processed. */
1392 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1397 if (decl_function_context (t
) == info
->context
)
1399 /* If we copied a pointer to the frame, then the original decl
1400 is used unchanged in the parent function. */
1401 if (use_pointer_in_frame (t
))
1404 /* No need to transform anything if no child references the
1406 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1411 x
= get_local_debug_decl (info
, t
, field
);
1412 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1413 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1418 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1420 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1428 save_val_only
= wi
->val_only
;
1429 wi
->val_only
= false;
1431 wi
->changed
= false;
1432 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1433 wi
->val_only
= save_val_only
;
1435 /* If we converted anything ... */
1440 /* Then the frame decl is now addressable. */
1441 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1443 save_context
= current_function_decl
;
1444 current_function_decl
= info
->context
;
1445 recompute_tree_invariant_for_addr_expr (t
);
1446 current_function_decl
= save_context
;
1448 /* If we are in a context where we only accept values, then
1449 compute the address into a temporary. */
1451 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1460 case ARRAY_RANGE_REF
:
1462 /* Go down this entire nest and just look at the final prefix and
1463 anything that describes the references. Otherwise, we lose track
1464 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1465 save_val_only
= wi
->val_only
;
1466 wi
->val_only
= true;
1468 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1470 if (TREE_CODE (t
) == COMPONENT_REF
)
1471 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1473 else if (TREE_CODE (t
) == ARRAY_REF
1474 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1476 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1478 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1480 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1483 else if (TREE_CODE (t
) == BIT_FIELD_REF
)
1485 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1487 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1491 wi
->val_only
= false;
1492 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1493 wi
->val_only
= save_val_only
;
1496 case VIEW_CONVERT_EXPR
:
1497 /* Just request to look at the subtrees, leaving val_only and lhs
1498 untouched. This might actually be for !val_only + lhs, in which
1499 case we don't want to force a replacement by a temporary. */
1504 if (!IS_TYPE_OR_DECL_P (t
))
1507 wi
->val_only
= true;
1516 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1517 struct walk_stmt_info
*);
1519 /* Helper for convert_local_reference. Convert all the references in
1520 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1523 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1525 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1526 bool need_frame
= false, need_stmts
= false;
1529 bitmap new_suppress
;
1531 new_suppress
= BITMAP_GGC_ALLOC ();
1532 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1534 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1536 switch (OMP_CLAUSE_CODE (clause
))
1538 case OMP_CLAUSE_REDUCTION
:
1539 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1541 goto do_decl_clause
;
1543 case OMP_CLAUSE_LASTPRIVATE
:
1544 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1546 goto do_decl_clause
;
1548 case OMP_CLAUSE_PRIVATE
:
1549 case OMP_CLAUSE_FIRSTPRIVATE
:
1550 case OMP_CLAUSE_COPYPRIVATE
:
1551 case OMP_CLAUSE_SHARED
:
1553 decl
= OMP_CLAUSE_DECL (clause
);
1554 if (TREE_CODE (decl
) == VAR_DECL
1555 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1557 if (decl_function_context (decl
) == info
->context
1558 && !use_pointer_in_frame (decl
))
1560 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1563 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1564 OMP_CLAUSE_DECL (clause
)
1565 = get_local_debug_decl (info
, decl
, field
);
1571 case OMP_CLAUSE_SCHEDULE
:
1572 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1576 case OMP_CLAUSE_NUM_THREADS
:
1577 wi
->val_only
= true;
1579 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0), &dummy
,
1583 case OMP_CLAUSE_NOWAIT
:
1584 case OMP_CLAUSE_ORDERED
:
1585 case OMP_CLAUSE_DEFAULT
:
1586 case OMP_CLAUSE_COPYIN
:
1587 case OMP_CLAUSE_COLLAPSE
:
1588 case OMP_CLAUSE_UNTIED
:
1596 info
->suppress_expansion
= new_suppress
;
1599 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1600 switch (OMP_CLAUSE_CODE (clause
))
1602 case OMP_CLAUSE_REDUCTION
:
1603 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1606 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1607 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1609 walk_body (convert_local_reference_stmt
,
1610 convert_local_reference_op
, info
,
1611 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1612 walk_body (convert_local_reference_stmt
,
1613 convert_local_reference_op
, info
,
1614 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1615 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1620 case OMP_CLAUSE_LASTPRIVATE
:
1621 walk_body (convert_local_reference_stmt
,
1622 convert_local_reference_op
, info
,
1623 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1634 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1635 and PARM_DECLs that were referenced by inner nested functions.
1636 The rewrite will be a structure reference to the local frame variable. */
1639 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1640 struct walk_stmt_info
*wi
)
1642 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1643 tree save_local_var_chain
;
1644 bitmap save_suppress
;
1645 gimple stmt
= gsi_stmt (*gsi
);
1647 switch (gimple_code (stmt
))
1649 case GIMPLE_OMP_PARALLEL
:
1650 case GIMPLE_OMP_TASK
:
1651 save_suppress
= info
->suppress_expansion
;
1652 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1656 (void) get_frame_type (info
);
1657 c
= build_omp_clause (gimple_location (stmt
),
1659 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1660 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1661 gimple_omp_taskreg_set_clauses (stmt
, c
);
1664 save_local_var_chain
= info
->new_local_var_chain
;
1665 info
->new_local_var_chain
= NULL
;
1667 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
1668 gimple_omp_body (stmt
));
1670 if (info
->new_local_var_chain
)
1671 declare_vars (info
->new_local_var_chain
,
1672 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
1673 info
->new_local_var_chain
= save_local_var_chain
;
1674 info
->suppress_expansion
= save_suppress
;
1677 case GIMPLE_OMP_FOR
:
1678 save_suppress
= info
->suppress_expansion
;
1679 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1680 walk_gimple_omp_for (stmt
, convert_local_reference_stmt
,
1681 convert_local_reference_op
, info
);
1682 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1683 info
, gimple_omp_body (stmt
));
1684 info
->suppress_expansion
= save_suppress
;
1687 case GIMPLE_OMP_SECTIONS
:
1688 save_suppress
= info
->suppress_expansion
;
1689 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1690 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1691 info
, gimple_omp_body (stmt
));
1692 info
->suppress_expansion
= save_suppress
;
1695 case GIMPLE_OMP_SINGLE
:
1696 save_suppress
= info
->suppress_expansion
;
1697 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1698 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1699 info
, gimple_omp_body (stmt
));
1700 info
->suppress_expansion
= save_suppress
;
1703 case GIMPLE_OMP_SECTION
:
1704 case GIMPLE_OMP_MASTER
:
1705 case GIMPLE_OMP_ORDERED
:
1706 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1707 info
, gimple_omp_body (stmt
));
1711 /* For every other statement that we are not interested in
1712 handling here, let the walker traverse the operands. */
1713 *handled_ops_p
= false;
1717 /* Indicate that we have handled all the operands ourselves. */
1718 *handled_ops_p
= true;
1723 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1724 that reference labels from outer functions. The rewrite will be a
1725 call to __builtin_nonlocal_goto. */
1728 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1729 struct walk_stmt_info
*wi
)
1731 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1732 tree label
, new_label
, target_context
, x
, field
;
1735 gimple stmt
= gsi_stmt (*gsi
);
1737 if (gimple_code (stmt
) != GIMPLE_GOTO
)
1739 *handled_ops_p
= false;
1743 label
= gimple_goto_dest (stmt
);
1744 if (TREE_CODE (label
) != LABEL_DECL
)
1746 *handled_ops_p
= false;
1750 target_context
= decl_function_context (label
);
1751 if (target_context
== info
->context
)
1753 *handled_ops_p
= false;
1757 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
1760 /* The original user label may also be use for a normal goto, therefore
1761 we must create a new label that will actually receive the abnormal
1762 control transfer. This new label will be marked LABEL_NONLOCAL; this
1763 mark will trigger proper behavior in the cfg, as well as cause the
1764 (hairy target-specific) non-local goto receiver code to be generated
1765 when we expand rtl. Enter this association into var_map so that we
1766 can insert the new label into the IL during a second pass. */
1767 slot
= pointer_map_insert (i
->var_map
, label
);
1770 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
1771 DECL_NONLOCAL (new_label
) = 1;
1775 new_label
= (tree
) *slot
;
1777 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1778 field
= get_nl_goto_field (i
);
1779 x
= get_frame_field (info
, target_context
, field
, &wi
->gsi
);
1780 x
= build_addr (x
, target_context
);
1781 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1782 call
= gimple_build_call (implicit_built_in_decls
[BUILT_IN_NONLOCAL_GOTO
], 2,
1783 build_addr (new_label
, target_context
), x
);
1784 gsi_replace (&wi
->gsi
, call
, false);
1786 /* We have handled all of STMT's operands, no need to keep going. */
1787 *handled_ops_p
= true;
1792 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1793 are referenced via nonlocal goto from a nested function. The rewrite
1794 will involve installing a newly generated DECL_NONLOCAL label, and
1795 (potentially) a branch around the rtl gunk that is assumed to be
1796 attached to such a label. */
1799 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1800 struct walk_stmt_info
*wi
)
1802 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1803 tree label
, new_label
;
1804 gimple_stmt_iterator tmp_gsi
;
1806 gimple stmt
= gsi_stmt (*gsi
);
1808 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1810 *handled_ops_p
= false;
1814 label
= gimple_label_label (stmt
);
1816 slot
= pointer_map_contains (info
->var_map
, label
);
1819 *handled_ops_p
= false;
1823 /* If there's any possibility that the previous statement falls through,
1824 then we must branch around the new non-local label. */
1826 gsi_prev (&tmp_gsi
);
1827 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
1829 gimple stmt
= gimple_build_goto (label
);
1830 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1833 new_label
= (tree
) *slot
;
1834 stmt
= gimple_build_label (new_label
);
1835 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1837 *handled_ops_p
= true;
1842 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1843 of nested functions that require the use of trampolines. The rewrite
1844 will involve a reference a trampoline generated for the occasion. */
1847 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1849 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1850 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1851 tree t
= *tp
, decl
, target_context
, x
, builtin
;
1855 switch (TREE_CODE (t
))
1859 T.1 = &CHAIN->tramp;
1860 T.2 = __builtin_adjust_trampoline (T.1);
1861 T.3 = (func_type)T.2;
1864 decl
= TREE_OPERAND (t
, 0);
1865 if (TREE_CODE (decl
) != FUNCTION_DECL
)
1868 /* Only need to process nested functions. */
1869 target_context
= decl_function_context (decl
);
1870 if (!target_context
)
1873 /* If the nested function doesn't use a static chain, then
1874 it doesn't need a trampoline. */
1875 if (!DECL_STATIC_CHAIN (decl
))
1878 /* If we don't want a trampoline, then don't build one. */
1879 if (TREE_NO_TRAMPOLINE (t
))
1882 /* Lookup the immediate parent of the callee, as that's where
1883 we need to insert the trampoline. */
1884 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
1886 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
1888 /* Compute the address of the field holding the trampoline. */
1889 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1890 x
= build_addr (x
, target_context
);
1891 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1893 /* Do machine-specific ugliness. Normally this will involve
1894 computing extra alignment, but it can really be anything. */
1895 builtin
= implicit_built_in_decls
[BUILT_IN_ADJUST_TRAMPOLINE
];
1896 call
= gimple_build_call (builtin
, 1, x
);
1897 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
1899 /* Cast back to the proper function type. */
1900 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
1901 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1907 if (!IS_TYPE_OR_DECL_P (t
))
1916 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1917 to addresses of nested functions that require the use of
1918 trampolines. The rewrite will involve a reference a trampoline
1919 generated for the occasion. */
1922 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1923 struct walk_stmt_info
*wi
)
1925 gimple stmt
= gsi_stmt (*gsi
);
1927 switch (gimple_code (stmt
))
1931 /* Only walk call arguments, lest we generate trampolines for
1933 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
1934 for (i
= 0; i
< nargs
; i
++)
1935 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
1938 *handled_ops_p
= true;
1946 *handled_ops_p
= false;
1952 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1953 that reference nested functions to make sure that the static chain
1954 is set up properly for the call. */
1957 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1958 struct walk_stmt_info
*wi
)
1960 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1961 tree decl
, target_context
;
1962 char save_static_chain_added
;
1964 gimple stmt
= gsi_stmt (*gsi
);
1966 switch (gimple_code (stmt
))
1969 if (gimple_call_chain (stmt
))
1971 decl
= gimple_call_fndecl (stmt
);
1974 target_context
= decl_function_context (decl
);
1975 if (target_context
&& DECL_STATIC_CHAIN (decl
))
1977 gimple_call_set_chain (stmt
, get_static_chain (info
, target_context
,
1979 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
1983 case GIMPLE_OMP_PARALLEL
:
1984 case GIMPLE_OMP_TASK
:
1985 save_static_chain_added
= info
->static_chain_added
;
1986 info
->static_chain_added
= 0;
1987 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
1988 for (i
= 0; i
< 2; i
++)
1991 if ((info
->static_chain_added
& (1 << i
)) == 0)
1993 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
1994 /* Don't add CHAIN.* or FRAME.* twice. */
1995 for (c
= gimple_omp_taskreg_clauses (stmt
);
1997 c
= OMP_CLAUSE_CHAIN (c
))
1998 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
1999 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2000 && OMP_CLAUSE_DECL (c
) == decl
)
2004 c
= build_omp_clause (gimple_location (stmt
),
2005 i
? OMP_CLAUSE_FIRSTPRIVATE
2006 : OMP_CLAUSE_SHARED
);
2007 OMP_CLAUSE_DECL (c
) = decl
;
2008 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2009 gimple_omp_taskreg_set_clauses (stmt
, c
);
2012 info
->static_chain_added
|= save_static_chain_added
;
2015 case GIMPLE_OMP_FOR
:
2016 walk_body (convert_gimple_call
, NULL
, info
,
2017 gimple_omp_for_pre_body (stmt
));
2019 case GIMPLE_OMP_SECTIONS
:
2020 case GIMPLE_OMP_SECTION
:
2021 case GIMPLE_OMP_SINGLE
:
2022 case GIMPLE_OMP_MASTER
:
2023 case GIMPLE_OMP_ORDERED
:
2024 case GIMPLE_OMP_CRITICAL
:
2025 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body (stmt
));
2029 /* Keep looking for other operands. */
2030 *handled_ops_p
= false;
2034 *handled_ops_p
= true;
2038 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2039 call expressions. At the same time, determine if a nested function
2040 actually uses its static chain; if not, remember that. */
2043 convert_all_function_calls (struct nesting_info
*root
)
2045 struct nesting_info
*n
;
2049 /* First, optimistically clear static_chain for all decls that haven't
2050 used the static chain already for variable access. */
2051 FOR_EACH_NEST_INFO (n
, root
)
2053 tree decl
= n
->context
;
2054 if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2056 DECL_STATIC_CHAIN (decl
) = 0;
2057 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2058 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2059 lang_hooks
.decl_printable_name (decl
, 2));
2062 DECL_STATIC_CHAIN (decl
) = 1;
2065 /* Walk the functions and perform transformations. Note that these
2066 transformations can induce new uses of the static chain, which in turn
2067 require re-examining all users of the decl. */
2068 /* ??? It would make sense to try to use the call graph to speed this up,
2069 but the call graph hasn't really been built yet. Even if it did, we
2070 would still need to iterate in this loop since address-of references
2071 wouldn't show up in the callgraph anyway. */
2075 any_changed
= false;
2078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2079 fputc ('\n', dump_file
);
2081 FOR_EACH_NEST_INFO (n
, root
)
2083 tree decl
= n
->context
;
2084 bool old_static_chain
= DECL_STATIC_CHAIN (decl
);
2086 walk_function (convert_tramp_reference_stmt
,
2087 convert_tramp_reference_op
, n
);
2088 walk_function (convert_gimple_call
, NULL
, n
);
2090 /* If a call to another function created the use of a chain
2091 within this function, we'll have to continue iteration. */
2092 if (!old_static_chain
&& DECL_STATIC_CHAIN (decl
))
2096 while (any_changed
);
2098 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2099 fprintf (dump_file
, "convert_all_function_calls iterations: %d\n\n",
2103 struct nesting_copy_body_data
2106 struct nesting_info
*root
;
2109 /* A helper subroutine for debug_var_chain type remapping. */
2112 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2114 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2115 void **slot
= pointer_map_contains (nid
->root
->var_map
, decl
);
2118 return (tree
) *slot
;
2120 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2122 tree new_decl
= copy_decl_no_change (decl
, id
);
2123 DECL_ORIGINAL_TYPE (new_decl
)
2124 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2128 if (TREE_CODE (decl
) == VAR_DECL
2129 || TREE_CODE (decl
) == PARM_DECL
2130 || TREE_CODE (decl
) == RESULT_DECL
)
2133 return copy_decl_no_change (decl
, id
);
2136 /* A helper function for remap_vla_decls. See if *TP contains
2137 some remapped variables. */
2140 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2142 struct nesting_info
*root
= (struct nesting_info
*) data
;
2149 slot
= pointer_map_contains (root
->var_map
, t
);
2152 return (tree
) *slot
;
2157 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2161 remap_vla_decls (tree block
, struct nesting_info
*root
)
2163 tree var
, subblock
, val
, type
;
2164 struct nesting_copy_body_data id
;
2166 for (subblock
= BLOCK_SUBBLOCKS (block
);
2168 subblock
= BLOCK_CHAIN (subblock
))
2169 remap_vla_decls (subblock
, root
);
2171 for (var
= BLOCK_VARS (block
); var
; var
= TREE_CHAIN (var
))
2173 if (TREE_CODE (var
) == VAR_DECL
2174 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2175 && DECL_HAS_VALUE_EXPR_P (var
))
2177 type
= TREE_TYPE (var
);
2178 val
= DECL_VALUE_EXPR (var
);
2179 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) != NULL
2180 || walk_tree (&val
, contains_remapped_vars
, root
, NULL
) != NULL
)
2184 if (var
== NULL_TREE
)
2187 memset (&id
, 0, sizeof (id
));
2188 id
.cb
.copy_decl
= nesting_copy_decl
;
2189 id
.cb
.decl_map
= pointer_map_create ();
2192 for (; var
; var
= TREE_CHAIN (var
))
2193 if (TREE_CODE (var
) == VAR_DECL
2194 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
2195 && DECL_HAS_VALUE_EXPR_P (var
))
2197 struct nesting_info
*i
;
2198 tree newt
, t
, context
;
2200 t
= type
= TREE_TYPE (var
);
2201 val
= DECL_VALUE_EXPR (var
);
2202 if (walk_tree (&type
, contains_remapped_vars
, root
, NULL
) == NULL
2203 && walk_tree (&val
, contains_remapped_vars
, root
, NULL
) == NULL
)
2206 context
= decl_function_context (var
);
2207 for (i
= root
; i
; i
= i
->outer
)
2208 if (i
->context
== context
)
2214 id
.cb
.src_fn
= i
->context
;
2215 id
.cb
.dst_fn
= i
->context
;
2216 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2218 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2219 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2221 newt
= TREE_TYPE (newt
);
2224 if (TYPE_NAME (newt
)
2225 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2226 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2228 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2229 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2231 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2232 if (val
!= DECL_VALUE_EXPR (var
))
2233 SET_DECL_VALUE_EXPR (var
, val
);
2236 pointer_map_destroy (id
.cb
.decl_map
);
2239 /* Do "everything else" to clean up or complete state collected by the
2240 various walking passes -- lay out the types and decls, generate code
2241 to initialize the frame decl, store critical expressions in the
2242 struct function for rtl to find. */
2245 finalize_nesting_tree_1 (struct nesting_info
*root
)
2247 gimple_seq stmt_list
;
2249 tree context
= root
->context
;
2250 struct function
*sf
;
2254 /* If we created a non-local frame type or decl, we need to lay them
2255 out at this time. */
2256 if (root
->frame_type
)
2258 /* In some cases the frame type will trigger the -Wpadded warning.
2259 This is not helpful; suppress it. */
2260 int save_warn_padded
= warn_padded
;
2264 layout_type (root
->frame_type
);
2265 warn_padded
= save_warn_padded
;
2266 layout_decl (root
->frame_decl
, 0);
2268 /* Remove root->frame_decl from root->new_local_var_chain, so
2269 that we can declare it also in the lexical blocks, which
2270 helps ensure virtual regs that end up appearing in its RTL
2271 expression get substituted in instantiate_virtual_regs(). */
2272 for (adjust
= &root
->new_local_var_chain
;
2273 *adjust
!= root
->frame_decl
;
2274 adjust
= &TREE_CHAIN (*adjust
))
2275 gcc_assert (TREE_CHAIN (*adjust
));
2276 *adjust
= TREE_CHAIN (*adjust
);
2278 TREE_CHAIN (root
->frame_decl
) = NULL_TREE
;
2279 declare_vars (root
->frame_decl
,
2280 gimple_seq_first_stmt (gimple_body (context
)), true);
2283 /* If any parameters were referenced non-locally, then we need to
2284 insert a copy. Likewise, if any variables were referenced by
2285 pointer, we need to initialize the address. */
2286 if (root
->any_parm_remapped
)
2289 for (p
= DECL_ARGUMENTS (context
); p
; p
= TREE_CHAIN (p
))
2293 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2297 if (use_pointer_in_frame (p
))
2298 x
= build_addr (p
, context
);
2302 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2303 root
->frame_decl
, field
, NULL_TREE
);
2304 stmt
= gimple_build_assign (y
, x
);
2305 gimple_seq_add_stmt (&stmt_list
, stmt
);
2306 /* If the assignment is from a non-register the stmt is
2307 not valid gimple. Make it so by using a temporary instead. */
2308 if (!is_gimple_reg (x
)
2309 && is_gimple_reg_type (TREE_TYPE (x
)))
2311 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2312 x
= init_tmp_var (root
, x
, &gsi
);
2313 gimple_assign_set_rhs1 (stmt
, x
);
2318 /* If a chain_field was created, then it needs to be initialized
2320 if (root
->chain_field
)
2322 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2323 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2324 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2325 gimple_seq_add_stmt (&stmt_list
, stmt
);
2328 /* If trampolines were created, then we need to initialize them. */
2329 if (root
->any_tramp_created
)
2331 struct nesting_info
*i
;
2332 for (i
= root
->inner
; i
; i
= i
->next
)
2334 tree arg1
, arg2
, arg3
, x
, field
;
2336 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2340 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
2341 arg3
= build_addr (root
->frame_decl
, context
);
2343 arg2
= build_addr (i
->context
, context
);
2345 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2346 root
->frame_decl
, field
, NULL_TREE
);
2347 arg1
= build_addr (x
, context
);
2349 x
= implicit_built_in_decls
[BUILT_IN_INIT_TRAMPOLINE
];
2350 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2351 gimple_seq_add_stmt (&stmt_list
, stmt
);
2355 /* If we created initialization statements, insert them. */
2359 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2360 bind
= gimple_seq_first_stmt (gimple_body (context
));
2361 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2362 gimple_bind_set_body (bind
, stmt_list
);
2365 /* If a chain_decl was created, then it needs to be registered with
2366 struct function so that it gets initialized from the static chain
2367 register at the beginning of the function. */
2368 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2369 sf
->static_chain_decl
= root
->chain_decl
;
2371 /* Similarly for the non-local goto save area. */
2372 if (root
->nl_goto_field
)
2374 sf
->nonlocal_goto_save_area
2375 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2376 sf
->has_nonlocal_label
= 1;
2379 /* Make sure all new local variables get inserted into the
2380 proper BIND_EXPR. */
2381 if (root
->new_local_var_chain
)
2382 declare_vars (root
->new_local_var_chain
,
2383 gimple_seq_first_stmt (gimple_body (root
->context
)),
2386 if (root
->debug_var_chain
)
2391 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
2393 for (debug_var
= root
->debug_var_chain
; debug_var
;
2394 debug_var
= TREE_CHAIN (debug_var
))
2395 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2398 /* If there are any debug decls with variable length types,
2399 remap those types using other debug_var_chain variables. */
2402 struct nesting_copy_body_data id
;
2404 memset (&id
, 0, sizeof (id
));
2405 id
.cb
.copy_decl
= nesting_copy_decl
;
2406 id
.cb
.decl_map
= pointer_map_create ();
2409 for (; debug_var
; debug_var
= TREE_CHAIN (debug_var
))
2410 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2412 tree type
= TREE_TYPE (debug_var
);
2413 tree newt
, t
= type
;
2414 struct nesting_info
*i
;
2416 for (i
= root
; i
; i
= i
->outer
)
2417 if (variably_modified_type_p (type
, i
->context
))
2423 id
.cb
.src_fn
= i
->context
;
2424 id
.cb
.dst_fn
= i
->context
;
2425 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2427 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
2428 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2430 newt
= TREE_TYPE (newt
);
2433 if (TYPE_NAME (newt
)
2434 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2435 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2437 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2438 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2441 pointer_map_destroy (id
.cb
.decl_map
);
2444 scope
= gimple_seq_first_stmt (gimple_body (root
->context
));
2445 if (gimple_bind_block (scope
))
2446 declare_vars (root
->debug_var_chain
, scope
, true);
2448 BLOCK_VARS (DECL_INITIAL (root
->context
))
2449 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
2450 root
->debug_var_chain
);
2453 /* Dump the translated tree function. */
2456 fputs ("\n\n", dump_file
);
2457 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
2462 finalize_nesting_tree (struct nesting_info
*root
)
2464 struct nesting_info
*n
;
2465 FOR_EACH_NEST_INFO (n
, root
)
2466 finalize_nesting_tree_1 (n
);
2469 /* Unnest the nodes and pass them to cgraph. */
2472 unnest_nesting_tree_1 (struct nesting_info
*root
)
2474 struct cgraph_node
*node
= cgraph_node (root
->context
);
2476 /* For nested functions update the cgraph to reflect unnesting.
2477 We also delay finalizing of these functions up to this point. */
2480 cgraph_unnest_node (cgraph_node (root
->context
));
2481 cgraph_finalize_function (root
->context
, true);
2486 unnest_nesting_tree (struct nesting_info
*root
)
2488 struct nesting_info
*n
;
2489 FOR_EACH_NEST_INFO (n
, root
)
2490 unnest_nesting_tree_1 (n
);
2493 /* Free the data structures allocated during this pass. */
2496 free_nesting_tree (struct nesting_info
*root
)
2498 struct nesting_info
*node
, *next
;
2500 node
= iter_nestinfo_start (root
);
2503 next
= iter_nestinfo_next (node
);
2504 pointer_map_destroy (node
->var_map
);
2505 pointer_map_destroy (node
->field_map
);
2512 /* Gimplify a function and all its nested functions. */
2514 gimplify_all_functions (struct cgraph_node
*root
)
2516 struct cgraph_node
*iter
;
2517 if (!gimple_body (root
->decl
))
2518 gimplify_function_tree (root
->decl
);
2519 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
2520 gimplify_all_functions (iter
);
2523 /* Main entry point for this pass. Process FNDECL and all of its nested
2524 subroutines and turn them into something less tightly bound. */
2527 lower_nested_functions (tree fndecl
)
2529 struct cgraph_node
*cgn
;
2530 struct nesting_info
*root
;
2532 /* If there are no nested functions, there's nothing to do. */
2533 cgn
= cgraph_node (fndecl
);
2537 gimplify_all_functions (cgn
);
2539 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
2541 fprintf (dump_file
, "\n;; Function %s\n\n",
2542 lang_hooks
.decl_printable_name (fndecl
, 2));
2544 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
2545 root
= create_nesting_tree (cgn
);
2547 walk_all_functions (convert_nonlocal_reference_stmt
,
2548 convert_nonlocal_reference_op
,
2550 walk_all_functions (convert_local_reference_stmt
,
2551 convert_local_reference_op
,
2553 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
2554 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
2556 convert_all_function_calls (root
);
2557 finalize_nesting_tree (root
);
2558 unnest_nesting_tree (root
);
2560 free_nesting_tree (root
);
2561 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
2565 dump_end (TDI_nested
, dump_file
);
2570 #include "gt-tree-nested.h"