1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 #include "wide-int-print.h"
74 /* This file contains functions for building the Control Flow Graph (CFG)
75 for a function tree. */
77 /* Local declarations. */
79 /* Initial capacity for the basic block array. */
80 static const int initial_cfg_capacity
= 20;
82 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
83 which use a particular edge. The CASE_LABEL_EXPRs are chained together
84 via their CASE_CHAIN field, which we clear after we're done with the
85 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
87 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
88 update the case vector in response to edge redirections.
90 Right now this table is set up and torn down at key points in the
91 compilation process. It would be nice if we could make the table
92 more persistent. The key is getting notification of changes to
93 the CFG (particularly edge removal, creation and redirection). */
95 static struct pointer_map_t
*edge_to_cases
;
97 /* If we record edge_to_cases, this bitmap will hold indexes
98 of basic blocks that end in a GIMPLE_SWITCH which we touched
99 due to edge manipulations. */
101 static bitmap touched_switch_bbs
;
103 /* CFG statistics. */
106 long num_merged_labels
;
109 static struct cfg_stats_d cfg_stats
;
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
118 /* Hashtable helpers. */
120 struct locus_discrim_hasher
: typed_free_remove
<locus_discrim_map
>
122 typedef locus_discrim_map value_type
;
123 typedef locus_discrim_map compare_type
;
124 static inline hashval_t
hash (const value_type
*);
125 static inline bool equal (const value_type
*, const compare_type
*);
128 /* Trivial hash function for a location_t. ITEM is a pointer to
129 a hash table entry that maps a location_t to a discriminator. */
132 locus_discrim_hasher::hash (const value_type
*item
)
134 return LOCATION_LINE (item
->locus
);
137 /* Equality function for the locus-to-discriminator map. A and B
138 point to the two hash table entries to compare. */
141 locus_discrim_hasher::equal (const value_type
*a
, const compare_type
*b
)
143 return LOCATION_LINE (a
->locus
) == LOCATION_LINE (b
->locus
);
146 static hash_table
<locus_discrim_hasher
> *discriminator_per_locus
;
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq
);
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block
);
155 static void make_gimple_switch_edges (basic_block
);
156 static bool make_goto_expr_edges (basic_block
);
157 static void make_gimple_asm_edges (basic_block
);
158 static edge
gimple_redirect_edge_and_branch (edge
, basic_block
);
159 static edge
gimple_try_redirect_by_replacing_jump (edge
, basic_block
);
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple
, gimple
);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge
);
165 static gimple
first_non_label_stmt (basic_block
);
166 static bool verify_gimple_transaction (gimple
);
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block
, basic_block
);
170 static bool gimple_can_merge_blocks_p (basic_block
, basic_block
);
171 static void remove_bb (basic_block
);
172 static edge
find_taken_edge_computed_goto (basic_block
, tree
);
173 static edge
find_taken_edge_cond_expr (basic_block
, tree
);
174 static edge
find_taken_edge_switch_expr (basic_block
, tree
);
175 static tree
find_case_label_for_value (gimple
, tree
);
178 init_empty_tree_cfg_for_function (struct function
*fn
)
180 /* Initialize the basic block array. */
182 profile_status_for_fn (fn
) = PROFILE_ABSENT
;
183 n_basic_blocks_for_fn (fn
) = NUM_FIXED_BLOCKS
;
184 last_basic_block_for_fn (fn
) = NUM_FIXED_BLOCKS
;
185 vec_alloc (basic_block_info_for_fn (fn
), initial_cfg_capacity
);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn
),
187 initial_cfg_capacity
);
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn
), initial_cfg_capacity
);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn
),
192 initial_cfg_capacity
);
194 SET_BASIC_BLOCK_FOR_FN (fn
, ENTRY_BLOCK
, ENTRY_BLOCK_PTR_FOR_FN (fn
));
195 SET_BASIC_BLOCK_FOR_FN (fn
, EXIT_BLOCK
, EXIT_BLOCK_PTR_FOR_FN (fn
));
197 ENTRY_BLOCK_PTR_FOR_FN (fn
)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn
);
199 EXIT_BLOCK_PTR_FOR_FN (fn
)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn
);
204 init_empty_tree_cfg (void)
206 init_empty_tree_cfg_for_function (cfun
);
209 /*---------------------------------------------------------------------------
211 ---------------------------------------------------------------------------*/
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
217 build_gimple_cfg (gimple_seq seq
)
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
222 memset ((void *) &cfg_stats
, 0, sizeof (cfg_stats
));
224 init_empty_tree_cfg ();
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun
)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun
))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
),
236 n_basic_blocks_for_fn (cfun
));
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus
= new hash_table
<locus_discrim_hasher
> (13);
249 assign_discriminators ();
250 cleanup_dead_labels ();
251 delete discriminator_per_locus
;
252 discriminator_per_locus
= NULL
;
256 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
257 them and propagate the information to the loop. We assume that the
258 annotations come immediately before the condition of the loop. */
261 replace_loop_annotate ()
265 gimple_stmt_iterator gsi
;
268 FOR_EACH_LOOP (loop
, 0)
270 gsi
= gsi_last_bb (loop
->header
);
271 stmt
= gsi_stmt (gsi
);
272 if (!(stmt
&& gimple_code (stmt
) == GIMPLE_COND
))
274 for (gsi_prev_nondebug (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
276 stmt
= gsi_stmt (gsi
);
277 if (gimple_code (stmt
) != GIMPLE_CALL
)
279 if (!gimple_call_internal_p (stmt
)
280 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
282 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
284 case annot_expr_ivdep_kind
:
285 loop
->safelen
= INT_MAX
;
287 case annot_expr_no_vector_kind
:
288 loop
->dont_vectorize
= true;
290 case annot_expr_vector_kind
:
291 loop
->force_vectorize
= true;
292 cfun
->has_force_vectorize_loops
= true;
297 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
298 gimple_call_arg (stmt
, 0));
299 gsi_replace (&gsi
, stmt
, true);
303 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
304 FOR_EACH_BB_FN (bb
, cfun
)
306 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
308 stmt
= gsi_stmt (gsi
);
309 if (gimple_code (stmt
) != GIMPLE_CALL
)
311 if (!gimple_call_internal_p (stmt
)
312 || gimple_call_internal_fn (stmt
) != IFN_ANNOTATE
)
314 switch ((annot_expr_kind
) tree_to_shwi (gimple_call_arg (stmt
, 1)))
316 case annot_expr_ivdep_kind
:
317 case annot_expr_no_vector_kind
:
318 case annot_expr_vector_kind
:
323 warning_at (gimple_location (stmt
), 0, "ignoring loop annotation");
324 stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
325 gimple_call_arg (stmt
, 0));
326 gsi_replace (&gsi
, stmt
, true);
333 execute_build_cfg (void)
335 gimple_seq body
= gimple_body (current_function_decl
);
337 build_gimple_cfg (body
);
338 gimple_set_body (current_function_decl
, NULL
);
339 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
341 fprintf (dump_file
, "Scope blocks:\n");
342 dump_scope_blocks (dump_file
, dump_flags
);
345 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
346 replace_loop_annotate ();
352 const pass_data pass_data_build_cfg
=
354 GIMPLE_PASS
, /* type */
356 OPTGROUP_NONE
, /* optinfo_flags */
357 true, /* has_execute */
358 TV_TREE_CFG
, /* tv_id */
359 PROP_gimple_leh
, /* properties_required */
360 ( PROP_cfg
| PROP_loops
), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 0, /* todo_flags_finish */
366 class pass_build_cfg
: public gimple_opt_pass
369 pass_build_cfg (gcc::context
*ctxt
)
370 : gimple_opt_pass (pass_data_build_cfg
, ctxt
)
373 /* opt_pass methods: */
374 virtual unsigned int execute (function
*) { return execute_build_cfg (); }
376 }; // class pass_build_cfg
381 make_pass_build_cfg (gcc::context
*ctxt
)
383 return new pass_build_cfg (ctxt
);
387 /* Return true if T is a computed goto. */
390 computed_goto_p (gimple t
)
392 return (gimple_code (t
) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t
)) != LABEL_DECL
);
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
406 __builtin_unreachable ();
410 assert_unreachable_fallthru_edge_p (edge e
)
412 basic_block pred_bb
= e
->src
;
413 gimple last
= last_stmt (pred_bb
);
414 if (last
&& gimple_code (last
) == GIMPLE_COND
)
416 basic_block other_bb
= EDGE_SUCC (pred_bb
, 0)->dest
;
417 if (other_bb
== e
->dest
)
418 other_bb
= EDGE_SUCC (pred_bb
, 1)->dest
;
419 if (EDGE_COUNT (other_bb
->succs
) == 0)
421 gimple_stmt_iterator gsi
= gsi_after_labels (other_bb
);
426 stmt
= gsi_stmt (gsi
);
427 while (is_gimple_debug (stmt
) || gimple_clobber_p (stmt
))
432 stmt
= gsi_stmt (gsi
);
434 return gimple_call_builtin_p (stmt
, BUILT_IN_UNREACHABLE
);
441 /* Build a flowgraph for the sequence of stmts SEQ. */
444 make_blocks (gimple_seq seq
)
446 gimple_stmt_iterator i
= gsi_start (seq
);
448 bool start_new_block
= true;
449 bool first_stmt_of_seq
= true;
450 basic_block bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
452 while (!gsi_end_p (i
))
459 /* If the statement starts a new basic block or if we have determined
460 in a previous pass that we need to create a new block for STMT, do
462 if (start_new_block
|| stmt_starts_bb_p (stmt
, prev_stmt
))
464 if (!first_stmt_of_seq
)
465 gsi_split_seq_before (&i
, &seq
);
466 bb
= create_basic_block (seq
, NULL
, bb
);
467 start_new_block
= false;
470 /* Now add STMT to BB and create the subgraphs for special statement
472 gimple_set_bb (stmt
, bb
);
474 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
476 if (stmt_ends_bb_p (stmt
))
478 /* If the stmt can make abnormal goto use a new temporary
479 for the assignment to the LHS. This makes sure the old value
480 of the LHS is available on the abnormal edge. Otherwise
481 we will end up with overlapping life-ranges for abnormal
483 if (gimple_has_lhs (stmt
)
484 && stmt_can_make_abnormal_goto (stmt
)
485 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
487 tree lhs
= gimple_get_lhs (stmt
);
488 tree tmp
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
489 gimple s
= gimple_build_assign (lhs
, tmp
);
490 gimple_set_location (s
, gimple_location (stmt
));
491 gimple_set_block (s
, gimple_block (stmt
));
492 gimple_set_lhs (stmt
, tmp
);
493 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
494 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
495 DECL_GIMPLE_REG_P (tmp
) = 1;
496 gsi_insert_after (&i
, s
, GSI_SAME_STMT
);
498 start_new_block
= true;
502 first_stmt_of_seq
= false;
507 /* Create and return a new empty basic block after bb AFTER. */
510 create_bb (void *h
, void *e
, basic_block after
)
516 /* Create and initialize a new basic block. Since alloc_block uses
517 GC allocation that clears memory to allocate a basic block, we do
518 not have to clear the newly allocated basic block here. */
521 bb
->index
= last_basic_block_for_fn (cfun
);
523 set_bb_seq (bb
, h
? (gimple_seq
) h
: NULL
);
525 /* Add the new block to the linked list of blocks. */
526 link_block (bb
, after
);
528 /* Grow the basic block array if needed. */
529 if ((size_t) last_basic_block_for_fn (cfun
)
530 == basic_block_info_for_fn (cfun
)->length ())
533 (last_basic_block_for_fn (cfun
)
534 + (last_basic_block_for_fn (cfun
) + 3) / 4);
535 vec_safe_grow_cleared (basic_block_info_for_fn (cfun
), new_size
);
538 /* Add the newly created block to the array. */
539 SET_BASIC_BLOCK_FOR_FN (cfun
, last_basic_block_for_fn (cfun
), bb
);
541 n_basic_blocks_for_fn (cfun
)++;
542 last_basic_block_for_fn (cfun
)++;
548 /*---------------------------------------------------------------------------
550 ---------------------------------------------------------------------------*/
552 /* Fold COND_EXPR_COND of each COND_EXPR. */
555 fold_cond_expr_cond (void)
559 FOR_EACH_BB_FN (bb
, cfun
)
561 gimple stmt
= last_stmt (bb
);
563 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
565 location_t loc
= gimple_location (stmt
);
569 fold_defer_overflow_warnings ();
570 cond
= fold_binary_loc (loc
, gimple_cond_code (stmt
), boolean_type_node
,
571 gimple_cond_lhs (stmt
), gimple_cond_rhs (stmt
));
574 zerop
= integer_zerop (cond
);
575 onep
= integer_onep (cond
);
578 zerop
= onep
= false;
580 fold_undefer_overflow_warnings (zerop
|| onep
,
582 WARN_STRICT_OVERFLOW_CONDITIONAL
);
584 gimple_cond_make_false (stmt
);
586 gimple_cond_make_true (stmt
);
591 /* If basic block BB has an abnormal edge to a basic block
592 containing IFN_ABNORMAL_DISPATCHER internal call, return
593 that the dispatcher's basic block, otherwise return NULL. */
596 get_abnormal_succ_dispatcher (basic_block bb
)
601 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
602 if ((e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)) == EDGE_ABNORMAL
)
604 gimple_stmt_iterator gsi
605 = gsi_start_nondebug_after_labels_bb (e
->dest
);
606 gimple g
= gsi_stmt (gsi
);
608 && is_gimple_call (g
)
609 && gimple_call_internal_p (g
)
610 && gimple_call_internal_fn (g
) == IFN_ABNORMAL_DISPATCHER
)
616 /* Helper function for make_edges. Create a basic block with
617 with ABNORMAL_DISPATCHER internal call in it if needed, and
618 create abnormal edges from BBS to it and from it to FOR_BB
619 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
622 handle_abnormal_edges (basic_block
*dispatcher_bbs
,
623 basic_block for_bb
, int *bb_to_omp_idx
,
624 auto_vec
<basic_block
> *bbs
, bool computed_goto
)
626 basic_block
*dispatcher
= dispatcher_bbs
+ (computed_goto
? 1 : 0);
627 unsigned int idx
= 0;
633 dispatcher
= dispatcher_bbs
+ 2 * bb_to_omp_idx
[for_bb
->index
];
634 if (bb_to_omp_idx
[for_bb
->index
] != 0)
638 /* If the dispatcher has been created already, then there are basic
639 blocks with abnormal edges to it, so just make a new edge to
641 if (*dispatcher
== NULL
)
643 /* Check if there are any basic blocks that need to have
644 abnormal edges to this dispatcher. If there are none, return
646 if (bb_to_omp_idx
== NULL
)
648 if (bbs
->is_empty ())
653 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
654 if (bb_to_omp_idx
[bb
->index
] == bb_to_omp_idx
[for_bb
->index
])
660 /* Create the dispatcher bb. */
661 *dispatcher
= create_basic_block (NULL
, NULL
, for_bb
);
664 /* Factor computed gotos into a common computed goto site. Also
665 record the location of that site so that we can un-factor the
666 gotos after we have converted back to normal form. */
667 gimple_stmt_iterator gsi
= gsi_start_bb (*dispatcher
);
669 /* Create the destination of the factored goto. Each original
670 computed goto will put its desired destination into this
671 variable and jump to the label we create immediately below. */
672 tree var
= create_tmp_var (ptr_type_node
, "gotovar");
674 /* Build a label for the new block which will contain the
675 factored computed goto. */
676 tree factored_label_decl
677 = create_artificial_label (UNKNOWN_LOCATION
);
678 gimple factored_computed_goto_label
679 = gimple_build_label (factored_label_decl
);
680 gsi_insert_after (&gsi
, factored_computed_goto_label
, GSI_NEW_STMT
);
682 /* Build our new computed goto. */
683 gimple factored_computed_goto
= gimple_build_goto (var
);
684 gsi_insert_after (&gsi
, factored_computed_goto
, GSI_NEW_STMT
);
686 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
689 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
692 gsi
= gsi_last_bb (bb
);
693 gimple last
= gsi_stmt (gsi
);
695 gcc_assert (computed_goto_p (last
));
697 /* Copy the original computed goto's destination into VAR. */
699 = gimple_build_assign (var
, gimple_goto_dest (last
));
700 gsi_insert_before (&gsi
, assignment
, GSI_SAME_STMT
);
702 edge e
= make_edge (bb
, *dispatcher
, EDGE_FALLTHRU
);
703 e
->goto_locus
= gimple_location (last
);
704 gsi_remove (&gsi
, true);
709 tree arg
= inner
? boolean_true_node
: boolean_false_node
;
710 gimple g
= gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER
,
712 gimple_stmt_iterator gsi
= gsi_after_labels (*dispatcher
);
713 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
715 /* Create predecessor edges of the dispatcher. */
716 FOR_EACH_VEC_ELT (*bbs
, idx
, bb
)
719 && bb_to_omp_idx
[bb
->index
] != bb_to_omp_idx
[for_bb
->index
])
721 make_edge (bb
, *dispatcher
, EDGE_ABNORMAL
);
726 make_edge (*dispatcher
, for_bb
, EDGE_ABNORMAL
);
729 /* Join all the blocks in the flowgraph. */
735 struct omp_region
*cur_region
= NULL
;
736 auto_vec
<basic_block
> ab_edge_goto
;
737 auto_vec
<basic_block
> ab_edge_call
;
738 int *bb_to_omp_idx
= NULL
;
739 int cur_omp_region_idx
= 0;
741 /* Create an edge from entry to the first block with executable
743 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
744 BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
),
747 /* Traverse the basic block array placing edges. */
748 FOR_EACH_BB_FN (bb
, cfun
)
750 gimple last
= last_stmt (bb
);
754 bb_to_omp_idx
[bb
->index
] = cur_omp_region_idx
;
758 enum gimple_code code
= gimple_code (last
);
762 if (make_goto_expr_edges (bb
))
763 ab_edge_goto
.safe_push (bb
);
768 edge e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
769 e
->goto_locus
= gimple_location (last
);
774 make_cond_expr_edges (bb
);
778 make_gimple_switch_edges (bb
);
782 make_eh_edges (last
);
785 case GIMPLE_EH_DISPATCH
:
786 fallthru
= make_eh_dispatch_edges (last
);
790 /* If this function receives a nonlocal goto, then we need to
791 make edges from this call site to all the nonlocal goto
793 if (stmt_can_make_abnormal_goto (last
))
794 ab_edge_call
.safe_push (bb
);
796 /* If this statement has reachable exception handlers, then
797 create abnormal edges to them. */
798 make_eh_edges (last
);
800 /* BUILTIN_RETURN is really a return statement. */
801 if (gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
803 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
806 /* Some calls are known not to return. */
808 fallthru
= !(gimple_call_flags (last
) & ECF_NORETURN
);
812 /* A GIMPLE_ASSIGN may throw internally and thus be considered
814 if (is_ctrl_altering_stmt (last
))
815 make_eh_edges (last
);
820 make_gimple_asm_edges (bb
);
825 fallthru
= make_gimple_omp_edges (bb
, &cur_region
,
826 &cur_omp_region_idx
);
827 if (cur_region
&& bb_to_omp_idx
== NULL
)
828 bb_to_omp_idx
= XCNEWVEC (int, n_basic_blocks_for_fn (cfun
));
831 case GIMPLE_TRANSACTION
:
833 tree abort_label
= gimple_transaction_label (last
);
835 make_edge (bb
, label_to_block (abort_label
), EDGE_TM_ABORT
);
841 gcc_assert (!stmt_ends_bb_p (last
));
849 make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
852 /* Computed gotos are hell to deal with, especially if there are
853 lots of them with a large number of destinations. So we factor
854 them to a common computed goto location before we build the
855 edge list. After we convert back to normal form, we will un-factor
856 the computed gotos since factoring introduces an unwanted jump.
857 For non-local gotos and abnormal edges from calls to calls that return
858 twice or forced labels, factor the abnormal edges too, by having all
859 abnormal edges from the calls go to a common artificial basic block
860 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
861 basic block to all forced labels and calls returning twice.
862 We do this per-OpenMP structured block, because those regions
863 are guaranteed to be single entry single exit by the standard,
864 so it is not allowed to enter or exit such regions abnormally this way,
865 thus all computed gotos, non-local gotos and setjmp/longjmp calls
866 must not transfer control across SESE region boundaries. */
867 if (!ab_edge_goto
.is_empty () || !ab_edge_call
.is_empty ())
869 gimple_stmt_iterator gsi
;
870 basic_block dispatcher_bb_array
[2] = { NULL
, NULL
};
871 basic_block
*dispatcher_bbs
= dispatcher_bb_array
;
872 int count
= n_basic_blocks_for_fn (cfun
);
875 dispatcher_bbs
= XCNEWVEC (basic_block
, 2 * count
);
877 FOR_EACH_BB_FN (bb
, cfun
)
879 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
881 gimple label_stmt
= gsi_stmt (gsi
);
884 if (gimple_code (label_stmt
) != GIMPLE_LABEL
)
887 target
= gimple_label_label (label_stmt
);
889 /* Make an edge to every label block that has been marked as a
890 potential target for a computed goto or a non-local goto. */
891 if (FORCED_LABEL (target
))
892 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
893 &ab_edge_goto
, true);
894 if (DECL_NONLOCAL (target
))
896 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
897 &ab_edge_call
, false);
902 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
903 gsi_next_nondebug (&gsi
);
904 if (!gsi_end_p (gsi
))
906 /* Make an edge to every setjmp-like call. */
907 gimple call_stmt
= gsi_stmt (gsi
);
908 if (is_gimple_call (call_stmt
)
909 && ((gimple_call_flags (call_stmt
) & ECF_RETURNS_TWICE
)
910 || gimple_call_builtin_p (call_stmt
,
911 BUILT_IN_SETJMP_RECEIVER
)))
912 handle_abnormal_edges (dispatcher_bbs
, bb
, bb_to_omp_idx
,
913 &ab_edge_call
, false);
918 XDELETE (dispatcher_bbs
);
921 XDELETE (bb_to_omp_idx
);
925 /* Fold COND_EXPR_COND of each COND_EXPR. */
926 fold_cond_expr_cond ();
929 /* Find the next available discriminator value for LOCUS. The
930 discriminator distinguishes among several basic blocks that
931 share a common locus, allowing for more accurate sample-based
935 next_discriminator_for_locus (location_t locus
)
937 struct locus_discrim_map item
;
938 struct locus_discrim_map
**slot
;
941 item
.discriminator
= 0;
942 slot
= discriminator_per_locus
->find_slot_with_hash (
943 &item
, LOCATION_LINE (locus
), INSERT
);
945 if (*slot
== HTAB_EMPTY_ENTRY
)
947 *slot
= XNEW (struct locus_discrim_map
);
949 (*slot
)->locus
= locus
;
950 (*slot
)->discriminator
= 0;
952 (*slot
)->discriminator
++;
953 return (*slot
)->discriminator
;
956 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
959 same_line_p (location_t locus1
, location_t locus2
)
961 expanded_location from
, to
;
963 if (locus1
== locus2
)
966 from
= expand_location (locus1
);
967 to
= expand_location (locus2
);
969 if (from
.line
!= to
.line
)
971 if (from
.file
== to
.file
)
973 return (from
.file
!= NULL
975 && filename_cmp (from
.file
, to
.file
) == 0);
978 /* Assign discriminators to each basic block. */
981 assign_discriminators (void)
985 FOR_EACH_BB_FN (bb
, cfun
)
989 gimple last
= last_stmt (bb
);
990 location_t locus
= last
? gimple_location (last
) : UNKNOWN_LOCATION
;
992 if (locus
== UNKNOWN_LOCATION
)
995 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
997 gimple first
= first_non_label_stmt (e
->dest
);
998 gimple last
= last_stmt (e
->dest
);
999 if ((first
&& same_line_p (locus
, gimple_location (first
)))
1000 || (last
&& same_line_p (locus
, gimple_location (last
))))
1002 if (e
->dest
->discriminator
!= 0 && bb
->discriminator
== 0)
1003 bb
->discriminator
= next_discriminator_for_locus (locus
);
1005 e
->dest
->discriminator
= next_discriminator_for_locus (locus
);
1011 /* Create the edges for a GIMPLE_COND starting at block BB. */
1014 make_cond_expr_edges (basic_block bb
)
1016 gimple entry
= last_stmt (bb
);
1017 gimple then_stmt
, else_stmt
;
1018 basic_block then_bb
, else_bb
;
1019 tree then_label
, else_label
;
1023 gcc_assert (gimple_code (entry
) == GIMPLE_COND
);
1025 /* Entry basic blocks for each component. */
1026 then_label
= gimple_cond_true_label (entry
);
1027 else_label
= gimple_cond_false_label (entry
);
1028 then_bb
= label_to_block (then_label
);
1029 else_bb
= label_to_block (else_label
);
1030 then_stmt
= first_stmt (then_bb
);
1031 else_stmt
= first_stmt (else_bb
);
1033 e
= make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1034 e
->goto_locus
= gimple_location (then_stmt
);
1035 e
= make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1037 e
->goto_locus
= gimple_location (else_stmt
);
1039 /* We do not need the labels anymore. */
1040 gimple_cond_set_true_label (entry
, NULL_TREE
);
1041 gimple_cond_set_false_label (entry
, NULL_TREE
);
1045 /* Called for each element in the hash table (P) as we delete the
1046 edge to cases hash table.
1048 Clear all the TREE_CHAINs to prevent problems with copying of
1049 SWITCH_EXPRs and structure sharing rules, then free the hash table
1053 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED
, void **value
,
1054 void *data ATTRIBUTE_UNUSED
)
1058 for (t
= (tree
) *value
; t
; t
= next
)
1060 next
= CASE_CHAIN (t
);
1061 CASE_CHAIN (t
) = NULL
;
1068 /* Start recording information mapping edges to case labels. */
1071 start_recording_case_labels (void)
1073 gcc_assert (edge_to_cases
== NULL
);
1074 edge_to_cases
= pointer_map_create ();
1075 touched_switch_bbs
= BITMAP_ALLOC (NULL
);
1078 /* Return nonzero if we are recording information for case labels. */
1081 recording_case_labels_p (void)
1083 return (edge_to_cases
!= NULL
);
1086 /* Stop recording information mapping edges to case labels and
1087 remove any information we have recorded. */
1089 end_recording_case_labels (void)
1093 pointer_map_traverse (edge_to_cases
, edge_to_cases_cleanup
, NULL
);
1094 pointer_map_destroy (edge_to_cases
);
1095 edge_to_cases
= NULL
;
1096 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs
, 0, i
, bi
)
1098 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
1101 gimple stmt
= last_stmt (bb
);
1102 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1103 group_case_labels_stmt (stmt
);
1106 BITMAP_FREE (touched_switch_bbs
);
1109 /* If we are inside a {start,end}_recording_cases block, then return
1110 a chain of CASE_LABEL_EXPRs from T which reference E.
1112 Otherwise return NULL. */
1115 get_cases_for_edge (edge e
, gimple t
)
1120 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1121 chains available. Return NULL so the caller can detect this case. */
1122 if (!recording_case_labels_p ())
1125 slot
= pointer_map_contains (edge_to_cases
, e
);
1127 return (tree
) *slot
;
1129 /* If we did not find E in the hash table, then this must be the first
1130 time we have been queried for information about E & T. Add all the
1131 elements from T to the hash table then perform the query again. */
1133 n
= gimple_switch_num_labels (t
);
1134 for (i
= 0; i
< n
; i
++)
1136 tree elt
= gimple_switch_label (t
, i
);
1137 tree lab
= CASE_LABEL (elt
);
1138 basic_block label_bb
= label_to_block (lab
);
1139 edge this_edge
= find_edge (e
->src
, label_bb
);
1141 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1143 slot
= pointer_map_insert (edge_to_cases
, this_edge
);
1144 CASE_CHAIN (elt
) = (tree
) *slot
;
1148 return (tree
) *pointer_map_contains (edge_to_cases
, e
);
1151 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1154 make_gimple_switch_edges (basic_block bb
)
1156 gimple entry
= last_stmt (bb
);
1159 n
= gimple_switch_num_labels (entry
);
1161 for (i
= 0; i
< n
; ++i
)
1163 tree lab
= CASE_LABEL (gimple_switch_label (entry
, i
));
1164 basic_block label_bb
= label_to_block (lab
);
1165 make_edge (bb
, label_bb
, 0);
1170 /* Return the basic block holding label DEST. */
1173 label_to_block_fn (struct function
*ifun
, tree dest
)
1175 int uid
= LABEL_DECL_UID (dest
);
1177 /* We would die hard when faced by an undefined label. Emit a label to
1178 the very first basic block. This will hopefully make even the dataflow
1179 and undefined variable warnings quite right. */
1180 if (seen_error () && uid
< 0)
1182 gimple_stmt_iterator gsi
=
1183 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, NUM_FIXED_BLOCKS
));
1186 stmt
= gimple_build_label (dest
);
1187 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
1188 uid
= LABEL_DECL_UID (dest
);
1190 if (vec_safe_length (ifun
->cfg
->x_label_to_block_map
) <= (unsigned int) uid
)
1192 return (*ifun
->cfg
->x_label_to_block_map
)[uid
];
1195 /* Create edges for a goto statement at block BB. Returns true
1196 if abnormal edges should be created. */
1199 make_goto_expr_edges (basic_block bb
)
1201 gimple_stmt_iterator last
= gsi_last_bb (bb
);
1202 gimple goto_t
= gsi_stmt (last
);
1204 /* A simple GOTO creates normal edges. */
1205 if (simple_goto_p (goto_t
))
1207 tree dest
= gimple_goto_dest (goto_t
);
1208 basic_block label_bb
= label_to_block (dest
);
1209 edge e
= make_edge (bb
, label_bb
, EDGE_FALLTHRU
);
1210 e
->goto_locus
= gimple_location (goto_t
);
1211 gsi_remove (&last
, true);
1215 /* A computed GOTO creates abnormal edges. */
1219 /* Create edges for an asm statement with labels at block BB. */
1222 make_gimple_asm_edges (basic_block bb
)
1224 gimple stmt
= last_stmt (bb
);
1225 int i
, n
= gimple_asm_nlabels (stmt
);
1227 for (i
= 0; i
< n
; ++i
)
1229 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
1230 basic_block label_bb
= label_to_block (label
);
1231 make_edge (bb
, label_bb
, 0);
1235 /*---------------------------------------------------------------------------
1237 ---------------------------------------------------------------------------*/
1239 /* Cleanup useless labels in basic blocks. This is something we wish
1240 to do early because it allows us to group case labels before creating
1241 the edges for the CFG, and it speeds up block statement iterators in
1242 all passes later on.
1243 We rerun this pass after CFG is created, to get rid of the labels that
1244 are no longer referenced. After then we do not run it any more, since
1245 (almost) no new labels should be created. */
1247 /* A map from basic block index to the leading label of that block. */
1248 static struct label_record
1253 /* True if the label is referenced from somewhere. */
1257 /* Given LABEL return the first label in the same basic block. */
1260 main_block_label (tree label
)
1262 basic_block bb
= label_to_block (label
);
1263 tree main_label
= label_for_bb
[bb
->index
].label
;
1265 /* label_to_block possibly inserted undefined label into the chain. */
1268 label_for_bb
[bb
->index
].label
= label
;
1272 label_for_bb
[bb
->index
].used
= true;
1276 /* Clean up redundant labels within the exception tree. */
1279 cleanup_dead_labels_eh (void)
1286 if (cfun
->eh
== NULL
)
1289 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1290 if (lp
&& lp
->post_landing_pad
)
1292 lab
= main_block_label (lp
->post_landing_pad
);
1293 if (lab
!= lp
->post_landing_pad
)
1295 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1296 EH_LANDING_PAD_NR (lab
) = lp
->index
;
1300 FOR_ALL_EH_REGION (r
)
1304 case ERT_MUST_NOT_THROW
:
1310 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
1314 c
->label
= main_block_label (lab
);
1319 case ERT_ALLOWED_EXCEPTIONS
:
1320 lab
= r
->u
.allowed
.label
;
1322 r
->u
.allowed
.label
= main_block_label (lab
);
1328 /* Cleanup redundant labels. This is a three-step process:
1329 1) Find the leading label for each block.
1330 2) Redirect all references to labels to the leading labels.
1331 3) Cleanup all useless labels. */
1334 cleanup_dead_labels (void)
1337 label_for_bb
= XCNEWVEC (struct label_record
, last_basic_block_for_fn (cfun
));
1339 /* Find a suitable label for each block. We use the first user-defined
1340 label if there is one, or otherwise just the first label we see. */
1341 FOR_EACH_BB_FN (bb
, cfun
)
1343 gimple_stmt_iterator i
;
1345 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1348 gimple stmt
= gsi_stmt (i
);
1350 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1353 label
= gimple_label_label (stmt
);
1355 /* If we have not yet seen a label for the current block,
1356 remember this one and see if there are more labels. */
1357 if (!label_for_bb
[bb
->index
].label
)
1359 label_for_bb
[bb
->index
].label
= label
;
1363 /* If we did see a label for the current block already, but it
1364 is an artificially created label, replace it if the current
1365 label is a user defined label. */
1366 if (!DECL_ARTIFICIAL (label
)
1367 && DECL_ARTIFICIAL (label_for_bb
[bb
->index
].label
))
1369 label_for_bb
[bb
->index
].label
= label
;
1375 /* Now redirect all jumps/branches to the selected label.
1376 First do so for each block ending in a control statement. */
1377 FOR_EACH_BB_FN (bb
, cfun
)
1379 gimple stmt
= last_stmt (bb
);
1380 tree label
, new_label
;
1385 switch (gimple_code (stmt
))
1388 label
= gimple_cond_true_label (stmt
);
1391 new_label
= main_block_label (label
);
1392 if (new_label
!= label
)
1393 gimple_cond_set_true_label (stmt
, new_label
);
1396 label
= gimple_cond_false_label (stmt
);
1399 new_label
= main_block_label (label
);
1400 if (new_label
!= label
)
1401 gimple_cond_set_false_label (stmt
, new_label
);
1407 size_t i
, n
= gimple_switch_num_labels (stmt
);
1409 /* Replace all destination labels. */
1410 for (i
= 0; i
< n
; ++i
)
1412 tree case_label
= gimple_switch_label (stmt
, i
);
1413 label
= CASE_LABEL (case_label
);
1414 new_label
= main_block_label (label
);
1415 if (new_label
!= label
)
1416 CASE_LABEL (case_label
) = new_label
;
1423 int i
, n
= gimple_asm_nlabels (stmt
);
1425 for (i
= 0; i
< n
; ++i
)
1427 tree cons
= gimple_asm_label_op (stmt
, i
);
1428 tree label
= main_block_label (TREE_VALUE (cons
));
1429 TREE_VALUE (cons
) = label
;
1434 /* We have to handle gotos until they're removed, and we don't
1435 remove them until after we've created the CFG edges. */
1437 if (!computed_goto_p (stmt
))
1439 label
= gimple_goto_dest (stmt
);
1440 new_label
= main_block_label (label
);
1441 if (new_label
!= label
)
1442 gimple_goto_set_dest (stmt
, new_label
);
1446 case GIMPLE_TRANSACTION
:
1448 tree label
= gimple_transaction_label (stmt
);
1451 tree new_label
= main_block_label (label
);
1452 if (new_label
!= label
)
1453 gimple_transaction_set_label (stmt
, new_label
);
1463 /* Do the same for the exception region tree labels. */
1464 cleanup_dead_labels_eh ();
1466 /* Finally, purge dead labels. All user-defined labels and labels that
1467 can be the target of non-local gotos and labels which have their
1468 address taken are preserved. */
1469 FOR_EACH_BB_FN (bb
, cfun
)
1471 gimple_stmt_iterator i
;
1472 tree label_for_this_bb
= label_for_bb
[bb
->index
].label
;
1474 if (!label_for_this_bb
)
1477 /* If the main label of the block is unused, we may still remove it. */
1478 if (!label_for_bb
[bb
->index
].used
)
1479 label_for_this_bb
= NULL
;
1481 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
1484 gimple stmt
= gsi_stmt (i
);
1486 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1489 label
= gimple_label_label (stmt
);
1491 if (label
== label_for_this_bb
1492 || !DECL_ARTIFICIAL (label
)
1493 || DECL_NONLOCAL (label
)
1494 || FORCED_LABEL (label
))
1497 gsi_remove (&i
, true);
1501 free (label_for_bb
);
1504 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1505 the ones jumping to the same label.
1506 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1509 group_case_labels_stmt (gimple stmt
)
1511 int old_size
= gimple_switch_num_labels (stmt
);
1512 int i
, j
, new_size
= old_size
;
1513 basic_block default_bb
= NULL
;
1515 default_bb
= label_to_block (CASE_LABEL (gimple_switch_default_label (stmt
)));
1517 /* Look for possible opportunities to merge cases. */
1519 while (i
< old_size
)
1521 tree base_case
, base_high
;
1522 basic_block base_bb
;
1524 base_case
= gimple_switch_label (stmt
, i
);
1526 gcc_assert (base_case
);
1527 base_bb
= label_to_block (CASE_LABEL (base_case
));
1529 /* Discard cases that have the same destination as the
1531 if (base_bb
== default_bb
)
1533 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1539 base_high
= CASE_HIGH (base_case
)
1540 ? CASE_HIGH (base_case
)
1541 : CASE_LOW (base_case
);
1544 /* Try to merge case labels. Break out when we reach the end
1545 of the label vector or when we cannot merge the next case
1546 label with the current one. */
1547 while (i
< old_size
)
1549 tree merge_case
= gimple_switch_label (stmt
, i
);
1550 basic_block merge_bb
= label_to_block (CASE_LABEL (merge_case
));
1551 wide_int bhp1
= wi::add (base_high
, 1);
1553 /* Merge the cases if they jump to the same place,
1554 and their ranges are consecutive. */
1555 if (merge_bb
== base_bb
1556 && wi::eq_p (CASE_LOW (merge_case
), bhp1
))
1558 base_high
= CASE_HIGH (merge_case
) ?
1559 CASE_HIGH (merge_case
) : CASE_LOW (merge_case
);
1560 CASE_HIGH (base_case
) = base_high
;
1561 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1570 /* Compress the case labels in the label vector, and adjust the
1571 length of the vector. */
1572 for (i
= 0, j
= 0; i
< new_size
; i
++)
1574 while (! gimple_switch_label (stmt
, j
))
1576 gimple_switch_set_label (stmt
, i
,
1577 gimple_switch_label (stmt
, j
++));
1580 gcc_assert (new_size
<= old_size
);
1581 gimple_switch_set_num_labels (stmt
, new_size
);
1584 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1585 and scan the sorted vector of cases. Combine the ones jumping to the
1589 group_case_labels (void)
1593 FOR_EACH_BB_FN (bb
, cfun
)
1595 gimple stmt
= last_stmt (bb
);
1596 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
1597 group_case_labels_stmt (stmt
);
1601 /* Checks whether we can merge block B into block A. */
1604 gimple_can_merge_blocks_p (basic_block a
, basic_block b
)
1607 gimple_stmt_iterator gsi
;
1609 if (!single_succ_p (a
))
1612 if (single_succ_edge (a
)->flags
& EDGE_COMPLEX
)
1615 if (single_succ (a
) != b
)
1618 if (!single_pred_p (b
))
1621 if (b
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
1624 /* If A ends by a statement causing exceptions or something similar, we
1625 cannot merge the blocks. */
1626 stmt
= last_stmt (a
);
1627 if (stmt
&& stmt_ends_bb_p (stmt
))
1630 /* Do not allow a block with only a non-local label to be merged. */
1632 && gimple_code (stmt
) == GIMPLE_LABEL
1633 && DECL_NONLOCAL (gimple_label_label (stmt
)))
1636 /* Examine the labels at the beginning of B. */
1637 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1640 stmt
= gsi_stmt (gsi
);
1641 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1643 lab
= gimple_label_label (stmt
);
1645 /* Do not remove user forced labels or for -O0 any user labels. */
1646 if (!DECL_ARTIFICIAL (lab
) && (!optimize
|| FORCED_LABEL (lab
)))
1650 /* Protect the loop latches. */
1651 if (current_loops
&& b
->loop_father
->latch
== b
)
1654 /* It must be possible to eliminate all phi nodes in B. If ssa form
1655 is not up-to-date and a name-mapping is registered, we cannot eliminate
1656 any phis. Symbols marked for renaming are never a problem though. */
1657 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1659 gimple phi
= gsi_stmt (gsi
);
1660 /* Technically only new names matter. */
1661 if (name_registered_for_update_p (PHI_RESULT (phi
)))
1665 /* When not optimizing, don't merge if we'd lose goto_locus. */
1667 && single_succ_edge (a
)->goto_locus
!= UNKNOWN_LOCATION
)
1669 location_t goto_locus
= single_succ_edge (a
)->goto_locus
;
1670 gimple_stmt_iterator prev
, next
;
1671 prev
= gsi_last_nondebug_bb (a
);
1672 next
= gsi_after_labels (b
);
1673 if (!gsi_end_p (next
) && is_gimple_debug (gsi_stmt (next
)))
1674 gsi_next_nondebug (&next
);
1675 if ((gsi_end_p (prev
)
1676 || gimple_location (gsi_stmt (prev
)) != goto_locus
)
1677 && (gsi_end_p (next
)
1678 || gimple_location (gsi_stmt (next
)) != goto_locus
))
1685 /* Replaces all uses of NAME by VAL. */
1688 replace_uses_by (tree name
, tree val
)
1690 imm_use_iterator imm_iter
;
1695 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, name
)
1697 /* Mark the block if we change the last stmt in it. */
1698 if (cfgcleanup_altered_bbs
1699 && stmt_ends_bb_p (stmt
))
1700 bitmap_set_bit (cfgcleanup_altered_bbs
, gimple_bb (stmt
)->index
);
1702 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1704 replace_exp (use
, val
);
1706 if (gimple_code (stmt
) == GIMPLE_PHI
)
1708 e
= gimple_phi_arg_edge (stmt
, PHI_ARG_INDEX_FROM_USE (use
));
1709 if (e
->flags
& EDGE_ABNORMAL
)
1711 /* This can only occur for virtual operands, since
1712 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1713 would prevent replacement. */
1714 gcc_checking_assert (virtual_operand_p (name
));
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
) = 1;
1720 if (gimple_code (stmt
) != GIMPLE_PHI
)
1722 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1723 gimple orig_stmt
= stmt
;
1726 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1727 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1728 only change sth from non-invariant to invariant, and only
1729 when propagating constants. */
1730 if (is_gimple_min_invariant (val
))
1731 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1733 tree op
= gimple_op (stmt
, i
);
1734 /* Operands may be empty here. For example, the labels
1735 of a GIMPLE_COND are nulled out following the creation
1736 of the corresponding CFG edges. */
1737 if (op
&& TREE_CODE (op
) == ADDR_EXPR
)
1738 recompute_tree_invariant_for_addr_expr (op
);
1741 if (fold_stmt (&gsi
))
1742 stmt
= gsi_stmt (gsi
);
1744 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
1745 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
1751 gcc_checking_assert (has_zero_uses (name
));
1753 /* Also update the trees stored in loop structures. */
1758 FOR_EACH_LOOP (loop
, 0)
1760 substitute_in_loop_info (loop
, name
, val
);
1765 /* Merge block B into block A. */
1768 gimple_merge_blocks (basic_block a
, basic_block b
)
1770 gimple_stmt_iterator last
, gsi
, psi
;
1773 fprintf (dump_file
, "Merging blocks %d and %d\n", a
->index
, b
->index
);
1775 /* Remove all single-valued PHI nodes from block B of the form
1776 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1777 gsi
= gsi_last_bb (a
);
1778 for (psi
= gsi_start_phis (b
); !gsi_end_p (psi
); )
1780 gimple phi
= gsi_stmt (psi
);
1781 tree def
= gimple_phi_result (phi
), use
= gimple_phi_arg_def (phi
, 0);
1783 bool may_replace_uses
= (virtual_operand_p (def
)
1784 || may_propagate_copy (def
, use
));
1786 /* In case we maintain loop closed ssa form, do not propagate arguments
1787 of loop exit phi nodes. */
1789 && loops_state_satisfies_p (LOOP_CLOSED_SSA
)
1790 && !virtual_operand_p (def
)
1791 && TREE_CODE (use
) == SSA_NAME
1792 && a
->loop_father
!= b
->loop_father
)
1793 may_replace_uses
= false;
1795 if (!may_replace_uses
)
1797 gcc_assert (!virtual_operand_p (def
));
1799 /* Note that just emitting the copies is fine -- there is no problem
1800 with ordering of phi nodes. This is because A is the single
1801 predecessor of B, therefore results of the phi nodes cannot
1802 appear as arguments of the phi nodes. */
1803 copy
= gimple_build_assign (def
, use
);
1804 gsi_insert_after (&gsi
, copy
, GSI_NEW_STMT
);
1805 remove_phi_node (&psi
, false);
1809 /* If we deal with a PHI for virtual operands, we can simply
1810 propagate these without fussing with folding or updating
1812 if (virtual_operand_p (def
))
1814 imm_use_iterator iter
;
1815 use_operand_p use_p
;
1818 FOR_EACH_IMM_USE_STMT (stmt
, iter
, def
)
1819 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1820 SET_USE (use_p
, use
);
1822 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
1823 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use
) = 1;
1826 replace_uses_by (def
, use
);
1828 remove_phi_node (&psi
, true);
1832 /* Ensure that B follows A. */
1833 move_block_after (b
, a
);
1835 gcc_assert (single_succ_edge (a
)->flags
& EDGE_FALLTHRU
);
1836 gcc_assert (!last_stmt (a
) || !stmt_ends_bb_p (last_stmt (a
)));
1838 /* Remove labels from B and set gimple_bb to A for other statements. */
1839 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
);)
1841 gimple stmt
= gsi_stmt (gsi
);
1842 if (gimple_code (stmt
) == GIMPLE_LABEL
)
1844 tree label
= gimple_label_label (stmt
);
1847 gsi_remove (&gsi
, false);
1849 /* Now that we can thread computed gotos, we might have
1850 a situation where we have a forced label in block B
1851 However, the label at the start of block B might still be
1852 used in other ways (think about the runtime checking for
1853 Fortran assigned gotos). So we can not just delete the
1854 label. Instead we move the label to the start of block A. */
1855 if (FORCED_LABEL (label
))
1857 gimple_stmt_iterator dest_gsi
= gsi_start_bb (a
);
1858 gsi_insert_before (&dest_gsi
, stmt
, GSI_NEW_STMT
);
1860 /* Other user labels keep around in a form of a debug stmt. */
1861 else if (!DECL_ARTIFICIAL (label
) && MAY_HAVE_DEBUG_STMTS
)
1863 gimple dbg
= gimple_build_debug_bind (label
,
1866 gimple_debug_bind_reset_value (dbg
);
1867 gsi_insert_before (&gsi
, dbg
, GSI_SAME_STMT
);
1870 lp_nr
= EH_LANDING_PAD_NR (label
);
1873 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
1874 lp
->post_landing_pad
= NULL
;
1879 gimple_set_bb (stmt
, a
);
1884 /* When merging two BBs, if their counts are different, the larger count
1885 is selected as the new bb count. This is to handle inconsistent
1887 if (a
->loop_father
== b
->loop_father
)
1889 a
->count
= MAX (a
->count
, b
->count
);
1890 a
->frequency
= MAX (a
->frequency
, b
->frequency
);
1893 /* Merge the sequences. */
1894 last
= gsi_last_bb (a
);
1895 gsi_insert_seq_after (&last
, bb_seq (b
), GSI_NEW_STMT
);
1896 set_bb_seq (b
, NULL
);
1898 if (cfgcleanup_altered_bbs
)
1899 bitmap_set_bit (cfgcleanup_altered_bbs
, a
->index
);
1903 /* Return the one of two successors of BB that is not reachable by a
1904 complex edge, if there is one. Else, return BB. We use
1905 this in optimizations that use post-dominators for their heuristics,
1906 to catch the cases in C++ where function calls are involved. */
1909 single_noncomplex_succ (basic_block bb
)
1912 if (EDGE_COUNT (bb
->succs
) != 2)
1915 e0
= EDGE_SUCC (bb
, 0);
1916 e1
= EDGE_SUCC (bb
, 1);
1917 if (e0
->flags
& EDGE_COMPLEX
)
1919 if (e1
->flags
& EDGE_COMPLEX
)
1925 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1928 notice_special_calls (gimple call
)
1930 int flags
= gimple_call_flags (call
);
1932 if (flags
& ECF_MAY_BE_ALLOCA
)
1933 cfun
->calls_alloca
= true;
1934 if (flags
& ECF_RETURNS_TWICE
)
1935 cfun
->calls_setjmp
= true;
1939 /* Clear flags set by notice_special_calls. Used by dead code removal
1940 to update the flags. */
1943 clear_special_calls (void)
1945 cfun
->calls_alloca
= false;
1946 cfun
->calls_setjmp
= false;
1949 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1952 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb
)
1954 /* Since this block is no longer reachable, we can just delete all
1955 of its PHI nodes. */
1956 remove_phi_nodes (bb
);
1958 /* Remove edges to BB's successors. */
1959 while (EDGE_COUNT (bb
->succs
) > 0)
1960 remove_edge (EDGE_SUCC (bb
, 0));
1964 /* Remove statements of basic block BB. */
1967 remove_bb (basic_block bb
)
1969 gimple_stmt_iterator i
;
1973 fprintf (dump_file
, "Removing basic block %d\n", bb
->index
);
1974 if (dump_flags
& TDF_DETAILS
)
1976 dump_bb (dump_file
, bb
, 0, TDF_BLOCKS
);
1977 fprintf (dump_file
, "\n");
1983 struct loop
*loop
= bb
->loop_father
;
1985 /* If a loop gets removed, clean up the information associated
1987 if (loop
->latch
== bb
1988 || loop
->header
== bb
)
1989 free_numbers_of_iterations_estimates_loop (loop
);
1992 /* Remove all the instructions in the block. */
1993 if (bb_seq (bb
) != NULL
)
1995 /* Walk backwards so as to get a chance to substitute all
1996 released DEFs into debug stmts. See
1997 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1999 for (i
= gsi_last_bb (bb
); !gsi_end_p (i
);)
2001 gimple stmt
= gsi_stmt (i
);
2002 if (gimple_code (stmt
) == GIMPLE_LABEL
2003 && (FORCED_LABEL (gimple_label_label (stmt
))
2004 || DECL_NONLOCAL (gimple_label_label (stmt
))))
2007 gimple_stmt_iterator new_gsi
;
2009 /* A non-reachable non-local label may still be referenced.
2010 But it no longer needs to carry the extra semantics of
2012 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
2014 DECL_NONLOCAL (gimple_label_label (stmt
)) = 0;
2015 FORCED_LABEL (gimple_label_label (stmt
)) = 1;
2018 new_bb
= bb
->prev_bb
;
2019 new_gsi
= gsi_start_bb (new_bb
);
2020 gsi_remove (&i
, false);
2021 gsi_insert_before (&new_gsi
, stmt
, GSI_NEW_STMT
);
2025 /* Release SSA definitions if we are in SSA. Note that we
2026 may be called when not in SSA. For example,
2027 final_cleanup calls this function via
2028 cleanup_tree_cfg. */
2029 if (gimple_in_ssa_p (cfun
))
2030 release_defs (stmt
);
2032 gsi_remove (&i
, true);
2036 i
= gsi_last_bb (bb
);
2042 remove_phi_nodes_and_edges_for_unreachable_block (bb
);
2043 bb
->il
.gimple
.seq
= NULL
;
2044 bb
->il
.gimple
.phi_nodes
= NULL
;
2048 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2049 predicate VAL, return the edge that will be taken out of the block.
2050 If VAL does not match a unique edge, NULL is returned. */
2053 find_taken_edge (basic_block bb
, tree val
)
2057 stmt
= last_stmt (bb
);
2060 gcc_assert (is_ctrl_stmt (stmt
));
2065 if (!is_gimple_min_invariant (val
))
2068 if (gimple_code (stmt
) == GIMPLE_COND
)
2069 return find_taken_edge_cond_expr (bb
, val
);
2071 if (gimple_code (stmt
) == GIMPLE_SWITCH
)
2072 return find_taken_edge_switch_expr (bb
, val
);
2074 if (computed_goto_p (stmt
))
2076 /* Only optimize if the argument is a label, if the argument is
2077 not a label then we can not construct a proper CFG.
2079 It may be the case that we only need to allow the LABEL_REF to
2080 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2081 appear inside a LABEL_EXPR just to be safe. */
2082 if ((TREE_CODE (val
) == ADDR_EXPR
|| TREE_CODE (val
) == LABEL_EXPR
)
2083 && TREE_CODE (TREE_OPERAND (val
, 0)) == LABEL_DECL
)
2084 return find_taken_edge_computed_goto (bb
, TREE_OPERAND (val
, 0));
2091 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2092 statement, determine which of the outgoing edges will be taken out of the
2093 block. Return NULL if either edge may be taken. */
2096 find_taken_edge_computed_goto (basic_block bb
, tree val
)
2101 dest
= label_to_block (val
);
2104 e
= find_edge (bb
, dest
);
2105 gcc_assert (e
!= NULL
);
2111 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2112 statement, determine which of the two edges will be taken out of the
2113 block. Return NULL if either edge may be taken. */
2116 find_taken_edge_cond_expr (basic_block bb
, tree val
)
2118 edge true_edge
, false_edge
;
2120 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2122 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2123 return (integer_zerop (val
) ? false_edge
: true_edge
);
2126 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2127 statement, determine which edge will be taken out of the block. Return
2128 NULL if any edge may be taken. */
2131 find_taken_edge_switch_expr (basic_block bb
, tree val
)
2133 basic_block dest_bb
;
2138 switch_stmt
= last_stmt (bb
);
2139 taken_case
= find_case_label_for_value (switch_stmt
, val
);
2140 dest_bb
= label_to_block (CASE_LABEL (taken_case
));
2142 e
= find_edge (bb
, dest_bb
);
2148 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2149 We can make optimal use here of the fact that the case labels are
2150 sorted: We can do a binary search for a case matching VAL. */
2153 find_case_label_for_value (gimple switch_stmt
, tree val
)
2155 size_t low
, high
, n
= gimple_switch_num_labels (switch_stmt
);
2156 tree default_case
= gimple_switch_default_label (switch_stmt
);
2158 for (low
= 0, high
= n
; high
- low
> 1; )
2160 size_t i
= (high
+ low
) / 2;
2161 tree t
= gimple_switch_label (switch_stmt
, i
);
2164 /* Cache the result of comparing CASE_LOW and val. */
2165 cmp
= tree_int_cst_compare (CASE_LOW (t
), val
);
2172 if (CASE_HIGH (t
) == NULL
)
2174 /* A singe-valued case label. */
2180 /* A case range. We can only handle integer ranges. */
2181 if (cmp
<= 0 && tree_int_cst_compare (CASE_HIGH (t
), val
) >= 0)
2186 return default_case
;
2190 /* Dump a basic block on stderr. */
2193 gimple_debug_bb (basic_block bb
)
2195 dump_bb (stderr
, bb
, 0, TDF_VOPS
|TDF_MEMSYMS
|TDF_BLOCKS
);
2199 /* Dump basic block with index N on stderr. */
2202 gimple_debug_bb_n (int n
)
2204 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun
, n
));
2205 return BASIC_BLOCK_FOR_FN (cfun
, n
);
2209 /* Dump the CFG on stderr.
2211 FLAGS are the same used by the tree dumping functions
2212 (see TDF_* in dumpfile.h). */
2215 gimple_debug_cfg (int flags
)
2217 gimple_dump_cfg (stderr
, flags
);
2221 /* Dump the program showing basic block boundaries on the given FILE.
2223 FLAGS are the same used by the tree dumping functions (see TDF_* in
2227 gimple_dump_cfg (FILE *file
, int flags
)
2229 if (flags
& TDF_DETAILS
)
2231 dump_function_header (file
, current_function_decl
, flags
);
2232 fprintf (file
, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2233 n_basic_blocks_for_fn (cfun
), n_edges_for_fn (cfun
),
2234 last_basic_block_for_fn (cfun
));
2236 brief_dump_cfg (file
, flags
| TDF_COMMENT
);
2237 fprintf (file
, "\n");
2240 if (flags
& TDF_STATS
)
2241 dump_cfg_stats (file
);
2243 dump_function_to_file (current_function_decl
, file
, flags
| TDF_BLOCKS
);
2247 /* Dump CFG statistics on FILE. */
2250 dump_cfg_stats (FILE *file
)
2252 static long max_num_merged_labels
= 0;
2253 unsigned long size
, total
= 0;
2256 const char * const fmt_str
= "%-30s%-13s%12s\n";
2257 const char * const fmt_str_1
= "%-30s%13d%11lu%c\n";
2258 const char * const fmt_str_2
= "%-30s%13ld%11lu%c\n";
2259 const char * const fmt_str_3
= "%-43s%11lu%c\n";
2260 const char *funcname
= current_function_name ();
2262 fprintf (file
, "\nCFG Statistics for %s\n\n", funcname
);
2264 fprintf (file
, "---------------------------------------------------------\n");
2265 fprintf (file
, fmt_str
, "", " Number of ", "Memory");
2266 fprintf (file
, fmt_str
, "", " instances ", "used ");
2267 fprintf (file
, "---------------------------------------------------------\n");
2269 size
= n_basic_blocks_for_fn (cfun
) * sizeof (struct basic_block_def
);
2271 fprintf (file
, fmt_str_1
, "Basic blocks", n_basic_blocks_for_fn (cfun
),
2272 SCALE (size
), LABEL (size
));
2275 FOR_EACH_BB_FN (bb
, cfun
)
2276 num_edges
+= EDGE_COUNT (bb
->succs
);
2277 size
= num_edges
* sizeof (struct edge_def
);
2279 fprintf (file
, fmt_str_2
, "Edges", num_edges
, SCALE (size
), LABEL (size
));
2281 fprintf (file
, "---------------------------------------------------------\n");
2282 fprintf (file
, fmt_str_3
, "Total memory used by CFG data", SCALE (total
),
2284 fprintf (file
, "---------------------------------------------------------\n");
2285 fprintf (file
, "\n");
2287 if (cfg_stats
.num_merged_labels
> max_num_merged_labels
)
2288 max_num_merged_labels
= cfg_stats
.num_merged_labels
;
2290 fprintf (file
, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2291 cfg_stats
.num_merged_labels
, max_num_merged_labels
);
2293 fprintf (file
, "\n");
2297 /* Dump CFG statistics on stderr. Keep extern so that it's always
2298 linked in the final executable. */
2301 debug_cfg_stats (void)
2303 dump_cfg_stats (stderr
);
2306 /*---------------------------------------------------------------------------
2307 Miscellaneous helpers
2308 ---------------------------------------------------------------------------*/
2310 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2311 flow. Transfers of control flow associated with EH are excluded. */
2314 call_can_make_abnormal_goto (gimple t
)
2316 /* If the function has no non-local labels, then a call cannot make an
2317 abnormal transfer of control. */
2318 if (!cfun
->has_nonlocal_label
2319 && !cfun
->calls_setjmp
)
2322 /* Likewise if the call has no side effects. */
2323 if (!gimple_has_side_effects (t
))
2326 /* Likewise if the called function is leaf. */
2327 if (gimple_call_flags (t
) & ECF_LEAF
)
2334 /* Return true if T can make an abnormal transfer of control flow.
2335 Transfers of control flow associated with EH are excluded. */
2338 stmt_can_make_abnormal_goto (gimple t
)
2340 if (computed_goto_p (t
))
2342 if (is_gimple_call (t
))
2343 return call_can_make_abnormal_goto (t
);
2348 /* Return true if T represents a stmt that always transfers control. */
2351 is_ctrl_stmt (gimple t
)
2353 switch (gimple_code (t
))
2367 /* Return true if T is a statement that may alter the flow of control
2368 (e.g., a call to a non-returning function). */
2371 is_ctrl_altering_stmt (gimple t
)
2375 switch (gimple_code (t
))
2379 int flags
= gimple_call_flags (t
);
2381 /* A call alters control flow if it can make an abnormal goto. */
2382 if (call_can_make_abnormal_goto (t
))
2385 /* A call also alters control flow if it does not return. */
2386 if (flags
& ECF_NORETURN
)
2389 /* TM ending statements have backedges out of the transaction.
2390 Return true so we split the basic block containing them.
2391 Note that the TM_BUILTIN test is merely an optimization. */
2392 if ((flags
& ECF_TM_BUILTIN
)
2393 && is_tm_ending_fndecl (gimple_call_fndecl (t
)))
2396 /* BUILT_IN_RETURN call is same as return statement. */
2397 if (gimple_call_builtin_p (t
, BUILT_IN_RETURN
))
2402 case GIMPLE_EH_DISPATCH
:
2403 /* EH_DISPATCH branches to the individual catch handlers at
2404 this level of a try or allowed-exceptions region. It can
2405 fallthru to the next statement as well. */
2409 if (gimple_asm_nlabels (t
) > 0)
2414 /* OpenMP directives alter control flow. */
2417 case GIMPLE_TRANSACTION
:
2418 /* A transaction start alters control flow. */
2425 /* If a statement can throw, it alters control flow. */
2426 return stmt_can_throw_internal (t
);
2430 /* Return true if T is a simple local goto. */
2433 simple_goto_p (gimple t
)
2435 return (gimple_code (t
) == GIMPLE_GOTO
2436 && TREE_CODE (gimple_goto_dest (t
)) == LABEL_DECL
);
2440 /* Return true if STMT should start a new basic block. PREV_STMT is
2441 the statement preceding STMT. It is used when STMT is a label or a
2442 case label. Labels should only start a new basic block if their
2443 previous statement wasn't a label. Otherwise, sequence of labels
2444 would generate unnecessary basic blocks that only contain a single
2448 stmt_starts_bb_p (gimple stmt
, gimple prev_stmt
)
2453 /* Labels start a new basic block only if the preceding statement
2454 wasn't a label of the same type. This prevents the creation of
2455 consecutive blocks that have nothing but a single label. */
2456 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2458 /* Nonlocal and computed GOTO targets always start a new block. */
2459 if (DECL_NONLOCAL (gimple_label_label (stmt
))
2460 || FORCED_LABEL (gimple_label_label (stmt
)))
2463 if (prev_stmt
&& gimple_code (prev_stmt
) == GIMPLE_LABEL
)
2465 if (DECL_NONLOCAL (gimple_label_label (prev_stmt
)))
2468 cfg_stats
.num_merged_labels
++;
2474 else if (gimple_code (stmt
) == GIMPLE_CALL
2475 && gimple_call_flags (stmt
) & ECF_RETURNS_TWICE
)
2476 /* setjmp acts similar to a nonlocal GOTO target and thus should
2477 start a new block. */
2484 /* Return true if T should end a basic block. */
2487 stmt_ends_bb_p (gimple t
)
2489 return is_ctrl_stmt (t
) || is_ctrl_altering_stmt (t
);
2492 /* Remove block annotations and other data structures. */
2495 delete_tree_cfg_annotations (void)
2497 vec_free (label_to_block_map_for_fn (cfun
));
2501 /* Return the first statement in basic block BB. */
2504 first_stmt (basic_block bb
)
2506 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2509 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2517 /* Return the first non-label statement in basic block BB. */
2520 first_non_label_stmt (basic_block bb
)
2522 gimple_stmt_iterator i
= gsi_start_bb (bb
);
2523 while (!gsi_end_p (i
) && gimple_code (gsi_stmt (i
)) == GIMPLE_LABEL
)
2525 return !gsi_end_p (i
) ? gsi_stmt (i
) : NULL
;
2528 /* Return the last statement in basic block BB. */
2531 last_stmt (basic_block bb
)
2533 gimple_stmt_iterator i
= gsi_last_bb (bb
);
2536 while (!gsi_end_p (i
) && is_gimple_debug ((stmt
= gsi_stmt (i
))))
2544 /* Return the last statement of an otherwise empty block. Return NULL
2545 if the block is totally empty, or if it contains more than one
2549 last_and_only_stmt (basic_block bb
)
2551 gimple_stmt_iterator i
= gsi_last_nondebug_bb (bb
);
2557 last
= gsi_stmt (i
);
2558 gsi_prev_nondebug (&i
);
2562 /* Empty statements should no longer appear in the instruction stream.
2563 Everything that might have appeared before should be deleted by
2564 remove_useless_stmts, and the optimizers should just gsi_remove
2565 instead of smashing with build_empty_stmt.
2567 Thus the only thing that should appear here in a block containing
2568 one executable statement is a label. */
2569 prev
= gsi_stmt (i
);
2570 if (gimple_code (prev
) == GIMPLE_LABEL
)
2576 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2579 reinstall_phi_args (edge new_edge
, edge old_edge
)
2581 edge_var_map_vector
*v
;
2584 gimple_stmt_iterator phis
;
2586 v
= redirect_edge_var_map_vector (old_edge
);
2590 for (i
= 0, phis
= gsi_start_phis (new_edge
->dest
);
2591 v
->iterate (i
, &vm
) && !gsi_end_p (phis
);
2592 i
++, gsi_next (&phis
))
2594 gimple phi
= gsi_stmt (phis
);
2595 tree result
= redirect_edge_var_map_result (vm
);
2596 tree arg
= redirect_edge_var_map_def (vm
);
2598 gcc_assert (result
== gimple_phi_result (phi
));
2600 add_phi_arg (phi
, arg
, new_edge
, redirect_edge_var_map_location (vm
));
2603 redirect_edge_var_map_clear (old_edge
);
2606 /* Returns the basic block after which the new basic block created
2607 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2608 near its "logical" location. This is of most help to humans looking
2609 at debugging dumps. */
2612 split_edge_bb_loc (edge edge_in
)
2614 basic_block dest
= edge_in
->dest
;
2615 basic_block dest_prev
= dest
->prev_bb
;
2619 edge e
= find_edge (dest_prev
, dest
);
2620 if (e
&& !(e
->flags
& EDGE_COMPLEX
))
2621 return edge_in
->src
;
2626 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2627 Abort on abnormal edges. */
2630 gimple_split_edge (edge edge_in
)
2632 basic_block new_bb
, after_bb
, dest
;
2635 /* Abnormal edges cannot be split. */
2636 gcc_assert (!(edge_in
->flags
& EDGE_ABNORMAL
));
2638 dest
= edge_in
->dest
;
2640 after_bb
= split_edge_bb_loc (edge_in
);
2642 new_bb
= create_empty_bb (after_bb
);
2643 new_bb
->frequency
= EDGE_FREQUENCY (edge_in
);
2644 new_bb
->count
= edge_in
->count
;
2645 new_edge
= make_edge (new_bb
, dest
, EDGE_FALLTHRU
);
2646 new_edge
->probability
= REG_BR_PROB_BASE
;
2647 new_edge
->count
= edge_in
->count
;
2649 e
= redirect_edge_and_branch (edge_in
, new_bb
);
2650 gcc_assert (e
== edge_in
);
2651 reinstall_phi_args (new_edge
, e
);
2657 /* Verify properties of the address expression T with base object BASE. */
2660 verify_address (tree t
, tree base
)
2663 bool old_side_effects
;
2665 bool new_side_effects
;
2667 old_constant
= TREE_CONSTANT (t
);
2668 old_side_effects
= TREE_SIDE_EFFECTS (t
);
2670 recompute_tree_invariant_for_addr_expr (t
);
2671 new_side_effects
= TREE_SIDE_EFFECTS (t
);
2672 new_constant
= TREE_CONSTANT (t
);
2674 if (old_constant
!= new_constant
)
2676 error ("constant not recomputed when ADDR_EXPR changed");
2679 if (old_side_effects
!= new_side_effects
)
2681 error ("side effects not recomputed when ADDR_EXPR changed");
2685 if (!(TREE_CODE (base
) == VAR_DECL
2686 || TREE_CODE (base
) == PARM_DECL
2687 || TREE_CODE (base
) == RESULT_DECL
))
2690 if (DECL_GIMPLE_REG_P (base
))
2692 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2699 /* Callback for walk_tree, check that all elements with address taken are
2700 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2701 inside a PHI node. */
2704 verify_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
2711 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2712 #define CHECK_OP(N, MSG) \
2713 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2714 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2716 switch (TREE_CODE (t
))
2719 if (SSA_NAME_IN_FREE_LIST (t
))
2721 error ("SSA name in freelist but still referenced");
2727 error ("INDIRECT_REF in gimple IL");
2731 x
= TREE_OPERAND (t
, 0);
2732 if (!POINTER_TYPE_P (TREE_TYPE (x
))
2733 || !is_gimple_mem_ref_addr (x
))
2735 error ("invalid first operand of MEM_REF");
2738 if (TREE_CODE (TREE_OPERAND (t
, 1)) != INTEGER_CST
2739 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 1))))
2741 error ("invalid offset operand of MEM_REF");
2742 return TREE_OPERAND (t
, 1);
2744 if (TREE_CODE (x
) == ADDR_EXPR
2745 && (x
= verify_address (x
, TREE_OPERAND (x
, 0))))
2751 x
= fold (ASSERT_EXPR_COND (t
));
2752 if (x
== boolean_false_node
)
2754 error ("ASSERT_EXPR with an always-false condition");
2760 error ("MODIFY_EXPR not expected while having tuples");
2767 gcc_assert (is_gimple_address (t
));
2769 /* Skip any references (they will be checked when we recurse down the
2770 tree) and ensure that any variable used as a prefix is marked
2772 for (x
= TREE_OPERAND (t
, 0);
2773 handled_component_p (x
);
2774 x
= TREE_OPERAND (x
, 0))
2777 if ((tem
= verify_address (t
, x
)))
2780 if (!(TREE_CODE (x
) == VAR_DECL
2781 || TREE_CODE (x
) == PARM_DECL
2782 || TREE_CODE (x
) == RESULT_DECL
))
2785 if (!TREE_ADDRESSABLE (x
))
2787 error ("address taken, but ADDRESSABLE bit not set");
2795 x
= COND_EXPR_COND (t
);
2796 if (!INTEGRAL_TYPE_P (TREE_TYPE (x
)))
2798 error ("non-integral used in condition");
2801 if (!is_gimple_condexpr (x
))
2803 error ("invalid conditional operand");
2808 case NON_LVALUE_EXPR
:
2809 case TRUTH_NOT_EXPR
:
2813 case FIX_TRUNC_EXPR
:
2818 CHECK_OP (0, "invalid operand to unary operator");
2824 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2826 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2830 if (TREE_CODE (t
) == BIT_FIELD_REF
)
2832 tree t0
= TREE_OPERAND (t
, 0);
2833 tree t1
= TREE_OPERAND (t
, 1);
2834 tree t2
= TREE_OPERAND (t
, 2);
2835 if (!tree_fits_uhwi_p (t1
)
2836 || !tree_fits_uhwi_p (t2
))
2838 error ("invalid position or size operand to BIT_FIELD_REF");
2841 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
2842 && (TYPE_PRECISION (TREE_TYPE (t
))
2843 != tree_to_uhwi (t1
)))
2845 error ("integral result type precision does not match "
2846 "field size of BIT_FIELD_REF");
2849 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t
))
2850 && TYPE_MODE (TREE_TYPE (t
)) != BLKmode
2851 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t
)))
2852 != tree_to_uhwi (t1
)))
2854 error ("mode precision of non-integral result does not "
2855 "match field size of BIT_FIELD_REF");
2858 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0
))
2859 && (tree_to_uhwi (t1
) + tree_to_uhwi (t2
)
2860 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0
)))))
2862 error ("position plus size exceeds size of referenced object in "
2867 t
= TREE_OPERAND (t
, 0);
2872 case ARRAY_RANGE_REF
:
2873 case VIEW_CONVERT_EXPR
:
2874 /* We have a nest of references. Verify that each of the operands
2875 that determine where to reference is either a constant or a variable,
2876 verify that the base is valid, and then show we've already checked
2878 while (handled_component_p (t
))
2880 if (TREE_CODE (t
) == COMPONENT_REF
&& TREE_OPERAND (t
, 2))
2881 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2882 else if (TREE_CODE (t
) == ARRAY_REF
2883 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
2885 CHECK_OP (1, "invalid array index");
2886 if (TREE_OPERAND (t
, 2))
2887 CHECK_OP (2, "invalid array lower bound");
2888 if (TREE_OPERAND (t
, 3))
2889 CHECK_OP (3, "invalid array stride");
2891 else if (TREE_CODE (t
) == BIT_FIELD_REF
2892 || TREE_CODE (t
) == REALPART_EXPR
2893 || TREE_CODE (t
) == IMAGPART_EXPR
)
2895 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2900 t
= TREE_OPERAND (t
, 0);
2903 if (!is_gimple_min_invariant (t
) && !is_gimple_lvalue (t
))
2905 error ("invalid reference prefix");
2912 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2913 POINTER_PLUS_EXPR. */
2914 if (POINTER_TYPE_P (TREE_TYPE (t
)))
2916 error ("invalid operand to plus/minus, type is a pointer");
2919 CHECK_OP (0, "invalid operand to binary operator");
2920 CHECK_OP (1, "invalid operand to binary operator");
2923 case POINTER_PLUS_EXPR
:
2924 /* Check to make sure the first operand is a pointer or reference type. */
2925 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t
, 0))))
2927 error ("invalid operand to pointer plus, first operand is not a pointer");
2930 /* Check to make sure the second operand is a ptrofftype. */
2931 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t
, 1))))
2933 error ("invalid operand to pointer plus, second operand is not an "
2934 "integer type of appropriate width");
2944 case UNORDERED_EXPR
:
2953 case TRUNC_DIV_EXPR
:
2955 case FLOOR_DIV_EXPR
:
2956 case ROUND_DIV_EXPR
:
2957 case TRUNC_MOD_EXPR
:
2959 case FLOOR_MOD_EXPR
:
2960 case ROUND_MOD_EXPR
:
2962 case EXACT_DIV_EXPR
:
2972 CHECK_OP (0, "invalid operand to binary operator");
2973 CHECK_OP (1, "invalid operand to binary operator");
2977 if (TREE_CONSTANT (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2981 case CASE_LABEL_EXPR
:
2984 error ("invalid CASE_CHAIN");
2998 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2999 Returns true if there is an error, otherwise false. */
3002 verify_types_in_gimple_min_lval (tree expr
)
3006 if (is_gimple_id (expr
))
3009 if (TREE_CODE (expr
) != TARGET_MEM_REF
3010 && TREE_CODE (expr
) != MEM_REF
)
3012 error ("invalid expression for min lvalue");
3016 /* TARGET_MEM_REFs are strange beasts. */
3017 if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3020 op
= TREE_OPERAND (expr
, 0);
3021 if (!is_gimple_val (op
))
3023 error ("invalid operand in indirect reference");
3024 debug_generic_stmt (op
);
3027 /* Memory references now generally can involve a value conversion. */
3032 /* Verify if EXPR is a valid GIMPLE reference expression. If
3033 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3034 if there is an error, otherwise false. */
3037 verify_types_in_gimple_reference (tree expr
, bool require_lvalue
)
3039 while (handled_component_p (expr
))
3041 tree op
= TREE_OPERAND (expr
, 0);
3043 if (TREE_CODE (expr
) == ARRAY_REF
3044 || TREE_CODE (expr
) == ARRAY_RANGE_REF
)
3046 if (!is_gimple_val (TREE_OPERAND (expr
, 1))
3047 || (TREE_OPERAND (expr
, 2)
3048 && !is_gimple_val (TREE_OPERAND (expr
, 2)))
3049 || (TREE_OPERAND (expr
, 3)
3050 && !is_gimple_val (TREE_OPERAND (expr
, 3))))
3052 error ("invalid operands to array reference");
3053 debug_generic_stmt (expr
);
3058 /* Verify if the reference array element types are compatible. */
3059 if (TREE_CODE (expr
) == ARRAY_REF
3060 && !useless_type_conversion_p (TREE_TYPE (expr
),
3061 TREE_TYPE (TREE_TYPE (op
))))
3063 error ("type mismatch in array reference");
3064 debug_generic_stmt (TREE_TYPE (expr
));
3065 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3068 if (TREE_CODE (expr
) == ARRAY_RANGE_REF
3069 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr
)),
3070 TREE_TYPE (TREE_TYPE (op
))))
3072 error ("type mismatch in array range reference");
3073 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr
)));
3074 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3078 if ((TREE_CODE (expr
) == REALPART_EXPR
3079 || TREE_CODE (expr
) == IMAGPART_EXPR
)
3080 && !useless_type_conversion_p (TREE_TYPE (expr
),
3081 TREE_TYPE (TREE_TYPE (op
))))
3083 error ("type mismatch in real/imagpart reference");
3084 debug_generic_stmt (TREE_TYPE (expr
));
3085 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op
)));
3089 if (TREE_CODE (expr
) == COMPONENT_REF
3090 && !useless_type_conversion_p (TREE_TYPE (expr
),
3091 TREE_TYPE (TREE_OPERAND (expr
, 1))))
3093 error ("type mismatch in component reference");
3094 debug_generic_stmt (TREE_TYPE (expr
));
3095 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr
, 1)));
3099 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3101 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3102 that their operand is not an SSA name or an invariant when
3103 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3104 bug). Otherwise there is nothing to verify, gross mismatches at
3105 most invoke undefined behavior. */
3107 && (TREE_CODE (op
) == SSA_NAME
3108 || is_gimple_min_invariant (op
)))
3110 error ("conversion of an SSA_NAME on the left hand side");
3111 debug_generic_stmt (expr
);
3114 else if (TREE_CODE (op
) == SSA_NAME
3115 && TYPE_SIZE (TREE_TYPE (expr
)) != TYPE_SIZE (TREE_TYPE (op
)))
3117 error ("conversion of register to a different size");
3118 debug_generic_stmt (expr
);
3121 else if (!handled_component_p (op
))
3128 if (TREE_CODE (expr
) == MEM_REF
)
3130 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr
, 0)))
3132 error ("invalid address operand in MEM_REF");
3133 debug_generic_stmt (expr
);
3136 if (TREE_CODE (TREE_OPERAND (expr
, 1)) != INTEGER_CST
3137 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 1))))
3139 error ("invalid offset operand in MEM_REF");
3140 debug_generic_stmt (expr
);
3144 else if (TREE_CODE (expr
) == TARGET_MEM_REF
)
3146 if (!TMR_BASE (expr
)
3147 || !is_gimple_mem_ref_addr (TMR_BASE (expr
)))
3149 error ("invalid address operand in TARGET_MEM_REF");
3152 if (!TMR_OFFSET (expr
)
3153 || TREE_CODE (TMR_OFFSET (expr
)) != INTEGER_CST
3154 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr
))))
3156 error ("invalid offset operand in TARGET_MEM_REF");
3157 debug_generic_stmt (expr
);
3162 return ((require_lvalue
|| !is_gimple_min_invariant (expr
))
3163 && verify_types_in_gimple_min_lval (expr
));
3166 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3167 list of pointer-to types that is trivially convertible to DEST. */
3170 one_pointer_to_useless_type_conversion_p (tree dest
, tree src_obj
)
3174 if (!TYPE_POINTER_TO (src_obj
))
3177 for (src
= TYPE_POINTER_TO (src_obj
); src
; src
= TYPE_NEXT_PTR_TO (src
))
3178 if (useless_type_conversion_p (dest
, src
))
3184 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3185 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3188 valid_fixed_convert_types_p (tree type1
, tree type2
)
3190 return (FIXED_POINT_TYPE_P (type1
)
3191 && (INTEGRAL_TYPE_P (type2
)
3192 || SCALAR_FLOAT_TYPE_P (type2
)
3193 || FIXED_POINT_TYPE_P (type2
)));
3196 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3197 is a problem, otherwise false. */
3200 verify_gimple_call (gimple stmt
)
3202 tree fn
= gimple_call_fn (stmt
);
3203 tree fntype
, fndecl
;
3206 if (gimple_call_internal_p (stmt
))
3210 error ("gimple call has two targets");
3211 debug_generic_stmt (fn
);
3219 error ("gimple call has no target");
3224 if (fn
&& !is_gimple_call_addr (fn
))
3226 error ("invalid function in gimple call");
3227 debug_generic_stmt (fn
);
3232 && (!POINTER_TYPE_P (TREE_TYPE (fn
))
3233 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != FUNCTION_TYPE
3234 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) != METHOD_TYPE
)))
3236 error ("non-function in gimple call");
3240 fndecl
= gimple_call_fndecl (stmt
);
3242 && TREE_CODE (fndecl
) == FUNCTION_DECL
3243 && DECL_LOOPING_CONST_OR_PURE_P (fndecl
)
3244 && !DECL_PURE_P (fndecl
)
3245 && !TREE_READONLY (fndecl
))
3247 error ("invalid pure const state for function");
3251 if (gimple_call_lhs (stmt
)
3252 && (!is_gimple_lvalue (gimple_call_lhs (stmt
))
3253 || verify_types_in_gimple_reference (gimple_call_lhs (stmt
), true)))
3255 error ("invalid LHS in gimple call");
3259 if (gimple_call_lhs (stmt
) && gimple_call_noreturn_p (stmt
))
3261 error ("LHS in noreturn call");
3265 fntype
= gimple_call_fntype (stmt
);
3267 && gimple_call_lhs (stmt
)
3268 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt
)),
3270 /* ??? At least C++ misses conversions at assignments from
3271 void * call results.
3272 ??? Java is completely off. Especially with functions
3273 returning java.lang.Object.
3274 For now simply allow arbitrary pointer type conversions. */
3275 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt
)))
3276 && POINTER_TYPE_P (TREE_TYPE (fntype
))))
3278 error ("invalid conversion in gimple call");
3279 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt
)));
3280 debug_generic_stmt (TREE_TYPE (fntype
));
3284 if (gimple_call_chain (stmt
)
3285 && !is_gimple_val (gimple_call_chain (stmt
)))
3287 error ("invalid static chain in gimple call");
3288 debug_generic_stmt (gimple_call_chain (stmt
));
3292 /* If there is a static chain argument, this should not be an indirect
3293 call, and the decl should have DECL_STATIC_CHAIN set. */
3294 if (gimple_call_chain (stmt
))
3296 if (!gimple_call_fndecl (stmt
))
3298 error ("static chain in indirect gimple call");
3301 fn
= TREE_OPERAND (fn
, 0);
3303 if (!DECL_STATIC_CHAIN (fn
))
3305 error ("static chain with function that doesn%'t use one");
3310 /* ??? The C frontend passes unpromoted arguments in case it
3311 didn't see a function declaration before the call. So for now
3312 leave the call arguments mostly unverified. Once we gimplify
3313 unit-at-a-time we have a chance to fix this. */
3315 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3317 tree arg
= gimple_call_arg (stmt
, i
);
3318 if ((is_gimple_reg_type (TREE_TYPE (arg
))
3319 && !is_gimple_val (arg
))
3320 || (!is_gimple_reg_type (TREE_TYPE (arg
))
3321 && !is_gimple_lvalue (arg
)))
3323 error ("invalid argument to gimple call");
3324 debug_generic_expr (arg
);
3332 /* Verifies the gimple comparison with the result type TYPE and
3333 the operands OP0 and OP1. */
3336 verify_gimple_comparison (tree type
, tree op0
, tree op1
)
3338 tree op0_type
= TREE_TYPE (op0
);
3339 tree op1_type
= TREE_TYPE (op1
);
3341 if (!is_gimple_val (op0
) || !is_gimple_val (op1
))
3343 error ("invalid operands in gimple comparison");
3347 /* For comparisons we do not have the operations type as the
3348 effective type the comparison is carried out in. Instead
3349 we require that either the first operand is trivially
3350 convertible into the second, or the other way around.
3351 Because we special-case pointers to void we allow
3352 comparisons of pointers with the same mode as well. */
3353 if (!useless_type_conversion_p (op0_type
, op1_type
)
3354 && !useless_type_conversion_p (op1_type
, op0_type
)
3355 && (!POINTER_TYPE_P (op0_type
)
3356 || !POINTER_TYPE_P (op1_type
)
3357 || TYPE_MODE (op0_type
) != TYPE_MODE (op1_type
)))
3359 error ("mismatching comparison operand types");
3360 debug_generic_expr (op0_type
);
3361 debug_generic_expr (op1_type
);
3365 /* The resulting type of a comparison may be an effective boolean type. */
3366 if (INTEGRAL_TYPE_P (type
)
3367 && (TREE_CODE (type
) == BOOLEAN_TYPE
3368 || TYPE_PRECISION (type
) == 1))
3370 if (TREE_CODE (op0_type
) == VECTOR_TYPE
3371 || TREE_CODE (op1_type
) == VECTOR_TYPE
)
3373 error ("vector comparison returning a boolean");
3374 debug_generic_expr (op0_type
);
3375 debug_generic_expr (op1_type
);
3379 /* Or an integer vector type with the same size and element count
3380 as the comparison operand types. */
3381 else if (TREE_CODE (type
) == VECTOR_TYPE
3382 && TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
)
3384 if (TREE_CODE (op0_type
) != VECTOR_TYPE
3385 || TREE_CODE (op1_type
) != VECTOR_TYPE
)
3387 error ("non-vector operands in vector comparison");
3388 debug_generic_expr (op0_type
);
3389 debug_generic_expr (op1_type
);
3393 if (TYPE_VECTOR_SUBPARTS (type
) != TYPE_VECTOR_SUBPARTS (op0_type
)
3394 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type
)))
3395 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type
))))
3396 /* The result of a vector comparison is of signed
3398 || TYPE_UNSIGNED (TREE_TYPE (type
)))
3400 error ("invalid vector comparison resulting type");
3401 debug_generic_expr (type
);
3407 error ("bogus comparison result type");
3408 debug_generic_expr (type
);
3415 /* Verify a gimple assignment statement STMT with an unary rhs.
3416 Returns true if anything is wrong. */
3419 verify_gimple_assign_unary (gimple stmt
)
3421 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3422 tree lhs
= gimple_assign_lhs (stmt
);
3423 tree lhs_type
= TREE_TYPE (lhs
);
3424 tree rhs1
= gimple_assign_rhs1 (stmt
);
3425 tree rhs1_type
= TREE_TYPE (rhs1
);
3427 if (!is_gimple_reg (lhs
))
3429 error ("non-register as LHS of unary operation");
3433 if (!is_gimple_val (rhs1
))
3435 error ("invalid operand in unary operation");
3439 /* First handle conversions. */
3444 /* Allow conversions from pointer type to integral type only if
3445 there is no sign or zero extension involved.
3446 For targets were the precision of ptrofftype doesn't match that
3447 of pointers we need to allow arbitrary conversions to ptrofftype. */
3448 if ((POINTER_TYPE_P (lhs_type
)
3449 && INTEGRAL_TYPE_P (rhs1_type
))
3450 || (POINTER_TYPE_P (rhs1_type
)
3451 && INTEGRAL_TYPE_P (lhs_type
)
3452 && (TYPE_PRECISION (rhs1_type
) >= TYPE_PRECISION (lhs_type
)
3453 || ptrofftype_p (sizetype
))))
3456 /* Allow conversion from integral to offset type and vice versa. */
3457 if ((TREE_CODE (lhs_type
) == OFFSET_TYPE
3458 && INTEGRAL_TYPE_P (rhs1_type
))
3459 || (INTEGRAL_TYPE_P (lhs_type
)
3460 && TREE_CODE (rhs1_type
) == OFFSET_TYPE
))
3463 /* Otherwise assert we are converting between types of the
3465 if (INTEGRAL_TYPE_P (lhs_type
) != INTEGRAL_TYPE_P (rhs1_type
))
3467 error ("invalid types in nop conversion");
3468 debug_generic_expr (lhs_type
);
3469 debug_generic_expr (rhs1_type
);
3476 case ADDR_SPACE_CONVERT_EXPR
:
3478 if (!POINTER_TYPE_P (rhs1_type
) || !POINTER_TYPE_P (lhs_type
)
3479 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type
))
3480 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type
))))
3482 error ("invalid types in address space conversion");
3483 debug_generic_expr (lhs_type
);
3484 debug_generic_expr (rhs1_type
);
3491 case FIXED_CONVERT_EXPR
:
3493 if (!valid_fixed_convert_types_p (lhs_type
, rhs1_type
)
3494 && !valid_fixed_convert_types_p (rhs1_type
, lhs_type
))
3496 error ("invalid types in fixed-point conversion");
3497 debug_generic_expr (lhs_type
);
3498 debug_generic_expr (rhs1_type
);
3507 if ((!INTEGRAL_TYPE_P (rhs1_type
) || !SCALAR_FLOAT_TYPE_P (lhs_type
))
3508 && (!VECTOR_INTEGER_TYPE_P (rhs1_type
)
3509 || !VECTOR_FLOAT_TYPE_P (lhs_type
)))
3511 error ("invalid types in conversion to floating point");
3512 debug_generic_expr (lhs_type
);
3513 debug_generic_expr (rhs1_type
);
3520 case FIX_TRUNC_EXPR
:
3522 if ((!INTEGRAL_TYPE_P (lhs_type
) || !SCALAR_FLOAT_TYPE_P (rhs1_type
))
3523 && (!VECTOR_INTEGER_TYPE_P (lhs_type
)
3524 || !VECTOR_FLOAT_TYPE_P (rhs1_type
)))
3526 error ("invalid types in conversion to integer");
3527 debug_generic_expr (lhs_type
);
3528 debug_generic_expr (rhs1_type
);
3535 case VEC_UNPACK_HI_EXPR
:
3536 case VEC_UNPACK_LO_EXPR
:
3537 case REDUC_MAX_EXPR
:
3538 case REDUC_MIN_EXPR
:
3539 case REDUC_PLUS_EXPR
:
3540 case VEC_UNPACK_FLOAT_HI_EXPR
:
3541 case VEC_UNPACK_FLOAT_LO_EXPR
:
3549 case NON_LVALUE_EXPR
:
3557 /* For the remaining codes assert there is no conversion involved. */
3558 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
3560 error ("non-trivial conversion in unary operation");
3561 debug_generic_expr (lhs_type
);
3562 debug_generic_expr (rhs1_type
);
3569 /* Verify a gimple assignment statement STMT with a binary rhs.
3570 Returns true if anything is wrong. */
3573 verify_gimple_assign_binary (gimple stmt
)
3575 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3576 tree lhs
= gimple_assign_lhs (stmt
);
3577 tree lhs_type
= TREE_TYPE (lhs
);
3578 tree rhs1
= gimple_assign_rhs1 (stmt
);
3579 tree rhs1_type
= TREE_TYPE (rhs1
);
3580 tree rhs2
= gimple_assign_rhs2 (stmt
);
3581 tree rhs2_type
= TREE_TYPE (rhs2
);
3583 if (!is_gimple_reg (lhs
))
3585 error ("non-register as LHS of binary operation");
3589 if (!is_gimple_val (rhs1
)
3590 || !is_gimple_val (rhs2
))
3592 error ("invalid operands in binary operation");
3596 /* First handle operations that involve different types. */
3601 if (TREE_CODE (lhs_type
) != COMPLEX_TYPE
3602 || !(INTEGRAL_TYPE_P (rhs1_type
)
3603 || SCALAR_FLOAT_TYPE_P (rhs1_type
))
3604 || !(INTEGRAL_TYPE_P (rhs2_type
)
3605 || SCALAR_FLOAT_TYPE_P (rhs2_type
)))
3607 error ("type mismatch in complex expression");
3608 debug_generic_expr (lhs_type
);
3609 debug_generic_expr (rhs1_type
);
3610 debug_generic_expr (rhs2_type
);
3622 /* Shifts and rotates are ok on integral types, fixed point
3623 types and integer vector types. */
3624 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3625 && !FIXED_POINT_TYPE_P (rhs1_type
)
3626 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3627 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))))
3628 || (!INTEGRAL_TYPE_P (rhs2_type
)
3629 /* Vector shifts of vectors are also ok. */
3630 && !(TREE_CODE (rhs1_type
) == VECTOR_TYPE
3631 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3632 && TREE_CODE (rhs2_type
) == VECTOR_TYPE
3633 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3634 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3636 error ("type mismatch in shift expression");
3637 debug_generic_expr (lhs_type
);
3638 debug_generic_expr (rhs1_type
);
3639 debug_generic_expr (rhs2_type
);
3646 case VEC_LSHIFT_EXPR
:
3647 case VEC_RSHIFT_EXPR
:
3649 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3650 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3651 || POINTER_TYPE_P (TREE_TYPE (rhs1_type
))
3652 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type
))
3653 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type
)))
3654 || (!INTEGRAL_TYPE_P (rhs2_type
)
3655 && (TREE_CODE (rhs2_type
) != VECTOR_TYPE
3656 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type
))))
3657 || !useless_type_conversion_p (lhs_type
, rhs1_type
))
3659 error ("type mismatch in vector shift expression");
3660 debug_generic_expr (lhs_type
);
3661 debug_generic_expr (rhs1_type
);
3662 debug_generic_expr (rhs2_type
);
3665 /* For shifting a vector of non-integral components we
3666 only allow shifting by a constant multiple of the element size. */
3667 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3668 && (TREE_CODE (rhs2
) != INTEGER_CST
3669 || !div_if_zero_remainder (rhs2
,
3670 TYPE_SIZE (TREE_TYPE (rhs1_type
)))))
3672 error ("non-element sized vector shift of floating point vector");
3679 case WIDEN_LSHIFT_EXPR
:
3681 if (!INTEGRAL_TYPE_P (lhs_type
)
3682 || !INTEGRAL_TYPE_P (rhs1_type
)
3683 || TREE_CODE (rhs2
) != INTEGER_CST
3684 || (2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)))
3686 error ("type mismatch in widening vector shift expression");
3687 debug_generic_expr (lhs_type
);
3688 debug_generic_expr (rhs1_type
);
3689 debug_generic_expr (rhs2_type
);
3696 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3697 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3699 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3700 || TREE_CODE (lhs_type
) != VECTOR_TYPE
3701 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type
))
3702 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type
))
3703 || TREE_CODE (rhs2
) != INTEGER_CST
3704 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type
))
3705 > TYPE_PRECISION (TREE_TYPE (lhs_type
))))
3707 error ("type mismatch in widening vector shift expression");
3708 debug_generic_expr (lhs_type
);
3709 debug_generic_expr (rhs1_type
);
3710 debug_generic_expr (rhs2_type
);
3720 tree lhs_etype
= lhs_type
;
3721 tree rhs1_etype
= rhs1_type
;
3722 tree rhs2_etype
= rhs2_type
;
3723 if (TREE_CODE (lhs_type
) == VECTOR_TYPE
)
3725 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3726 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
)
3728 error ("invalid non-vector operands to vector valued plus");
3731 lhs_etype
= TREE_TYPE (lhs_type
);
3732 rhs1_etype
= TREE_TYPE (rhs1_type
);
3733 rhs2_etype
= TREE_TYPE (rhs2_type
);
3735 if (POINTER_TYPE_P (lhs_etype
)
3736 || POINTER_TYPE_P (rhs1_etype
)
3737 || POINTER_TYPE_P (rhs2_etype
))
3739 error ("invalid (pointer) operands to plus/minus");
3743 /* Continue with generic binary expression handling. */
3747 case POINTER_PLUS_EXPR
:
3749 if (!POINTER_TYPE_P (rhs1_type
)
3750 || !useless_type_conversion_p (lhs_type
, rhs1_type
)
3751 || !ptrofftype_p (rhs2_type
))
3753 error ("type mismatch in pointer plus expression");
3754 debug_generic_stmt (lhs_type
);
3755 debug_generic_stmt (rhs1_type
);
3756 debug_generic_stmt (rhs2_type
);
3763 case TRUTH_ANDIF_EXPR
:
3764 case TRUTH_ORIF_EXPR
:
3765 case TRUTH_AND_EXPR
:
3767 case TRUTH_XOR_EXPR
:
3777 case UNORDERED_EXPR
:
3785 /* Comparisons are also binary, but the result type is not
3786 connected to the operand types. */
3787 return verify_gimple_comparison (lhs_type
, rhs1
, rhs2
);
3789 case WIDEN_MULT_EXPR
:
3790 if (TREE_CODE (lhs_type
) != INTEGER_TYPE
)
3792 return ((2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
))
3793 || (TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
)));
3795 case WIDEN_SUM_EXPR
:
3796 case VEC_WIDEN_MULT_HI_EXPR
:
3797 case VEC_WIDEN_MULT_LO_EXPR
:
3798 case VEC_WIDEN_MULT_EVEN_EXPR
:
3799 case VEC_WIDEN_MULT_ODD_EXPR
:
3800 case VEC_PACK_TRUNC_EXPR
:
3801 case VEC_PACK_SAT_EXPR
:
3802 case VEC_PACK_FIX_TRUNC_EXPR
:
3807 case MULT_HIGHPART_EXPR
:
3808 case TRUNC_DIV_EXPR
:
3810 case FLOOR_DIV_EXPR
:
3811 case ROUND_DIV_EXPR
:
3812 case TRUNC_MOD_EXPR
:
3814 case FLOOR_MOD_EXPR
:
3815 case ROUND_MOD_EXPR
:
3817 case EXACT_DIV_EXPR
:
3823 /* Continue with generic binary expression handling. */
3830 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3831 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3833 error ("type mismatch in binary expression");
3834 debug_generic_stmt (lhs_type
);
3835 debug_generic_stmt (rhs1_type
);
3836 debug_generic_stmt (rhs2_type
);
3843 /* Verify a gimple assignment statement STMT with a ternary rhs.
3844 Returns true if anything is wrong. */
3847 verify_gimple_assign_ternary (gimple stmt
)
3849 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3850 tree lhs
= gimple_assign_lhs (stmt
);
3851 tree lhs_type
= TREE_TYPE (lhs
);
3852 tree rhs1
= gimple_assign_rhs1 (stmt
);
3853 tree rhs1_type
= TREE_TYPE (rhs1
);
3854 tree rhs2
= gimple_assign_rhs2 (stmt
);
3855 tree rhs2_type
= TREE_TYPE (rhs2
);
3856 tree rhs3
= gimple_assign_rhs3 (stmt
);
3857 tree rhs3_type
= TREE_TYPE (rhs3
);
3859 if (!is_gimple_reg (lhs
))
3861 error ("non-register as LHS of ternary operation");
3865 if (((rhs_code
== VEC_COND_EXPR
|| rhs_code
== COND_EXPR
)
3866 ? !is_gimple_condexpr (rhs1
) : !is_gimple_val (rhs1
))
3867 || !is_gimple_val (rhs2
)
3868 || !is_gimple_val (rhs3
))
3870 error ("invalid operands in ternary operation");
3874 /* First handle operations that involve different types. */
3877 case WIDEN_MULT_PLUS_EXPR
:
3878 case WIDEN_MULT_MINUS_EXPR
:
3879 if ((!INTEGRAL_TYPE_P (rhs1_type
)
3880 && !FIXED_POINT_TYPE_P (rhs1_type
))
3881 || !useless_type_conversion_p (rhs1_type
, rhs2_type
)
3882 || !useless_type_conversion_p (lhs_type
, rhs3_type
)
3883 || 2 * TYPE_PRECISION (rhs1_type
) > TYPE_PRECISION (lhs_type
)
3884 || TYPE_PRECISION (rhs1_type
) != TYPE_PRECISION (rhs2_type
))
3886 error ("type mismatch in widening multiply-accumulate expression");
3887 debug_generic_expr (lhs_type
);
3888 debug_generic_expr (rhs1_type
);
3889 debug_generic_expr (rhs2_type
);
3890 debug_generic_expr (rhs3_type
);
3896 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3897 || !useless_type_conversion_p (lhs_type
, rhs2_type
)
3898 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3900 error ("type mismatch in fused multiply-add expression");
3901 debug_generic_expr (lhs_type
);
3902 debug_generic_expr (rhs1_type
);
3903 debug_generic_expr (rhs2_type
);
3904 debug_generic_expr (rhs3_type
);
3911 if (!useless_type_conversion_p (lhs_type
, rhs2_type
)
3912 || !useless_type_conversion_p (lhs_type
, rhs3_type
))
3914 error ("type mismatch in conditional expression");
3915 debug_generic_expr (lhs_type
);
3916 debug_generic_expr (rhs2_type
);
3917 debug_generic_expr (rhs3_type
);
3923 if (!useless_type_conversion_p (lhs_type
, rhs1_type
)
3924 || !useless_type_conversion_p (lhs_type
, rhs2_type
))
3926 error ("type mismatch in vector permute expression");
3927 debug_generic_expr (lhs_type
);
3928 debug_generic_expr (rhs1_type
);
3929 debug_generic_expr (rhs2_type
);
3930 debug_generic_expr (rhs3_type
);
3934 if (TREE_CODE (rhs1_type
) != VECTOR_TYPE
3935 || TREE_CODE (rhs2_type
) != VECTOR_TYPE
3936 || TREE_CODE (rhs3_type
) != VECTOR_TYPE
)
3938 error ("vector types expected in vector permute expression");
3939 debug_generic_expr (lhs_type
);
3940 debug_generic_expr (rhs1_type
);
3941 debug_generic_expr (rhs2_type
);
3942 debug_generic_expr (rhs3_type
);
3946 if (TYPE_VECTOR_SUBPARTS (rhs1_type
) != TYPE_VECTOR_SUBPARTS (rhs2_type
)
3947 || TYPE_VECTOR_SUBPARTS (rhs2_type
)
3948 != TYPE_VECTOR_SUBPARTS (rhs3_type
)
3949 || TYPE_VECTOR_SUBPARTS (rhs3_type
)
3950 != TYPE_VECTOR_SUBPARTS (lhs_type
))
3952 error ("vectors with different element number found "
3953 "in vector permute expression");
3954 debug_generic_expr (lhs_type
);
3955 debug_generic_expr (rhs1_type
);
3956 debug_generic_expr (rhs2_type
);
3957 debug_generic_expr (rhs3_type
);
3961 if (TREE_CODE (TREE_TYPE (rhs3_type
)) != INTEGER_TYPE
3962 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type
)))
3963 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type
))))
3965 error ("invalid mask type in vector permute expression");
3966 debug_generic_expr (lhs_type
);
3967 debug_generic_expr (rhs1_type
);
3968 debug_generic_expr (rhs2_type
);
3969 debug_generic_expr (rhs3_type
);
3976 case REALIGN_LOAD_EXPR
:
3986 /* Verify a gimple assignment statement STMT with a single rhs.
3987 Returns true if anything is wrong. */
3990 verify_gimple_assign_single (gimple stmt
)
3992 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3993 tree lhs
= gimple_assign_lhs (stmt
);
3994 tree lhs_type
= TREE_TYPE (lhs
);
3995 tree rhs1
= gimple_assign_rhs1 (stmt
);
3996 tree rhs1_type
= TREE_TYPE (rhs1
);
3999 if (!useless_type_conversion_p (lhs_type
, rhs1_type
))
4001 error ("non-trivial conversion at assignment");
4002 debug_generic_expr (lhs_type
);
4003 debug_generic_expr (rhs1_type
);
4007 if (gimple_clobber_p (stmt
)
4008 && !(DECL_P (lhs
) || TREE_CODE (lhs
) == MEM_REF
))
4010 error ("non-decl/MEM_REF LHS in clobber statement");
4011 debug_generic_expr (lhs
);
4015 if (handled_component_p (lhs
)
4016 || TREE_CODE (lhs
) == MEM_REF
4017 || TREE_CODE (lhs
) == TARGET_MEM_REF
)
4018 res
|= verify_types_in_gimple_reference (lhs
, true);
4020 /* Special codes we cannot handle via their class. */
4025 tree op
= TREE_OPERAND (rhs1
, 0);
4026 if (!is_gimple_addressable (op
))
4028 error ("invalid operand in unary expression");
4032 /* Technically there is no longer a need for matching types, but
4033 gimple hygiene asks for this check. In LTO we can end up
4034 combining incompatible units and thus end up with addresses
4035 of globals that change their type to a common one. */
4037 && !types_compatible_p (TREE_TYPE (op
),
4038 TREE_TYPE (TREE_TYPE (rhs1
)))
4039 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1
),
4042 error ("type mismatch in address expression");
4043 debug_generic_stmt (TREE_TYPE (rhs1
));
4044 debug_generic_stmt (TREE_TYPE (op
));
4048 return verify_types_in_gimple_reference (op
, true);
4053 error ("INDIRECT_REF in gimple IL");
4059 case ARRAY_RANGE_REF
:
4060 case VIEW_CONVERT_EXPR
:
4063 case TARGET_MEM_REF
:
4065 if (!is_gimple_reg (lhs
)
4066 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4068 error ("invalid rhs for gimple memory store");
4069 debug_generic_stmt (lhs
);
4070 debug_generic_stmt (rhs1
);
4073 return res
|| verify_types_in_gimple_reference (rhs1
, false);
4085 /* tcc_declaration */
4090 if (!is_gimple_reg (lhs
)
4091 && !is_gimple_reg (rhs1
)
4092 && is_gimple_reg_type (TREE_TYPE (lhs
)))
4094 error ("invalid rhs for gimple memory store");
4095 debug_generic_stmt (lhs
);
4096 debug_generic_stmt (rhs1
);
4102 if (TREE_CODE (rhs1_type
) == VECTOR_TYPE
)
4105 tree elt_i
, elt_v
, elt_t
= NULL_TREE
;
4107 if (CONSTRUCTOR_NELTS (rhs1
) == 0)
4109 /* For vector CONSTRUCTORs we require that either it is empty
4110 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4111 (then the element count must be correct to cover the whole
4112 outer vector and index must be NULL on all elements, or it is
4113 a CONSTRUCTOR of scalar elements, where we as an exception allow
4114 smaller number of elements (assuming zero filling) and
4115 consecutive indexes as compared to NULL indexes (such
4116 CONSTRUCTORs can appear in the IL from FEs). */
4117 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1
), i
, elt_i
, elt_v
)
4119 if (elt_t
== NULL_TREE
)
4121 elt_t
= TREE_TYPE (elt_v
);
4122 if (TREE_CODE (elt_t
) == VECTOR_TYPE
)
4124 tree elt_t
= TREE_TYPE (elt_v
);
4125 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4128 error ("incorrect type of vector CONSTRUCTOR"
4130 debug_generic_stmt (rhs1
);
4133 else if (CONSTRUCTOR_NELTS (rhs1
)
4134 * TYPE_VECTOR_SUBPARTS (elt_t
)
4135 != TYPE_VECTOR_SUBPARTS (rhs1_type
))
4137 error ("incorrect number of vector CONSTRUCTOR"
4139 debug_generic_stmt (rhs1
);
4143 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type
),
4146 error ("incorrect type of vector CONSTRUCTOR elements");
4147 debug_generic_stmt (rhs1
);
4150 else if (CONSTRUCTOR_NELTS (rhs1
)
4151 > TYPE_VECTOR_SUBPARTS (rhs1_type
))
4153 error ("incorrect number of vector CONSTRUCTOR elements");
4154 debug_generic_stmt (rhs1
);
4158 else if (!useless_type_conversion_p (elt_t
, TREE_TYPE (elt_v
)))
4160 error ("incorrect type of vector CONSTRUCTOR elements");
4161 debug_generic_stmt (rhs1
);
4164 if (elt_i
!= NULL_TREE
4165 && (TREE_CODE (elt_t
) == VECTOR_TYPE
4166 || TREE_CODE (elt_i
) != INTEGER_CST
4167 || compare_tree_int (elt_i
, i
) != 0))
4169 error ("vector CONSTRUCTOR with non-NULL element index");
4170 debug_generic_stmt (rhs1
);
4178 case WITH_SIZE_EXPR
:
4188 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4189 is a problem, otherwise false. */
4192 verify_gimple_assign (gimple stmt
)
4194 switch (gimple_assign_rhs_class (stmt
))
4196 case GIMPLE_SINGLE_RHS
:
4197 return verify_gimple_assign_single (stmt
);
4199 case GIMPLE_UNARY_RHS
:
4200 return verify_gimple_assign_unary (stmt
);
4202 case GIMPLE_BINARY_RHS
:
4203 return verify_gimple_assign_binary (stmt
);
4205 case GIMPLE_TERNARY_RHS
:
4206 return verify_gimple_assign_ternary (stmt
);
4213 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4214 is a problem, otherwise false. */
4217 verify_gimple_return (gimple stmt
)
4219 tree op
= gimple_return_retval (stmt
);
4220 tree restype
= TREE_TYPE (TREE_TYPE (cfun
->decl
));
4222 /* We cannot test for present return values as we do not fix up missing
4223 return values from the original source. */
4227 if (!is_gimple_val (op
)
4228 && TREE_CODE (op
) != RESULT_DECL
)
4230 error ("invalid operand in return statement");
4231 debug_generic_stmt (op
);
4235 if ((TREE_CODE (op
) == RESULT_DECL
4236 && DECL_BY_REFERENCE (op
))
4237 || (TREE_CODE (op
) == SSA_NAME
4238 && SSA_NAME_VAR (op
)
4239 && TREE_CODE (SSA_NAME_VAR (op
)) == RESULT_DECL
4240 && DECL_BY_REFERENCE (SSA_NAME_VAR (op
))))
4241 op
= TREE_TYPE (op
);
4243 if (!useless_type_conversion_p (restype
, TREE_TYPE (op
)))
4245 error ("invalid conversion in return statement");
4246 debug_generic_stmt (restype
);
4247 debug_generic_stmt (TREE_TYPE (op
));
4255 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4256 is a problem, otherwise false. */
4259 verify_gimple_goto (gimple stmt
)
4261 tree dest
= gimple_goto_dest (stmt
);
4263 /* ??? We have two canonical forms of direct goto destinations, a
4264 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4265 if (TREE_CODE (dest
) != LABEL_DECL
4266 && (!is_gimple_val (dest
)
4267 || !POINTER_TYPE_P (TREE_TYPE (dest
))))
4269 error ("goto destination is neither a label nor a pointer");
4276 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4277 is a problem, otherwise false. */
4280 verify_gimple_switch (gimple stmt
)
4283 tree elt
, prev_upper_bound
= NULL_TREE
;
4284 tree index_type
, elt_type
= NULL_TREE
;
4286 if (!is_gimple_val (gimple_switch_index (stmt
)))
4288 error ("invalid operand to switch statement");
4289 debug_generic_stmt (gimple_switch_index (stmt
));
4293 index_type
= TREE_TYPE (gimple_switch_index (stmt
));
4294 if (! INTEGRAL_TYPE_P (index_type
))
4296 error ("non-integral type switch statement");
4297 debug_generic_expr (index_type
);
4301 elt
= gimple_switch_label (stmt
, 0);
4302 if (CASE_LOW (elt
) != NULL_TREE
|| CASE_HIGH (elt
) != NULL_TREE
)
4304 error ("invalid default case label in switch statement");
4305 debug_generic_expr (elt
);
4309 n
= gimple_switch_num_labels (stmt
);
4310 for (i
= 1; i
< n
; i
++)
4312 elt
= gimple_switch_label (stmt
, i
);
4314 if (! CASE_LOW (elt
))
4316 error ("invalid case label in switch statement");
4317 debug_generic_expr (elt
);
4321 && ! tree_int_cst_lt (CASE_LOW (elt
), CASE_HIGH (elt
)))
4323 error ("invalid case range in switch statement");
4324 debug_generic_expr (elt
);
4330 if (TREE_TYPE (CASE_LOW (elt
)) != elt_type
4331 || (CASE_HIGH (elt
) && TREE_TYPE (CASE_HIGH (elt
)) != elt_type
))
4333 error ("type mismatch for case label in switch statement");
4334 debug_generic_expr (elt
);
4340 elt_type
= TREE_TYPE (CASE_LOW (elt
));
4341 if (TYPE_PRECISION (index_type
) < TYPE_PRECISION (elt_type
))
4343 error ("type precision mismatch in switch statement");
4348 if (prev_upper_bound
)
4350 if (! tree_int_cst_lt (prev_upper_bound
, CASE_LOW (elt
)))
4352 error ("case labels not sorted in switch statement");
4357 prev_upper_bound
= CASE_HIGH (elt
);
4358 if (! prev_upper_bound
)
4359 prev_upper_bound
= CASE_LOW (elt
);
4365 /* Verify a gimple debug statement STMT.
4366 Returns true if anything is wrong. */
4369 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED
)
4371 /* There isn't much that could be wrong in a gimple debug stmt. A
4372 gimple debug bind stmt, for example, maps a tree, that's usually
4373 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4374 component or member of an aggregate type, to another tree, that
4375 can be an arbitrary expression. These stmts expand into debug
4376 insns, and are converted to debug notes by var-tracking.c. */
4380 /* Verify a gimple label statement STMT.
4381 Returns true if anything is wrong. */
4384 verify_gimple_label (gimple stmt
)
4386 tree decl
= gimple_label_label (stmt
);
4390 if (TREE_CODE (decl
) != LABEL_DECL
)
4392 if (!DECL_NONLOCAL (decl
) && !FORCED_LABEL (decl
)
4393 && DECL_CONTEXT (decl
) != current_function_decl
)
4395 error ("label's context is not the current function decl");
4399 uid
= LABEL_DECL_UID (decl
);
4402 || (*label_to_block_map_for_fn (cfun
))[uid
] != gimple_bb (stmt
)))
4404 error ("incorrect entry in label_to_block_map");
4408 uid
= EH_LANDING_PAD_NR (decl
);
4411 eh_landing_pad lp
= get_eh_landing_pad_from_number (uid
);
4412 if (decl
!= lp
->post_landing_pad
)
4414 error ("incorrect setting of landing pad number");
4422 /* Verify the GIMPLE statement STMT. Returns true if there is an
4423 error, otherwise false. */
4426 verify_gimple_stmt (gimple stmt
)
4428 switch (gimple_code (stmt
))
4431 return verify_gimple_assign (stmt
);
4434 return verify_gimple_label (stmt
);
4437 return verify_gimple_call (stmt
);
4440 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
4442 error ("invalid comparison code in gimple cond");
4445 if (!(!gimple_cond_true_label (stmt
)
4446 || TREE_CODE (gimple_cond_true_label (stmt
)) == LABEL_DECL
)
4447 || !(!gimple_cond_false_label (stmt
)
4448 || TREE_CODE (gimple_cond_false_label (stmt
)) == LABEL_DECL
))
4450 error ("invalid labels in gimple cond");
4454 return verify_gimple_comparison (boolean_type_node
,
4455 gimple_cond_lhs (stmt
),
4456 gimple_cond_rhs (stmt
));
4459 return verify_gimple_goto (stmt
);
4462 return verify_gimple_switch (stmt
);
4465 return verify_gimple_return (stmt
);
4470 case GIMPLE_TRANSACTION
:
4471 return verify_gimple_transaction (stmt
);
4473 /* Tuples that do not have tree operands. */
4475 case GIMPLE_PREDICT
:
4477 case GIMPLE_EH_DISPATCH
:
4478 case GIMPLE_EH_MUST_NOT_THROW
:
4482 /* OpenMP directives are validated by the FE and never operated
4483 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4484 non-gimple expressions when the main index variable has had
4485 its address taken. This does not affect the loop itself
4486 because the header of an GIMPLE_OMP_FOR is merely used to determine
4487 how to setup the parallel iteration. */
4491 return verify_gimple_debug (stmt
);
4498 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4499 and false otherwise. */
4502 verify_gimple_phi (gimple phi
)
4506 tree phi_result
= gimple_phi_result (phi
);
4511 error ("invalid PHI result");
4515 virtual_p
= virtual_operand_p (phi_result
);
4516 if (TREE_CODE (phi_result
) != SSA_NAME
4518 && SSA_NAME_VAR (phi_result
) != gimple_vop (cfun
)))
4520 error ("invalid PHI result");
4524 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4526 tree t
= gimple_phi_arg_def (phi
, i
);
4530 error ("missing PHI def");
4534 /* Addressable variables do have SSA_NAMEs but they
4535 are not considered gimple values. */
4536 else if ((TREE_CODE (t
) == SSA_NAME
4537 && virtual_p
!= virtual_operand_p (t
))
4539 && (TREE_CODE (t
) != SSA_NAME
4540 || SSA_NAME_VAR (t
) != gimple_vop (cfun
)))
4542 && !is_gimple_val (t
)))
4544 error ("invalid PHI argument");
4545 debug_generic_expr (t
);
4548 #ifdef ENABLE_TYPES_CHECKING
4549 if (!useless_type_conversion_p (TREE_TYPE (phi_result
), TREE_TYPE (t
)))
4551 error ("incompatible types in PHI argument %u", i
);
4552 debug_generic_stmt (TREE_TYPE (phi_result
));
4553 debug_generic_stmt (TREE_TYPE (t
));
4562 /* Verify the GIMPLE statements inside the sequence STMTS. */
4565 verify_gimple_in_seq_2 (gimple_seq stmts
)
4567 gimple_stmt_iterator ittr
;
4570 for (ittr
= gsi_start (stmts
); !gsi_end_p (ittr
); gsi_next (&ittr
))
4572 gimple stmt
= gsi_stmt (ittr
);
4574 switch (gimple_code (stmt
))
4577 err
|= verify_gimple_in_seq_2 (gimple_bind_body (stmt
));
4581 err
|= verify_gimple_in_seq_2 (gimple_try_eval (stmt
));
4582 err
|= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt
));
4585 case GIMPLE_EH_FILTER
:
4586 err
|= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt
));
4589 case GIMPLE_EH_ELSE
:
4590 err
|= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt
));
4591 err
|= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt
));
4595 err
|= verify_gimple_in_seq_2 (gimple_catch_handler (stmt
));
4598 case GIMPLE_TRANSACTION
:
4599 err
|= verify_gimple_transaction (stmt
);
4604 bool err2
= verify_gimple_stmt (stmt
);
4606 debug_gimple_stmt (stmt
);
4615 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4616 is a problem, otherwise false. */
4619 verify_gimple_transaction (gimple stmt
)
4621 tree lab
= gimple_transaction_label (stmt
);
4622 if (lab
!= NULL
&& TREE_CODE (lab
) != LABEL_DECL
)
4624 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt
));
4628 /* Verify the GIMPLE statements inside the statement list STMTS. */
4631 verify_gimple_in_seq (gimple_seq stmts
)
4633 timevar_push (TV_TREE_STMT_VERIFY
);
4634 if (verify_gimple_in_seq_2 (stmts
))
4635 internal_error ("verify_gimple failed");
4636 timevar_pop (TV_TREE_STMT_VERIFY
);
4639 /* Return true when the T can be shared. */
4642 tree_node_can_be_shared (tree t
)
4644 if (IS_TYPE_OR_DECL_P (t
)
4645 || is_gimple_min_invariant (t
)
4646 || TREE_CODE (t
) == SSA_NAME
4647 || t
== error_mark_node
4648 || TREE_CODE (t
) == IDENTIFIER_NODE
)
4651 if (TREE_CODE (t
) == CASE_LABEL_EXPR
)
4660 /* Called via walk_tree. Verify tree sharing. */
4663 verify_node_sharing_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4665 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4667 if (tree_node_can_be_shared (*tp
))
4669 *walk_subtrees
= false;
4673 if (pointer_set_insert (visited
, *tp
))
4679 /* Called via walk_gimple_stmt. Verify tree sharing. */
4682 verify_node_sharing (tree
*tp
, int *walk_subtrees
, void *data
)
4684 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4685 return verify_node_sharing_1 (tp
, walk_subtrees
, wi
->info
);
4688 static bool eh_error_found
;
4690 verify_eh_throw_stmt_node (void **slot
, void *data
)
4692 struct throw_stmt_node
*node
= (struct throw_stmt_node
*)*slot
;
4693 struct pointer_set_t
*visited
= (struct pointer_set_t
*) data
;
4695 if (!pointer_set_contains (visited
, node
->stmt
))
4697 error ("dead STMT in EH table");
4698 debug_gimple_stmt (node
->stmt
);
4699 eh_error_found
= true;
4704 /* Verify if the location LOCs block is in BLOCKS. */
4707 verify_location (pointer_set_t
*blocks
, location_t loc
)
4709 tree block
= LOCATION_BLOCK (loc
);
4710 if (block
!= NULL_TREE
4711 && !pointer_set_contains (blocks
, block
))
4713 error ("location references block not in block tree");
4716 if (block
!= NULL_TREE
)
4717 return verify_location (blocks
, BLOCK_SOURCE_LOCATION (block
));
4721 /* Called via walk_tree. Verify that expressions have no blocks. */
4724 verify_expr_no_block (tree
*tp
, int *walk_subtrees
, void *)
4728 *walk_subtrees
= false;
4732 location_t loc
= EXPR_LOCATION (*tp
);
4733 if (LOCATION_BLOCK (loc
) != NULL
)
4739 /* Called via walk_tree. Verify locations of expressions. */
4742 verify_expr_location_1 (tree
*tp
, int *walk_subtrees
, void *data
)
4744 struct pointer_set_t
*blocks
= (struct pointer_set_t
*) data
;
4746 if (TREE_CODE (*tp
) == VAR_DECL
4747 && DECL_HAS_DEBUG_EXPR_P (*tp
))
4749 tree t
= DECL_DEBUG_EXPR (*tp
);
4750 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4754 if ((TREE_CODE (*tp
) == VAR_DECL
4755 || TREE_CODE (*tp
) == PARM_DECL
4756 || TREE_CODE (*tp
) == RESULT_DECL
)
4757 && DECL_HAS_VALUE_EXPR_P (*tp
))
4759 tree t
= DECL_VALUE_EXPR (*tp
);
4760 tree addr
= walk_tree (&t
, verify_expr_no_block
, NULL
, NULL
);
4767 *walk_subtrees
= false;
4771 location_t loc
= EXPR_LOCATION (*tp
);
4772 if (verify_location (blocks
, loc
))
4778 /* Called via walk_gimple_op. Verify locations of expressions. */
4781 verify_expr_location (tree
*tp
, int *walk_subtrees
, void *data
)
4783 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4784 return verify_expr_location_1 (tp
, walk_subtrees
, wi
->info
);
4787 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4790 collect_subblocks (pointer_set_t
*blocks
, tree block
)
4793 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
4795 pointer_set_insert (blocks
, t
);
4796 collect_subblocks (blocks
, t
);
4800 /* Verify the GIMPLE statements in the CFG of FN. */
4803 verify_gimple_in_cfg (struct function
*fn
, bool verify_nothrow
)
4807 struct pointer_set_t
*visited
, *visited_stmts
, *blocks
;
4809 timevar_push (TV_TREE_STMT_VERIFY
);
4810 visited
= pointer_set_create ();
4811 visited_stmts
= pointer_set_create ();
4813 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4814 blocks
= pointer_set_create ();
4815 if (DECL_INITIAL (fn
->decl
))
4817 pointer_set_insert (blocks
, DECL_INITIAL (fn
->decl
));
4818 collect_subblocks (blocks
, DECL_INITIAL (fn
->decl
));
4821 FOR_EACH_BB_FN (bb
, fn
)
4823 gimple_stmt_iterator gsi
;
4825 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4827 gimple phi
= gsi_stmt (gsi
);
4831 pointer_set_insert (visited_stmts
, phi
);
4833 if (gimple_bb (phi
) != bb
)
4835 error ("gimple_bb (phi) is set to a wrong basic block");
4839 err2
|= verify_gimple_phi (phi
);
4841 /* Only PHI arguments have locations. */
4842 if (gimple_location (phi
) != UNKNOWN_LOCATION
)
4844 error ("PHI node with location");
4848 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
4850 tree arg
= gimple_phi_arg_def (phi
, i
);
4851 tree addr
= walk_tree (&arg
, verify_node_sharing_1
,
4855 error ("incorrect sharing of tree nodes");
4856 debug_generic_expr (addr
);
4859 location_t loc
= gimple_phi_arg_location (phi
, i
);
4860 if (virtual_operand_p (gimple_phi_result (phi
))
4861 && loc
!= UNKNOWN_LOCATION
)
4863 error ("virtual PHI with argument locations");
4866 addr
= walk_tree (&arg
, verify_expr_location_1
, blocks
, NULL
);
4869 debug_generic_expr (addr
);
4872 err2
|= verify_location (blocks
, loc
);
4876 debug_gimple_stmt (phi
);
4880 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4882 gimple stmt
= gsi_stmt (gsi
);
4884 struct walk_stmt_info wi
;
4888 pointer_set_insert (visited_stmts
, stmt
);
4890 if (gimple_bb (stmt
) != bb
)
4892 error ("gimple_bb (stmt) is set to a wrong basic block");
4896 err2
|= verify_gimple_stmt (stmt
);
4897 err2
|= verify_location (blocks
, gimple_location (stmt
));
4899 memset (&wi
, 0, sizeof (wi
));
4900 wi
.info
= (void *) visited
;
4901 addr
= walk_gimple_op (stmt
, verify_node_sharing
, &wi
);
4904 error ("incorrect sharing of tree nodes");
4905 debug_generic_expr (addr
);
4909 memset (&wi
, 0, sizeof (wi
));
4910 wi
.info
= (void *) blocks
;
4911 addr
= walk_gimple_op (stmt
, verify_expr_location
, &wi
);
4914 debug_generic_expr (addr
);
4918 /* ??? Instead of not checking these stmts at all the walker
4919 should know its context via wi. */
4920 if (!is_gimple_debug (stmt
)
4921 && !is_gimple_omp (stmt
))
4923 memset (&wi
, 0, sizeof (wi
));
4924 addr
= walk_gimple_op (stmt
, verify_expr
, &wi
);
4927 debug_generic_expr (addr
);
4928 inform (gimple_location (stmt
), "in statement");
4933 /* If the statement is marked as part of an EH region, then it is
4934 expected that the statement could throw. Verify that when we
4935 have optimizations that simplify statements such that we prove
4936 that they cannot throw, that we update other data structures
4938 lp_nr
= lookup_stmt_eh_lp (stmt
);
4941 if (!stmt_could_throw_p (stmt
))
4945 error ("statement marked for throw, but doesn%'t");
4949 else if (!gsi_one_before_end_p (gsi
))
4951 error ("statement marked for throw in middle of block");
4957 debug_gimple_stmt (stmt
);
4962 eh_error_found
= false;
4963 if (get_eh_throw_stmt_table (cfun
))
4964 htab_traverse (get_eh_throw_stmt_table (cfun
),
4965 verify_eh_throw_stmt_node
,
4968 if (err
|| eh_error_found
)
4969 internal_error ("verify_gimple failed");
4971 pointer_set_destroy (visited
);
4972 pointer_set_destroy (visited_stmts
);
4973 pointer_set_destroy (blocks
);
4974 verify_histograms ();
4975 timevar_pop (TV_TREE_STMT_VERIFY
);
4979 /* Verifies that the flow information is OK. */
4982 gimple_verify_flow_info (void)
4986 gimple_stmt_iterator gsi
;
4991 if (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4992 || ENTRY_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
4994 error ("ENTRY_BLOCK has IL associated with it");
4998 if (EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.seq
4999 || EXIT_BLOCK_PTR_FOR_FN (cfun
)->il
.gimple
.phi_nodes
)
5001 error ("EXIT_BLOCK has IL associated with it");
5005 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5006 if (e
->flags
& EDGE_FALLTHRU
)
5008 error ("fallthru to exit from bb %d", e
->src
->index
);
5012 FOR_EACH_BB_FN (bb
, cfun
)
5014 bool found_ctrl_stmt
= false;
5018 /* Skip labels on the start of basic block. */
5019 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5022 gimple prev_stmt
= stmt
;
5024 stmt
= gsi_stmt (gsi
);
5026 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5029 label
= gimple_label_label (stmt
);
5030 if (prev_stmt
&& DECL_NONLOCAL (label
))
5032 error ("nonlocal label ");
5033 print_generic_expr (stderr
, label
, 0);
5034 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5039 if (prev_stmt
&& EH_LANDING_PAD_NR (label
) != 0)
5041 error ("EH landing pad label ");
5042 print_generic_expr (stderr
, label
, 0);
5043 fprintf (stderr
, " is not first in a sequence of labels in bb %d",
5048 if (label_to_block (label
) != bb
)
5051 print_generic_expr (stderr
, label
, 0);
5052 fprintf (stderr
, " to block does not match in bb %d",
5057 if (decl_function_context (label
) != current_function_decl
)
5060 print_generic_expr (stderr
, label
, 0);
5061 fprintf (stderr
, " has incorrect context in bb %d",
5067 /* Verify that body of basic block BB is free of control flow. */
5068 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5070 gimple stmt
= gsi_stmt (gsi
);
5072 if (found_ctrl_stmt
)
5074 error ("control flow in the middle of basic block %d",
5079 if (stmt_ends_bb_p (stmt
))
5080 found_ctrl_stmt
= true;
5082 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5085 print_generic_expr (stderr
, gimple_label_label (stmt
), 0);
5086 fprintf (stderr
, " in the middle of basic block %d", bb
->index
);
5091 gsi
= gsi_last_bb (bb
);
5092 if (gsi_end_p (gsi
))
5095 stmt
= gsi_stmt (gsi
);
5097 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5100 err
|= verify_eh_edges (stmt
);
5102 if (is_ctrl_stmt (stmt
))
5104 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5105 if (e
->flags
& EDGE_FALLTHRU
)
5107 error ("fallthru edge after a control statement in bb %d",
5113 if (gimple_code (stmt
) != GIMPLE_COND
)
5115 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5116 after anything else but if statement. */
5117 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5118 if (e
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
))
5120 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5126 switch (gimple_code (stmt
))
5133 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
5137 || !(true_edge
->flags
& EDGE_TRUE_VALUE
)
5138 || !(false_edge
->flags
& EDGE_FALSE_VALUE
)
5139 || (true_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5140 || (false_edge
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
))
5141 || EDGE_COUNT (bb
->succs
) >= 3)
5143 error ("wrong outgoing edge flags at end of bb %d",
5151 if (simple_goto_p (stmt
))
5153 error ("explicit goto at end of bb %d", bb
->index
);
5158 /* FIXME. We should double check that the labels in the
5159 destination blocks have their address taken. */
5160 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5161 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_TRUE_VALUE
5162 | EDGE_FALSE_VALUE
))
5163 || !(e
->flags
& EDGE_ABNORMAL
))
5165 error ("wrong outgoing edge flags at end of bb %d",
5173 if (!gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
5175 /* ... fallthru ... */
5177 if (!single_succ_p (bb
)
5178 || (single_succ_edge (bb
)->flags
5179 & (EDGE_FALLTHRU
| EDGE_ABNORMAL
5180 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5182 error ("wrong outgoing edge flags at end of bb %d", bb
->index
);
5185 if (single_succ (bb
) != EXIT_BLOCK_PTR_FOR_FN (cfun
))
5187 error ("return edge does not point to exit in bb %d",
5199 n
= gimple_switch_num_labels (stmt
);
5201 /* Mark all the destination basic blocks. */
5202 for (i
= 0; i
< n
; ++i
)
5204 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5205 basic_block label_bb
= label_to_block (lab
);
5206 gcc_assert (!label_bb
->aux
|| label_bb
->aux
== (void *)1);
5207 label_bb
->aux
= (void *)1;
5210 /* Verify that the case labels are sorted. */
5211 prev
= gimple_switch_label (stmt
, 0);
5212 for (i
= 1; i
< n
; ++i
)
5214 tree c
= gimple_switch_label (stmt
, i
);
5217 error ("found default case not at the start of "
5223 && !tree_int_cst_lt (CASE_LOW (prev
), CASE_LOW (c
)))
5225 error ("case labels not sorted: ");
5226 print_generic_expr (stderr
, prev
, 0);
5227 fprintf (stderr
," is greater than ");
5228 print_generic_expr (stderr
, c
, 0);
5229 fprintf (stderr
," but comes before it.\n");
5234 /* VRP will remove the default case if it can prove it will
5235 never be executed. So do not verify there always exists
5236 a default case here. */
5238 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5242 error ("extra outgoing edge %d->%d",
5243 bb
->index
, e
->dest
->index
);
5247 e
->dest
->aux
= (void *)2;
5248 if ((e
->flags
& (EDGE_FALLTHRU
| EDGE_ABNORMAL
5249 | EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
)))
5251 error ("wrong outgoing edge flags at end of bb %d",
5257 /* Check that we have all of them. */
5258 for (i
= 0; i
< n
; ++i
)
5260 tree lab
= CASE_LABEL (gimple_switch_label (stmt
, i
));
5261 basic_block label_bb
= label_to_block (lab
);
5263 if (label_bb
->aux
!= (void *)2)
5265 error ("missing edge %i->%i", bb
->index
, label_bb
->index
);
5270 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5271 e
->dest
->aux
= (void *)0;
5275 case GIMPLE_EH_DISPATCH
:
5276 err
|= verify_eh_dispatch_edge (stmt
);
5284 if (dom_info_state (CDI_DOMINATORS
) >= DOM_NO_FAST_QUERY
)
5285 verify_dominators (CDI_DOMINATORS
);
5291 /* Updates phi nodes after creating a forwarder block joined
5292 by edge FALLTHRU. */
5295 gimple_make_forwarder_block (edge fallthru
)
5299 basic_block dummy
, bb
;
5301 gimple_stmt_iterator gsi
;
5303 dummy
= fallthru
->src
;
5304 bb
= fallthru
->dest
;
5306 if (single_pred_p (bb
))
5309 /* If we redirected a branch we must create new PHI nodes at the
5311 for (gsi
= gsi_start_phis (dummy
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5313 gimple phi
, new_phi
;
5315 phi
= gsi_stmt (gsi
);
5316 var
= gimple_phi_result (phi
);
5317 new_phi
= create_phi_node (var
, bb
);
5318 gimple_phi_set_result (phi
, copy_ssa_name (var
, phi
));
5319 add_phi_arg (new_phi
, gimple_phi_result (phi
), fallthru
,
5323 /* Add the arguments we have stored on edges. */
5324 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5329 flush_pending_stmts (e
);
5334 /* Return a non-special label in the head of basic block BLOCK.
5335 Create one if it doesn't exist. */
5338 gimple_block_label (basic_block bb
)
5340 gimple_stmt_iterator i
, s
= gsi_start_bb (bb
);
5345 for (i
= s
; !gsi_end_p (i
); first
= false, gsi_next (&i
))
5347 stmt
= gsi_stmt (i
);
5348 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5350 label
= gimple_label_label (stmt
);
5351 if (!DECL_NONLOCAL (label
))
5354 gsi_move_before (&i
, &s
);
5359 label
= create_artificial_label (UNKNOWN_LOCATION
);
5360 stmt
= gimple_build_label (label
);
5361 gsi_insert_before (&s
, stmt
, GSI_NEW_STMT
);
5366 /* Attempt to perform edge redirection by replacing a possibly complex
5367 jump instruction by a goto or by removing the jump completely.
5368 This can apply only if all edges now point to the same block. The
5369 parameters and return values are equivalent to
5370 redirect_edge_and_branch. */
5373 gimple_try_redirect_by_replacing_jump (edge e
, basic_block target
)
5375 basic_block src
= e
->src
;
5376 gimple_stmt_iterator i
;
5379 /* We can replace or remove a complex jump only when we have exactly
5381 if (EDGE_COUNT (src
->succs
) != 2
5382 /* Verify that all targets will be TARGET. Specifically, the
5383 edge that is not E must also go to TARGET. */
5384 || EDGE_SUCC (src
, EDGE_SUCC (src
, 0) == e
)->dest
!= target
)
5387 i
= gsi_last_bb (src
);
5391 stmt
= gsi_stmt (i
);
5393 if (gimple_code (stmt
) == GIMPLE_COND
|| gimple_code (stmt
) == GIMPLE_SWITCH
)
5395 gsi_remove (&i
, true);
5396 e
= ssa_redirect_edge (e
, target
);
5397 e
->flags
= EDGE_FALLTHRU
;
5405 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5406 edge representing the redirected branch. */
5409 gimple_redirect_edge_and_branch (edge e
, basic_block dest
)
5411 basic_block bb
= e
->src
;
5412 gimple_stmt_iterator gsi
;
5416 if (e
->flags
& EDGE_ABNORMAL
)
5419 if (e
->dest
== dest
)
5422 if (e
->flags
& EDGE_EH
)
5423 return redirect_eh_edge (e
, dest
);
5425 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
))
5427 ret
= gimple_try_redirect_by_replacing_jump (e
, dest
);
5432 gsi
= gsi_last_bb (bb
);
5433 stmt
= gsi_end_p (gsi
) ? NULL
: gsi_stmt (gsi
);
5435 switch (stmt
? gimple_code (stmt
) : GIMPLE_ERROR_MARK
)
5438 /* For COND_EXPR, we only need to redirect the edge. */
5442 /* No non-abnormal edges should lead from a non-simple goto, and
5443 simple ones should be represented implicitly. */
5448 tree label
= gimple_block_label (dest
);
5449 tree cases
= get_cases_for_edge (e
, stmt
);
5451 /* If we have a list of cases associated with E, then use it
5452 as it's a lot faster than walking the entire case vector. */
5455 edge e2
= find_edge (e
->src
, dest
);
5462 CASE_LABEL (cases
) = label
;
5463 cases
= CASE_CHAIN (cases
);
5466 /* If there was already an edge in the CFG, then we need
5467 to move all the cases associated with E to E2. */
5470 tree cases2
= get_cases_for_edge (e2
, stmt
);
5472 CASE_CHAIN (last
) = CASE_CHAIN (cases2
);
5473 CASE_CHAIN (cases2
) = first
;
5475 bitmap_set_bit (touched_switch_bbs
, gimple_bb (stmt
)->index
);
5479 size_t i
, n
= gimple_switch_num_labels (stmt
);
5481 for (i
= 0; i
< n
; i
++)
5483 tree elt
= gimple_switch_label (stmt
, i
);
5484 if (label_to_block (CASE_LABEL (elt
)) == e
->dest
)
5485 CASE_LABEL (elt
) = label
;
5493 int i
, n
= gimple_asm_nlabels (stmt
);
5496 for (i
= 0; i
< n
; ++i
)
5498 tree cons
= gimple_asm_label_op (stmt
, i
);
5499 if (label_to_block (TREE_VALUE (cons
)) == e
->dest
)
5502 label
= gimple_block_label (dest
);
5503 TREE_VALUE (cons
) = label
;
5507 /* If we didn't find any label matching the former edge in the
5508 asm labels, we must be redirecting the fallthrough
5510 gcc_assert (label
|| (e
->flags
& EDGE_FALLTHRU
));
5515 gsi_remove (&gsi
, true);
5516 e
->flags
|= EDGE_FALLTHRU
;
5519 case GIMPLE_OMP_RETURN
:
5520 case GIMPLE_OMP_CONTINUE
:
5521 case GIMPLE_OMP_SECTIONS_SWITCH
:
5522 case GIMPLE_OMP_FOR
:
5523 /* The edges from OMP constructs can be simply redirected. */
5526 case GIMPLE_EH_DISPATCH
:
5527 if (!(e
->flags
& EDGE_FALLTHRU
))
5528 redirect_eh_dispatch_edge (stmt
, e
, dest
);
5531 case GIMPLE_TRANSACTION
:
5532 /* The ABORT edge has a stored label associated with it, otherwise
5533 the edges are simply redirectable. */
5535 gimple_transaction_set_label (stmt
, gimple_block_label (dest
));
5539 /* Otherwise it must be a fallthru edge, and we don't need to
5540 do anything besides redirecting it. */
5541 gcc_assert (e
->flags
& EDGE_FALLTHRU
);
5545 /* Update/insert PHI nodes as necessary. */
5547 /* Now update the edges in the CFG. */
5548 e
= ssa_redirect_edge (e
, dest
);
5553 /* Returns true if it is possible to remove edge E by redirecting
5554 it to the destination of the other edge from E->src. */
5557 gimple_can_remove_branch_p (const_edge e
)
5559 if (e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
))
5565 /* Simple wrapper, as we can always redirect fallthru edges. */
5568 gimple_redirect_edge_and_branch_force (edge e
, basic_block dest
)
5570 e
= gimple_redirect_edge_and_branch (e
, dest
);
5577 /* Splits basic block BB after statement STMT (but at least after the
5578 labels). If STMT is NULL, BB is split just after the labels. */
5581 gimple_split_block (basic_block bb
, void *stmt
)
5583 gimple_stmt_iterator gsi
;
5584 gimple_stmt_iterator gsi_tgt
;
5591 new_bb
= create_empty_bb (bb
);
5593 /* Redirect the outgoing edges. */
5594 new_bb
->succs
= bb
->succs
;
5596 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
5599 if (stmt
&& gimple_code ((gimple
) stmt
) == GIMPLE_LABEL
)
5602 /* Move everything from GSI to the new basic block. */
5603 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5605 act
= gsi_stmt (gsi
);
5606 if (gimple_code (act
) == GIMPLE_LABEL
)
5619 if (gsi_end_p (gsi
))
5622 /* Split the statement list - avoid re-creating new containers as this
5623 brings ugly quadratic memory consumption in the inliner.
5624 (We are still quadratic since we need to update stmt BB pointers,
5626 gsi_split_seq_before (&gsi
, &list
);
5627 set_bb_seq (new_bb
, list
);
5628 for (gsi_tgt
= gsi_start (list
);
5629 !gsi_end_p (gsi_tgt
); gsi_next (&gsi_tgt
))
5630 gimple_set_bb (gsi_stmt (gsi_tgt
), new_bb
);
5636 /* Moves basic block BB after block AFTER. */
5639 gimple_move_block_after (basic_block bb
, basic_block after
)
5641 if (bb
->prev_bb
== after
)
5645 link_block (bb
, after
);
5651 /* Return TRUE if block BB has no executable statements, otherwise return
5655 gimple_empty_block_p (basic_block bb
)
5657 /* BB must have no executable statements. */
5658 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
5661 if (gsi_end_p (gsi
))
5663 if (is_gimple_debug (gsi_stmt (gsi
)))
5664 gsi_next_nondebug (&gsi
);
5665 return gsi_end_p (gsi
);
5669 /* Split a basic block if it ends with a conditional branch and if the
5670 other part of the block is not empty. */
5673 gimple_split_block_before_cond_jump (basic_block bb
)
5675 gimple last
, split_point
;
5676 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
5677 if (gsi_end_p (gsi
))
5679 last
= gsi_stmt (gsi
);
5680 if (gimple_code (last
) != GIMPLE_COND
5681 && gimple_code (last
) != GIMPLE_SWITCH
)
5683 gsi_prev_nondebug (&gsi
);
5684 split_point
= gsi_stmt (gsi
);
5685 return split_block (bb
, split_point
)->dest
;
5689 /* Return true if basic_block can be duplicated. */
5692 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED
)
5697 /* Create a duplicate of the basic block BB. NOTE: This does not
5698 preserve SSA form. */
5701 gimple_duplicate_bb (basic_block bb
)
5704 gimple_stmt_iterator gsi
, gsi_tgt
;
5705 gimple_seq phis
= phi_nodes (bb
);
5706 gimple phi
, stmt
, copy
;
5708 new_bb
= create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
);
5710 /* Copy the PHI nodes. We ignore PHI node arguments here because
5711 the incoming edges have not been setup yet. */
5712 for (gsi
= gsi_start (phis
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5714 phi
= gsi_stmt (gsi
);
5715 copy
= create_phi_node (NULL_TREE
, new_bb
);
5716 create_new_def_for (gimple_phi_result (phi
), copy
,
5717 gimple_phi_result_ptr (copy
));
5718 gimple_set_uid (copy
, gimple_uid (phi
));
5721 gsi_tgt
= gsi_start_bb (new_bb
);
5722 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5724 def_operand_p def_p
;
5725 ssa_op_iter op_iter
;
5728 stmt
= gsi_stmt (gsi
);
5729 if (gimple_code (stmt
) == GIMPLE_LABEL
)
5732 /* Don't duplicate label debug stmts. */
5733 if (gimple_debug_bind_p (stmt
)
5734 && TREE_CODE (gimple_debug_bind_get_var (stmt
))
5738 /* Create a new copy of STMT and duplicate STMT's virtual
5740 copy
= gimple_copy (stmt
);
5741 gsi_insert_after (&gsi_tgt
, copy
, GSI_NEW_STMT
);
5743 maybe_duplicate_eh_stmt (copy
, stmt
);
5744 gimple_duplicate_stmt_histograms (cfun
, copy
, cfun
, stmt
);
5746 /* When copying around a stmt writing into a local non-user
5747 aggregate, make sure it won't share stack slot with other
5749 lhs
= gimple_get_lhs (stmt
);
5750 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5752 tree base
= get_base_address (lhs
);
5754 && (TREE_CODE (base
) == VAR_DECL
5755 || TREE_CODE (base
) == RESULT_DECL
)
5756 && DECL_IGNORED_P (base
)
5757 && !TREE_STATIC (base
)
5758 && !DECL_EXTERNAL (base
)
5759 && (TREE_CODE (base
) != VAR_DECL
5760 || !DECL_HAS_VALUE_EXPR_P (base
)))
5761 DECL_NONSHAREABLE (base
) = 1;
5764 /* Create new names for all the definitions created by COPY and
5765 add replacement mappings for each new name. */
5766 FOR_EACH_SSA_DEF_OPERAND (def_p
, copy
, op_iter
, SSA_OP_ALL_DEFS
)
5767 create_new_def_for (DEF_FROM_PTR (def_p
), copy
, def_p
);
5773 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5776 add_phi_args_after_copy_edge (edge e_copy
)
5778 basic_block bb
, bb_copy
= e_copy
->src
, dest
;
5781 gimple phi
, phi_copy
;
5783 gimple_stmt_iterator psi
, psi_copy
;
5785 if (gimple_seq_empty_p (phi_nodes (e_copy
->dest
)))
5788 bb
= bb_copy
->flags
& BB_DUPLICATED
? get_bb_original (bb_copy
) : bb_copy
;
5790 if (e_copy
->dest
->flags
& BB_DUPLICATED
)
5791 dest
= get_bb_original (e_copy
->dest
);
5793 dest
= e_copy
->dest
;
5795 e
= find_edge (bb
, dest
);
5798 /* During loop unrolling the target of the latch edge is copied.
5799 In this case we are not looking for edge to dest, but to
5800 duplicated block whose original was dest. */
5801 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5803 if ((e
->dest
->flags
& BB_DUPLICATED
)
5804 && get_bb_original (e
->dest
) == dest
)
5808 gcc_assert (e
!= NULL
);
5811 for (psi
= gsi_start_phis (e
->dest
),
5812 psi_copy
= gsi_start_phis (e_copy
->dest
);
5814 gsi_next (&psi
), gsi_next (&psi_copy
))
5816 phi
= gsi_stmt (psi
);
5817 phi_copy
= gsi_stmt (psi_copy
);
5818 def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5819 add_phi_arg (phi_copy
, def
, e_copy
,
5820 gimple_phi_arg_location_from_edge (phi
, e
));
5825 /* Basic block BB_COPY was created by code duplication. Add phi node
5826 arguments for edges going out of BB_COPY. The blocks that were
5827 duplicated have BB_DUPLICATED set. */
5830 add_phi_args_after_copy_bb (basic_block bb_copy
)
5835 FOR_EACH_EDGE (e_copy
, ei
, bb_copy
->succs
)
5837 add_phi_args_after_copy_edge (e_copy
);
5841 /* Blocks in REGION_COPY array of length N_REGION were created by
5842 duplication of basic blocks. Add phi node arguments for edges
5843 going from these blocks. If E_COPY is not NULL, also add
5844 phi node arguments for its destination.*/
5847 add_phi_args_after_copy (basic_block
*region_copy
, unsigned n_region
,
5852 for (i
= 0; i
< n_region
; i
++)
5853 region_copy
[i
]->flags
|= BB_DUPLICATED
;
5855 for (i
= 0; i
< n_region
; i
++)
5856 add_phi_args_after_copy_bb (region_copy
[i
]);
5858 add_phi_args_after_copy_edge (e_copy
);
5860 for (i
= 0; i
< n_region
; i
++)
5861 region_copy
[i
]->flags
&= ~BB_DUPLICATED
;
5864 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5865 important exit edge EXIT. By important we mean that no SSA name defined
5866 inside region is live over the other exit edges of the region. All entry
5867 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5868 to the duplicate of the region. Dominance and loop information is
5869 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5870 UPDATE_DOMINANCE is false then we assume that the caller will update the
5871 dominance information after calling this function. The new basic
5872 blocks are stored to REGION_COPY in the same order as they had in REGION,
5873 provided that REGION_COPY is not NULL.
5874 The function returns false if it is unable to copy the region,
5878 gimple_duplicate_sese_region (edge entry
, edge exit
,
5879 basic_block
*region
, unsigned n_region
,
5880 basic_block
*region_copy
,
5881 bool update_dominance
)
5884 bool free_region_copy
= false, copying_header
= false;
5885 struct loop
*loop
= entry
->dest
->loop_father
;
5887 vec
<basic_block
> doms
;
5889 int total_freq
= 0, entry_freq
= 0;
5890 gcov_type total_count
= 0, entry_count
= 0;
5892 if (!can_copy_bbs_p (region
, n_region
))
5895 /* Some sanity checking. Note that we do not check for all possible
5896 missuses of the functions. I.e. if you ask to copy something weird,
5897 it will work, but the state of structures probably will not be
5899 for (i
= 0; i
< n_region
; i
++)
5901 /* We do not handle subloops, i.e. all the blocks must belong to the
5903 if (region
[i
]->loop_father
!= loop
)
5906 if (region
[i
] != entry
->dest
5907 && region
[i
] == loop
->header
)
5911 /* In case the function is used for loop header copying (which is the primary
5912 use), ensure that EXIT and its copy will be new latch and entry edges. */
5913 if (loop
->header
== entry
->dest
)
5915 copying_header
= true;
5917 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, exit
->src
))
5920 for (i
= 0; i
< n_region
; i
++)
5921 if (region
[i
] != exit
->src
5922 && dominated_by_p (CDI_DOMINATORS
, region
[i
], exit
->src
))
5926 initialize_original_copy_tables ();
5929 set_loop_copy (loop
, loop_outer (loop
));
5931 set_loop_copy (loop
, loop
);
5935 region_copy
= XNEWVEC (basic_block
, n_region
);
5936 free_region_copy
= true;
5939 /* Record blocks outside the region that are dominated by something
5941 if (update_dominance
)
5944 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
5947 if (entry
->dest
->count
)
5949 total_count
= entry
->dest
->count
;
5950 entry_count
= entry
->count
;
5951 /* Fix up corner cases, to avoid division by zero or creation of negative
5953 if (entry_count
> total_count
)
5954 entry_count
= total_count
;
5958 total_freq
= entry
->dest
->frequency
;
5959 entry_freq
= EDGE_FREQUENCY (entry
);
5960 /* Fix up corner cases, to avoid division by zero or creation of negative
5962 if (total_freq
== 0)
5964 else if (entry_freq
> total_freq
)
5965 entry_freq
= total_freq
;
5968 copy_bbs (region
, n_region
, region_copy
, &exit
, 1, &exit_copy
, loop
,
5969 split_edge_bb_loc (entry
), update_dominance
);
5972 scale_bbs_frequencies_gcov_type (region
, n_region
,
5973 total_count
- entry_count
,
5975 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, entry_count
,
5980 scale_bbs_frequencies_int (region
, n_region
, total_freq
- entry_freq
,
5982 scale_bbs_frequencies_int (region_copy
, n_region
, entry_freq
, total_freq
);
5987 loop
->header
= exit
->dest
;
5988 loop
->latch
= exit
->src
;
5991 /* Redirect the entry and add the phi node arguments. */
5992 redirected
= redirect_edge_and_branch (entry
, get_bb_copy (entry
->dest
));
5993 gcc_assert (redirected
!= NULL
);
5994 flush_pending_stmts (entry
);
5996 /* Concerning updating of dominators: We must recount dominators
5997 for entry block and its copy. Anything that is outside of the
5998 region, but was dominated by something inside needs recounting as
6000 if (update_dominance
)
6002 set_immediate_dominator (CDI_DOMINATORS
, entry
->dest
, entry
->src
);
6003 doms
.safe_push (get_bb_original (entry
->dest
));
6004 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6008 /* Add the other PHI node arguments. */
6009 add_phi_args_after_copy (region_copy
, n_region
, NULL
);
6011 if (free_region_copy
)
6014 free_original_copy_tables ();
6018 /* Checks if BB is part of the region defined by N_REGION BBS. */
6020 bb_part_of_region_p (basic_block bb
, basic_block
* bbs
, unsigned n_region
)
6024 for (n
= 0; n
< n_region
; n
++)
6032 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6033 are stored to REGION_COPY in the same order in that they appear
6034 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6035 the region, EXIT an exit from it. The condition guarding EXIT
6036 is moved to ENTRY. Returns true if duplication succeeds, false
6062 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED
, edge exit ATTRIBUTE_UNUSED
,
6063 basic_block
*region ATTRIBUTE_UNUSED
, unsigned n_region ATTRIBUTE_UNUSED
,
6064 basic_block
*region_copy ATTRIBUTE_UNUSED
)
6067 bool free_region_copy
= false;
6068 struct loop
*loop
= exit
->dest
->loop_father
;
6069 struct loop
*orig_loop
= entry
->dest
->loop_father
;
6070 basic_block switch_bb
, entry_bb
, nentry_bb
;
6071 vec
<basic_block
> doms
;
6072 int total_freq
= 0, exit_freq
= 0;
6073 gcov_type total_count
= 0, exit_count
= 0;
6074 edge exits
[2], nexits
[2], e
;
6075 gimple_stmt_iterator gsi
;
6078 basic_block exit_bb
;
6079 gimple_stmt_iterator psi
;
6082 struct loop
*target
, *aloop
, *cloop
;
6084 gcc_assert (EDGE_COUNT (exit
->src
->succs
) == 2);
6086 exits
[1] = EDGE_SUCC (exit
->src
, EDGE_SUCC (exit
->src
, 0) == exit
);
6088 if (!can_copy_bbs_p (region
, n_region
))
6091 initialize_original_copy_tables ();
6092 set_loop_copy (orig_loop
, loop
);
6095 for (aloop
= orig_loop
->inner
; aloop
; aloop
= aloop
->next
)
6097 if (bb_part_of_region_p (aloop
->header
, region
, n_region
))
6099 cloop
= duplicate_loop (aloop
, target
);
6100 duplicate_subloops (aloop
, cloop
);
6106 region_copy
= XNEWVEC (basic_block
, n_region
);
6107 free_region_copy
= true;
6110 gcc_assert (!need_ssa_update_p (cfun
));
6112 /* Record blocks outside the region that are dominated by something
6114 doms
= get_dominated_by_region (CDI_DOMINATORS
, region
, n_region
);
6116 if (exit
->src
->count
)
6118 total_count
= exit
->src
->count
;
6119 exit_count
= exit
->count
;
6120 /* Fix up corner cases, to avoid division by zero or creation of negative
6122 if (exit_count
> total_count
)
6123 exit_count
= total_count
;
6127 total_freq
= exit
->src
->frequency
;
6128 exit_freq
= EDGE_FREQUENCY (exit
);
6129 /* Fix up corner cases, to avoid division by zero or creation of negative
6131 if (total_freq
== 0)
6133 if (exit_freq
> total_freq
)
6134 exit_freq
= total_freq
;
6137 copy_bbs (region
, n_region
, region_copy
, exits
, 2, nexits
, orig_loop
,
6138 split_edge_bb_loc (exit
), true);
6141 scale_bbs_frequencies_gcov_type (region
, n_region
,
6142 total_count
- exit_count
,
6144 scale_bbs_frequencies_gcov_type (region_copy
, n_region
, exit_count
,
6149 scale_bbs_frequencies_int (region
, n_region
, total_freq
- exit_freq
,
6151 scale_bbs_frequencies_int (region_copy
, n_region
, exit_freq
, total_freq
);
6154 /* Create the switch block, and put the exit condition to it. */
6155 entry_bb
= entry
->dest
;
6156 nentry_bb
= get_bb_copy (entry_bb
);
6157 if (!last_stmt (entry
->src
)
6158 || !stmt_ends_bb_p (last_stmt (entry
->src
)))
6159 switch_bb
= entry
->src
;
6161 switch_bb
= split_edge (entry
);
6162 set_immediate_dominator (CDI_DOMINATORS
, nentry_bb
, switch_bb
);
6164 gsi
= gsi_last_bb (switch_bb
);
6165 cond_stmt
= last_stmt (exit
->src
);
6166 gcc_assert (gimple_code (cond_stmt
) == GIMPLE_COND
);
6167 cond_stmt
= gimple_copy (cond_stmt
);
6169 gsi_insert_after (&gsi
, cond_stmt
, GSI_NEW_STMT
);
6171 sorig
= single_succ_edge (switch_bb
);
6172 sorig
->flags
= exits
[1]->flags
;
6173 snew
= make_edge (switch_bb
, nentry_bb
, exits
[0]->flags
);
6175 /* Register the new edge from SWITCH_BB in loop exit lists. */
6176 rescan_loop_exit (snew
, true, false);
6178 /* Add the PHI node arguments. */
6179 add_phi_args_after_copy (region_copy
, n_region
, snew
);
6181 /* Get rid of now superfluous conditions and associated edges (and phi node
6183 exit_bb
= exit
->dest
;
6185 e
= redirect_edge_and_branch (exits
[0], exits
[1]->dest
);
6186 PENDING_STMT (e
) = NULL
;
6188 /* The latch of ORIG_LOOP was copied, and so was the backedge
6189 to the original header. We redirect this backedge to EXIT_BB. */
6190 for (i
= 0; i
< n_region
; i
++)
6191 if (get_bb_original (region_copy
[i
]) == orig_loop
->latch
)
6193 gcc_assert (single_succ_edge (region_copy
[i
]));
6194 e
= redirect_edge_and_branch (single_succ_edge (region_copy
[i
]), exit_bb
);
6195 PENDING_STMT (e
) = NULL
;
6196 for (psi
= gsi_start_phis (exit_bb
);
6200 phi
= gsi_stmt (psi
);
6201 def
= PHI_ARG_DEF (phi
, nexits
[0]->dest_idx
);
6202 add_phi_arg (phi
, def
, e
, gimple_phi_arg_location_from_edge (phi
, e
));
6205 e
= redirect_edge_and_branch (nexits
[1], nexits
[0]->dest
);
6206 PENDING_STMT (e
) = NULL
;
6208 /* Anything that is outside of the region, but was dominated by something
6209 inside needs to update dominance info. */
6210 iterate_fix_dominators (CDI_DOMINATORS
, doms
, false);
6212 /* Update the SSA web. */
6213 update_ssa (TODO_update_ssa
);
6215 if (free_region_copy
)
6218 free_original_copy_tables ();
6222 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6223 adding blocks when the dominator traversal reaches EXIT. This
6224 function silently assumes that ENTRY strictly dominates EXIT. */
6227 gather_blocks_in_sese_region (basic_block entry
, basic_block exit
,
6228 vec
<basic_block
> *bbs_p
)
6232 for (son
= first_dom_son (CDI_DOMINATORS
, entry
);
6234 son
= next_dom_son (CDI_DOMINATORS
, son
))
6236 bbs_p
->safe_push (son
);
6238 gather_blocks_in_sese_region (son
, exit
, bbs_p
);
6242 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6243 The duplicates are recorded in VARS_MAP. */
6246 replace_by_duplicate_decl (tree
*tp
, struct pointer_map_t
*vars_map
,
6249 tree t
= *tp
, new_t
;
6250 struct function
*f
= DECL_STRUCT_FUNCTION (to_context
);
6253 if (DECL_CONTEXT (t
) == to_context
)
6256 loc
= pointer_map_contains (vars_map
, t
);
6260 loc
= pointer_map_insert (vars_map
, t
);
6264 new_t
= copy_var_decl (t
, DECL_NAME (t
), TREE_TYPE (t
));
6265 add_local_decl (f
, new_t
);
6269 gcc_assert (TREE_CODE (t
) == CONST_DECL
);
6270 new_t
= copy_node (t
);
6272 DECL_CONTEXT (new_t
) = to_context
;
6277 new_t
= (tree
) *loc
;
6283 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6284 VARS_MAP maps old ssa names and var_decls to the new ones. */
6287 replace_ssa_name (tree name
, struct pointer_map_t
*vars_map
,
6293 gcc_assert (!virtual_operand_p (name
));
6295 loc
= pointer_map_contains (vars_map
, name
);
6299 tree decl
= SSA_NAME_VAR (name
);
6302 replace_by_duplicate_decl (&decl
, vars_map
, to_context
);
6303 new_name
= make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6304 decl
, SSA_NAME_DEF_STMT (name
));
6305 if (SSA_NAME_IS_DEFAULT_DEF (name
))
6306 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context
),
6310 new_name
= copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context
),
6311 name
, SSA_NAME_DEF_STMT (name
));
6313 loc
= pointer_map_insert (vars_map
, name
);
6317 new_name
= (tree
) *loc
;
6328 struct pointer_map_t
*vars_map
;
6329 htab_t new_label_map
;
6330 struct pointer_map_t
*eh_map
;
6334 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6335 contained in *TP if it has been ORIG_BLOCK previously and change the
6336 DECL_CONTEXT of every local variable referenced in *TP. */
6339 move_stmt_op (tree
*tp
, int *walk_subtrees
, void *data
)
6341 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
6342 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6347 tree block
= TREE_BLOCK (t
);
6348 if (block
== p
->orig_block
6349 || (p
->orig_block
== NULL_TREE
6350 && block
!= NULL_TREE
))
6351 TREE_SET_BLOCK (t
, p
->new_block
);
6352 #ifdef ENABLE_CHECKING
6353 else if (block
!= NULL_TREE
)
6355 while (block
&& TREE_CODE (block
) == BLOCK
&& block
!= p
->orig_block
)
6356 block
= BLOCK_SUPERCONTEXT (block
);
6357 gcc_assert (block
== p
->orig_block
);
6361 else if (DECL_P (t
) || TREE_CODE (t
) == SSA_NAME
)
6363 if (TREE_CODE (t
) == SSA_NAME
)
6364 *tp
= replace_ssa_name (t
, p
->vars_map
, p
->to_context
);
6365 else if (TREE_CODE (t
) == LABEL_DECL
)
6367 if (p
->new_label_map
)
6369 struct tree_map in
, *out
;
6371 out
= (struct tree_map
*)
6372 htab_find_with_hash (p
->new_label_map
, &in
, DECL_UID (t
));
6377 DECL_CONTEXT (t
) = p
->to_context
;
6379 else if (p
->remap_decls_p
)
6381 /* Replace T with its duplicate. T should no longer appear in the
6382 parent function, so this looks wasteful; however, it may appear
6383 in referenced_vars, and more importantly, as virtual operands of
6384 statements, and in alias lists of other variables. It would be
6385 quite difficult to expunge it from all those places. ??? It might
6386 suffice to do this for addressable variables. */
6387 if ((TREE_CODE (t
) == VAR_DECL
6388 && !is_global_var (t
))
6389 || TREE_CODE (t
) == CONST_DECL
)
6390 replace_by_duplicate_decl (tp
, p
->vars_map
, p
->to_context
);
6394 else if (TYPE_P (t
))
6400 /* Helper for move_stmt_r. Given an EH region number for the source
6401 function, map that to the duplicate EH regio number in the dest. */
6404 move_stmt_eh_region_nr (int old_nr
, struct move_stmt_d
*p
)
6406 eh_region old_r
, new_r
;
6409 old_r
= get_eh_region_from_number (old_nr
);
6410 slot
= pointer_map_contains (p
->eh_map
, old_r
);
6411 new_r
= (eh_region
) *slot
;
6413 return new_r
->index
;
6416 /* Similar, but operate on INTEGER_CSTs. */
6419 move_stmt_eh_region_tree_nr (tree old_t_nr
, struct move_stmt_d
*p
)
6423 old_nr
= tree_to_shwi (old_t_nr
);
6424 new_nr
= move_stmt_eh_region_nr (old_nr
, p
);
6426 return build_int_cst (integer_type_node
, new_nr
);
6429 /* Like move_stmt_op, but for gimple statements.
6431 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6432 contained in the current statement in *GSI_P and change the
6433 DECL_CONTEXT of every local variable referenced in the current
6437 move_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
6438 struct walk_stmt_info
*wi
)
6440 struct move_stmt_d
*p
= (struct move_stmt_d
*) wi
->info
;
6441 gimple stmt
= gsi_stmt (*gsi_p
);
6442 tree block
= gimple_block (stmt
);
6444 if (block
== p
->orig_block
6445 || (p
->orig_block
== NULL_TREE
6446 && block
!= NULL_TREE
))
6447 gimple_set_block (stmt
, p
->new_block
);
6449 switch (gimple_code (stmt
))
6452 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6454 tree r
, fndecl
= gimple_call_fndecl (stmt
);
6455 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
6456 switch (DECL_FUNCTION_CODE (fndecl
))
6458 case BUILT_IN_EH_COPY_VALUES
:
6459 r
= gimple_call_arg (stmt
, 1);
6460 r
= move_stmt_eh_region_tree_nr (r
, p
);
6461 gimple_call_set_arg (stmt
, 1, r
);
6464 case BUILT_IN_EH_POINTER
:
6465 case BUILT_IN_EH_FILTER
:
6466 r
= gimple_call_arg (stmt
, 0);
6467 r
= move_stmt_eh_region_tree_nr (r
, p
);
6468 gimple_call_set_arg (stmt
, 0, r
);
6479 int r
= gimple_resx_region (stmt
);
6480 r
= move_stmt_eh_region_nr (r
, p
);
6481 gimple_resx_set_region (stmt
, r
);
6485 case GIMPLE_EH_DISPATCH
:
6487 int r
= gimple_eh_dispatch_region (stmt
);
6488 r
= move_stmt_eh_region_nr (r
, p
);
6489 gimple_eh_dispatch_set_region (stmt
, r
);
6493 case GIMPLE_OMP_RETURN
:
6494 case GIMPLE_OMP_CONTINUE
:
6497 if (is_gimple_omp (stmt
))
6499 /* Do not remap variables inside OMP directives. Variables
6500 referenced in clauses and directive header belong to the
6501 parent function and should not be moved into the child
6503 bool save_remap_decls_p
= p
->remap_decls_p
;
6504 p
->remap_decls_p
= false;
6505 *handled_ops_p
= true;
6507 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), move_stmt_r
,
6510 p
->remap_decls_p
= save_remap_decls_p
;
6518 /* Move basic block BB from function CFUN to function DEST_FN. The
6519 block is moved out of the original linked list and placed after
6520 block AFTER in the new list. Also, the block is removed from the
6521 original array of blocks and placed in DEST_FN's array of blocks.
6522 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6523 updated to reflect the moved edges.
6525 The local variables are remapped to new instances, VARS_MAP is used
6526 to record the mapping. */
6529 move_block_to_fn (struct function
*dest_cfun
, basic_block bb
,
6530 basic_block after
, bool update_edge_count_p
,
6531 struct move_stmt_d
*d
)
6533 struct control_flow_graph
*cfg
;
6536 gimple_stmt_iterator si
;
6537 unsigned old_len
, new_len
;
6539 /* Remove BB from dominance structures. */
6540 delete_from_dominance_info (CDI_DOMINATORS
, bb
);
6542 /* Move BB from its current loop to the copy in the new function. */
6545 struct loop
*new_loop
= (struct loop
*)bb
->loop_father
->aux
;
6547 bb
->loop_father
= new_loop
;
6550 /* Link BB to the new linked list. */
6551 move_block_after (bb
, after
);
6553 /* Update the edge count in the corresponding flowgraphs. */
6554 if (update_edge_count_p
)
6555 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6557 cfun
->cfg
->x_n_edges
--;
6558 dest_cfun
->cfg
->x_n_edges
++;
6561 /* Remove BB from the original basic block array. */
6562 (*cfun
->cfg
->x_basic_block_info
)[bb
->index
] = NULL
;
6563 cfun
->cfg
->x_n_basic_blocks
--;
6565 /* Grow DEST_CFUN's basic block array if needed. */
6566 cfg
= dest_cfun
->cfg
;
6567 cfg
->x_n_basic_blocks
++;
6568 if (bb
->index
>= cfg
->x_last_basic_block
)
6569 cfg
->x_last_basic_block
= bb
->index
+ 1;
6571 old_len
= vec_safe_length (cfg
->x_basic_block_info
);
6572 if ((unsigned) cfg
->x_last_basic_block
>= old_len
)
6574 new_len
= cfg
->x_last_basic_block
+ (cfg
->x_last_basic_block
+ 3) / 4;
6575 vec_safe_grow_cleared (cfg
->x_basic_block_info
, new_len
);
6578 (*cfg
->x_basic_block_info
)[bb
->index
] = bb
;
6580 /* Remap the variables in phi nodes. */
6581 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); )
6583 gimple phi
= gsi_stmt (si
);
6585 tree op
= PHI_RESULT (phi
);
6589 if (virtual_operand_p (op
))
6591 /* Remove the phi nodes for virtual operands (alias analysis will be
6592 run for the new function, anyway). */
6593 remove_phi_node (&si
, true);
6597 SET_PHI_RESULT (phi
,
6598 replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6599 FOR_EACH_PHI_ARG (use
, phi
, oi
, SSA_OP_USE
)
6601 op
= USE_FROM_PTR (use
);
6602 if (TREE_CODE (op
) == SSA_NAME
)
6603 SET_USE (use
, replace_ssa_name (op
, d
->vars_map
, dest_cfun
->decl
));
6606 for (i
= 0; i
< EDGE_COUNT (bb
->preds
); i
++)
6608 location_t locus
= gimple_phi_arg_location (phi
, i
);
6609 tree block
= LOCATION_BLOCK (locus
);
6611 if (locus
== UNKNOWN_LOCATION
)
6613 if (d
->orig_block
== NULL_TREE
|| block
== d
->orig_block
)
6615 if (d
->new_block
== NULL_TREE
)
6616 locus
= LOCATION_LOCUS (locus
);
6618 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, d
->new_block
);
6619 gimple_phi_arg_set_location (phi
, i
, locus
);
6626 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6628 gimple stmt
= gsi_stmt (si
);
6629 struct walk_stmt_info wi
;
6631 memset (&wi
, 0, sizeof (wi
));
6633 walk_gimple_stmt (&si
, move_stmt_r
, move_stmt_op
, &wi
);
6635 if (gimple_code (stmt
) == GIMPLE_LABEL
)
6637 tree label
= gimple_label_label (stmt
);
6638 int uid
= LABEL_DECL_UID (label
);
6640 gcc_assert (uid
> -1);
6642 old_len
= vec_safe_length (cfg
->x_label_to_block_map
);
6643 if (old_len
<= (unsigned) uid
)
6645 new_len
= 3 * uid
/ 2 + 1;
6646 vec_safe_grow_cleared (cfg
->x_label_to_block_map
, new_len
);
6649 (*cfg
->x_label_to_block_map
)[uid
] = bb
;
6650 (*cfun
->cfg
->x_label_to_block_map
)[uid
] = NULL
;
6652 gcc_assert (DECL_CONTEXT (label
) == dest_cfun
->decl
);
6654 if (uid
>= dest_cfun
->cfg
->last_label_uid
)
6655 dest_cfun
->cfg
->last_label_uid
= uid
+ 1;
6658 maybe_duplicate_eh_stmt_fn (dest_cfun
, stmt
, cfun
, stmt
, d
->eh_map
, 0);
6659 remove_stmt_from_eh_lp_fn (cfun
, stmt
);
6661 gimple_duplicate_stmt_histograms (dest_cfun
, stmt
, cfun
, stmt
);
6662 gimple_remove_stmt_histograms (cfun
, stmt
);
6664 /* We cannot leave any operands allocated from the operand caches of
6665 the current function. */
6666 free_stmt_operands (cfun
, stmt
);
6667 push_cfun (dest_cfun
);
6672 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6673 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
6675 tree block
= LOCATION_BLOCK (e
->goto_locus
);
6676 if (d
->orig_block
== NULL_TREE
6677 || block
== d
->orig_block
)
6678 e
->goto_locus
= d
->new_block
?
6679 COMBINE_LOCATION_DATA (line_table
, e
->goto_locus
, d
->new_block
) :
6680 LOCATION_LOCUS (e
->goto_locus
);
6684 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6685 the outermost EH region. Use REGION as the incoming base EH region. */
6688 find_outermost_region_in_block (struct function
*src_cfun
,
6689 basic_block bb
, eh_region region
)
6691 gimple_stmt_iterator si
;
6693 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
6695 gimple stmt
= gsi_stmt (si
);
6696 eh_region stmt_region
;
6699 lp_nr
= lookup_stmt_eh_lp_fn (src_cfun
, stmt
);
6700 stmt_region
= get_eh_region_from_lp_number_fn (src_cfun
, lp_nr
);
6704 region
= stmt_region
;
6705 else if (stmt_region
!= region
)
6707 region
= eh_region_outermost (src_cfun
, stmt_region
, region
);
6708 gcc_assert (region
!= NULL
);
6717 new_label_mapper (tree decl
, void *data
)
6719 htab_t hash
= (htab_t
) data
;
6723 gcc_assert (TREE_CODE (decl
) == LABEL_DECL
);
6725 m
= XNEW (struct tree_map
);
6726 m
->hash
= DECL_UID (decl
);
6727 m
->base
.from
= decl
;
6728 m
->to
= create_artificial_label (UNKNOWN_LOCATION
);
6729 LABEL_DECL_UID (m
->to
) = LABEL_DECL_UID (decl
);
6730 if (LABEL_DECL_UID (m
->to
) >= cfun
->cfg
->last_label_uid
)
6731 cfun
->cfg
->last_label_uid
= LABEL_DECL_UID (m
->to
) + 1;
6733 slot
= htab_find_slot_with_hash (hash
, m
, m
->hash
, INSERT
);
6734 gcc_assert (*slot
== NULL
);
6741 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6745 replace_block_vars_by_duplicates (tree block
, struct pointer_map_t
*vars_map
,
6750 for (tp
= &BLOCK_VARS (block
); *tp
; tp
= &DECL_CHAIN (*tp
))
6753 if (TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != CONST_DECL
)
6755 replace_by_duplicate_decl (&t
, vars_map
, to_context
);
6758 if (TREE_CODE (*tp
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (*tp
))
6760 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (*tp
));
6761 DECL_HAS_VALUE_EXPR_P (t
) = 1;
6763 DECL_CHAIN (t
) = DECL_CHAIN (*tp
);
6768 for (block
= BLOCK_SUBBLOCKS (block
); block
; block
= BLOCK_CHAIN (block
))
6769 replace_block_vars_by_duplicates (block
, vars_map
, to_context
);
6772 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6776 fixup_loop_arrays_after_move (struct function
*fn1
, struct function
*fn2
,
6779 /* Discard it from the old loop array. */
6780 (*get_loops (fn1
))[loop
->num
] = NULL
;
6782 /* Place it in the new loop array, assigning it a new number. */
6783 loop
->num
= number_of_loops (fn2
);
6784 vec_safe_push (loops_for_fn (fn2
)->larray
, loop
);
6786 /* Recurse to children. */
6787 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
6788 fixup_loop_arrays_after_move (fn1
, fn2
, loop
);
6791 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6792 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6793 single basic block in the original CFG and the new basic block is
6794 returned. DEST_CFUN must not have a CFG yet.
6796 Note that the region need not be a pure SESE region. Blocks inside
6797 the region may contain calls to abort/exit. The only restriction
6798 is that ENTRY_BB should be the only entry point and it must
6801 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6802 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6803 to the new function.
6805 All local variables referenced in the region are assumed to be in
6806 the corresponding BLOCK_VARS and unexpanded variable lists
6807 associated with DEST_CFUN. */
6810 move_sese_region_to_fn (struct function
*dest_cfun
, basic_block entry_bb
,
6811 basic_block exit_bb
, tree orig_block
)
6813 vec
<basic_block
> bbs
, dom_bbs
;
6814 basic_block dom_entry
= get_immediate_dominator (CDI_DOMINATORS
, entry_bb
);
6815 basic_block after
, bb
, *entry_pred
, *exit_succ
, abb
;
6816 struct function
*saved_cfun
= cfun
;
6817 int *entry_flag
, *exit_flag
;
6818 unsigned *entry_prob
, *exit_prob
;
6819 unsigned i
, num_entry_edges
, num_exit_edges
, num_nodes
;
6822 htab_t new_label_map
;
6823 struct pointer_map_t
*vars_map
, *eh_map
;
6824 struct loop
*loop
= entry_bb
->loop_father
;
6825 struct loop
*loop0
= get_loop (saved_cfun
, 0);
6826 struct move_stmt_d d
;
6828 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6830 gcc_assert (entry_bb
!= exit_bb
6832 || dominated_by_p (CDI_DOMINATORS
, exit_bb
, entry_bb
)));
6834 /* Collect all the blocks in the region. Manually add ENTRY_BB
6835 because it won't be added by dfs_enumerate_from. */
6837 bbs
.safe_push (entry_bb
);
6838 gather_blocks_in_sese_region (entry_bb
, exit_bb
, &bbs
);
6840 /* The blocks that used to be dominated by something in BBS will now be
6841 dominated by the new block. */
6842 dom_bbs
= get_dominated_by_region (CDI_DOMINATORS
,
6846 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6847 the predecessor edges to ENTRY_BB and the successor edges to
6848 EXIT_BB so that we can re-attach them to the new basic block that
6849 will replace the region. */
6850 num_entry_edges
= EDGE_COUNT (entry_bb
->preds
);
6851 entry_pred
= XNEWVEC (basic_block
, num_entry_edges
);
6852 entry_flag
= XNEWVEC (int, num_entry_edges
);
6853 entry_prob
= XNEWVEC (unsigned, num_entry_edges
);
6855 for (ei
= ei_start (entry_bb
->preds
); (e
= ei_safe_edge (ei
)) != NULL
;)
6857 entry_prob
[i
] = e
->probability
;
6858 entry_flag
[i
] = e
->flags
;
6859 entry_pred
[i
++] = e
->src
;
6865 num_exit_edges
= EDGE_COUNT (exit_bb
->succs
);
6866 exit_succ
= XNEWVEC (basic_block
, num_exit_edges
);
6867 exit_flag
= XNEWVEC (int, num_exit_edges
);
6868 exit_prob
= XNEWVEC (unsigned, num_exit_edges
);
6870 for (ei
= ei_start (exit_bb
->succs
); (e
= ei_safe_edge (ei
)) != NULL
;)
6872 exit_prob
[i
] = e
->probability
;
6873 exit_flag
[i
] = e
->flags
;
6874 exit_succ
[i
++] = e
->dest
;
6886 /* Switch context to the child function to initialize DEST_FN's CFG. */
6887 gcc_assert (dest_cfun
->cfg
== NULL
);
6888 push_cfun (dest_cfun
);
6890 init_empty_tree_cfg ();
6892 /* Initialize EH information for the new function. */
6894 new_label_map
= NULL
;
6897 eh_region region
= NULL
;
6899 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6900 region
= find_outermost_region_in_block (saved_cfun
, bb
, region
);
6902 init_eh_for_function ();
6905 new_label_map
= htab_create (17, tree_map_hash
, tree_map_eq
, free
);
6906 eh_map
= duplicate_eh_regions (saved_cfun
, region
, 0,
6907 new_label_mapper
, new_label_map
);
6911 /* Initialize an empty loop tree. */
6912 struct loops
*loops
= ggc_cleared_alloc
<struct loops
> ();
6913 init_loops_structure (dest_cfun
, loops
, 1);
6914 loops
->state
= LOOPS_MAY_HAVE_MULTIPLE_LATCHES
;
6915 set_loops_for_fn (dest_cfun
, loops
);
6917 /* Move the outlined loop tree part. */
6918 num_nodes
= bbs
.length ();
6919 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6921 if (bb
->loop_father
->header
== bb
)
6923 struct loop
*this_loop
= bb
->loop_father
;
6924 struct loop
*outer
= loop_outer (this_loop
);
6926 /* If the SESE region contains some bbs ending with
6927 a noreturn call, those are considered to belong
6928 to the outermost loop in saved_cfun, rather than
6929 the entry_bb's loop_father. */
6933 num_nodes
-= this_loop
->num_nodes
;
6934 flow_loop_tree_node_remove (bb
->loop_father
);
6935 flow_loop_tree_node_add (get_loop (dest_cfun
, 0), this_loop
);
6936 fixup_loop_arrays_after_move (saved_cfun
, cfun
, this_loop
);
6939 else if (bb
->loop_father
== loop0
&& loop0
!= loop
)
6942 /* Remove loop exits from the outlined region. */
6943 if (loops_for_fn (saved_cfun
)->exits
)
6944 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6946 void **slot
= htab_find_slot_with_hash
6947 (loops_for_fn (saved_cfun
)->exits
, e
,
6948 htab_hash_pointer (e
), NO_INSERT
);
6950 htab_clear_slot (loops_for_fn (saved_cfun
)->exits
, slot
);
6955 /* Adjust the number of blocks in the tree root of the outlined part. */
6956 get_loop (dest_cfun
, 0)->num_nodes
= bbs
.length () + 2;
6958 /* Setup a mapping to be used by move_block_to_fn. */
6959 loop
->aux
= current_loops
->tree_root
;
6960 loop0
->aux
= current_loops
->tree_root
;
6964 /* Move blocks from BBS into DEST_CFUN. */
6965 gcc_assert (bbs
.length () >= 2);
6966 after
= dest_cfun
->cfg
->x_entry_block_ptr
;
6967 vars_map
= pointer_map_create ();
6969 memset (&d
, 0, sizeof (d
));
6970 d
.orig_block
= orig_block
;
6971 d
.new_block
= DECL_INITIAL (dest_cfun
->decl
);
6972 d
.from_context
= cfun
->decl
;
6973 d
.to_context
= dest_cfun
->decl
;
6974 d
.vars_map
= vars_map
;
6975 d
.new_label_map
= new_label_map
;
6977 d
.remap_decls_p
= true;
6979 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
6981 /* No need to update edge counts on the last block. It has
6982 already been updated earlier when we detached the region from
6983 the original CFG. */
6984 move_block_to_fn (dest_cfun
, bb
, after
, bb
!= exit_bb
, &d
);
6990 /* Loop sizes are no longer correct, fix them up. */
6991 loop
->num_nodes
-= num_nodes
;
6992 for (struct loop
*outer
= loop_outer (loop
);
6993 outer
; outer
= loop_outer (outer
))
6994 outer
->num_nodes
-= num_nodes
;
6995 loop0
->num_nodes
-= bbs
.length () - num_nodes
;
6997 if (saved_cfun
->has_simduid_loops
|| saved_cfun
->has_force_vectorize_loops
)
7000 for (i
= 0; vec_safe_iterate (loops
->larray
, i
, &aloop
); i
++)
7005 replace_by_duplicate_decl (&aloop
->simduid
, d
.vars_map
,
7007 dest_cfun
->has_simduid_loops
= true;
7009 if (aloop
->force_vectorize
)
7010 dest_cfun
->has_force_vectorize_loops
= true;
7014 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7018 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7020 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun
->decl
))
7021 = BLOCK_SUBBLOCKS (orig_block
);
7022 for (block
= BLOCK_SUBBLOCKS (orig_block
);
7023 block
; block
= BLOCK_CHAIN (block
))
7024 BLOCK_SUPERCONTEXT (block
) = DECL_INITIAL (dest_cfun
->decl
);
7025 BLOCK_SUBBLOCKS (orig_block
) = NULL_TREE
;
7028 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun
->decl
),
7029 vars_map
, dest_cfun
->decl
);
7032 htab_delete (new_label_map
);
7034 pointer_map_destroy (eh_map
);
7035 pointer_map_destroy (vars_map
);
7037 /* Rewire the entry and exit blocks. The successor to the entry
7038 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7039 the child function. Similarly, the predecessor of DEST_FN's
7040 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7041 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7042 various CFG manipulation function get to the right CFG.
7044 FIXME, this is silly. The CFG ought to become a parameter to
7046 push_cfun (dest_cfun
);
7047 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
), entry_bb
, EDGE_FALLTHRU
);
7049 make_edge (exit_bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), 0);
7052 /* Back in the original function, the SESE region has disappeared,
7053 create a new basic block in its place. */
7054 bb
= create_empty_bb (entry_pred
[0]);
7056 add_bb_to_loop (bb
, loop
);
7057 for (i
= 0; i
< num_entry_edges
; i
++)
7059 e
= make_edge (entry_pred
[i
], bb
, entry_flag
[i
]);
7060 e
->probability
= entry_prob
[i
];
7063 for (i
= 0; i
< num_exit_edges
; i
++)
7065 e
= make_edge (bb
, exit_succ
[i
], exit_flag
[i
]);
7066 e
->probability
= exit_prob
[i
];
7069 set_immediate_dominator (CDI_DOMINATORS
, bb
, dom_entry
);
7070 FOR_EACH_VEC_ELT (dom_bbs
, i
, abb
)
7071 set_immediate_dominator (CDI_DOMINATORS
, abb
, bb
);
7089 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7093 dump_function_to_file (tree fndecl
, FILE *file
, int flags
)
7095 tree arg
, var
, old_current_fndecl
= current_function_decl
;
7096 struct function
*dsf
;
7097 bool ignore_topmost_bind
= false, any_var
= false;
7100 bool tmclone
= (TREE_CODE (fndecl
) == FUNCTION_DECL
7101 && decl_is_tm_clone (fndecl
));
7102 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
7104 current_function_decl
= fndecl
;
7105 fprintf (file
, "%s %s(", function_name (fun
), tmclone
? "[tm-clone] " : "");
7107 arg
= DECL_ARGUMENTS (fndecl
);
7110 print_generic_expr (file
, TREE_TYPE (arg
), dump_flags
);
7111 fprintf (file
, " ");
7112 print_generic_expr (file
, arg
, dump_flags
);
7113 if (flags
& TDF_VERBOSE
)
7114 print_node (file
, "", arg
, 4);
7115 if (DECL_CHAIN (arg
))
7116 fprintf (file
, ", ");
7117 arg
= DECL_CHAIN (arg
);
7119 fprintf (file
, ")\n");
7121 if (flags
& TDF_VERBOSE
)
7122 print_node (file
, "", fndecl
, 2);
7124 dsf
= DECL_STRUCT_FUNCTION (fndecl
);
7125 if (dsf
&& (flags
& TDF_EH
))
7126 dump_eh_tree (file
, dsf
);
7128 if (flags
& TDF_RAW
&& !gimple_has_body_p (fndecl
))
7130 dump_node (fndecl
, TDF_SLIM
| flags
, file
);
7131 current_function_decl
= old_current_fndecl
;
7135 /* When GIMPLE is lowered, the variables are no longer available in
7136 BIND_EXPRs, so display them separately. */
7137 if (fun
&& fun
->decl
== fndecl
&& (fun
->curr_properties
& PROP_gimple_lcf
))
7140 ignore_topmost_bind
= true;
7142 fprintf (file
, "{\n");
7143 if (!vec_safe_is_empty (fun
->local_decls
))
7144 FOR_EACH_LOCAL_DECL (fun
, ix
, var
)
7146 print_generic_decl (file
, var
, flags
);
7147 if (flags
& TDF_VERBOSE
)
7148 print_node (file
, "", var
, 4);
7149 fprintf (file
, "\n");
7153 if (gimple_in_ssa_p (cfun
))
7154 for (ix
= 1; ix
< num_ssa_names
; ++ix
)
7156 tree name
= ssa_name (ix
);
7157 if (name
&& !SSA_NAME_VAR (name
))
7159 fprintf (file
, " ");
7160 print_generic_expr (file
, TREE_TYPE (name
), flags
);
7161 fprintf (file
, " ");
7162 print_generic_expr (file
, name
, flags
);
7163 fprintf (file
, ";\n");
7170 if (fun
&& fun
->decl
== fndecl
7172 && basic_block_info_for_fn (fun
))
7174 /* If the CFG has been built, emit a CFG-based dump. */
7175 if (!ignore_topmost_bind
)
7176 fprintf (file
, "{\n");
7178 if (any_var
&& n_basic_blocks_for_fn (fun
))
7179 fprintf (file
, "\n");
7181 FOR_EACH_BB_FN (bb
, fun
)
7182 dump_bb (file
, bb
, 2, flags
| TDF_COMMENT
);
7184 fprintf (file
, "}\n");
7186 else if (DECL_SAVED_TREE (fndecl
) == NULL
)
7188 /* The function is now in GIMPLE form but the CFG has not been
7189 built yet. Emit the single sequence of GIMPLE statements
7190 that make up its body. */
7191 gimple_seq body
= gimple_body (fndecl
);
7193 if (gimple_seq_first_stmt (body
)
7194 && gimple_seq_first_stmt (body
) == gimple_seq_last_stmt (body
)
7195 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
)
7196 print_gimple_seq (file
, body
, 0, flags
);
7199 if (!ignore_topmost_bind
)
7200 fprintf (file
, "{\n");
7203 fprintf (file
, "\n");
7205 print_gimple_seq (file
, body
, 2, flags
);
7206 fprintf (file
, "}\n");
7213 /* Make a tree based dump. */
7214 chain
= DECL_SAVED_TREE (fndecl
);
7215 if (chain
&& TREE_CODE (chain
) == BIND_EXPR
)
7217 if (ignore_topmost_bind
)
7219 chain
= BIND_EXPR_BODY (chain
);
7227 if (!ignore_topmost_bind
)
7228 fprintf (file
, "{\n");
7233 fprintf (file
, "\n");
7235 print_generic_stmt_indented (file
, chain
, flags
, indent
);
7236 if (ignore_topmost_bind
)
7237 fprintf (file
, "}\n");
7240 if (flags
& TDF_ENUMERATE_LOCALS
)
7241 dump_enumerated_decls (file
, flags
);
7242 fprintf (file
, "\n\n");
7244 current_function_decl
= old_current_fndecl
;
7247 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7250 debug_function (tree fn
, int flags
)
7252 dump_function_to_file (fn
, stderr
, flags
);
7256 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7259 print_pred_bbs (FILE *file
, basic_block bb
)
7264 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7265 fprintf (file
, "bb_%d ", e
->src
->index
);
7269 /* Print on FILE the indexes for the successors of basic_block BB. */
7272 print_succ_bbs (FILE *file
, basic_block bb
)
7277 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7278 fprintf (file
, "bb_%d ", e
->dest
->index
);
7281 /* Print to FILE the basic block BB following the VERBOSITY level. */
7284 print_loops_bb (FILE *file
, basic_block bb
, int indent
, int verbosity
)
7286 char *s_indent
= (char *) alloca ((size_t) indent
+ 1);
7287 memset ((void *) s_indent
, ' ', (size_t) indent
);
7288 s_indent
[indent
] = '\0';
7290 /* Print basic_block's header. */
7293 fprintf (file
, "%s bb_%d (preds = {", s_indent
, bb
->index
);
7294 print_pred_bbs (file
, bb
);
7295 fprintf (file
, "}, succs = {");
7296 print_succ_bbs (file
, bb
);
7297 fprintf (file
, "})\n");
7300 /* Print basic_block's body. */
7303 fprintf (file
, "%s {\n", s_indent
);
7304 dump_bb (file
, bb
, indent
+ 4, TDF_VOPS
|TDF_MEMSYMS
);
7305 fprintf (file
, "%s }\n", s_indent
);
7309 static void print_loop_and_siblings (FILE *, struct loop
*, int, int);
7311 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7312 VERBOSITY level this outputs the contents of the loop, or just its
7316 print_loop (FILE *file
, struct loop
*loop
, int indent
, int verbosity
)
7324 s_indent
= (char *) alloca ((size_t) indent
+ 1);
7325 memset ((void *) s_indent
, ' ', (size_t) indent
);
7326 s_indent
[indent
] = '\0';
7328 /* Print loop's header. */
7329 fprintf (file
, "%sloop_%d (", s_indent
, loop
->num
);
7331 fprintf (file
, "header = %d", loop
->header
->index
);
7334 fprintf (file
, "deleted)\n");
7338 fprintf (file
, ", latch = %d", loop
->latch
->index
);
7340 fprintf (file
, ", multiple latches");
7341 fprintf (file
, ", niter = ");
7342 print_generic_expr (file
, loop
->nb_iterations
, 0);
7344 if (loop
->any_upper_bound
)
7346 fprintf (file
, ", upper_bound = ");
7347 print_decu (loop
->nb_iterations_upper_bound
, file
);
7350 if (loop
->any_estimate
)
7352 fprintf (file
, ", estimate = ");
7353 print_decu (loop
->nb_iterations_estimate
, file
);
7355 fprintf (file
, ")\n");
7357 /* Print loop's body. */
7360 fprintf (file
, "%s{\n", s_indent
);
7361 FOR_EACH_BB_FN (bb
, cfun
)
7362 if (bb
->loop_father
== loop
)
7363 print_loops_bb (file
, bb
, indent
, verbosity
);
7365 print_loop_and_siblings (file
, loop
->inner
, indent
+ 2, verbosity
);
7366 fprintf (file
, "%s}\n", s_indent
);
7370 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7371 spaces. Following VERBOSITY level this outputs the contents of the
7372 loop, or just its structure. */
7375 print_loop_and_siblings (FILE *file
, struct loop
*loop
, int indent
,
7381 print_loop (file
, loop
, indent
, verbosity
);
7382 print_loop_and_siblings (file
, loop
->next
, indent
, verbosity
);
7385 /* Follow a CFG edge from the entry point of the program, and on entry
7386 of a loop, pretty print the loop structure on FILE. */
7389 print_loops (FILE *file
, int verbosity
)
7393 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
);
7394 if (bb
&& bb
->loop_father
)
7395 print_loop_and_siblings (file
, bb
->loop_father
, 0, verbosity
);
7401 debug (struct loop
&ref
)
7403 print_loop (stderr
, &ref
, 0, /*verbosity*/0);
7407 debug (struct loop
*ptr
)
7412 fprintf (stderr
, "<nil>\n");
7415 /* Dump a loop verbosely. */
7418 debug_verbose (struct loop
&ref
)
7420 print_loop (stderr
, &ref
, 0, /*verbosity*/3);
7424 debug_verbose (struct loop
*ptr
)
7429 fprintf (stderr
, "<nil>\n");
7433 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7436 debug_loops (int verbosity
)
7438 print_loops (stderr
, verbosity
);
7441 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7444 debug_loop (struct loop
*loop
, int verbosity
)
7446 print_loop (stderr
, loop
, 0, verbosity
);
7449 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7453 debug_loop_num (unsigned num
, int verbosity
)
7455 debug_loop (get_loop (cfun
, num
), verbosity
);
7458 /* Return true if BB ends with a call, possibly followed by some
7459 instructions that must stay with the call. Return false,
7463 gimple_block_ends_with_call_p (basic_block bb
)
7465 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7466 return !gsi_end_p (gsi
) && is_gimple_call (gsi_stmt (gsi
));
7470 /* Return true if BB ends with a conditional branch. Return false,
7474 gimple_block_ends_with_condjump_p (const_basic_block bb
)
7476 gimple stmt
= last_stmt (CONST_CAST_BB (bb
));
7477 return (stmt
&& gimple_code (stmt
) == GIMPLE_COND
);
7481 /* Return true if we need to add fake edge to exit at statement T.
7482 Helper function for gimple_flow_call_edges_add. */
7485 need_fake_edge_p (gimple t
)
7487 tree fndecl
= NULL_TREE
;
7490 /* NORETURN and LONGJMP calls already have an edge to exit.
7491 CONST and PURE calls do not need one.
7492 We don't currently check for CONST and PURE here, although
7493 it would be a good idea, because those attributes are
7494 figured out from the RTL in mark_constant_function, and
7495 the counter incrementation code from -fprofile-arcs
7496 leads to different results from -fbranch-probabilities. */
7497 if (is_gimple_call (t
))
7499 fndecl
= gimple_call_fndecl (t
);
7500 call_flags
= gimple_call_flags (t
);
7503 if (is_gimple_call (t
)
7505 && DECL_BUILT_IN (fndecl
)
7506 && (call_flags
& ECF_NOTHROW
)
7507 && !(call_flags
& ECF_RETURNS_TWICE
)
7508 /* fork() doesn't really return twice, but the effect of
7509 wrapping it in __gcov_fork() which calls __gcov_flush()
7510 and clears the counters before forking has the same
7511 effect as returning twice. Force a fake edge. */
7512 && !(DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7513 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FORK
))
7516 if (is_gimple_call (t
))
7522 if (!(call_flags
& ECF_NORETURN
))
7526 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7527 if ((e
->flags
& EDGE_FAKE
) == 0)
7531 if (gimple_code (t
) == GIMPLE_ASM
7532 && (gimple_asm_volatile_p (t
) || gimple_asm_input_p (t
)))
7539 /* Add fake edges to the function exit for any non constant and non
7540 noreturn calls (or noreturn calls with EH/abnormal edges),
7541 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7542 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7545 The goal is to expose cases in which entering a basic block does
7546 not imply that all subsequent instructions must be executed. */
7549 gimple_flow_call_edges_add (sbitmap blocks
)
7552 int blocks_split
= 0;
7553 int last_bb
= last_basic_block_for_fn (cfun
);
7554 bool check_last_block
= false;
7556 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
7560 check_last_block
= true;
7562 check_last_block
= bitmap_bit_p (blocks
,
7563 EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
->index
);
7565 /* In the last basic block, before epilogue generation, there will be
7566 a fallthru edge to EXIT. Special care is required if the last insn
7567 of the last basic block is a call because make_edge folds duplicate
7568 edges, which would result in the fallthru edge also being marked
7569 fake, which would result in the fallthru edge being removed by
7570 remove_fake_edges, which would result in an invalid CFG.
7572 Moreover, we can't elide the outgoing fake edge, since the block
7573 profiler needs to take this into account in order to solve the minimal
7574 spanning tree in the case that the call doesn't return.
7576 Handle this by adding a dummy instruction in a new last basic block. */
7577 if (check_last_block
)
7579 basic_block bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
7580 gimple_stmt_iterator gsi
= gsi_last_nondebug_bb (bb
);
7583 if (!gsi_end_p (gsi
))
7586 if (t
&& need_fake_edge_p (t
))
7590 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7593 gsi_insert_on_edge (e
, gimple_build_nop ());
7594 gsi_commit_edge_inserts ();
7599 /* Now add fake edges to the function exit for any non constant
7600 calls since there is no way that we can determine if they will
7602 for (i
= 0; i
< last_bb
; i
++)
7604 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7605 gimple_stmt_iterator gsi
;
7606 gimple stmt
, last_stmt
;
7611 if (blocks
&& !bitmap_bit_p (blocks
, i
))
7614 gsi
= gsi_last_nondebug_bb (bb
);
7615 if (!gsi_end_p (gsi
))
7617 last_stmt
= gsi_stmt (gsi
);
7620 stmt
= gsi_stmt (gsi
);
7621 if (need_fake_edge_p (stmt
))
7625 /* The handling above of the final block before the
7626 epilogue should be enough to verify that there is
7627 no edge to the exit block in CFG already.
7628 Calling make_edge in such case would cause us to
7629 mark that edge as fake and remove it later. */
7630 #ifdef ENABLE_CHECKING
7631 if (stmt
== last_stmt
)
7633 e
= find_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
));
7634 gcc_assert (e
== NULL
);
7638 /* Note that the following may create a new basic block
7639 and renumber the existing basic blocks. */
7640 if (stmt
!= last_stmt
)
7642 e
= split_block (bb
, stmt
);
7646 make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FAKE
);
7650 while (!gsi_end_p (gsi
));
7655 verify_flow_info ();
7657 return blocks_split
;
7660 /* Removes edge E and all the blocks dominated by it, and updates dominance
7661 information. The IL in E->src needs to be updated separately.
7662 If dominance info is not available, only the edge E is removed.*/
7665 remove_edge_and_dominated_blocks (edge e
)
7667 vec
<basic_block
> bbs_to_remove
= vNULL
;
7668 vec
<basic_block
> bbs_to_fix_dom
= vNULL
;
7672 bool none_removed
= false;
7674 basic_block bb
, dbb
;
7677 if (!dom_info_available_p (CDI_DOMINATORS
))
7683 /* No updating is needed for edges to exit. */
7684 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
7686 if (cfgcleanup_altered_bbs
)
7687 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7692 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7693 that is not dominated by E->dest, then this set is empty. Otherwise,
7694 all the basic blocks dominated by E->dest are removed.
7696 Also, to DF_IDOM we store the immediate dominators of the blocks in
7697 the dominance frontier of E (i.e., of the successors of the
7698 removed blocks, if there are any, and of E->dest otherwise). */
7699 FOR_EACH_EDGE (f
, ei
, e
->dest
->preds
)
7704 if (!dominated_by_p (CDI_DOMINATORS
, f
->src
, e
->dest
))
7706 none_removed
= true;
7711 df
= BITMAP_ALLOC (NULL
);
7712 df_idom
= BITMAP_ALLOC (NULL
);
7715 bitmap_set_bit (df_idom
,
7716 get_immediate_dominator (CDI_DOMINATORS
, e
->dest
)->index
);
7719 bbs_to_remove
= get_all_dominated_blocks (CDI_DOMINATORS
, e
->dest
);
7720 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7722 FOR_EACH_EDGE (f
, ei
, bb
->succs
)
7724 if (f
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
7725 bitmap_set_bit (df
, f
->dest
->index
);
7728 FOR_EACH_VEC_ELT (bbs_to_remove
, i
, bb
)
7729 bitmap_clear_bit (df
, bb
->index
);
7731 EXECUTE_IF_SET_IN_BITMAP (df
, 0, i
, bi
)
7733 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7734 bitmap_set_bit (df_idom
,
7735 get_immediate_dominator (CDI_DOMINATORS
, bb
)->index
);
7739 if (cfgcleanup_altered_bbs
)
7741 /* Record the set of the altered basic blocks. */
7742 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
7743 bitmap_ior_into (cfgcleanup_altered_bbs
, df
);
7746 /* Remove E and the cancelled blocks. */
7751 /* Walk backwards so as to get a chance to substitute all
7752 released DEFs into debug stmts. See
7753 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7755 for (i
= bbs_to_remove
.length (); i
-- > 0; )
7756 delete_basic_block (bbs_to_remove
[i
]);
7759 /* Update the dominance information. The immediate dominator may change only
7760 for blocks whose immediate dominator belongs to DF_IDOM:
7762 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7763 removal. Let Z the arbitrary block such that idom(Z) = Y and
7764 Z dominates X after the removal. Before removal, there exists a path P
7765 from Y to X that avoids Z. Let F be the last edge on P that is
7766 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7767 dominates W, and because of P, Z does not dominate W), and W belongs to
7768 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7769 EXECUTE_IF_SET_IN_BITMAP (df_idom
, 0, i
, bi
)
7771 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7772 for (dbb
= first_dom_son (CDI_DOMINATORS
, bb
);
7774 dbb
= next_dom_son (CDI_DOMINATORS
, dbb
))
7775 bbs_to_fix_dom
.safe_push (dbb
);
7778 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
7781 BITMAP_FREE (df_idom
);
7782 bbs_to_remove
.release ();
7783 bbs_to_fix_dom
.release ();
7786 /* Purge dead EH edges from basic block BB. */
7789 gimple_purge_dead_eh_edges (basic_block bb
)
7791 bool changed
= false;
7794 gimple stmt
= last_stmt (bb
);
7796 if (stmt
&& stmt_can_throw_internal (stmt
))
7799 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7801 if (e
->flags
& EDGE_EH
)
7803 remove_edge_and_dominated_blocks (e
);
7813 /* Purge dead EH edges from basic block listed in BLOCKS. */
7816 gimple_purge_all_dead_eh_edges (const_bitmap blocks
)
7818 bool changed
= false;
7822 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7824 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7826 /* Earlier gimple_purge_dead_eh_edges could have removed
7827 this basic block already. */
7828 gcc_assert (bb
|| changed
);
7830 changed
|= gimple_purge_dead_eh_edges (bb
);
7836 /* Purge dead abnormal call edges from basic block BB. */
7839 gimple_purge_dead_abnormal_call_edges (basic_block bb
)
7841 bool changed
= false;
7844 gimple stmt
= last_stmt (bb
);
7846 if (!cfun
->has_nonlocal_label
7847 && !cfun
->calls_setjmp
)
7850 if (stmt
&& stmt_can_make_abnormal_goto (stmt
))
7853 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
7855 if (e
->flags
& EDGE_ABNORMAL
)
7857 if (e
->flags
& EDGE_FALLTHRU
)
7858 e
->flags
&= ~EDGE_ABNORMAL
;
7860 remove_edge_and_dominated_blocks (e
);
7870 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7873 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks
)
7875 bool changed
= false;
7879 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, i
, bi
)
7881 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
7883 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7884 this basic block already. */
7885 gcc_assert (bb
|| changed
);
7887 changed
|= gimple_purge_dead_abnormal_call_edges (bb
);
7893 /* This function is called whenever a new edge is created or
7897 gimple_execute_on_growing_pred (edge e
)
7899 basic_block bb
= e
->dest
;
7901 if (!gimple_seq_empty_p (phi_nodes (bb
)))
7902 reserve_phi_args_for_new_edge (bb
);
7905 /* This function is called immediately before edge E is removed from
7906 the edge vector E->dest->preds. */
7909 gimple_execute_on_shrinking_pred (edge e
)
7911 if (!gimple_seq_empty_p (phi_nodes (e
->dest
)))
7912 remove_phi_args (e
);
7915 /*---------------------------------------------------------------------------
7916 Helper functions for Loop versioning
7917 ---------------------------------------------------------------------------*/
7919 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7920 of 'first'. Both of them are dominated by 'new_head' basic block. When
7921 'new_head' was created by 'second's incoming edge it received phi arguments
7922 on the edge by split_edge(). Later, additional edge 'e' was created to
7923 connect 'new_head' and 'first'. Now this routine adds phi args on this
7924 additional edge 'e' that new_head to second edge received as part of edge
7928 gimple_lv_adjust_loop_header_phi (basic_block first
, basic_block second
,
7929 basic_block new_head
, edge e
)
7932 gimple_stmt_iterator psi1
, psi2
;
7934 edge e2
= find_edge (new_head
, second
);
7936 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7937 edge, we should always have an edge from NEW_HEAD to SECOND. */
7938 gcc_assert (e2
!= NULL
);
7940 /* Browse all 'second' basic block phi nodes and add phi args to
7941 edge 'e' for 'first' head. PHI args are always in correct order. */
7943 for (psi2
= gsi_start_phis (second
),
7944 psi1
= gsi_start_phis (first
);
7945 !gsi_end_p (psi2
) && !gsi_end_p (psi1
);
7946 gsi_next (&psi2
), gsi_next (&psi1
))
7948 phi1
= gsi_stmt (psi1
);
7949 phi2
= gsi_stmt (psi2
);
7950 def
= PHI_ARG_DEF (phi2
, e2
->dest_idx
);
7951 add_phi_arg (phi1
, def
, e
, gimple_phi_arg_location_from_edge (phi2
, e2
));
7956 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7957 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7958 the destination of the ELSE part. */
7961 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED
,
7962 basic_block second_head ATTRIBUTE_UNUSED
,
7963 basic_block cond_bb
, void *cond_e
)
7965 gimple_stmt_iterator gsi
;
7966 gimple new_cond_expr
;
7967 tree cond_expr
= (tree
) cond_e
;
7970 /* Build new conditional expr */
7971 new_cond_expr
= gimple_build_cond_from_tree (cond_expr
,
7972 NULL_TREE
, NULL_TREE
);
7974 /* Add new cond in cond_bb. */
7975 gsi
= gsi_last_bb (cond_bb
);
7976 gsi_insert_after (&gsi
, new_cond_expr
, GSI_NEW_STMT
);
7978 /* Adjust edges appropriately to connect new head with first head
7979 as well as second head. */
7980 e0
= single_succ_edge (cond_bb
);
7981 e0
->flags
&= ~EDGE_FALLTHRU
;
7982 e0
->flags
|= EDGE_FALSE_VALUE
;
7986 /* Do book-keeping of basic block BB for the profile consistency checker.
7987 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7988 then do post-pass accounting. Store the counting in RECORD. */
7990 gimple_account_profile_record (basic_block bb
, int after_pass
,
7991 struct profile_record
*record
)
7993 gimple_stmt_iterator i
;
7994 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
7996 record
->size
[after_pass
]
7997 += estimate_num_insns (gsi_stmt (i
), &eni_size_weights
);
7998 if (profile_status_for_fn (cfun
) == PROFILE_READ
)
7999 record
->time
[after_pass
]
8000 += estimate_num_insns (gsi_stmt (i
),
8001 &eni_time_weights
) * bb
->count
;
8002 else if (profile_status_for_fn (cfun
) == PROFILE_GUESSED
)
8003 record
->time
[after_pass
]
8004 += estimate_num_insns (gsi_stmt (i
),
8005 &eni_time_weights
) * bb
->frequency
;
8009 struct cfg_hooks gimple_cfg_hooks
= {
8011 gimple_verify_flow_info
,
8012 gimple_dump_bb
, /* dump_bb */
8013 gimple_dump_bb_for_graph
, /* dump_bb_for_graph */
8014 create_bb
, /* create_basic_block */
8015 gimple_redirect_edge_and_branch
, /* redirect_edge_and_branch */
8016 gimple_redirect_edge_and_branch_force
, /* redirect_edge_and_branch_force */
8017 gimple_can_remove_branch_p
, /* can_remove_branch_p */
8018 remove_bb
, /* delete_basic_block */
8019 gimple_split_block
, /* split_block */
8020 gimple_move_block_after
, /* move_block_after */
8021 gimple_can_merge_blocks_p
, /* can_merge_blocks_p */
8022 gimple_merge_blocks
, /* merge_blocks */
8023 gimple_predict_edge
, /* predict_edge */
8024 gimple_predicted_by_p
, /* predicted_by_p */
8025 gimple_can_duplicate_bb_p
, /* can_duplicate_block_p */
8026 gimple_duplicate_bb
, /* duplicate_block */
8027 gimple_split_edge
, /* split_edge */
8028 gimple_make_forwarder_block
, /* make_forward_block */
8029 NULL
, /* tidy_fallthru_edge */
8030 NULL
, /* force_nonfallthru */
8031 gimple_block_ends_with_call_p
,/* block_ends_with_call_p */
8032 gimple_block_ends_with_condjump_p
, /* block_ends_with_condjump_p */
8033 gimple_flow_call_edges_add
, /* flow_call_edges_add */
8034 gimple_execute_on_growing_pred
, /* execute_on_growing_pred */
8035 gimple_execute_on_shrinking_pred
, /* execute_on_shrinking_pred */
8036 gimple_duplicate_loop_to_header_edge
, /* duplicate loop for trees */
8037 gimple_lv_add_condition_to_bb
, /* lv_add_condition_to_bb */
8038 gimple_lv_adjust_loop_header_phi
, /* lv_adjust_loop_header_phi*/
8039 extract_true_false_edges_from_block
, /* extract_cond_bb_edges */
8040 flush_pending_stmts
, /* flush_pending_stmts */
8041 gimple_empty_block_p
, /* block_empty_p */
8042 gimple_split_block_before_cond_jump
, /* split_block_before_cond_jump */
8043 gimple_account_profile_record
,
8047 /* Split all critical edges. */
8050 split_critical_edges (void)
8056 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8057 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8058 mappings around the calls to split_edge. */
8059 start_recording_case_labels ();
8060 FOR_ALL_BB_FN (bb
, cfun
)
8062 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8064 if (EDGE_CRITICAL_P (e
) && !(e
->flags
& EDGE_ABNORMAL
))
8066 /* PRE inserts statements to edges and expects that
8067 since split_critical_edges was done beforehand, committing edge
8068 insertions will not split more edges. In addition to critical
8069 edges we must split edges that have multiple successors and
8070 end by control flow statements, such as RESX.
8071 Go ahead and split them too. This matches the logic in
8072 gimple_find_edge_insert_loc. */
8073 else if ((!single_pred_p (e
->dest
)
8074 || !gimple_seq_empty_p (phi_nodes (e
->dest
))
8075 || e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
8076 && e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
8077 && !(e
->flags
& EDGE_ABNORMAL
))
8079 gimple_stmt_iterator gsi
;
8081 gsi
= gsi_last_bb (e
->src
);
8082 if (!gsi_end_p (gsi
)
8083 && stmt_ends_bb_p (gsi_stmt (gsi
))
8084 && (gimple_code (gsi_stmt (gsi
)) != GIMPLE_RETURN
8085 && !gimple_call_builtin_p (gsi_stmt (gsi
),
8091 end_recording_case_labels ();
8097 const pass_data pass_data_split_crit_edges
=
8099 GIMPLE_PASS
, /* type */
8100 "crited", /* name */
8101 OPTGROUP_NONE
, /* optinfo_flags */
8102 true, /* has_execute */
8103 TV_TREE_SPLIT_EDGES
, /* tv_id */
8104 PROP_cfg
, /* properties_required */
8105 PROP_no_crit_edges
, /* properties_provided */
8106 0, /* properties_destroyed */
8107 0, /* todo_flags_start */
8108 0, /* todo_flags_finish */
8111 class pass_split_crit_edges
: public gimple_opt_pass
8114 pass_split_crit_edges (gcc::context
*ctxt
)
8115 : gimple_opt_pass (pass_data_split_crit_edges
, ctxt
)
8118 /* opt_pass methods: */
8119 virtual unsigned int execute (function
*) { return split_critical_edges (); }
8121 opt_pass
* clone () { return new pass_split_crit_edges (m_ctxt
); }
8122 }; // class pass_split_crit_edges
8127 make_pass_split_crit_edges (gcc::context
*ctxt
)
8129 return new pass_split_crit_edges (ctxt
);
8133 /* Build a ternary operation and gimplify it. Emit code before GSI.
8134 Return the gimple_val holding the result. */
8137 gimplify_build3 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8138 tree type
, tree a
, tree b
, tree c
)
8141 location_t loc
= gimple_location (gsi_stmt (*gsi
));
8143 ret
= fold_build3_loc (loc
, code
, type
, a
, b
, c
);
8146 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8150 /* Build a binary operation and gimplify it. Emit code before GSI.
8151 Return the gimple_val holding the result. */
8154 gimplify_build2 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
8155 tree type
, tree a
, tree b
)
8159 ret
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
, b
);
8162 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8166 /* Build a unary operation and gimplify it. Emit code before GSI.
8167 Return the gimple_val holding the result. */
8170 gimplify_build1 (gimple_stmt_iterator
*gsi
, enum tree_code code
, tree type
,
8175 ret
= fold_build1_loc (gimple_location (gsi_stmt (*gsi
)), code
, type
, a
);
8178 return force_gimple_operand_gsi (gsi
, ret
, true, NULL
, true,
8184 /* Given a basic block B which ends with a conditional and has
8185 precisely two successors, determine which of the edges is taken if
8186 the conditional is true and which is taken if the conditional is
8187 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8190 extract_true_false_edges_from_block (basic_block b
,
8194 edge e
= EDGE_SUCC (b
, 0);
8196 if (e
->flags
& EDGE_TRUE_VALUE
)
8199 *false_edge
= EDGE_SUCC (b
, 1);
8204 *true_edge
= EDGE_SUCC (b
, 1);
8208 /* Emit return warnings. */
8212 const pass_data pass_data_warn_function_return
=
8214 GIMPLE_PASS
, /* type */
8215 "*warn_function_return", /* name */
8216 OPTGROUP_NONE
, /* optinfo_flags */
8217 true, /* has_execute */
8218 TV_NONE
, /* tv_id */
8219 PROP_cfg
, /* properties_required */
8220 0, /* properties_provided */
8221 0, /* properties_destroyed */
8222 0, /* todo_flags_start */
8223 0, /* todo_flags_finish */
8226 class pass_warn_function_return
: public gimple_opt_pass
8229 pass_warn_function_return (gcc::context
*ctxt
)
8230 : gimple_opt_pass (pass_data_warn_function_return
, ctxt
)
8233 /* opt_pass methods: */
8234 virtual unsigned int execute (function
*);
8236 }; // class pass_warn_function_return
8239 pass_warn_function_return::execute (function
*fun
)
8241 source_location location
;
8246 if (!targetm
.warn_func_return (fun
->decl
))
8249 /* If we have a path to EXIT, then we do return. */
8250 if (TREE_THIS_VOLATILE (fun
->decl
)
8251 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0)
8253 location
= UNKNOWN_LOCATION
;
8254 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8256 last
= last_stmt (e
->src
);
8257 if ((gimple_code (last
) == GIMPLE_RETURN
8258 || gimple_call_builtin_p (last
, BUILT_IN_RETURN
))
8259 && (location
= gimple_location (last
)) != UNKNOWN_LOCATION
)
8262 if (location
== UNKNOWN_LOCATION
)
8263 location
= cfun
->function_end_locus
;
8264 warning_at (location
, 0, "%<noreturn%> function does return");
8267 /* If we see "return;" in some basic block, then we do reach the end
8268 without returning a value. */
8269 else if (warn_return_type
8270 && !TREE_NO_WARNING (fun
->decl
)
8271 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) > 0
8272 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun
->decl
))))
8274 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
)
8276 gimple last
= last_stmt (e
->src
);
8277 if (gimple_code (last
) == GIMPLE_RETURN
8278 && gimple_return_retval (last
) == NULL
8279 && !gimple_no_warning_p (last
))
8281 location
= gimple_location (last
);
8282 if (location
== UNKNOWN_LOCATION
)
8283 location
= fun
->function_end_locus
;
8284 warning_at (location
, OPT_Wreturn_type
, "control reaches end of non-void function");
8285 TREE_NO_WARNING (fun
->decl
) = 1;
8296 make_pass_warn_function_return (gcc::context
*ctxt
)
8298 return new pass_warn_function_return (ctxt
);
8301 /* Walk a gimplified function and warn for functions whose return value is
8302 ignored and attribute((warn_unused_result)) is set. This is done before
8303 inlining, so we don't have to worry about that. */
8306 do_warn_unused_result (gimple_seq seq
)
8309 gimple_stmt_iterator i
;
8311 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
8313 gimple g
= gsi_stmt (i
);
8315 switch (gimple_code (g
))
8318 do_warn_unused_result (gimple_bind_body (g
));
8321 do_warn_unused_result (gimple_try_eval (g
));
8322 do_warn_unused_result (gimple_try_cleanup (g
));
8325 do_warn_unused_result (gimple_catch_handler (g
));
8327 case GIMPLE_EH_FILTER
:
8328 do_warn_unused_result (gimple_eh_filter_failure (g
));
8332 if (gimple_call_lhs (g
))
8334 if (gimple_call_internal_p (g
))
8337 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8338 LHS. All calls whose value is ignored should be
8339 represented like this. Look for the attribute. */
8340 fdecl
= gimple_call_fndecl (g
);
8341 ftype
= gimple_call_fntype (g
);
8343 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype
)))
8345 location_t loc
= gimple_location (g
);
8348 warning_at (loc
, OPT_Wunused_result
,
8349 "ignoring return value of %qD, "
8350 "declared with attribute warn_unused_result",
8353 warning_at (loc
, OPT_Wunused_result
,
8354 "ignoring return value of function "
8355 "declared with attribute warn_unused_result");
8360 /* Not a container, not a call, or a call whose value is used. */
8368 const pass_data pass_data_warn_unused_result
=
8370 GIMPLE_PASS
, /* type */
8371 "*warn_unused_result", /* name */
8372 OPTGROUP_NONE
, /* optinfo_flags */
8373 true, /* has_execute */
8374 TV_NONE
, /* tv_id */
8375 PROP_gimple_any
, /* properties_required */
8376 0, /* properties_provided */
8377 0, /* properties_destroyed */
8378 0, /* todo_flags_start */
8379 0, /* todo_flags_finish */
8382 class pass_warn_unused_result
: public gimple_opt_pass
8385 pass_warn_unused_result (gcc::context
*ctxt
)
8386 : gimple_opt_pass (pass_data_warn_unused_result
, ctxt
)
8389 /* opt_pass methods: */
8390 virtual bool gate (function
*) { return flag_warn_unused_result
; }
8391 virtual unsigned int execute (function
*)
8393 do_warn_unused_result (gimple_body (current_function_decl
));
8397 }; // class pass_warn_unused_result
8402 make_pass_warn_unused_result (gcc::context
*ctxt
)
8404 return new pass_warn_unused_result (ctxt
);
8407 /* IPA passes, compilation of earlier functions or inlining
8408 might have changed some properties, such as marked functions nothrow,
8409 pure, const or noreturn.
8410 Remove redundant edges and basic blocks, and create new ones if necessary.
8412 This pass can't be executed as stand alone pass from pass manager, because
8413 in between inlining and this fixup the verify_flow_info would fail. */
8416 execute_fixup_cfg (void)
8419 gimple_stmt_iterator gsi
;
8421 gcov_type count_scale
;
8426 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl
)->count
,
8427 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
);
8429 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
8430 cgraph_get_node (current_function_decl
)->count
;
8431 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
8432 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
,
8435 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
)
8436 e
->count
= apply_scale (e
->count
, count_scale
);
8438 FOR_EACH_BB_FN (bb
, cfun
)
8440 bb
->count
= apply_scale (bb
->count
, count_scale
);
8441 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
8443 gimple stmt
= gsi_stmt (gsi
);
8444 tree decl
= is_gimple_call (stmt
)
8445 ? gimple_call_fndecl (stmt
)
8449 int flags
= gimple_call_flags (stmt
);
8450 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_LOOPING_CONST_OR_PURE
))
8452 if (gimple_purge_dead_abnormal_call_edges (bb
))
8453 todo
|= TODO_cleanup_cfg
;
8455 if (gimple_in_ssa_p (cfun
))
8457 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8462 if (flags
& ECF_NORETURN
8463 && fixup_noreturn_call (stmt
))
8464 todo
|= TODO_cleanup_cfg
;
8467 /* Remove stores to variables we marked write-only.
8468 Keep access when store has side effect, i.e. in case when source
8470 if (gimple_store_p (stmt
)
8471 && !gimple_has_side_effects (stmt
))
8473 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8475 if (TREE_CODE (lhs
) == VAR_DECL
8476 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8477 && varpool_get_node (lhs
)->writeonly
)
8479 unlink_stmt_vdef (stmt
);
8480 gsi_remove (&gsi
, true);
8481 release_defs (stmt
);
8482 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8486 /* For calls we can simply remove LHS when it is known
8487 to be write-only. */
8488 if (is_gimple_call (stmt
)
8489 && gimple_get_lhs (stmt
))
8491 tree lhs
= get_base_address (gimple_get_lhs (stmt
));
8493 if (TREE_CODE (lhs
) == VAR_DECL
8494 && (TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
8495 && varpool_get_node (lhs
)->writeonly
)
8497 gimple_call_set_lhs (stmt
, NULL
);
8499 todo
|= TODO_update_ssa
| TODO_cleanup_cfg
;
8503 if (maybe_clean_eh_stmt (stmt
)
8504 && gimple_purge_dead_eh_edges (bb
))
8505 todo
|= TODO_cleanup_cfg
;
8509 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8510 e
->count
= apply_scale (e
->count
, count_scale
);
8512 /* If we have a basic block with no successors that does not
8513 end with a control statement or a noreturn call end it with
8514 a call to __builtin_unreachable. This situation can occur
8515 when inlining a noreturn call that does in fact return. */
8516 if (EDGE_COUNT (bb
->succs
) == 0)
8518 gimple stmt
= last_stmt (bb
);
8520 || (!is_ctrl_stmt (stmt
)
8521 && (!is_gimple_call (stmt
)
8522 || (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0)))
8524 stmt
= gimple_build_call
8525 (builtin_decl_implicit (BUILT_IN_UNREACHABLE
), 0);
8526 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
8527 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
8531 if (count_scale
!= REG_BR_PROB_BASE
)
8532 compute_function_frequency ();
8534 /* We just processed all calls. */
8535 if (cfun
->gimple_df
)
8536 vec_free (MODIFIED_NORETURN_CALLS (cfun
));
8538 /* Dump a textual representation of the flowgraph. */
8540 gimple_dump_cfg (dump_file
, dump_flags
);
8543 && (todo
& TODO_cleanup_cfg
))
8544 loops_state_set (LOOPS_NEED_FIXUP
);
8551 const pass_data pass_data_fixup_cfg
=
8553 GIMPLE_PASS
, /* type */
8554 "*free_cfg_annotations", /* name */
8555 OPTGROUP_NONE
, /* optinfo_flags */
8556 true, /* has_execute */
8557 TV_NONE
, /* tv_id */
8558 PROP_cfg
, /* properties_required */
8559 0, /* properties_provided */
8560 0, /* properties_destroyed */
8561 0, /* todo_flags_start */
8562 0, /* todo_flags_finish */
8565 class pass_fixup_cfg
: public gimple_opt_pass
8568 pass_fixup_cfg (gcc::context
*ctxt
)
8569 : gimple_opt_pass (pass_data_fixup_cfg
, ctxt
)
8572 /* opt_pass methods: */
8573 opt_pass
* clone () { return new pass_fixup_cfg (m_ctxt
); }
8574 virtual unsigned int execute (function
*) { return execute_fixup_cfg (); }
8576 }; // class pass_fixup_cfg
8581 make_pass_fixup_cfg (gcc::context
*ctxt
)
8583 return new pass_fixup_cfg (ctxt
);
8586 /* Garbage collection support for edge_def. */
8588 extern void gt_ggc_mx (tree
&);
8589 extern void gt_ggc_mx (gimple
&);
8590 extern void gt_ggc_mx (rtx
&);
8591 extern void gt_ggc_mx (basic_block
&);
8594 gt_ggc_mx (edge_def
*e
)
8596 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8598 gt_ggc_mx (e
->dest
);
8599 if (current_ir_type () == IR_GIMPLE
)
8600 gt_ggc_mx (e
->insns
.g
);
8602 gt_ggc_mx (e
->insns
.r
);
8606 /* PCH support for edge_def. */
8608 extern void gt_pch_nx (tree
&);
8609 extern void gt_pch_nx (gimple
&);
8610 extern void gt_pch_nx (rtx
&);
8611 extern void gt_pch_nx (basic_block
&);
8614 gt_pch_nx (edge_def
*e
)
8616 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8618 gt_pch_nx (e
->dest
);
8619 if (current_ir_type () == IR_GIMPLE
)
8620 gt_pch_nx (e
->insns
.g
);
8622 gt_pch_nx (e
->insns
.r
);
8627 gt_pch_nx (edge_def
*e
, gt_pointer_operator op
, void *cookie
)
8629 tree block
= LOCATION_BLOCK (e
->goto_locus
);
8630 op (&(e
->src
), cookie
);
8631 op (&(e
->dest
), cookie
);
8632 if (current_ir_type () == IR_GIMPLE
)
8633 op (&(e
->insns
.g
), cookie
);
8635 op (&(e
->insns
.r
), cookie
);
8636 op (&(block
), cookie
);