]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-cfg.c
re PR debug/36617 (Debug info for OpenMP code is almost non-existent)
[gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a 1/* Control flow functions for trees.
135a171d 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
56e84019 3 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
11any later version.
12
13GCC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
6de9cd9a
DN
32#include "flags.h"
33#include "function.h"
34#include "expr.h"
35#include "ggc.h"
36#include "langhooks.h"
37#include "diagnostic.h"
38#include "tree-flow.h"
39#include "timevar.h"
40#include "tree-dump.h"
41#include "tree-pass.h"
42#include "toplev.h"
43#include "except.h"
44#include "cfgloop.h"
42759f1e 45#include "cfglayout.h"
9af0df6b 46#include "tree-ssa-propagate.h"
6946b3f7 47#include "value-prof.h"
4437b50d 48#include "pointer-set.h"
917948d3 49#include "tree-inline.h"
6de9cd9a
DN
50
51/* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54/* Local declarations. */
55
56/* Initial capacity for the basic block array. */
57static const int initial_cfg_capacity = 20;
58
d6be0d7f
JL
59/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
92b6dff3 63
d6be0d7f
JL
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
92b6dff3 66
d6be0d7f
JL
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
15814ba0 72static struct pointer_map_t *edge_to_cases;
92b6dff3 73
6de9cd9a
DN
74/* CFG statistics. */
75struct cfg_stats_d
76{
77 long num_merged_labels;
78};
79
80static struct cfg_stats_d cfg_stats;
81
82/* Nonzero if we found a computed goto while building basic blocks. */
83static bool found_computed_goto;
84
85/* Basic blocks and flowgraphs. */
86static basic_block create_bb (void *, void *, basic_block);
6de9cd9a
DN
87static void make_blocks (tree);
88static void factor_computed_gotos (void);
6de9cd9a
DN
89
90/* Edges. */
91static void make_edges (void);
6de9cd9a
DN
92static void make_cond_expr_edges (basic_block);
93static void make_switch_expr_edges (basic_block);
94static void make_goto_expr_edges (basic_block);
95static edge tree_redirect_edge_and_branch (edge, basic_block);
96static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
c2924966 97static unsigned int split_critical_edges (void);
6de9cd9a
DN
98
99/* Various helpers. */
6ea2b70d 100static inline bool stmt_starts_bb_p (const_tree, const_tree);
6de9cd9a
DN
101static int tree_verify_flow_info (void);
102static void tree_make_forwarder_block (edge);
6de9cd9a 103static void tree_cfg2vcg (FILE *);
0a4fe58f 104static inline void change_bb_for_stmt (tree t, basic_block bb);
c6c6b7aa 105static bool computed_goto_p (const_tree);
6de9cd9a
DN
106
107/* Flowgraph optimization and cleanup. */
108static void tree_merge_blocks (basic_block, basic_block);
b48d0358 109static bool tree_can_merge_blocks_p (basic_block, basic_block);
6de9cd9a 110static void remove_bb (basic_block);
be477406 111static edge find_taken_edge_computed_goto (basic_block, tree);
6de9cd9a
DN
112static edge find_taken_edge_cond_expr (basic_block, tree);
113static edge find_taken_edge_switch_expr (basic_block, tree);
114static tree find_case_label_for_value (tree, tree);
6de9cd9a 115
a930a4ef 116void
9defb1fe 117init_empty_tree_cfg_for_function (struct function *fn)
a930a4ef
JH
118{
119 /* Initialize the basic block array. */
9defb1fe
DN
120 init_flow (fn);
121 profile_status_for_function (fn) = PROFILE_ABSENT;
122 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
123 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
124 basic_block_info_for_function (fn)
125 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
126 VEC_safe_grow_cleared (basic_block, gc,
127 basic_block_info_for_function (fn),
a590ac65 128 initial_cfg_capacity);
a930a4ef
JH
129
130 /* Build a mapping of labels to their associated blocks. */
9defb1fe
DN
131 label_to_block_map_for_function (fn)
132 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
133 VEC_safe_grow_cleared (basic_block, gc,
134 label_to_block_map_for_function (fn),
a590ac65 135 initial_cfg_capacity);
a930a4ef 136
9defb1fe
DN
137 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
138 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
139 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
140 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
141
142 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
143 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
144 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
145 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
146}
147
148void
149init_empty_tree_cfg (void)
150{
151 init_empty_tree_cfg_for_function (cfun);
a930a4ef 152}
6de9cd9a
DN
153
154/*---------------------------------------------------------------------------
155 Create basic blocks
156---------------------------------------------------------------------------*/
157
158/* Entry point to the CFG builder for trees. TP points to the list of
159 statements to be added to the flowgraph. */
160
161static void
162build_tree_cfg (tree *tp)
163{
164 /* Register specific tree functions. */
165 tree_register_cfg_hooks ();
166
6de9cd9a
DN
167 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
168
a930a4ef 169 init_empty_tree_cfg ();
6de9cd9a
DN
170
171 found_computed_goto = 0;
172 make_blocks (*tp);
173
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto)
180 factor_computed_gotos ();
181
f0b698c1 182 /* Make sure there is always at least one block, even if it's empty. */
24bd1a0b 183 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
184 create_empty_bb (ENTRY_BLOCK_PTR);
185
6de9cd9a 186 /* Adjust the size of the array. */
68f9b844 187 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
a590ac65 188 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
6de9cd9a 189
f667741c
SB
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
192
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
197
6de9cd9a
DN
198 /* Create the edges of the flowgraph. */
199 make_edges ();
8b11009b 200 cleanup_dead_labels ();
6de9cd9a
DN
201
202 /* Debugging dumps. */
203
204 /* Write the flowgraph to a VCG file. */
205 {
206 int local_dump_flags;
10d22567
ZD
207 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
208 if (vcg_file)
6de9cd9a 209 {
10d22567
ZD
210 tree_cfg2vcg (vcg_file);
211 dump_end (TDI_vcg, vcg_file);
6de9cd9a
DN
212 }
213 }
214
81cfbbc2
JH
215#ifdef ENABLE_CHECKING
216 verify_stmts ();
217#endif
218
6de9cd9a
DN
219 /* Dump a textual representation of the flowgraph. */
220 if (dump_file)
221 dump_tree_cfg (dump_file, dump_flags);
222}
223
c2924966 224static unsigned int
6de9cd9a
DN
225execute_build_cfg (void)
226{
227 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
c2924966 228 return 0;
6de9cd9a
DN
229}
230
8ddbbcae 231struct gimple_opt_pass pass_build_cfg =
6de9cd9a 232{
8ddbbcae
JH
233 {
234 GIMPLE_PASS,
6de9cd9a
DN
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
8ddbbcae
JH
246 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
247 }
6de9cd9a
DN
248};
249
6531d1be 250/* Search the CFG for any computed gotos. If found, factor them to a
6de9cd9a 251 common computed goto site. Also record the location of that site so
6531d1be 252 that we can un-factor the gotos after we have converted back to
6de9cd9a
DN
253 normal form. */
254
255static void
256factor_computed_gotos (void)
257{
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
263
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
6531d1be 267
6de9cd9a
DN
268 FOR_EACH_BB (bb)
269 {
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
272
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
276
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
281
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
284 {
285 tree assignment;
286
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
291 {
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
294
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
300
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
308
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
313 }
314
315 /* Copy the original computed goto's destination into VAR. */
939409af
RS
316 assignment = build_gimple_modify_stmt (var,
317 GOTO_DESTINATION (last));
6de9cd9a
DN
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
319
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
322 }
323 }
324}
325
326
6de9cd9a
DN
327/* Build a flowgraph for the statement_list STMT_LIST. */
328
329static void
330make_blocks (tree stmt_list)
331{
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
337
338 while (!tsi_end_p (i))
339 {
340 tree prev_stmt;
341
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
344
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
349 {
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
354 }
355
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
359
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
362
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
367
368 tsi_next (&i);
369 first_stmt_of_list = false;
370 }
371}
372
373
374/* Create and return a new empty basic block after bb AFTER. */
375
376static basic_block
377create_bb (void *h, void *e, basic_block after)
378{
379 basic_block bb;
380
1e128c5f 381 gcc_assert (!e);
6de9cd9a 382
27fd69fa
KH
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
6de9cd9a 386 bb = alloc_block ();
6de9cd9a
DN
387
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
7506e1cb
ZD
390 bb->il.tree = GGC_CNEW (struct tree_bb_info);
391 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
6de9cd9a
DN
392
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
395
396 /* Grow the basic block array if needed. */
68f9b844 397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
6de9cd9a
DN
398 {
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
a590ac65 400 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
6de9cd9a
DN
401 }
402
403 /* Add the newly created block to the array. */
68f9b844 404 SET_BASIC_BLOCK (last_basic_block, bb);
6de9cd9a 405
6de9cd9a
DN
406 n_basic_blocks++;
407 last_basic_block++;
408
6de9cd9a
DN
409 return bb;
410}
411
412
413/*---------------------------------------------------------------------------
414 Edge creation
415---------------------------------------------------------------------------*/
416
fca01525
KH
417/* Fold COND_EXPR_COND of each COND_EXPR. */
418
e21aff8a 419void
fca01525
KH
420fold_cond_expr_cond (void)
421{
422 basic_block bb;
423
424 FOR_EACH_BB (bb)
425 {
426 tree stmt = last_stmt (bb);
427
428 if (stmt
429 && TREE_CODE (stmt) == COND_EXPR)
430 {
6ac01510
ILT
431 tree cond;
432 bool zerop, onep;
433
434 fold_defer_overflow_warnings ();
435 cond = fold (COND_EXPR_COND (stmt));
436 zerop = integer_zerop (cond);
437 onep = integer_onep (cond);
e233ac97 438 fold_undefer_overflow_warnings (zerop || onep,
4df28528 439 stmt,
6ac01510
ILT
440 WARN_STRICT_OVERFLOW_CONDITIONAL);
441 if (zerop)
4bafe847 442 COND_EXPR_COND (stmt) = boolean_false_node;
6ac01510 443 else if (onep)
4bafe847 444 COND_EXPR_COND (stmt) = boolean_true_node;
fca01525
KH
445 }
446 }
447}
448
6de9cd9a
DN
449/* Join all the blocks in the flowgraph. */
450
451static void
452make_edges (void)
453{
454 basic_block bb;
bed575d5 455 struct omp_region *cur_region = NULL;
6de9cd9a
DN
456
457 /* Create an edge from entry to the first block with executable
458 statements in it. */
24bd1a0b 459 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
6de9cd9a 460
adb35797 461 /* Traverse the basic block array placing edges. */
6de9cd9a
DN
462 FOR_EACH_BB (bb)
463 {
6de9cd9a 464 tree last = last_stmt (bb);
56e84019 465 bool fallthru;
6de9cd9a 466
56e84019 467 if (last)
6de9cd9a 468 {
bed575d5
RS
469 enum tree_code code = TREE_CODE (last);
470 switch (code)
56e84019
RH
471 {
472 case GOTO_EXPR:
473 make_goto_expr_edges (bb);
474 fallthru = false;
475 break;
476 case RETURN_EXPR:
477 make_edge (bb, EXIT_BLOCK_PTR, 0);
478 fallthru = false;
479 break;
480 case COND_EXPR:
481 make_cond_expr_edges (bb);
482 fallthru = false;
483 break;
484 case SWITCH_EXPR:
485 make_switch_expr_edges (bb);
486 fallthru = false;
487 break;
488 case RESX_EXPR:
489 make_eh_edges (last);
490 fallthru = false;
491 break;
492
493 case CALL_EXPR:
494 /* If this function receives a nonlocal goto, then we need to
495 make edges from this call site to all the nonlocal goto
496 handlers. */
4f6c2131
EB
497 if (tree_can_make_abnormal_goto (last))
498 make_abnormal_goto_edges (bb, true);
6de9cd9a 499
56e84019
RH
500 /* If this statement has reachable exception handlers, then
501 create abnormal edges to them. */
502 make_eh_edges (last);
503
504 /* Some calls are known not to return. */
505 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
506 break;
507
508 case MODIFY_EXPR:
07beea0d
AH
509 gcc_unreachable ();
510
511 case GIMPLE_MODIFY_STMT:
56e84019
RH
512 if (is_ctrl_altering_stmt (last))
513 {
07beea0d
AH
514 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
515 the CALL_EXPR may have an abnormal edge. Search the RHS
516 for this case and create any required edges. */
4f6c2131
EB
517 if (tree_can_make_abnormal_goto (last))
518 make_abnormal_goto_edges (bb, true);
56e84019
RH
519
520 make_eh_edges (last);
521 }
522 fallthru = true;
523 break;
524
525 case OMP_PARALLEL:
a68ab351 526 case OMP_TASK:
56e84019
RH
527 case OMP_FOR:
528 case OMP_SINGLE:
529 case OMP_MASTER:
530 case OMP_ORDERED:
531 case OMP_CRITICAL:
532 case OMP_SECTION:
bed575d5 533 cur_region = new_omp_region (bb, code, cur_region);
56e84019
RH
534 fallthru = true;
535 break;
536
7e2df4a1 537 case OMP_SECTIONS:
bed575d5 538 cur_region = new_omp_region (bb, code, cur_region);
e5c95afe
ZD
539 fallthru = true;
540 break;
541
542 case OMP_SECTIONS_SWITCH:
7e2df4a1 543 fallthru = false;
777f7f9a
RH
544 break;
545
a509ebb5
RL
546
547 case OMP_ATOMIC_LOAD:
548 case OMP_ATOMIC_STORE:
549 fallthru = true;
550 break;
551
552
bed575d5
RS
553 case OMP_RETURN:
554 /* In the case of an OMP_SECTION, the edge will go somewhere
555 other than the next block. This will be created later. */
556 cur_region->exit = bb;
557 fallthru = cur_region->type != OMP_SECTION;
558 cur_region = cur_region->outer;
559 break;
560
561 case OMP_CONTINUE:
562 cur_region->cont = bb;
563 switch (cur_region->type)
564 {
565 case OMP_FOR:
135a171d
JJ
566 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
567 to prevent splitting them. */
568 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
e5c95afe 569 /* Make the loopback edge. */
135a171d
JJ
570 make_edge (bb, single_succ (cur_region->entry),
571 EDGE_ABNORMAL);
572
e5c95afe
ZD
573 /* Create an edge from OMP_FOR to exit, which corresponds to
574 the case that the body of the loop is not executed at
575 all. */
135a171d
JJ
576 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
577 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
578 fallthru = false;
bed575d5
RS
579 break;
580
581 case OMP_SECTIONS:
582 /* Wire up the edges into and out of the nested sections. */
bed575d5 583 {
e5c95afe
ZD
584 basic_block switch_bb = single_succ (cur_region->entry);
585
bed575d5
RS
586 struct omp_region *i;
587 for (i = cur_region->inner; i ; i = i->next)
588 {
589 gcc_assert (i->type == OMP_SECTION);
e5c95afe 590 make_edge (switch_bb, i->entry, 0);
bed575d5
RS
591 make_edge (i->exit, bb, EDGE_FALLTHRU);
592 }
e5c95afe
ZD
593
594 /* Make the loopback edge to the block with
595 OMP_SECTIONS_SWITCH. */
596 make_edge (bb, switch_bb, 0);
597
598 /* Make the edge from the switch to exit. */
599 make_edge (switch_bb, bb->next_bb, 0);
600 fallthru = false;
bed575d5
RS
601 }
602 break;
6531d1be 603
bed575d5
RS
604 default:
605 gcc_unreachable ();
606 }
bed575d5
RS
607 break;
608
56e84019
RH
609 default:
610 gcc_assert (!stmt_ends_bb_p (last));
611 fallthru = true;
612 }
6de9cd9a 613 }
56e84019
RH
614 else
615 fallthru = true;
6de9cd9a 616
56e84019 617 if (fallthru)
6de9cd9a
DN
618 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
619 }
620
bed575d5
RS
621 if (root_omp_region)
622 free_omp_regions ();
623
fca01525
KH
624 /* Fold COND_EXPR_COND of each COND_EXPR. */
625 fold_cond_expr_cond ();
6de9cd9a
DN
626}
627
628
6de9cd9a
DN
629/* Create the edges for a COND_EXPR starting at block BB.
630 At this point, both clauses must contain only simple gotos. */
631
632static void
633make_cond_expr_edges (basic_block bb)
634{
635 tree entry = last_stmt (bb);
636 basic_block then_bb, else_bb;
637 tree then_label, else_label;
d783b2a2 638 edge e;
6de9cd9a 639
1e128c5f
GB
640 gcc_assert (entry);
641 gcc_assert (TREE_CODE (entry) == COND_EXPR);
6de9cd9a
DN
642
643 /* Entry basic blocks for each component. */
644 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
645 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
646 then_bb = label_to_block (then_label);
647 else_bb = label_to_block (else_label);
648
d783b2a2 649 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
d783b2a2 650 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
d783b2a2
JH
651 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
652 if (e)
2d593c86 653 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
a9b77cd1
ZD
654
655 /* We do not need the gotos anymore. */
656 COND_EXPR_THEN (entry) = NULL_TREE;
657 COND_EXPR_ELSE (entry) = NULL_TREE;
6de9cd9a
DN
658}
659
92b6dff3 660
d6be0d7f
JL
661/* Called for each element in the hash table (P) as we delete the
662 edge to cases hash table.
663
6531d1be 664 Clear all the TREE_CHAINs to prevent problems with copying of
d6be0d7f
JL
665 SWITCH_EXPRs and structure sharing rules, then free the hash table
666 element. */
667
15814ba0 668static bool
ac7d7749 669edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
15814ba0 670 void *data ATTRIBUTE_UNUSED)
d6be0d7f 671{
d6be0d7f
JL
672 tree t, next;
673
15814ba0 674 for (t = (tree) *value; t; t = next)
d6be0d7f
JL
675 {
676 next = TREE_CHAIN (t);
677 TREE_CHAIN (t) = NULL;
678 }
15814ba0
PB
679
680 *value = NULL;
681 return false;
d6be0d7f
JL
682}
683
684/* Start recording information mapping edges to case labels. */
685
c9784e6d 686void
d6be0d7f
JL
687start_recording_case_labels (void)
688{
689 gcc_assert (edge_to_cases == NULL);
15814ba0 690 edge_to_cases = pointer_map_create ();
d6be0d7f
JL
691}
692
693/* Return nonzero if we are recording information for case labels. */
694
695static bool
696recording_case_labels_p (void)
697{
698 return (edge_to_cases != NULL);
699}
700
701/* Stop recording information mapping edges to case labels and
702 remove any information we have recorded. */
c9784e6d 703void
d6be0d7f
JL
704end_recording_case_labels (void)
705{
15814ba0
PB
706 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
707 pointer_map_destroy (edge_to_cases);
d6be0d7f
JL
708 edge_to_cases = NULL;
709}
710
d6be0d7f
JL
711/* If we are inside a {start,end}_recording_cases block, then return
712 a chain of CASE_LABEL_EXPRs from T which reference E.
713
714 Otherwise return NULL. */
92b6dff3
JL
715
716static tree
d6be0d7f 717get_cases_for_edge (edge e, tree t)
92b6dff3 718{
92b6dff3 719 void **slot;
d6be0d7f
JL
720 size_t i, n;
721 tree vec;
92b6dff3 722
d6be0d7f
JL
723 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
724 chains available. Return NULL so the caller can detect this case. */
725 if (!recording_case_labels_p ())
726 return NULL;
6531d1be 727
15814ba0 728 slot = pointer_map_contains (edge_to_cases, e);
92b6dff3 729 if (slot)
15814ba0 730 return (tree) *slot;
92b6dff3 731
d6be0d7f
JL
732 /* If we did not find E in the hash table, then this must be the first
733 time we have been queried for information about E & T. Add all the
734 elements from T to the hash table then perform the query again. */
92b6dff3 735
d6be0d7f 736 vec = SWITCH_LABELS (t);
92b6dff3 737 n = TREE_VEC_LENGTH (vec);
92b6dff3
JL
738 for (i = 0; i < n; i++)
739 {
15814ba0
PB
740 tree elt = TREE_VEC_ELT (vec, i);
741 tree lab = CASE_LABEL (elt);
d6be0d7f 742 basic_block label_bb = label_to_block (lab);
15814ba0
PB
743 edge this_edge = find_edge (e->src, label_bb);
744
745 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
746 a new chain. */
747 slot = pointer_map_insert (edge_to_cases, this_edge);
748 TREE_CHAIN (elt) = (tree) *slot;
749 *slot = elt;
92b6dff3 750 }
15814ba0
PB
751
752 return (tree) *pointer_map_contains (edge_to_cases, e);
92b6dff3 753}
6de9cd9a
DN
754
755/* Create the edges for a SWITCH_EXPR starting at block BB.
756 At this point, the switch body has been lowered and the
757 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
758
759static void
760make_switch_expr_edges (basic_block bb)
761{
762 tree entry = last_stmt (bb);
763 size_t i, n;
764 tree vec;
765
766 vec = SWITCH_LABELS (entry);
767 n = TREE_VEC_LENGTH (vec);
768
769 for (i = 0; i < n; ++i)
770 {
771 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
772 basic_block label_bb = label_to_block (lab);
d6be0d7f 773 make_edge (bb, label_bb, 0);
6de9cd9a
DN
774 }
775}
776
777
778/* Return the basic block holding label DEST. */
779
780basic_block
997de8ed 781label_to_block_fn (struct function *ifun, tree dest)
6de9cd9a 782{
242229bb
JH
783 int uid = LABEL_DECL_UID (dest);
784
f0b698c1
KH
785 /* We would die hard when faced by an undefined label. Emit a label to
786 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
787 and undefined variable warnings quite right. */
788 if ((errorcount || sorrycount) && uid < 0)
789 {
6531d1be 790 block_stmt_iterator bsi =
24bd1a0b 791 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
242229bb
JH
792 tree stmt;
793
794 stmt = build1 (LABEL_EXPR, void_type_node, dest);
795 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
796 uid = LABEL_DECL_UID (dest);
797 }
e597f337
KH
798 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
799 <= (unsigned int) uid)
98f464e0 800 return NULL;
e597f337 801 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
6de9cd9a
DN
802}
803
4f6c2131
EB
804/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
805 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
806
807void
808make_abnormal_goto_edges (basic_block bb, bool for_call)
809{
810 basic_block target_bb;
811 block_stmt_iterator bsi;
812
813 FOR_EACH_BB (target_bb)
814 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
815 {
816 tree target = bsi_stmt (bsi);
817
818 if (TREE_CODE (target) != LABEL_EXPR)
819 break;
820
821 target = LABEL_EXPR_LABEL (target);
822
823 /* Make an edge to every label block that has been marked as a
824 potential target for a computed goto or a non-local goto. */
825 if ((FORCED_LABEL (target) && !for_call)
826 || (DECL_NONLOCAL (target) && for_call))
827 {
828 make_edge (bb, target_bb, EDGE_ABNORMAL);
829 break;
830 }
831 }
832}
833
6de9cd9a
DN
834/* Create edges for a goto statement at block BB. */
835
836static void
837make_goto_expr_edges (basic_block bb)
838{
6de9cd9a 839 block_stmt_iterator last = bsi_last (bb);
4f6c2131 840 tree goto_t = bsi_stmt (last);
6de9cd9a 841
4f6c2131
EB
842 /* A simple GOTO creates normal edges. */
843 if (simple_goto_p (goto_t))
6de9cd9a 844 {
7d3bf067 845 tree dest = GOTO_DESTINATION (goto_t);
4f6c2131 846 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
4f6c2131 847 e->goto_locus = EXPR_LOCATION (goto_t);
4f6c2131
EB
848 bsi_remove (&last, true);
849 return;
6de9cd9a
DN
850 }
851
4f6c2131
EB
852 /* A computed GOTO creates abnormal edges. */
853 make_abnormal_goto_edges (bb, false);
6de9cd9a
DN
854}
855
856
857/*---------------------------------------------------------------------------
858 Flowgraph analysis
859---------------------------------------------------------------------------*/
860
f698d217
SB
861/* Cleanup useless labels in basic blocks. This is something we wish
862 to do early because it allows us to group case labels before creating
863 the edges for the CFG, and it speeds up block statement iterators in
864 all passes later on.
8b11009b
ZD
865 We rerun this pass after CFG is created, to get rid of the labels that
866 are no longer referenced. After then we do not run it any more, since
867 (almost) no new labels should be created. */
f698d217
SB
868
869/* A map from basic block index to the leading label of that block. */
8b11009b
ZD
870static struct label_record
871{
872 /* The label. */
873 tree label;
874
875 /* True if the label is referenced from somewhere. */
876 bool used;
877} *label_for_bb;
f698d217
SB
878
879/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
880static void
881update_eh_label (struct eh_region *region)
882{
883 tree old_label = get_eh_region_tree_label (region);
884 if (old_label)
885 {
165b54c3
SB
886 tree new_label;
887 basic_block bb = label_to_block (old_label);
888
889 /* ??? After optimizing, there may be EH regions with labels
890 that have already been removed from the function body, so
891 there is no basic block for them. */
892 if (! bb)
893 return;
894
8b11009b
ZD
895 new_label = label_for_bb[bb->index].label;
896 label_for_bb[bb->index].used = true;
f698d217
SB
897 set_eh_region_tree_label (region, new_label);
898 }
899}
900
242229bb
JH
901/* Given LABEL return the first label in the same basic block. */
902static tree
903main_block_label (tree label)
904{
905 basic_block bb = label_to_block (label);
8b11009b 906 tree main_label = label_for_bb[bb->index].label;
242229bb
JH
907
908 /* label_to_block possibly inserted undefined label into the chain. */
8b11009b
ZD
909 if (!main_label)
910 {
911 label_for_bb[bb->index].label = label;
912 main_label = label;
913 }
914
915 label_for_bb[bb->index].used = true;
916 return main_label;
242229bb
JH
917}
918
b986ebf3 919/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
920 1) Find the leading label for each block.
921 2) Redirect all references to labels to the leading labels.
922 3) Cleanup all useless labels. */
6de9cd9a 923
165b54c3 924void
6de9cd9a
DN
925cleanup_dead_labels (void)
926{
927 basic_block bb;
8b11009b 928 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
6de9cd9a
DN
929
930 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 931 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
932 FOR_EACH_BB (bb)
933 {
934 block_stmt_iterator i;
935
936 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
937 {
938 tree label, stmt = bsi_stmt (i);
939
940 if (TREE_CODE (stmt) != LABEL_EXPR)
941 break;
942
943 label = LABEL_EXPR_LABEL (stmt);
944
945 /* If we have not yet seen a label for the current block,
946 remember this one and see if there are more labels. */
8b11009b 947 if (!label_for_bb[bb->index].label)
6de9cd9a 948 {
8b11009b 949 label_for_bb[bb->index].label = label;
6de9cd9a
DN
950 continue;
951 }
952
953 /* If we did see a label for the current block already, but it
954 is an artificially created label, replace it if the current
955 label is a user defined label. */
8b11009b
ZD
956 if (!DECL_ARTIFICIAL (label)
957 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
6de9cd9a 958 {
8b11009b 959 label_for_bb[bb->index].label = label;
6de9cd9a
DN
960 break;
961 }
962 }
963 }
964
f698d217
SB
965 /* Now redirect all jumps/branches to the selected label.
966 First do so for each block ending in a control statement. */
6de9cd9a
DN
967 FOR_EACH_BB (bb)
968 {
969 tree stmt = last_stmt (bb);
970 if (!stmt)
971 continue;
972
973 switch (TREE_CODE (stmt))
974 {
975 case COND_EXPR:
976 {
977 tree true_branch, false_branch;
6de9cd9a
DN
978
979 true_branch = COND_EXPR_THEN (stmt);
980 false_branch = COND_EXPR_ELSE (stmt);
6de9cd9a 981
a9b77cd1
ZD
982 if (true_branch)
983 GOTO_DESTINATION (true_branch)
984 = main_block_label (GOTO_DESTINATION (true_branch));
985 if (false_branch)
986 GOTO_DESTINATION (false_branch)
987 = main_block_label (GOTO_DESTINATION (false_branch));
6de9cd9a
DN
988
989 break;
990 }
6531d1be 991
6de9cd9a
DN
992 case SWITCH_EXPR:
993 {
994 size_t i;
995 tree vec = SWITCH_LABELS (stmt);
996 size_t n = TREE_VEC_LENGTH (vec);
6531d1be 997
6de9cd9a
DN
998 /* Replace all destination labels. */
999 for (i = 0; i < n; ++i)
92b6dff3
JL
1000 {
1001 tree elt = TREE_VEC_ELT (vec, i);
1002 tree label = main_block_label (CASE_LABEL (elt));
d6be0d7f 1003 CASE_LABEL (elt) = label;
92b6dff3 1004 }
6de9cd9a
DN
1005 break;
1006 }
1007
f667741c
SB
1008 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1009 remove them until after we've created the CFG edges. */
1010 case GOTO_EXPR:
242229bb
JH
1011 if (! computed_goto_p (stmt))
1012 {
1013 GOTO_DESTINATION (stmt)
1014 = main_block_label (GOTO_DESTINATION (stmt));
1015 break;
1016 }
f667741c 1017
6de9cd9a
DN
1018 default:
1019 break;
1020 }
1021 }
1022
f698d217
SB
1023 for_each_eh_region (update_eh_label);
1024
6de9cd9a 1025 /* Finally, purge dead labels. All user-defined labels and labels that
cea0f4f1
AP
1026 can be the target of non-local gotos and labels which have their
1027 address taken are preserved. */
6de9cd9a
DN
1028 FOR_EACH_BB (bb)
1029 {
1030 block_stmt_iterator i;
8b11009b 1031 tree label_for_this_bb = label_for_bb[bb->index].label;
6de9cd9a 1032
8b11009b 1033 if (!label_for_this_bb)
6de9cd9a
DN
1034 continue;
1035
8b11009b
ZD
1036 /* If the main label of the block is unused, we may still remove it. */
1037 if (!label_for_bb[bb->index].used)
1038 label_for_this_bb = NULL;
1039
6de9cd9a
DN
1040 for (i = bsi_start (bb); !bsi_end_p (i); )
1041 {
1042 tree label, stmt = bsi_stmt (i);
1043
1044 if (TREE_CODE (stmt) != LABEL_EXPR)
1045 break;
1046
1047 label = LABEL_EXPR_LABEL (stmt);
1048
1049 if (label == label_for_this_bb
1050 || ! DECL_ARTIFICIAL (label)
cea0f4f1
AP
1051 || DECL_NONLOCAL (label)
1052 || FORCED_LABEL (label))
6de9cd9a
DN
1053 bsi_next (&i);
1054 else
736432ee 1055 bsi_remove (&i, true);
6de9cd9a
DN
1056 }
1057 }
1058
1059 free (label_for_bb);
1060}
1061
f667741c
SB
1062/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1063 and scan the sorted vector of cases. Combine the ones jumping to the
1064 same label.
1065 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1066
165b54c3 1067void
f667741c
SB
1068group_case_labels (void)
1069{
1070 basic_block bb;
1071
1072 FOR_EACH_BB (bb)
1073 {
1074 tree stmt = last_stmt (bb);
1075 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1076 {
1077 tree labels = SWITCH_LABELS (stmt);
1078 int old_size = TREE_VEC_LENGTH (labels);
1079 int i, j, new_size = old_size;
b7814a18
RG
1080 tree default_case = NULL_TREE;
1081 tree default_label = NULL_TREE;
29c4d22b 1082
66efeafc 1083 /* The default label is always the last case in a switch
b7814a18
RG
1084 statement after gimplification if it was not optimized
1085 away. */
1086 if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
1087 && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
1088 {
1089 default_case = TREE_VEC_ELT (labels, old_size - 1);
1090 default_label = CASE_LABEL (default_case);
1091 old_size--;
1092 }
f667741c 1093
b7814a18 1094 /* Look for possible opportunities to merge cases. */
f667741c 1095 i = 0;
b7814a18 1096 while (i < old_size)
f667741c 1097 {
ed9cef22 1098 tree base_case, base_label, base_high;
f667741c
SB
1099 base_case = TREE_VEC_ELT (labels, i);
1100
1e128c5f 1101 gcc_assert (base_case);
f667741c 1102 base_label = CASE_LABEL (base_case);
31e9eea2
SB
1103
1104 /* Discard cases that have the same destination as the
1105 default case. */
1106 if (base_label == default_label)
1107 {
1108 TREE_VEC_ELT (labels, i) = NULL_TREE;
1109 i++;
29c4d22b 1110 new_size--;
31e9eea2
SB
1111 continue;
1112 }
1113
f667741c
SB
1114 base_high = CASE_HIGH (base_case) ?
1115 CASE_HIGH (base_case) : CASE_LOW (base_case);
d717e500 1116 i++;
f667741c
SB
1117 /* Try to merge case labels. Break out when we reach the end
1118 of the label vector or when we cannot merge the next case
1119 label with the current one. */
b7814a18 1120 while (i < old_size)
f667741c 1121 {
d717e500 1122 tree merge_case = TREE_VEC_ELT (labels, i);
f667741c
SB
1123 tree merge_label = CASE_LABEL (merge_case);
1124 tree t = int_const_binop (PLUS_EXPR, base_high,
1125 integer_one_node, 1);
1126
1127 /* Merge the cases if they jump to the same place,
1128 and their ranges are consecutive. */
1129 if (merge_label == base_label
1130 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1131 {
1132 base_high = CASE_HIGH (merge_case) ?
1133 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1134 CASE_HIGH (base_case) = base_high;
1135 TREE_VEC_ELT (labels, i) = NULL_TREE;
1136 new_size--;
d717e500 1137 i++;
f667741c
SB
1138 }
1139 else
1140 break;
1141 }
1142 }
1143
1144 /* Compress the case labels in the label vector, and adjust the
1145 length of the vector. */
1146 for (i = 0, j = 0; i < new_size; i++)
1147 {
1148 while (! TREE_VEC_ELT (labels, j))
1149 j++;
1150 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1151 }
1152 TREE_VEC_LENGTH (labels) = new_size;
1153 }
1154 }
1155}
6de9cd9a
DN
1156
1157/* Checks whether we can merge block B into block A. */
1158
1159static bool
b48d0358 1160tree_can_merge_blocks_p (basic_block a, basic_block b)
6de9cd9a 1161{
9678086d 1162 const_tree stmt;
b48d0358 1163 block_stmt_iterator bsi;
38965eb2 1164 tree phi;
6de9cd9a 1165
c5cbcccf 1166 if (!single_succ_p (a))
6de9cd9a
DN
1167 return false;
1168
c5cbcccf 1169 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
6de9cd9a
DN
1170 return false;
1171
c5cbcccf 1172 if (single_succ (a) != b)
6de9cd9a
DN
1173 return false;
1174
c5cbcccf 1175 if (!single_pred_p (b))
6de9cd9a
DN
1176 return false;
1177
26e75214
KH
1178 if (b == EXIT_BLOCK_PTR)
1179 return false;
6531d1be 1180
6de9cd9a
DN
1181 /* If A ends by a statement causing exceptions or something similar, we
1182 cannot merge the blocks. */
75547801
KG
1183 /* This CONST_CAST is okay because last_stmt doesn't modify its
1184 argument and the return value is assign to a const_tree. */
b48d0358 1185 stmt = last_stmt (CONST_CAST_BB (a));
6de9cd9a
DN
1186 if (stmt && stmt_ends_bb_p (stmt))
1187 return false;
1188
1189 /* Do not allow a block with only a non-local label to be merged. */
1190 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1191 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1192 return false;
1193
38965eb2 1194 /* It must be possible to eliminate all phi nodes in B. If ssa form
8f8bb1d2
ZD
1195 is not up-to-date, we cannot eliminate any phis; however, if only
1196 some symbols as whole are marked for renaming, this is not a problem,
1197 as phi nodes for those symbols are irrelevant in updating anyway. */
38965eb2
ZD
1198 phi = phi_nodes (b);
1199 if (phi)
1200 {
8f8bb1d2 1201 if (name_mappings_registered_p ())
38965eb2
ZD
1202 return false;
1203
1204 for (; phi; phi = PHI_CHAIN (phi))
1205 if (!is_gimple_reg (PHI_RESULT (phi))
1206 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1207 return false;
1208 }
6de9cd9a
DN
1209
1210 /* Do not remove user labels. */
b48d0358 1211 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
6de9cd9a 1212 {
b48d0358 1213 stmt = bsi_stmt (bsi);
6de9cd9a
DN
1214 if (TREE_CODE (stmt) != LABEL_EXPR)
1215 break;
1216 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1217 return false;
1218 }
1219
2b271002
ZD
1220 /* Protect the loop latches. */
1221 if (current_loops
1222 && b->loop_father->latch == b)
1223 return false;
1224
6de9cd9a
DN
1225 return true;
1226}
1227
38965eb2
ZD
1228/* Replaces all uses of NAME by VAL. */
1229
684aaf29 1230void
38965eb2
ZD
1231replace_uses_by (tree name, tree val)
1232{
1233 imm_use_iterator imm_iter;
1234 use_operand_p use;
1235 tree stmt;
1236 edge e;
38965eb2 1237
6c00f606 1238 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
38965eb2 1239 {
cfaab3a9
DN
1240 if (TREE_CODE (stmt) != PHI_NODE)
1241 push_stmt_changes (&stmt);
1242
6c00f606
AM
1243 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1244 {
1245 replace_exp (use, val);
38965eb2 1246
6c00f606 1247 if (TREE_CODE (stmt) == PHI_NODE)
38965eb2 1248 {
6c00f606
AM
1249 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1250 if (e->flags & EDGE_ABNORMAL)
1251 {
1252 /* This can only occur for virtual operands, since
1253 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1254 would prevent replacement. */
1255 gcc_assert (!is_gimple_reg (name));
1256 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1257 }
38965eb2
ZD
1258 }
1259 }
cfaab3a9 1260
6c00f606
AM
1261 if (TREE_CODE (stmt) != PHI_NODE)
1262 {
1263 tree rhs;
9af0df6b 1264
6c00f606 1265 fold_stmt_inplace (stmt);
672987e8
ZD
1266 if (cfgcleanup_altered_bbs)
1267 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
cfaab3a9
DN
1268
1269 /* FIXME. This should go in pop_stmt_changes. */
6c00f606
AM
1270 rhs = get_rhs (stmt);
1271 if (TREE_CODE (rhs) == ADDR_EXPR)
1272 recompute_tree_invariant_for_addr_expr (rhs);
9af0df6b 1273
6c00f606 1274 maybe_clean_or_replace_eh_stmt (stmt, stmt);
cfaab3a9
DN
1275
1276 pop_stmt_changes (&stmt);
6c00f606 1277 }
38965eb2 1278 }
6531d1be 1279
40b448ef 1280 gcc_assert (has_zero_uses (name));
d5ab5675
ZD
1281
1282 /* Also update the trees stored in loop structures. */
1283 if (current_loops)
1284 {
1285 struct loop *loop;
42fd6772 1286 loop_iterator li;
d5ab5675 1287
42fd6772 1288 FOR_EACH_LOOP (li, loop, 0)
d5ab5675 1289 {
42fd6772 1290 substitute_in_loop_info (loop, name, val);
d5ab5675
ZD
1291 }
1292 }
38965eb2 1293}
6de9cd9a
DN
1294
1295/* Merge block B into block A. */
1296
1297static void
1298tree_merge_blocks (basic_block a, basic_block b)
1299{
1300 block_stmt_iterator bsi;
1301 tree_stmt_iterator last;
38965eb2 1302 tree phi;
6de9cd9a
DN
1303
1304 if (dump_file)
1305 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1306
c4f548b8
DN
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
38965eb2
ZD
1309 bsi = bsi_last (a);
1310 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1311 {
1312 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 tree copy;
d7f0e25c
ZD
1314 bool may_replace_uses = may_propagate_copy (def, use);
1315
7c8eb293
ZD
1316 /* In case we maintain loop closed ssa form, do not propagate arguments
1317 of loop exit phi nodes. */
d7f0e25c 1318 if (current_loops
f87000d0 1319 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
d7f0e25c
ZD
1320 && is_gimple_reg (def)
1321 && TREE_CODE (use) == SSA_NAME
1322 && a->loop_father != b->loop_father)
1323 may_replace_uses = false;
1324
1325 if (!may_replace_uses)
38965eb2
ZD
1326 {
1327 gcc_assert (is_gimple_reg (def));
1328
128a79fb 1329 /* Note that just emitting the copies is fine -- there is no problem
38965eb2
ZD
1330 with ordering of phi nodes. This is because A is the single
1331 predecessor of B, therefore results of the phi nodes cannot
1332 appear as arguments of the phi nodes. */
939409af 1333 copy = build_gimple_modify_stmt (def, use);
38965eb2 1334 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
38965eb2 1335 SSA_NAME_DEF_STMT (def) = copy;
611021e1 1336 remove_phi_node (phi, NULL, false);
38965eb2
ZD
1337 }
1338 else
611021e1 1339 {
d0f76c4b
RG
1340 /* If we deal with a PHI for virtual operands, we can simply
1341 propagate these without fussing with folding or updating
1342 the stmt. */
1343 if (!is_gimple_reg (def))
1344 {
1345 imm_use_iterator iter;
1346 use_operand_p use_p;
1347 tree stmt;
1348
1349 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1350 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1351 SET_USE (use_p, use);
1352 }
1353 else
1354 replace_uses_by (def, use);
611021e1
RK
1355 remove_phi_node (phi, NULL, true);
1356 }
38965eb2
ZD
1357 }
1358
6de9cd9a
DN
1359 /* Ensure that B follows A. */
1360 move_block_after (b, a);
1361
c5cbcccf 1362 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1e128c5f 1363 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a
DN
1364
1365 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1366 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1367 {
1368 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
be477406
JL
1369 {
1370 tree label = bsi_stmt (bsi);
1371
736432ee 1372 bsi_remove (&bsi, false);
be477406
JL
1373 /* Now that we can thread computed gotos, we might have
1374 a situation where we have a forced label in block B
1375 However, the label at the start of block B might still be
1376 used in other ways (think about the runtime checking for
1377 Fortran assigned gotos). So we can not just delete the
1378 label. Instead we move the label to the start of block A. */
1379 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1380 {
1381 block_stmt_iterator dest_bsi = bsi_start (a);
1382 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1383 }
1384 }
6de9cd9a
DN
1385 else
1386 {
0a4fe58f 1387 change_bb_for_stmt (bsi_stmt (bsi), a);
6de9cd9a
DN
1388 bsi_next (&bsi);
1389 }
1390 }
1391
1392 /* Merge the chains. */
7506e1cb
ZD
1393 last = tsi_last (bb_stmt_list (a));
1394 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1395 set_bb_stmt_list (b, NULL_TREE);
672987e8
ZD
1396
1397 if (cfgcleanup_altered_bbs)
1398 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
6de9cd9a
DN
1399}
1400
1401
bc23502b
PB
1402/* Return the one of two successors of BB that is not reachable by a
1403 reached by a complex edge, if there is one. Else, return BB. We use
1404 this in optimizations that use post-dominators for their heuristics,
1405 to catch the cases in C++ where function calls are involved. */
6531d1be 1406
bc23502b 1407basic_block
6531d1be 1408single_noncomplex_succ (basic_block bb)
bc23502b
PB
1409{
1410 edge e0, e1;
1411 if (EDGE_COUNT (bb->succs) != 2)
1412 return bb;
6531d1be 1413
bc23502b
PB
1414 e0 = EDGE_SUCC (bb, 0);
1415 e1 = EDGE_SUCC (bb, 1);
1416 if (e0->flags & EDGE_COMPLEX)
1417 return e1->dest;
1418 if (e1->flags & EDGE_COMPLEX)
1419 return e0->dest;
6531d1be 1420
bc23502b 1421 return bb;
6531d1be 1422}
bc23502b
PB
1423
1424
6de9cd9a
DN
1425/* Walk the function tree removing unnecessary statements.
1426
1427 * Empty statement nodes are removed
1428
1429 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1430
1431 * Unnecessary COND_EXPRs are removed
1432
1433 * Some unnecessary BIND_EXPRs are removed
1434
1435 Clearly more work could be done. The trick is doing the analysis
1436 and removal fast enough to be a net improvement in compile times.
1437
1438 Note that when we remove a control structure such as a COND_EXPR
1439 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1440 to ensure we eliminate all the useless code. */
1441
1442struct rus_data
1443{
1444 tree *last_goto;
1445 bool repeat;
1446 bool may_throw;
1447 bool may_branch;
1448 bool has_label;
1449};
1450
1451static void remove_useless_stmts_1 (tree *, struct rus_data *);
1452
1453static bool
1454remove_useless_stmts_warn_notreached (tree stmt)
1455{
9506ac2b 1456 if (EXPR_HAS_LOCATION (stmt))
6de9cd9a 1457 {
9506ac2b 1458 location_t loc = EXPR_LOCATION (stmt);
43e05e45
SB
1459 if (LOCATION_LINE (loc) > 0)
1460 {
c5409249 1461 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
43e05e45
SB
1462 return true;
1463 }
6de9cd9a
DN
1464 }
1465
1466 switch (TREE_CODE (stmt))
1467 {
1468 case STATEMENT_LIST:
1469 {
1470 tree_stmt_iterator i;
1471 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1472 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1473 return true;
1474 }
1475 break;
1476
1477 case COND_EXPR:
1478 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1479 return true;
1480 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1481 return true;
1482 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1483 return true;
1484 break;
1485
1486 case TRY_FINALLY_EXPR:
1487 case TRY_CATCH_EXPR:
1488 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1489 return true;
1490 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1491 return true;
1492 break;
1493
1494 case CATCH_EXPR:
1495 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1496 case EH_FILTER_EXPR:
1497 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1498 case BIND_EXPR:
1499 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1500
1501 default:
1502 /* Not a live container. */
1503 break;
1504 }
1505
1506 return false;
1507}
1508
1509static void
1510remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1511{
1512 tree then_clause, else_clause, cond;
1513 bool save_has_label, then_has_label, else_has_label;
1514
1515 save_has_label = data->has_label;
1516 data->has_label = false;
1517 data->last_goto = NULL;
1518
1519 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1520
1521 then_has_label = data->has_label;
1522 data->has_label = false;
1523 data->last_goto = NULL;
1524
1525 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1526
1527 else_has_label = data->has_label;
1528 data->has_label = save_has_label | then_has_label | else_has_label;
1529
6de9cd9a
DN
1530 then_clause = COND_EXPR_THEN (*stmt_p);
1531 else_clause = COND_EXPR_ELSE (*stmt_p);
18faa5da 1532 cond = fold (COND_EXPR_COND (*stmt_p));
6de9cd9a
DN
1533
1534 /* If neither arm does anything at all, we can remove the whole IF. */
1535 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1536 {
1537 *stmt_p = build_empty_stmt ();
1538 data->repeat = true;
1539 }
1540
1541 /* If there are no reachable statements in an arm, then we can
1542 zap the entire conditional. */
1543 else if (integer_nonzerop (cond) && !else_has_label)
1544 {
1545 if (warn_notreached)
1546 remove_useless_stmts_warn_notreached (else_clause);
1547 *stmt_p = then_clause;
1548 data->repeat = true;
1549 }
1550 else if (integer_zerop (cond) && !then_has_label)
1551 {
1552 if (warn_notreached)
1553 remove_useless_stmts_warn_notreached (then_clause);
1554 *stmt_p = else_clause;
1555 data->repeat = true;
1556 }
1557
1558 /* Check a couple of simple things on then/else with single stmts. */
1559 else
1560 {
1561 tree then_stmt = expr_only (then_clause);
1562 tree else_stmt = expr_only (else_clause);
1563
1564 /* Notice branches to a common destination. */
1565 if (then_stmt && else_stmt
1566 && TREE_CODE (then_stmt) == GOTO_EXPR
1567 && TREE_CODE (else_stmt) == GOTO_EXPR
1568 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1569 {
1570 *stmt_p = then_stmt;
1571 data->repeat = true;
1572 }
1573
1574 /* If the THEN/ELSE clause merely assigns a value to a variable or
1575 parameter which is already known to contain that value, then
1576 remove the useless THEN/ELSE clause. */
1577 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1578 {
1579 if (else_stmt
07beea0d
AH
1580 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1582 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
6de9cd9a
DN
1583 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1584 }
1585 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1586 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1587 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1588 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1589 {
1590 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1591 ? then_stmt : else_stmt);
1592 tree *location = (TREE_CODE (cond) == EQ_EXPR
1593 ? &COND_EXPR_THEN (*stmt_p)
1594 : &COND_EXPR_ELSE (*stmt_p));
1595
1596 if (stmt
07beea0d
AH
1597 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1598 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1599 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
6de9cd9a
DN
1600 *location = alloc_stmt_list ();
1601 }
1602 }
1603
1604 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1605 would be re-introduced during lowering. */
1606 data->last_goto = NULL;
1607}
1608
1609
1610static void
1611remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1612{
1613 bool save_may_branch, save_may_throw;
1614 bool this_may_branch, this_may_throw;
1615
1616 /* Collect may_branch and may_throw information for the body only. */
1617 save_may_branch = data->may_branch;
1618 save_may_throw = data->may_throw;
1619 data->may_branch = false;
1620 data->may_throw = false;
1621 data->last_goto = NULL;
1622
1623 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1624
1625 this_may_branch = data->may_branch;
1626 this_may_throw = data->may_throw;
1627 data->may_branch |= save_may_branch;
1628 data->may_throw |= save_may_throw;
1629 data->last_goto = NULL;
1630
1631 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1632
1633 /* If the body is empty, then we can emit the FINALLY block without
1634 the enclosing TRY_FINALLY_EXPR. */
1635 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1636 {
1637 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1638 data->repeat = true;
1639 }
1640
1641 /* If the handler is empty, then we can emit the TRY block without
1642 the enclosing TRY_FINALLY_EXPR. */
1643 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1644 {
1645 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1646 data->repeat = true;
1647 }
1648
1649 /* If the body neither throws, nor branches, then we can safely
1650 string the TRY and FINALLY blocks together. */
1651 else if (!this_may_branch && !this_may_throw)
1652 {
1653 tree stmt = *stmt_p;
1654 *stmt_p = TREE_OPERAND (stmt, 0);
1655 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1656 data->repeat = true;
1657 }
1658}
1659
1660
1661static void
1662remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1663{
1664 bool save_may_throw, this_may_throw;
1665 tree_stmt_iterator i;
1666 tree stmt;
1667
1668 /* Collect may_throw information for the body only. */
1669 save_may_throw = data->may_throw;
1670 data->may_throw = false;
1671 data->last_goto = NULL;
1672
1673 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1674
1675 this_may_throw = data->may_throw;
1676 data->may_throw = save_may_throw;
1677
1678 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1679 if (!this_may_throw)
1680 {
1681 if (warn_notreached)
1682 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1683 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1684 data->repeat = true;
1685 return;
1686 }
1687
1688 /* Process the catch clause specially. We may be able to tell that
1689 no exceptions propagate past this point. */
1690
1691 this_may_throw = true;
1692 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1693 stmt = tsi_stmt (i);
1694 data->last_goto = NULL;
1695
1696 switch (TREE_CODE (stmt))
1697 {
1698 case CATCH_EXPR:
1699 for (; !tsi_end_p (i); tsi_next (&i))
1700 {
1701 stmt = tsi_stmt (i);
1702 /* If we catch all exceptions, then the body does not
1703 propagate exceptions past this point. */
1704 if (CATCH_TYPES (stmt) == NULL)
1705 this_may_throw = false;
1706 data->last_goto = NULL;
1707 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1708 }
1709 break;
1710
1711 case EH_FILTER_EXPR:
1712 if (EH_FILTER_MUST_NOT_THROW (stmt))
1713 this_may_throw = false;
1714 else if (EH_FILTER_TYPES (stmt) == NULL)
1715 this_may_throw = false;
1716 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1717 break;
1718
1719 default:
1720 /* Otherwise this is a cleanup. */
1721 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1722
1723 /* If the cleanup is empty, then we can emit the TRY block without
1724 the enclosing TRY_CATCH_EXPR. */
1725 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1726 {
1727 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1728 data->repeat = true;
1729 }
1730 break;
1731 }
1732 data->may_throw |= this_may_throw;
1733}
1734
1735
1736static void
1737remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1738{
1739 tree block;
1740
1741 /* First remove anything underneath the BIND_EXPR. */
1742 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1743
1744 /* If the BIND_EXPR has no variables, then we can pull everything
1745 up one level and remove the BIND_EXPR, unless this is the toplevel
1746 BIND_EXPR for the current function or an inlined function.
1747
1748 When this situation occurs we will want to apply this
1749 optimization again. */
1750 block = BIND_EXPR_BLOCK (*stmt_p);
1751 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1752 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1753 && (! block
1754 || ! BLOCK_ABSTRACT_ORIGIN (block)
1755 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1756 != FUNCTION_DECL)))
1757 {
1758 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1759 data->repeat = true;
1760 }
1761}
1762
1763
1764static void
1765remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1766{
1767 tree dest = GOTO_DESTINATION (*stmt_p);
1768
1769 data->may_branch = true;
1770 data->last_goto = NULL;
1771
1772 /* Record the last goto expr, so that we can delete it if unnecessary. */
1773 if (TREE_CODE (dest) == LABEL_DECL)
1774 data->last_goto = stmt_p;
1775}
1776
1777
1778static void
1779remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1780{
1781 tree label = LABEL_EXPR_LABEL (*stmt_p);
1782
1783 data->has_label = true;
1784
1785 /* We do want to jump across non-local label receiver code. */
1786 if (DECL_NONLOCAL (label))
1787 data->last_goto = NULL;
1788
1789 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1790 {
1791 *data->last_goto = build_empty_stmt ();
1792 data->repeat = true;
1793 }
1794
1795 /* ??? Add something here to delete unused labels. */
1796}
1797
1798
1799/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1800 decl. This allows us to eliminate redundant or useless
6531d1be 1801 calls to "const" functions.
6de9cd9a
DN
1802
1803 Gimplifier already does the same operation, but we may notice functions
1804 being const and pure once their calls has been gimplified, so we need
1805 to update the flag. */
1806
1807static void
1808update_call_expr_flags (tree call)
1809{
1810 tree decl = get_callee_fndecl (call);
becfd6e5 1811 int flags;
6de9cd9a
DN
1812 if (!decl)
1813 return;
becfd6e5
KZ
1814 flags = call_expr_flags (call);
1815 if (flags & (ECF_CONST | ECF_PURE) && !(flags & ECF_LOOPING_CONST_OR_PURE))
6de9cd9a
DN
1816 TREE_SIDE_EFFECTS (call) = 0;
1817 if (TREE_NOTHROW (decl))
1818 TREE_NOTHROW (call) = 1;
1819}
1820
1821
1822/* T is CALL_EXPR. Set current_function_calls_* flags. */
1823
1824void
1825notice_special_calls (tree t)
1826{
1827 int flags = call_expr_flags (t);
1828
1829 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 1830 cfun->calls_alloca = true;
6de9cd9a 1831 if (flags & ECF_RETURNS_TWICE)
e3b5732b 1832 cfun->calls_setjmp = true;
6de9cd9a
DN
1833}
1834
1835
1836/* Clear flags set by notice_special_calls. Used by dead code removal
1837 to update the flags. */
1838
1839void
1840clear_special_calls (void)
1841{
e3b5732b
JH
1842 cfun->calls_alloca = false;
1843 cfun->calls_setjmp = false;
6de9cd9a
DN
1844}
1845
1846
1847static void
1848remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1849{
cd709752 1850 tree t = *tp, op;
6de9cd9a
DN
1851
1852 switch (TREE_CODE (t))
1853 {
1854 case COND_EXPR:
1855 remove_useless_stmts_cond (tp, data);
1856 break;
1857
1858 case TRY_FINALLY_EXPR:
1859 remove_useless_stmts_tf (tp, data);
1860 break;
1861
1862 case TRY_CATCH_EXPR:
1863 remove_useless_stmts_tc (tp, data);
1864 break;
1865
1866 case BIND_EXPR:
1867 remove_useless_stmts_bind (tp, data);
1868 break;
1869
1870 case GOTO_EXPR:
1871 remove_useless_stmts_goto (tp, data);
1872 break;
1873
1874 case LABEL_EXPR:
1875 remove_useless_stmts_label (tp, data);
1876 break;
1877
1878 case RETURN_EXPR:
53e782e5 1879 fold_stmt (tp);
6de9cd9a
DN
1880 data->last_goto = NULL;
1881 data->may_branch = true;
1882 break;
1883
1884 case CALL_EXPR:
53e782e5 1885 fold_stmt (tp);
6de9cd9a
DN
1886 data->last_goto = NULL;
1887 notice_special_calls (t);
1888 update_call_expr_flags (t);
1889 if (tree_could_throw_p (t))
1890 data->may_throw = true;
1891 break;
1892
1893 case MODIFY_EXPR:
07beea0d
AH
1894 gcc_unreachable ();
1895
1896 case GIMPLE_MODIFY_STMT:
6de9cd9a 1897 data->last_goto = NULL;
53e782e5 1898 fold_stmt (tp);
cd709752
RH
1899 op = get_call_expr_in (t);
1900 if (op)
6de9cd9a 1901 {
cd709752
RH
1902 update_call_expr_flags (op);
1903 notice_special_calls (op);
6de9cd9a
DN
1904 }
1905 if (tree_could_throw_p (t))
1906 data->may_throw = true;
1907 break;
1908
1909 case STATEMENT_LIST:
1910 {
1911 tree_stmt_iterator i = tsi_start (t);
1912 while (!tsi_end_p (i))
1913 {
1914 t = tsi_stmt (i);
1915 if (IS_EMPTY_STMT (t))
1916 {
1917 tsi_delink (&i);
1918 continue;
1919 }
6531d1be 1920
6de9cd9a
DN
1921 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1922
1923 t = tsi_stmt (i);
1924 if (TREE_CODE (t) == STATEMENT_LIST)
1925 {
1926 tsi_link_before (&i, t, TSI_SAME_STMT);
1927 tsi_delink (&i);
1928 }
1929 else
1930 tsi_next (&i);
1931 }
1932 }
1933 break;
8e14584d 1934 case ASM_EXPR:
53e782e5
AP
1935 fold_stmt (tp);
1936 data->last_goto = NULL;
1937 break;
6de9cd9a 1938
3088d404 1939 case OMP_PARALLEL:
a68ab351 1940 case OMP_TASK:
3088d404
JJ
1941 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1942 as useless. */
a68ab351 1943 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
3088d404
JJ
1944 data->last_goto = NULL;
1945 break;
1946
1947 case OMP_SECTIONS:
1948 case OMP_SINGLE:
1949 case OMP_SECTION:
a68ab351 1950 case OMP_MASTER:
3088d404
JJ
1951 case OMP_ORDERED:
1952 case OMP_CRITICAL:
1953 remove_useless_stmts_1 (&OMP_BODY (*tp), data);
1954 data->last_goto = NULL;
1955 break;
1956
1957 case OMP_FOR:
1958 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
1959 data->last_goto = NULL;
1960 if (OMP_FOR_PRE_BODY (*tp))
1961 {
1962 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
1963 data->last_goto = NULL;
1964 }
1965 break;
1966
6de9cd9a
DN
1967 default:
1968 data->last_goto = NULL;
1969 break;
1970 }
1971}
1972
c2924966 1973static unsigned int
6de9cd9a
DN
1974remove_useless_stmts (void)
1975{
1976 struct rus_data data;
1977
1978 clear_special_calls ();
1979
1980 do
1981 {
1982 memset (&data, 0, sizeof (data));
1983 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1984 }
1985 while (data.repeat);
c2924966 1986 return 0;
6de9cd9a
DN
1987}
1988
1989
8ddbbcae 1990struct gimple_opt_pass pass_remove_useless_stmts =
6de9cd9a 1991{
8ddbbcae
JH
1992 {
1993 GIMPLE_PASS,
6de9cd9a
DN
1994 "useless", /* name */
1995 NULL, /* gate */
1996 remove_useless_stmts, /* execute */
1997 NULL, /* sub */
1998 NULL, /* next */
1999 0, /* static_pass_number */
2000 0, /* tv_id */
9e5a3e6c
RH
2001 PROP_gimple_any, /* properties_required */
2002 0, /* properties_provided */
6de9cd9a
DN
2003 0, /* properties_destroyed */
2004 0, /* todo_flags_start */
8ddbbcae
JH
2005 TODO_dump_func /* todo_flags_finish */
2006 }
6de9cd9a
DN
2007};
2008
6de9cd9a
DN
2009/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2010
2011static void
2012remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2013{
2014 tree phi;
2015
2016 /* Since this block is no longer reachable, we can just delete all
2017 of its PHI nodes. */
2018 phi = phi_nodes (bb);
2019 while (phi)
2020 {
17192884 2021 tree next = PHI_CHAIN (phi);
9b3b55a1 2022 remove_phi_node (phi, NULL_TREE, true);
6de9cd9a
DN
2023 phi = next;
2024 }
2025
2026 /* Remove edges to BB's successors. */
628f6a4e 2027 while (EDGE_COUNT (bb->succs) > 0)
d0d2cc21 2028 remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
2029}
2030
2031
2032/* Remove statements of basic block BB. */
2033
2034static void
2035remove_bb (basic_block bb)
2036{
2037 block_stmt_iterator i;
dbce1570 2038 source_location loc = UNKNOWN_LOCATION;
6de9cd9a
DN
2039
2040 if (dump_file)
2041 {
2042 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2043 if (dump_flags & TDF_DETAILS)
2044 {
2045 dump_bb (bb, dump_file, 0);
2046 fprintf (dump_file, "\n");
2047 }
2048 }
2049
2b271002
ZD
2050 if (current_loops)
2051 {
2052 struct loop *loop = bb->loop_father;
2053
598ec7bd
ZD
2054 /* If a loop gets removed, clean up the information associated
2055 with it. */
2b271002
ZD
2056 if (loop->latch == bb
2057 || loop->header == bb)
598ec7bd 2058 free_numbers_of_iterations_estimates_loop (loop);
2b271002
ZD
2059 }
2060
6de9cd9a 2061 /* Remove all the instructions in the block. */
7506e1cb 2062 if (bb_stmt_list (bb) != NULL_TREE)
6de9cd9a 2063 {
7506e1cb 2064 for (i = bsi_start (bb); !bsi_end_p (i);)
77568960 2065 {
7506e1cb
ZD
2066 tree stmt = bsi_stmt (i);
2067 if (TREE_CODE (stmt) == LABEL_EXPR
2068 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2069 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2070 {
2071 basic_block new_bb;
2072 block_stmt_iterator new_bsi;
2073
2074 /* A non-reachable non-local label may still be referenced.
2075 But it no longer needs to carry the extra semantics of
2076 non-locality. */
2077 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2078 {
2079 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2080 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2081 }
bb1ecfe8 2082
7506e1cb
ZD
2083 new_bb = bb->prev_bb;
2084 new_bsi = bsi_start (new_bb);
2085 bsi_remove (&i, false);
2086 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2087 }
2088 else
bb1ecfe8 2089 {
7506e1cb
ZD
2090 /* Release SSA definitions if we are in SSA. Note that we
2091 may be called when not in SSA. For example,
2092 final_cleanup calls this function via
2093 cleanup_tree_cfg. */
2094 if (gimple_in_ssa_p (cfun))
2095 release_defs (stmt);
2096
2097 bsi_remove (&i, true);
bb1ecfe8 2098 }
6531d1be 2099
7506e1cb
ZD
2100 /* Don't warn for removed gotos. Gotos are often removed due to
2101 jump threading, thus resulting in bogus warnings. Not great,
2102 since this way we lose warnings for gotos in the original
2103 program that are indeed unreachable. */
2104 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2105 {
7506e1cb
ZD
2106 if (EXPR_HAS_LOCATION (stmt))
2107 loc = EXPR_LOCATION (stmt);
7506e1cb 2108 }
43e05e45 2109 }
6de9cd9a
DN
2110 }
2111
2112 /* If requested, give a warning that the first statement in the
2113 block is unreachable. We walk statements backwards in the
2114 loop above, so the last statement we process is the first statement
2115 in the block. */
5ffeb913 2116 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
44c21c7f 2117 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
6de9cd9a
DN
2118
2119 remove_phi_nodes_and_edges_for_unreachable_block (bb);
7506e1cb 2120 bb->il.tree = NULL;
6de9cd9a
DN
2121}
2122
6de9cd9a 2123
35920270
KH
2124/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2125 predicate VAL, return the edge that will be taken out of the block.
2126 If VAL does not match a unique edge, NULL is returned. */
6de9cd9a
DN
2127
2128edge
2129find_taken_edge (basic_block bb, tree val)
2130{
2131 tree stmt;
2132
2133 stmt = last_stmt (bb);
2134
1e128c5f
GB
2135 gcc_assert (stmt);
2136 gcc_assert (is_ctrl_stmt (stmt));
65f4323d 2137 gcc_assert (val);
6de9cd9a 2138
be477406 2139 if (! is_gimple_min_invariant (val))
6de9cd9a
DN
2140 return NULL;
2141
2142 if (TREE_CODE (stmt) == COND_EXPR)
2143 return find_taken_edge_cond_expr (bb, val);
2144
2145 if (TREE_CODE (stmt) == SWITCH_EXPR)
2146 return find_taken_edge_switch_expr (bb, val);
2147
be477406 2148 if (computed_goto_p (stmt))
1799efef
JL
2149 {
2150 /* Only optimize if the argument is a label, if the argument is
2151 not a label then we can not construct a proper CFG.
2152
2153 It may be the case that we only need to allow the LABEL_REF to
2154 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2155 appear inside a LABEL_EXPR just to be safe. */
2156 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2157 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2158 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2159 return NULL;
2160 }
be477406 2161
35920270 2162 gcc_unreachable ();
6de9cd9a
DN
2163}
2164
be477406
JL
2165/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2166 statement, determine which of the outgoing edges will be taken out of the
2167 block. Return NULL if either edge may be taken. */
2168
2169static edge
2170find_taken_edge_computed_goto (basic_block bb, tree val)
2171{
2172 basic_block dest;
2173 edge e = NULL;
2174
2175 dest = label_to_block (val);
2176 if (dest)
2177 {
2178 e = find_edge (bb, dest);
2179 gcc_assert (e != NULL);
2180 }
2181
2182 return e;
2183}
6de9cd9a
DN
2184
2185/* Given a constant value VAL and the entry block BB to a COND_EXPR
2186 statement, determine which of the two edges will be taken out of the
2187 block. Return NULL if either edge may be taken. */
2188
2189static edge
2190find_taken_edge_cond_expr (basic_block bb, tree val)
2191{
2192 edge true_edge, false_edge;
2193
2194 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
6531d1be 2195
f1b19062 2196 gcc_assert (TREE_CODE (val) == INTEGER_CST);
6e682d7e 2197 return (integer_zerop (val) ? false_edge : true_edge);
6de9cd9a
DN
2198}
2199
fca01525 2200/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
6de9cd9a
DN
2201 statement, determine which edge will be taken out of the block. Return
2202 NULL if any edge may be taken. */
2203
2204static edge
2205find_taken_edge_switch_expr (basic_block bb, tree val)
2206{
2207 tree switch_expr, taken_case;
2208 basic_block dest_bb;
2209 edge e;
2210
6de9cd9a
DN
2211 switch_expr = last_stmt (bb);
2212 taken_case = find_case_label_for_value (switch_expr, val);
2213 dest_bb = label_to_block (CASE_LABEL (taken_case));
2214
2215 e = find_edge (bb, dest_bb);
1e128c5f 2216 gcc_assert (e);
6de9cd9a
DN
2217 return e;
2218}
2219
2220
f667741c
SB
2221/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2222 We can make optimal use here of the fact that the case labels are
2223 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2224
2225static tree
2226find_case_label_for_value (tree switch_expr, tree val)
2227{
2228 tree vec = SWITCH_LABELS (switch_expr);
f667741c
SB
2229 size_t low, high, n = TREE_VEC_LENGTH (vec);
2230 tree default_case = TREE_VEC_ELT (vec, n - 1);
6de9cd9a 2231
f667741c 2232 for (low = -1, high = n - 1; high - low > 1; )
6de9cd9a 2233 {
f667741c 2234 size_t i = (high + low) / 2;
6de9cd9a 2235 tree t = TREE_VEC_ELT (vec, i);
f667741c
SB
2236 int cmp;
2237
2238 /* Cache the result of comparing CASE_LOW and val. */
2239 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2240
f667741c
SB
2241 if (cmp > 0)
2242 high = i;
2243 else
2244 low = i;
2245
2246 if (CASE_HIGH (t) == NULL)
6de9cd9a 2247 {
f667741c
SB
2248 /* A singe-valued case label. */
2249 if (cmp == 0)
6de9cd9a
DN
2250 return t;
2251 }
2252 else
2253 {
2254 /* A case range. We can only handle integer ranges. */
f667741c 2255 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2256 return t;
2257 }
2258 }
2259
6de9cd9a
DN
2260 return default_case;
2261}
2262
2263
6de9cd9a
DN
2264
2265
6de9cd9a
DN
2266/*---------------------------------------------------------------------------
2267 Debugging functions
2268---------------------------------------------------------------------------*/
2269
2270/* Dump tree-specific information of block BB to file OUTF. */
2271
2272void
2273tree_dump_bb (basic_block bb, FILE *outf, int indent)
2274{
38635499 2275 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
6de9cd9a
DN
2276}
2277
2278
2279/* Dump a basic block on stderr. */
2280
2281void
2282debug_tree_bb (basic_block bb)
2283{
2284 dump_bb (bb, stderr, 0);
2285}
2286
2287
2288/* Dump basic block with index N on stderr. */
2289
2290basic_block
2291debug_tree_bb_n (int n)
2292{
2293 debug_tree_bb (BASIC_BLOCK (n));
2294 return BASIC_BLOCK (n);
6531d1be 2295}
6de9cd9a
DN
2296
2297
2298/* Dump the CFG on stderr.
2299
2300 FLAGS are the same used by the tree dumping functions
6531d1be 2301 (see TDF_* in tree-pass.h). */
6de9cd9a
DN
2302
2303void
2304debug_tree_cfg (int flags)
2305{
2306 dump_tree_cfg (stderr, flags);
2307}
2308
2309
2310/* Dump the program showing basic block boundaries on the given FILE.
2311
2312 FLAGS are the same used by the tree dumping functions (see TDF_* in
2313 tree.h). */
2314
2315void
2316dump_tree_cfg (FILE *file, int flags)
2317{
2318 if (flags & TDF_DETAILS)
2319 {
2320 const char *funcname
673fda6b 2321 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2322
2323 fputc ('\n', file);
2324 fprintf (file, ";; Function %s\n\n", funcname);
2325 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2326 n_basic_blocks, n_edges, last_basic_block);
2327
2328 brief_dump_cfg (file);
2329 fprintf (file, "\n");
2330 }
2331
2332 if (flags & TDF_STATS)
2333 dump_cfg_stats (file);
2334
2335 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2336}
2337
2338
2339/* Dump CFG statistics on FILE. */
2340
2341void
2342dump_cfg_stats (FILE *file)
2343{
2344 static long max_num_merged_labels = 0;
2345 unsigned long size, total = 0;
7b0cab99 2346 long num_edges;
6de9cd9a
DN
2347 basic_block bb;
2348 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2349 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
cac50d94 2350 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
6de9cd9a
DN
2351 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2352 const char *funcname
673fda6b 2353 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2354
2355
2356 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2357
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str, "", " Number of ", "Memory");
2360 fprintf (file, fmt_str, "", " instances ", "used ");
2361 fprintf (file, "---------------------------------------------------------\n");
2362
2363 size = n_basic_blocks * sizeof (struct basic_block_def);
2364 total += size;
f7fda749
RH
2365 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2366 SCALE (size), LABEL (size));
6de9cd9a 2367
7b0cab99 2368 num_edges = 0;
6de9cd9a 2369 FOR_EACH_BB (bb)
7b0cab99
JH
2370 num_edges += EDGE_COUNT (bb->succs);
2371 size = num_edges * sizeof (struct edge_def);
6de9cd9a 2372 total += size;
cac50d94 2373 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
6de9cd9a 2374
6de9cd9a
DN
2375 fprintf (file, "---------------------------------------------------------\n");
2376 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2377 LABEL (total));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, "\n");
2380
2381 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2382 max_num_merged_labels = cfg_stats.num_merged_labels;
2383
2384 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats.num_merged_labels, max_num_merged_labels);
2386
2387 fprintf (file, "\n");
2388}
2389
2390
2391/* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2393
2394void
2395debug_cfg_stats (void)
2396{
2397 dump_cfg_stats (stderr);
2398}
2399
2400
2401/* Dump the flowgraph to a .vcg FILE. */
2402
2403static void
2404tree_cfg2vcg (FILE *file)
2405{
2406 edge e;
628f6a4e 2407 edge_iterator ei;
6de9cd9a
DN
2408 basic_block bb;
2409 const char *funcname
673fda6b 2410 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2411
2412 /* Write the file header. */
2413 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2414 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2415 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2416
2417 /* Write blocks and edges. */
628f6a4e 2418 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2419 {
2420 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2421 e->dest->index);
2422
2423 if (e->flags & EDGE_FAKE)
2424 fprintf (file, " linestyle: dotted priority: 10");
2425 else
2426 fprintf (file, " linestyle: solid priority: 100");
2427
2428 fprintf (file, " }\n");
2429 }
2430 fputc ('\n', file);
2431
2432 FOR_EACH_BB (bb)
2433 {
2434 enum tree_code head_code, end_code;
2435 const char *head_name, *end_name;
2436 int head_line = 0;
2437 int end_line = 0;
2438 tree first = first_stmt (bb);
2439 tree last = last_stmt (bb);
2440
2441 if (first)
2442 {
2443 head_code = TREE_CODE (first);
2444 head_name = tree_code_name[head_code];
2445 head_line = get_lineno (first);
2446 }
2447 else
2448 head_name = "no-statement";
2449
2450 if (last)
2451 {
2452 end_code = TREE_CODE (last);
2453 end_name = tree_code_name[end_code];
2454 end_line = get_lineno (last);
2455 }
2456 else
2457 end_name = "no-statement";
2458
2459 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2460 bb->index, bb->index, head_name, head_line, end_name,
2461 end_line);
2462
628f6a4e 2463 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2464 {
2465 if (e->dest == EXIT_BLOCK_PTR)
2466 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2467 else
2468 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2469
2470 if (e->flags & EDGE_FAKE)
2471 fprintf (file, " priority: 10 linestyle: dotted");
2472 else
2473 fprintf (file, " priority: 100 linestyle: solid");
2474
2475 fprintf (file, " }\n");
2476 }
2477
2478 if (bb->next_bb != EXIT_BLOCK_PTR)
2479 fputc ('\n', file);
2480 }
2481
2482 fputs ("}\n\n", file);
2483}
2484
2485
2486
2487/*---------------------------------------------------------------------------
2488 Miscellaneous helpers
2489---------------------------------------------------------------------------*/
2490
2491/* Return true if T represents a stmt that always transfers control. */
2492
2493bool
6ea2b70d 2494is_ctrl_stmt (const_tree t)
6de9cd9a
DN
2495{
2496 return (TREE_CODE (t) == COND_EXPR
2497 || TREE_CODE (t) == SWITCH_EXPR
2498 || TREE_CODE (t) == GOTO_EXPR
2499 || TREE_CODE (t) == RETURN_EXPR
2500 || TREE_CODE (t) == RESX_EXPR);
2501}
2502
2503
2504/* Return true if T is a statement that may alter the flow of control
2505 (e.g., a call to a non-returning function). */
2506
2507bool
9678086d 2508is_ctrl_altering_stmt (const_tree t)
6de9cd9a 2509{
9678086d 2510 const_tree call;
6de9cd9a 2511
1e128c5f 2512 gcc_assert (t);
0e014996 2513 call = get_call_expr_in (CONST_CAST_TREE (t));
cd709752 2514 if (call)
6de9cd9a 2515 {
6de9cd9a
DN
2516 /* A non-pure/const CALL_EXPR alters flow control if the current
2517 function has nonlocal labels. */
e3b5732b 2518 if (TREE_SIDE_EFFECTS (call) && cfun->has_nonlocal_label)
6de9cd9a
DN
2519 return true;
2520
2521 /* A CALL_EXPR also alters control flow if it does not return. */
6e14af16 2522 if (call_expr_flags (call) & ECF_NORETURN)
6de9cd9a 2523 return true;
6de9cd9a
DN
2524 }
2525
50674e96 2526 /* OpenMP directives alter control flow. */
bed575d5 2527 if (OMP_DIRECTIVE_P (t))
50674e96
DN
2528 return true;
2529
6de9cd9a
DN
2530 /* If a statement can throw, it alters control flow. */
2531 return tree_can_throw_internal (t);
2532}
2533
2534
2535/* Return true if T is a computed goto. */
2536
c6c6b7aa 2537static bool
6ea2b70d 2538computed_goto_p (const_tree t)
6de9cd9a
DN
2539{
2540 return (TREE_CODE (t) == GOTO_EXPR
2541 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2542}
2543
2544
4f6c2131 2545/* Return true if T is a simple local goto. */
6de9cd9a
DN
2546
2547bool
6ea2b70d 2548simple_goto_p (const_tree t)
6de9cd9a 2549{
4f6c2131
EB
2550 return (TREE_CODE (t) == GOTO_EXPR
2551 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2552}
2553
2554
2555/* Return true if T can make an abnormal transfer of control flow.
2556 Transfers of control flow associated with EH are excluded. */
2557
2558bool
6ea2b70d 2559tree_can_make_abnormal_goto (const_tree t)
4f6c2131
EB
2560{
2561 if (computed_goto_p (t))
2562 return true;
07beea0d
AH
2563 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2564 t = GIMPLE_STMT_OPERAND (t, 1);
4f6c2131
EB
2565 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2566 t = TREE_OPERAND (t, 0);
2567 if (TREE_CODE (t) == CALL_EXPR)
e3b5732b 2568 return TREE_SIDE_EFFECTS (t) && cfun->has_nonlocal_label;
4f6c2131 2569 return false;
6de9cd9a
DN
2570}
2571
2572
2573/* Return true if T should start a new basic block. PREV_T is the
2574 statement preceding T. It is used when T is a label or a case label.
2575 Labels should only start a new basic block if their previous statement
2576 wasn't a label. Otherwise, sequence of labels would generate
2577 unnecessary basic blocks that only contain a single label. */
2578
2579static inline bool
6ea2b70d 2580stmt_starts_bb_p (const_tree t, const_tree prev_t)
6de9cd9a 2581{
6de9cd9a
DN
2582 if (t == NULL_TREE)
2583 return false;
2584
2585 /* LABEL_EXPRs start a new basic block only if the preceding
2586 statement wasn't a label of the same type. This prevents the
2587 creation of consecutive blocks that have nothing but a single
2588 label. */
229cc11f 2589 if (TREE_CODE (t) == LABEL_EXPR)
6de9cd9a
DN
2590 {
2591 /* Nonlocal and computed GOTO targets always start a new block. */
229cc11f
KH
2592 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2593 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
6de9cd9a
DN
2594 return true;
2595
229cc11f 2596 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
6de9cd9a
DN
2597 {
2598 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2599 return true;
2600
2601 cfg_stats.num_merged_labels++;
2602 return false;
2603 }
2604 else
2605 return true;
2606 }
2607
2608 return false;
2609}
2610
2611
2612/* Return true if T should end a basic block. */
2613
2614bool
9678086d 2615stmt_ends_bb_p (const_tree t)
6de9cd9a
DN
2616{
2617 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2618}
2619
242229bb 2620/* Remove block annotations and other datastructures. */
6de9cd9a
DN
2621
2622void
242229bb 2623delete_tree_cfg_annotations (void)
6de9cd9a 2624{
3a40c18a
JH
2625 basic_block bb;
2626 block_stmt_iterator bsi;
2627
2628 /* Remove annotations from every tree in the function. */
2629 FOR_EACH_BB (bb)
2630 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2631 {
2632 tree stmt = bsi_stmt (bsi);
2633 ggc_free (stmt->base.ann);
2634 stmt->base.ann = NULL;
2635 }
6de9cd9a 2636 label_to_block_map = NULL;
6de9cd9a
DN
2637}
2638
2639
2640/* Return the first statement in basic block BB. */
2641
2642tree
2643first_stmt (basic_block bb)
2644{
2645 block_stmt_iterator i = bsi_start (bb);
2646 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2647}
2648
6de9cd9a
DN
2649/* Return the last statement in basic block BB. */
2650
2651tree
2652last_stmt (basic_block bb)
2653{
2654 block_stmt_iterator b = bsi_last (bb);
2655 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2656}
2657
6de9cd9a
DN
2658/* Return the last statement of an otherwise empty block. Return NULL
2659 if the block is totally empty, or if it contains more than one
2660 statement. */
2661
2662tree
2663last_and_only_stmt (basic_block bb)
2664{
2665 block_stmt_iterator i = bsi_last (bb);
2666 tree last, prev;
2667
2668 if (bsi_end_p (i))
2669 return NULL_TREE;
2670
2671 last = bsi_stmt (i);
2672 bsi_prev (&i);
2673 if (bsi_end_p (i))
2674 return last;
2675
2676 /* Empty statements should no longer appear in the instruction stream.
2677 Everything that might have appeared before should be deleted by
2678 remove_useless_stmts, and the optimizers should just bsi_remove
2679 instead of smashing with build_empty_stmt.
2680
2681 Thus the only thing that should appear here in a block containing
2682 one executable statement is a label. */
2683 prev = bsi_stmt (i);
2684 if (TREE_CODE (prev) == LABEL_EXPR)
2685 return last;
2686 else
2687 return NULL_TREE;
2688}
2689
2690
2691/* Mark BB as the basic block holding statement T. */
2692
2693void
2694set_bb_for_stmt (tree t, basic_block bb)
2695{
30d396e3
ZD
2696 if (TREE_CODE (t) == PHI_NODE)
2697 PHI_BB (t) = bb;
2698 else if (TREE_CODE (t) == STATEMENT_LIST)
6de9cd9a
DN
2699 {
2700 tree_stmt_iterator i;
2701 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2702 set_bb_for_stmt (tsi_stmt (i), bb);
2703 }
2704 else
2705 {
2706 stmt_ann_t ann = get_stmt_ann (t);
2707 ann->bb = bb;
2708
50674e96
DN
2709 /* If the statement is a label, add the label to block-to-labels map
2710 so that we can speed up edge creation for GOTO_EXPRs. */
2711 if (TREE_CODE (t) == LABEL_EXPR)
6de9cd9a
DN
2712 {
2713 int uid;
2714
2715 t = LABEL_EXPR_LABEL (t);
2716 uid = LABEL_DECL_UID (t);
2717 if (uid == -1)
2718 {
e597f337 2719 unsigned old_len = VEC_length (basic_block, label_to_block_map);
cb91fab0 2720 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
e597f337
KH
2721 if (old_len <= (unsigned) uid)
2722 {
e597f337
KH
2723 unsigned new_len = 3 * uid / 2;
2724
a590ac65
KH
2725 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2726 new_len);
e597f337 2727 }
6de9cd9a
DN
2728 }
2729 else
1e128c5f
GB
2730 /* We're moving an existing label. Make sure that we've
2731 removed it from the old block. */
e597f337
KH
2732 gcc_assert (!bb
2733 || !VEC_index (basic_block, label_to_block_map, uid));
2734 VEC_replace (basic_block, label_to_block_map, uid, bb);
6de9cd9a
DN
2735 }
2736 }
2737}
2738
0a4fe58f
JH
2739/* Faster version of set_bb_for_stmt that assume that statement is being moved
2740 from one basic block to another.
2741 For BB splitting we can run into quadratic case, so performance is quite
de1e45c3 2742 important and knowing that the tables are big enough, change_bb_for_stmt
0a4fe58f
JH
2743 can inline as leaf function. */
2744static inline void
2745change_bb_for_stmt (tree t, basic_block bb)
2746{
2747 get_stmt_ann (t)->bb = bb;
2748 if (TREE_CODE (t) == LABEL_EXPR)
2749 VEC_replace (basic_block, label_to_block_map,
2750 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2751}
2752
8b11a64c
ZD
2753/* Finds iterator for STMT. */
2754
2755extern block_stmt_iterator
1a1804c2 2756bsi_for_stmt (tree stmt)
8b11a64c
ZD
2757{
2758 block_stmt_iterator bsi;
2759
2760 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2761 if (bsi_stmt (bsi) == stmt)
2762 return bsi;
2763
1e128c5f 2764 gcc_unreachable ();
8b11a64c 2765}
6de9cd9a 2766
f430bae8
AM
2767/* Mark statement T as modified, and update it. */
2768static inline void
2769update_modified_stmts (tree t)
2770{
ed1a2abd
JH
2771 if (!ssa_operands_active ())
2772 return;
f430bae8
AM
2773 if (TREE_CODE (t) == STATEMENT_LIST)
2774 {
2775 tree_stmt_iterator i;
2776 tree stmt;
2777 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2778 {
2779 stmt = tsi_stmt (i);
2780 update_stmt_if_modified (stmt);
2781 }
2782 }
2783 else
2784 update_stmt_if_modified (t);
2785}
2786
6de9cd9a
DN
2787/* Insert statement (or statement list) T before the statement
2788 pointed-to by iterator I. M specifies how to update iterator I
2789 after insertion (see enum bsi_iterator_update). */
2790
2791void
2792bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2793{
2794 set_bb_for_stmt (t, i->bb);
f430bae8 2795 update_modified_stmts (t);
6de9cd9a
DN
2796 tsi_link_before (&i->tsi, t, m);
2797}
2798
2799
2800/* Insert statement (or statement list) T after the statement
2801 pointed-to by iterator I. M specifies how to update iterator I
2802 after insertion (see enum bsi_iterator_update). */
2803
2804void
2805bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2806{
2807 set_bb_for_stmt (t, i->bb);
f430bae8 2808 update_modified_stmts (t);
6de9cd9a
DN
2809 tsi_link_after (&i->tsi, t, m);
2810}
2811
2812
2813/* Remove the statement pointed to by iterator I. The iterator is updated
6531d1be 2814 to the next statement.
736432ee
JL
2815
2816 When REMOVE_EH_INFO is true we remove the statement pointed to by
2817 iterator I from the EH tables. Otherwise we do not modify the EH
2818 tables.
2819
2820 Generally, REMOVE_EH_INFO should be true when the statement is going to
2821 be removed from the IL and not reinserted elsewhere. */
6de9cd9a
DN
2822
2823void
736432ee 2824bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
6de9cd9a
DN
2825{
2826 tree t = bsi_stmt (*i);
2827 set_bb_for_stmt (t, NULL);
f430bae8 2828 delink_stmt_imm_use (t);
6de9cd9a 2829 tsi_delink (&i->tsi);
f430bae8 2830 mark_stmt_modified (t);
736432ee 2831 if (remove_eh_info)
6946b3f7
JH
2832 {
2833 remove_stmt_from_eh_region (t);
2834 gimple_remove_stmt_histograms (cfun, t);
2835 }
6de9cd9a
DN
2836}
2837
2838
2839/* Move the statement at FROM so it comes right after the statement at TO. */
2840
6531d1be 2841void
6de9cd9a
DN
2842bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2843{
2844 tree stmt = bsi_stmt (*from);
736432ee 2845 bsi_remove (from, false);
18965703
ZD
2846 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2847 move statements to an empty block. */
2848 bsi_insert_after (to, stmt, BSI_NEW_STMT);
6531d1be 2849}
6de9cd9a
DN
2850
2851
2852/* Move the statement at FROM so it comes right before the statement at TO. */
2853
6531d1be 2854void
6de9cd9a
DN
2855bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2856{
2857 tree stmt = bsi_stmt (*from);
736432ee 2858 bsi_remove (from, false);
18965703
ZD
2859 /* For consistency with bsi_move_after, it might be better to have
2860 BSI_NEW_STMT here; however, that breaks several places that expect
2861 that TO does not change. */
6de9cd9a
DN
2862 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2863}
2864
2865
2866/* Move the statement at FROM to the end of basic block BB. */
2867
2868void
2869bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2870{
2871 block_stmt_iterator last = bsi_last (bb);
6531d1be 2872
6de9cd9a
DN
2873 /* Have to check bsi_end_p because it could be an empty block. */
2874 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2875 bsi_move_before (from, &last);
2876 else
2877 bsi_move_after (from, &last);
2878}
2879
2880
2881/* Replace the contents of the statement pointed to by iterator BSI
736432ee
JL
2882 with STMT. If UPDATE_EH_INFO is true, the exception handling
2883 information of the original statement is moved to the new statement. */
6de9cd9a
DN
2884
2885void
736432ee 2886bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
6de9cd9a
DN
2887{
2888 int eh_region;
2889 tree orig_stmt = bsi_stmt (*bsi);
2890
ff39b79b
JH
2891 if (stmt == orig_stmt)
2892 return;
6de9cd9a
DN
2893 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2894 set_bb_for_stmt (stmt, bsi->bb);
2895
2896 /* Preserve EH region information from the original statement, if
2897 requested by the caller. */
736432ee 2898 if (update_eh_info)
6de9cd9a
DN
2899 {
2900 eh_region = lookup_stmt_eh_region (orig_stmt);
2901 if (eh_region >= 0)
59bb84ef 2902 {
736432ee 2903 remove_stmt_from_eh_region (orig_stmt);
59bb84ef
JL
2904 add_stmt_to_eh_region (stmt, eh_region);
2905 }
6de9cd9a
DN
2906 }
2907
ff39b79b
JH
2908 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2909 gimple_remove_stmt_histograms (cfun, orig_stmt);
b1ca239f 2910 delink_stmt_imm_use (orig_stmt);
6de9cd9a 2911 *bsi_stmt_ptr (*bsi) = stmt;
f430bae8
AM
2912 mark_stmt_modified (stmt);
2913 update_modified_stmts (stmt);
6de9cd9a
DN
2914}
2915
2916
2917/* Insert the statement pointed-to by BSI into edge E. Every attempt
2918 is made to place the statement in an existing basic block, but
2919 sometimes that isn't possible. When it isn't possible, the edge is
2920 split and the statement is added to the new block.
2921
2922 In all cases, the returned *BSI points to the correct location. The
2923 return value is true if insertion should be done after the location,
82b85a85
ZD
2924 or false if it should be done before the location. If new basic block
2925 has to be created, it is stored in *NEW_BB. */
6de9cd9a
DN
2926
2927static bool
82b85a85
ZD
2928tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2929 basic_block *new_bb)
6de9cd9a
DN
2930{
2931 basic_block dest, src;
2932 tree tmp;
2933
2934 dest = e->dest;
2935 restart:
2936
2937 /* If the destination has one predecessor which has no PHI nodes,
6531d1be 2938 insert there. Except for the exit block.
6de9cd9a
DN
2939
2940 The requirement for no PHI nodes could be relaxed. Basically we
2941 would have to examine the PHIs to prove that none of them used
e28d0cfb 2942 the value set by the statement we want to insert on E. That
6de9cd9a 2943 hardly seems worth the effort. */
c5cbcccf 2944 if (single_pred_p (dest)
6de9cd9a
DN
2945 && ! phi_nodes (dest)
2946 && dest != EXIT_BLOCK_PTR)
2947 {
2948 *bsi = bsi_start (dest);
2949 if (bsi_end_p (*bsi))
2950 return true;
2951
2952 /* Make sure we insert after any leading labels. */
2953 tmp = bsi_stmt (*bsi);
2954 while (TREE_CODE (tmp) == LABEL_EXPR)
2955 {
2956 bsi_next (bsi);
2957 if (bsi_end_p (*bsi))
2958 break;
2959 tmp = bsi_stmt (*bsi);
2960 }
2961
2962 if (bsi_end_p (*bsi))
2963 {
2964 *bsi = bsi_last (dest);
2965 return true;
2966 }
2967 else
2968 return false;
2969 }
2970
2971 /* If the source has one successor, the edge is not abnormal and
2972 the last statement does not end a basic block, insert there.
2973 Except for the entry block. */
2974 src = e->src;
2975 if ((e->flags & EDGE_ABNORMAL) == 0
c5cbcccf 2976 && single_succ_p (src)
6de9cd9a
DN
2977 && src != ENTRY_BLOCK_PTR)
2978 {
2979 *bsi = bsi_last (src);
2980 if (bsi_end_p (*bsi))
2981 return true;
2982
2983 tmp = bsi_stmt (*bsi);
2984 if (!stmt_ends_bb_p (tmp))
2985 return true;
ce068299
JH
2986
2987 /* Insert code just before returning the value. We may need to decompose
2988 the return in the case it contains non-trivial operand. */
2989 if (TREE_CODE (tmp) == RETURN_EXPR)
2990 {
2991 tree op = TREE_OPERAND (tmp, 0);
7802250d 2992 if (op && !is_gimple_val (op))
ce068299 2993 {
07beea0d 2994 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
ce068299 2995 bsi_insert_before (bsi, op, BSI_NEW_STMT);
07beea0d 2996 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
ce068299
JH
2997 }
2998 bsi_prev (bsi);
2999 return true;
3000 }
6de9cd9a
DN
3001 }
3002
3003 /* Otherwise, create a new basic block, and split this edge. */
3004 dest = split_edge (e);
82b85a85
ZD
3005 if (new_bb)
3006 *new_bb = dest;
c5cbcccf 3007 e = single_pred_edge (dest);
6de9cd9a
DN
3008 goto restart;
3009}
3010
3011
3012/* This routine will commit all pending edge insertions, creating any new
8e731e4e 3013 basic blocks which are necessary. */
6de9cd9a
DN
3014
3015void
8e731e4e 3016bsi_commit_edge_inserts (void)
6de9cd9a
DN
3017{
3018 basic_block bb;
3019 edge e;
628f6a4e 3020 edge_iterator ei;
6de9cd9a 3021
c5cbcccf 3022 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
6de9cd9a
DN
3023
3024 FOR_EACH_BB (bb)
628f6a4e 3025 FOR_EACH_EDGE (e, ei, bb->succs)
edfaf675 3026 bsi_commit_one_edge_insert (e, NULL);
6de9cd9a
DN
3027}
3028
3029
edfaf675
AM
3030/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3031 to this block, otherwise set it to NULL. */
6de9cd9a 3032
edfaf675
AM
3033void
3034bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
6de9cd9a 3035{
edfaf675
AM
3036 if (new_bb)
3037 *new_bb = NULL;
6de9cd9a
DN
3038 if (PENDING_STMT (e))
3039 {
3040 block_stmt_iterator bsi;
3041 tree stmt = PENDING_STMT (e);
3042
3043 PENDING_STMT (e) = NULL_TREE;
3044
edfaf675 3045 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
6de9cd9a
DN
3046 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3047 else
3048 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3049 }
3050}
3051
3052
3053/* Add STMT to the pending list of edge E. No actual insertion is
3054 made until a call to bsi_commit_edge_inserts () is made. */
3055
3056void
3057bsi_insert_on_edge (edge e, tree stmt)
3058{
3059 append_to_statement_list (stmt, &PENDING_STMT (e));
3060}
3061
adb35797
KH
3062/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3063 block has to be created, it is returned. */
82b85a85
ZD
3064
3065basic_block
3066bsi_insert_on_edge_immediate (edge e, tree stmt)
3067{
3068 block_stmt_iterator bsi;
3069 basic_block new_bb = NULL;
3070
1e128c5f 3071 gcc_assert (!PENDING_STMT (e));
82b85a85
ZD
3072
3073 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3077
3078 return new_bb;
3079}
6de9cd9a 3080
6de9cd9a
DN
3081/*---------------------------------------------------------------------------
3082 Tree specific functions for CFG manipulation
3083---------------------------------------------------------------------------*/
3084
4f7db7f7
KH
3085/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3086
3087static void
3088reinstall_phi_args (edge new_edge, edge old_edge)
3089{
ea7e6d5a
AH
3090 tree phi;
3091 edge_var_map_vector v;
3092 edge_var_map *vm;
3093 int i;
4f7db7f7 3094
ea7e6d5a
AH
3095 v = redirect_edge_var_map_vector (old_edge);
3096 if (!v)
4f7db7f7 3097 return;
6531d1be 3098
ea7e6d5a
AH
3099 for (i = 0, phi = phi_nodes (new_edge->dest);
3100 VEC_iterate (edge_var_map, v, i, vm) && phi;
3101 i++, phi = PHI_CHAIN (phi))
4f7db7f7 3102 {
ea7e6d5a
AH
3103 tree result = redirect_edge_var_map_result (vm);
3104 tree arg = redirect_edge_var_map_def (vm);
4f7db7f7
KH
3105
3106 gcc_assert (result == PHI_RESULT (phi));
3107
d2e398df 3108 add_phi_arg (phi, arg, new_edge);
4f7db7f7
KH
3109 }
3110
ea7e6d5a 3111 redirect_edge_var_map_clear (old_edge);
4f7db7f7
KH
3112}
3113
2a8a8292 3114/* Returns the basic block after which the new basic block created
b9a66240
ZD
3115 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3116 near its "logical" location. This is of most help to humans looking
3117 at debugging dumps. */
3118
3119static basic_block
3120split_edge_bb_loc (edge edge_in)
3121{
3122 basic_block dest = edge_in->dest;
3123
3124 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3125 return edge_in->src;
3126 else
3127 return dest->prev_bb;
3128}
3129
6de9cd9a
DN
3130/* Split a (typically critical) edge EDGE_IN. Return the new block.
3131 Abort on abnormal edges. */
3132
3133static basic_block
3134tree_split_edge (edge edge_in)
3135{
4741d956 3136 basic_block new_bb, after_bb, dest;
6de9cd9a 3137 edge new_edge, e;
6de9cd9a
DN
3138
3139 /* Abnormal edges cannot be split. */
1e128c5f 3140 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a 3141
6de9cd9a
DN
3142 dest = edge_in->dest;
3143
b9a66240 3144 after_bb = split_edge_bb_loc (edge_in);
6de9cd9a
DN
3145
3146 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
3147 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3148 new_bb->count = edge_in->count;
6de9cd9a 3149 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
3150 new_edge->probability = REG_BR_PROB_BASE;
3151 new_edge->count = edge_in->count;
6de9cd9a 3152
1e128c5f 3153 e = redirect_edge_and_branch (edge_in, new_bb);
c7b852c8 3154 gcc_assert (e == edge_in);
4f7db7f7 3155 reinstall_phi_args (new_edge, e);
6de9cd9a
DN
3156
3157 return new_bb;
3158}
3159
6de9cd9a 3160/* Callback for walk_tree, check that all elements with address taken are
7a442a1d
SB
3161 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3162 inside a PHI node. */
6de9cd9a
DN
3163
3164static tree
2fbe90f2 3165verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
3166{
3167 tree t = *tp, x;
3168
3169 if (TYPE_P (t))
3170 *walk_subtrees = 0;
6531d1be 3171
e8ca4159 3172 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2fbe90f2 3173#define CHECK_OP(N, MSG) \
e8ca4159 3174 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 3175 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
3176
3177 switch (TREE_CODE (t))
3178 {
3179 case SSA_NAME:
3180 if (SSA_NAME_IN_FREE_LIST (t))
3181 {
3182 error ("SSA name in freelist but still referenced");
3183 return *tp;
3184 }
3185 break;
3186
0bca51f0
DN
3187 case ASSERT_EXPR:
3188 x = fold (ASSERT_EXPR_COND (t));
3189 if (x == boolean_false_node)
3190 {
3191 error ("ASSERT_EXPR with an always-false condition");
3192 return *tp;
3193 }
3194 break;
3195
6de9cd9a 3196 case MODIFY_EXPR:
07beea0d
AH
3197 gcc_unreachable ();
3198
3199 case GIMPLE_MODIFY_STMT:
3200 x = GIMPLE_STMT_OPERAND (t, 0);
6de9cd9a
DN
3201 if (TREE_CODE (x) == BIT_FIELD_REF
3202 && is_gimple_reg (TREE_OPERAND (x, 0)))
3203 {
3204 error ("GIMPLE register modified with BIT_FIELD_REF");
2fbe90f2 3205 return t;
6de9cd9a
DN
3206 }
3207 break;
3208
3209 case ADDR_EXPR:
81fc3052 3210 {
81fc3052
DB
3211 bool old_constant;
3212 bool old_side_effects;
81fc3052
DB
3213 bool new_constant;
3214 bool new_side_effects;
3215
51eed280
PB
3216 gcc_assert (is_gimple_address (t));
3217
81fc3052
DB
3218 old_constant = TREE_CONSTANT (t);
3219 old_side_effects = TREE_SIDE_EFFECTS (t);
3220
127203ac 3221 recompute_tree_invariant_for_addr_expr (t);
81fc3052
DB
3222 new_side_effects = TREE_SIDE_EFFECTS (t);
3223 new_constant = TREE_CONSTANT (t);
3224
81fc3052
DB
3225 if (old_constant != new_constant)
3226 {
3227 error ("constant not recomputed when ADDR_EXPR changed");
3228 return t;
3229 }
3230 if (old_side_effects != new_side_effects)
3231 {
3232 error ("side effects not recomputed when ADDR_EXPR changed");
3233 return t;
3234 }
3235
3236 /* Skip any references (they will be checked when we recurse down the
3237 tree) and ensure that any variable used as a prefix is marked
3238 addressable. */
3239 for (x = TREE_OPERAND (t, 0);
3240 handled_component_p (x);
3241 x = TREE_OPERAND (x, 0))
3242 ;
3243
3244 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3245 return NULL;
3246 if (!TREE_ADDRESSABLE (x))
3247 {
3248 error ("address taken, but ADDRESSABLE bit not set");
3249 return x;
3250 }
bdb69bee 3251
81fc3052
DB
3252 break;
3253 }
6de9cd9a
DN
3254
3255 case COND_EXPR:
a6234684 3256 x = COND_EXPR_COND (t);
d40055ab 3257 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
6de9cd9a 3258 {
d40055ab 3259 error ("non-integral used in condition");
6de9cd9a
DN
3260 return x;
3261 }
9c691961
AP
3262 if (!is_gimple_condexpr (x))
3263 {
ab532386 3264 error ("invalid conditional operand");
9c691961
AP
3265 return x;
3266 }
6de9cd9a
DN
3267 break;
3268
a134e5f3
TB
3269 case NON_LVALUE_EXPR:
3270 gcc_unreachable ();
3271
1043771b 3272 CASE_CONVERT:
6de9cd9a 3273 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3274 case FLOAT_EXPR:
3275 case NEGATE_EXPR:
3276 case ABS_EXPR:
3277 case BIT_NOT_EXPR:
6de9cd9a 3278 case TRUTH_NOT_EXPR:
ab532386 3279 CHECK_OP (0, "invalid operand to unary operator");
6de9cd9a
DN
3280 break;
3281
3282 case REALPART_EXPR:
3283 case IMAGPART_EXPR:
2fbe90f2
RK
3284 case COMPONENT_REF:
3285 case ARRAY_REF:
3286 case ARRAY_RANGE_REF:
3287 case BIT_FIELD_REF:
3288 case VIEW_CONVERT_EXPR:
3289 /* We have a nest of references. Verify that each of the operands
3290 that determine where to reference is either a constant or a variable,
3291 verify that the base is valid, and then show we've already checked
3292 the subtrees. */
afe84921 3293 while (handled_component_p (t))
2fbe90f2
RK
3294 {
3295 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
ab532386 3296 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2fbe90f2
RK
3297 else if (TREE_CODE (t) == ARRAY_REF
3298 || TREE_CODE (t) == ARRAY_RANGE_REF)
3299 {
ab532386 3300 CHECK_OP (1, "invalid array index");
2fbe90f2 3301 if (TREE_OPERAND (t, 2))
ab532386 3302 CHECK_OP (2, "invalid array lower bound");
2fbe90f2 3303 if (TREE_OPERAND (t, 3))
ab532386 3304 CHECK_OP (3, "invalid array stride");
2fbe90f2
RK
3305 }
3306 else if (TREE_CODE (t) == BIT_FIELD_REF)
3307 {
e55f42fb
RG
3308 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3309 || !host_integerp (TREE_OPERAND (t, 2), 1))
3310 {
3311 error ("invalid position or size operand to BIT_FIELD_REF");
3312 return t;
3313 }
fc0f49f3
RG
3314 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3315 && (TYPE_PRECISION (TREE_TYPE (t))
3316 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3317 {
3318 error ("integral result type precision does not match "
3319 "field size of BIT_FIELD_REF");
3320 return t;
3321 }
3322 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3323 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3324 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3325 {
3326 error ("mode precision of non-integral result does not "
3327 "match field size of BIT_FIELD_REF");
3328 return t;
3329 }
2fbe90f2
RK
3330 }
3331
3332 t = TREE_OPERAND (t, 0);
3333 }
3334
bb0c55f6 3335 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2fbe90f2 3336 {
ab532386 3337 error ("invalid reference prefix");
2fbe90f2
RK
3338 return t;
3339 }
3340 *walk_subtrees = 0;
6de9cd9a 3341 break;
5be014d5
AP
3342 case PLUS_EXPR:
3343 case MINUS_EXPR:
3344 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3345 POINTER_PLUS_EXPR. */
3346 if (POINTER_TYPE_P (TREE_TYPE (t)))
3347 {
3348 error ("invalid operand to plus/minus, type is a pointer");
3349 return t;
3350 }
3351 CHECK_OP (0, "invalid operand to binary operator");
3352 CHECK_OP (1, "invalid operand to binary operator");
3353 break;
6de9cd9a 3354
5be014d5
AP
3355 case POINTER_PLUS_EXPR:
3356 /* Check to make sure the first operand is a pointer or reference type. */
3357 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3358 {
3359 error ("invalid operand to pointer plus, first operand is not a pointer");
3360 return t;
3361 }
3362 /* Check to make sure the second operand is an integer with type of
3363 sizetype. */
36618b93
RG
3364 if (!useless_type_conversion_p (sizetype,
3365 TREE_TYPE (TREE_OPERAND (t, 1))))
5be014d5
AP
3366 {
3367 error ("invalid operand to pointer plus, second operand is not an "
3368 "integer with type of sizetype.");
3369 return t;
3370 }
3371 /* FALLTHROUGH */
6de9cd9a
DN
3372 case LT_EXPR:
3373 case LE_EXPR:
3374 case GT_EXPR:
3375 case GE_EXPR:
3376 case EQ_EXPR:
3377 case NE_EXPR:
3378 case UNORDERED_EXPR:
3379 case ORDERED_EXPR:
3380 case UNLT_EXPR:
3381 case UNLE_EXPR:
3382 case UNGT_EXPR:
3383 case UNGE_EXPR:
3384 case UNEQ_EXPR:
d1a7edaf 3385 case LTGT_EXPR:
6de9cd9a
DN
3386 case MULT_EXPR:
3387 case TRUNC_DIV_EXPR:
3388 case CEIL_DIV_EXPR:
3389 case FLOOR_DIV_EXPR:
3390 case ROUND_DIV_EXPR:
3391 case TRUNC_MOD_EXPR:
3392 case CEIL_MOD_EXPR:
3393 case FLOOR_MOD_EXPR:
3394 case ROUND_MOD_EXPR:
3395 case RDIV_EXPR:
3396 case EXACT_DIV_EXPR:
3397 case MIN_EXPR:
3398 case MAX_EXPR:
3399 case LSHIFT_EXPR:
3400 case RSHIFT_EXPR:
3401 case LROTATE_EXPR:
3402 case RROTATE_EXPR:
3403 case BIT_IOR_EXPR:
3404 case BIT_XOR_EXPR:
3405 case BIT_AND_EXPR:
ab532386
JM
3406 CHECK_OP (0, "invalid operand to binary operator");
3407 CHECK_OP (1, "invalid operand to binary operator");
6de9cd9a
DN
3408 break;
3409
84816907
JM
3410 case CONSTRUCTOR:
3411 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3412 *walk_subtrees = 0;
3413 break;
3414
6de9cd9a
DN
3415 default:
3416 break;
3417 }
3418 return NULL;
2fbe90f2
RK
3419
3420#undef CHECK_OP
6de9cd9a
DN
3421}
3422
7e98624c
RG
3423/* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3424 if there is an error, otherwise false. */
3425
3426static bool
ed7a4b4b 3427verify_gimple_unary_expr (const_tree expr)
7e98624c
RG
3428{
3429 tree op = TREE_OPERAND (expr, 0);
3430 tree type = TREE_TYPE (expr);
3431
3432 if (!is_gimple_val (op))
3433 {
3434 error ("invalid operand in unary expression");
3435 return true;
3436 }
3437
3438 /* For general unary expressions we have the operations type
3439 as the effective type the operation is carried out on. So all
3440 we need to require is that the operand is trivially convertible
3441 to that type. */
3442 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3443 {
3444 error ("type mismatch in unary expression");
3445 debug_generic_expr (type);
3446 debug_generic_expr (TREE_TYPE (op));
3447 return true;
3448 }
3449
3450 return false;
3451}
3452
3453/* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3454 if there is an error, otherwise false. */
3455
3456static bool
ed7a4b4b 3457verify_gimple_binary_expr (const_tree expr)
7e98624c
RG
3458{
3459 tree op0 = TREE_OPERAND (expr, 0);
3460 tree op1 = TREE_OPERAND (expr, 1);
3461 tree type = TREE_TYPE (expr);
3462
3463 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3464 {
3465 error ("invalid operands in binary expression");
3466 return true;
3467 }
3468
3469 /* For general binary expressions we have the operations type
3470 as the effective type the operation is carried out on. So all
3471 we need to require is that both operands are trivially convertible
3472 to that type. */
3473 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3474 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3475 {
3476 error ("type mismatch in binary expression");
3477 debug_generic_stmt (type);
3478 debug_generic_stmt (TREE_TYPE (op0));
3479 debug_generic_stmt (TREE_TYPE (op1));
3480 return true;
3481 }
3482
3483 return false;
3484}
3485
3486/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3487 Returns true if there is an error, otherwise false. */
3488
3489static bool
3490verify_gimple_min_lval (tree expr)
3491{
3492 tree op;
3493
3494 if (is_gimple_id (expr))
3495 return false;
3496
3497 if (TREE_CODE (expr) != INDIRECT_REF
3498 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3499 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3500 {
3501 error ("invalid expression for min lvalue");
3502 return true;
3503 }
3504
3505 op = TREE_OPERAND (expr, 0);
3506 if (!is_gimple_val (op))
3507 {
3508 error ("invalid operand in indirect reference");
3509 debug_generic_stmt (op);
3510 return true;
3511 }
3512 if (!useless_type_conversion_p (TREE_TYPE (expr),
3513 TREE_TYPE (TREE_TYPE (op))))
3514 {
3515 error ("type mismatch in indirect reference");
3516 debug_generic_stmt (TREE_TYPE (expr));
3517 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3518 return true;
3519 }
3520
3521 return false;
3522}
3523
3524/* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3525 if there is an error, otherwise false. */
3526
3527static bool
3528verify_gimple_reference (tree expr)
3529{
3530 while (handled_component_p (expr))
3531 {
3532 tree op = TREE_OPERAND (expr, 0);
3533
3534 if (TREE_CODE (expr) == ARRAY_REF
3535 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3536 {
3537 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3538 || (TREE_OPERAND (expr, 2)
3539 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3540 || (TREE_OPERAND (expr, 3)
3541 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3542 {
3543 error ("invalid operands to array reference");
3544 debug_generic_stmt (expr);
3545 return true;
3546 }
3547 }
3548
3549 /* Verify if the reference array element types are compatible. */
3550 if (TREE_CODE (expr) == ARRAY_REF
3551 && !useless_type_conversion_p (TREE_TYPE (expr),
3552 TREE_TYPE (TREE_TYPE (op))))
3553 {
3554 error ("type mismatch in array reference");
3555 debug_generic_stmt (TREE_TYPE (expr));
3556 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3557 return true;
3558 }
3559 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3560 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3561 TREE_TYPE (TREE_TYPE (op))))
3562 {
3563 error ("type mismatch in array range reference");
3564 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3565 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3566 return true;
3567 }
3568
3569 if ((TREE_CODE (expr) == REALPART_EXPR
3570 || TREE_CODE (expr) == IMAGPART_EXPR)
3571 && !useless_type_conversion_p (TREE_TYPE (expr),
3572 TREE_TYPE (TREE_TYPE (op))))
3573 {
3574 error ("type mismatch in real/imagpart reference");
3575 debug_generic_stmt (TREE_TYPE (expr));
3576 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3577 return true;
3578 }
3579
3580 if (TREE_CODE (expr) == COMPONENT_REF
3581 && !useless_type_conversion_p (TREE_TYPE (expr),
3582 TREE_TYPE (TREE_OPERAND (expr, 1))))
3583 {
3584 error ("type mismatch in component reference");
3585 debug_generic_stmt (TREE_TYPE (expr));
3586 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3587 return true;
3588 }
3589
3590 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3591 is nothing to verify. Gross mismatches at most invoke
3592 undefined behavior. */
3593
3594 expr = op;
3595 }
3596
3597 return verify_gimple_min_lval (expr);
3598}
3599
20dcff2a
RG
3600/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3601 list of pointer-to types that is trivially convertible to DEST. */
3602
3603static bool
3604one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3605{
3606 tree src;
3607
3608 if (!TYPE_POINTER_TO (src_obj))
3609 return true;
3610
3611 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3612 if (useless_type_conversion_p (dest, src))
3613 return true;
3614
3615 return false;
3616}
3617
17d23165
RS
3618/* Return true if TYPE1 is a fixed-point type and if conversions to and
3619 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3620
3621static bool
3622valid_fixed_convert_types_p (tree type1, tree type2)
3623{
3624 return (FIXED_POINT_TYPE_P (type1)
3625 && (INTEGRAL_TYPE_P (type2)
3626 || SCALAR_FLOAT_TYPE_P (type2)
3627 || FIXED_POINT_TYPE_P (type2)));
3628}
3629
7e98624c
RG
3630/* Verify the GIMPLE expression EXPR. Returns true if there is an
3631 error, otherwise false. */
3632
3633static bool
3634verify_gimple_expr (tree expr)
3635{
3636 tree type = TREE_TYPE (expr);
3637
3638 if (is_gimple_val (expr))
3639 return false;
3640
3641 /* Special codes we cannot handle via their class. */
3642 switch (TREE_CODE (expr))
3643 {
1043771b 3644 CASE_CONVERT:
7e98624c
RG
3645 {
3646 tree op = TREE_OPERAND (expr, 0);
3647 if (!is_gimple_val (op))
3648 {
3649 error ("invalid operand in conversion");
3650 return true;
3651 }
3652
9822c455
RG
3653 /* Allow conversions between integral types and between
3654 pointer types. */
3655 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3656 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
7e98624c
RG
3657 return false;
3658
3659 /* Allow conversions between integral types and pointers only if
3660 there is no sign or zero extension involved. */
3661 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3662 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
a86907b2
RG
3663 && (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op))
3664 /* For targets were the precision of sizetype doesn't
3665 match that of pointers we need the following. */
3666 || type == sizetype || TREE_TYPE (op) == sizetype))
7e98624c
RG
3667 return false;
3668
3669 /* Allow conversion from integer to offset type and vice versa. */
3670 if ((TREE_CODE (type) == OFFSET_TYPE
3671 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3672 || (TREE_CODE (type) == INTEGER_TYPE
3673 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3674 return false;
3675
3676 /* Otherwise assert we are converting between types of the
3677 same kind. */
3678 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3679 {
3680 error ("invalid types in nop conversion");
3681 debug_generic_expr (type);
3682 debug_generic_expr (TREE_TYPE (op));
3683 return true;
3684 }
3685
3686 return false;
3687 }
3688
17d23165
RS
3689 case FIXED_CONVERT_EXPR:
3690 {
3691 tree op = TREE_OPERAND (expr, 0);
3692 if (!is_gimple_val (op))
3693 {
3694 error ("invalid operand in conversion");
3695 return true;
3696 }
3697
3698 if (!valid_fixed_convert_types_p (type, TREE_TYPE (op))
3699 && !valid_fixed_convert_types_p (TREE_TYPE (op), type))
3700 {
3701 error ("invalid types in fixed-point conversion");
3702 debug_generic_expr (type);
3703 debug_generic_expr (TREE_TYPE (op));
3704 return true;
3705 }
3706
3707 return false;
3708 }
3709
7e98624c
RG
3710 case FLOAT_EXPR:
3711 {
3712 tree op = TREE_OPERAND (expr, 0);
3713 if (!is_gimple_val (op))
3714 {
3715 error ("invalid operand in int to float conversion");
3716 return true;
3717 }
3718 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3719 || !SCALAR_FLOAT_TYPE_P (type))
3720 {
3721 error ("invalid types in conversion to floating point");
3722 debug_generic_expr (type);
3723 debug_generic_expr (TREE_TYPE (op));
3724 return true;
3725 }
3726 return false;
3727 }
3728
3729 case FIX_TRUNC_EXPR:
3730 {
3731 tree op = TREE_OPERAND (expr, 0);
3732 if (!is_gimple_val (op))
3733 {
3734 error ("invalid operand in float to int conversion");
3735 return true;
3736 }
3737 if (!INTEGRAL_TYPE_P (type)
3738 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3739 {
3740 error ("invalid types in conversion to integer");
3741 debug_generic_expr (type);
3742 debug_generic_expr (TREE_TYPE (op));
3743 return true;
3744 }
3745 return false;
3746 }
3747
3748 case COMPLEX_EXPR:
3749 {
3750 tree op0 = TREE_OPERAND (expr, 0);
3751 tree op1 = TREE_OPERAND (expr, 1);
3752 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3753 {
3754 error ("invalid operands in complex expression");
3755 return true;
3756 }
3757 if (!TREE_CODE (type) == COMPLEX_TYPE
3758 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3759 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3760 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3761 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3762 || !useless_type_conversion_p (TREE_TYPE (type),
3763 TREE_TYPE (op0))
3764 || !useless_type_conversion_p (TREE_TYPE (type),
3765 TREE_TYPE (op1)))
3766 {
3767 error ("type mismatch in complex expression");
3768 debug_generic_stmt (TREE_TYPE (expr));
3769 debug_generic_stmt (TREE_TYPE (op0));
3770 debug_generic_stmt (TREE_TYPE (op1));
3771 return true;
3772 }
3773 return false;
3774 }
3775
3776 case CONSTRUCTOR:
3777 {
3778 /* This is used like COMPLEX_EXPR but for vectors. */
3779 if (TREE_CODE (type) != VECTOR_TYPE)
3780 {
3781 error ("constructor not allowed for non-vector types");
3782 debug_generic_stmt (type);
3783 return true;
3784 }
3785 /* FIXME: verify constructor arguments. */
3786 return false;
3787 }
3788
3789 case LSHIFT_EXPR:
3790 case RSHIFT_EXPR:
3791 case LROTATE_EXPR:
3792 case RROTATE_EXPR:
3793 {
3794 tree op0 = TREE_OPERAND (expr, 0);
3795 tree op1 = TREE_OPERAND (expr, 1);
3796 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3797 {
3798 error ("invalid operands in shift expression");
3799 return true;
3800 }
3801 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3802 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3803 {
3804 error ("type mismatch in shift expression");
3805 debug_generic_stmt (TREE_TYPE (expr));
3806 debug_generic_stmt (TREE_TYPE (op0));
3807 debug_generic_stmt (TREE_TYPE (op1));
3808 return true;
3809 }
3810 return false;
3811 }
3812
3813 case PLUS_EXPR:
3814 case MINUS_EXPR:
3815 {
3816 tree op0 = TREE_OPERAND (expr, 0);
3817 tree op1 = TREE_OPERAND (expr, 1);
3818 if (POINTER_TYPE_P (type)
3819 || POINTER_TYPE_P (TREE_TYPE (op0))
3820 || POINTER_TYPE_P (TREE_TYPE (op1)))
3821 {
3822 error ("invalid (pointer) operands to plus/minus");
3823 return true;
3824 }
3825 /* Continue with generic binary expression handling. */
3826 break;
3827 }
3828
3829 case POINTER_PLUS_EXPR:
3830 {
3831 tree op0 = TREE_OPERAND (expr, 0);
3832 tree op1 = TREE_OPERAND (expr, 1);
3833 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3834 {
3835 error ("invalid operands in pointer plus expression");
3836 return true;
3837 }
3838 if (!POINTER_TYPE_P (TREE_TYPE (op0))
7e98624c
RG
3839 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3840 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3841 {
3842 error ("type mismatch in pointer plus expression");
3843 debug_generic_stmt (type);
3844 debug_generic_stmt (TREE_TYPE (op0));
3845 debug_generic_stmt (TREE_TYPE (op1));
3846 return true;
3847 }
3848 return false;
3849 }
3850
3851 case COND_EXPR:
3852 {
3853 tree op0 = TREE_OPERAND (expr, 0);
3854 tree op1 = TREE_OPERAND (expr, 1);
3855 tree op2 = TREE_OPERAND (expr, 2);
3856 if ((!is_gimple_val (op1)
3857 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3858 || (!is_gimple_val (op2)
3859 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3860 {
3861 error ("invalid operands in conditional expression");
3862 return true;
3863 }
3864 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3865 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3866 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3867 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3868 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3869 {
3870 error ("type mismatch in conditional expression");
3871 debug_generic_stmt (type);
3872 debug_generic_stmt (TREE_TYPE (op0));
3873 debug_generic_stmt (TREE_TYPE (op1));
3874 debug_generic_stmt (TREE_TYPE (op2));
3875 return true;
3876 }
3877 return verify_gimple_expr (op0);
3878 }
3879
3880 case ADDR_EXPR:
3881 {
3882 tree op = TREE_OPERAND (expr, 0);
7e98624c
RG
3883 if (!is_gimple_addressable (op))
3884 {
3885 error ("invalid operand in unary expression");
3886 return true;
3887 }
20dcff2a 3888 if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
7e98624c
RG
3889 /* FIXME: a longstanding wart, &a == &a[0]. */
3890 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
20dcff2a
RG
3891 || !one_pointer_to_useless_type_conversion_p (type,
3892 TREE_TYPE (TREE_TYPE (op)))))
7e98624c
RG
3893 {
3894 error ("type mismatch in address expression");
3895 debug_generic_stmt (TREE_TYPE (expr));
8fc6f12f 3896 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
7e98624c
RG
3897 return true;
3898 }
3899
3900 return verify_gimple_reference (op);
3901 }
3902
3903 case TRUTH_ANDIF_EXPR:
3904 case TRUTH_ORIF_EXPR:
2893f753
RAE
3905 gcc_unreachable ();
3906
7e98624c
RG
3907 case TRUTH_AND_EXPR:
3908 case TRUTH_OR_EXPR:
3909 case TRUTH_XOR_EXPR:
3910 {
3911 tree op0 = TREE_OPERAND (expr, 0);
3912 tree op1 = TREE_OPERAND (expr, 1);
3913
3914 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3915 {
3916 error ("invalid operands in truth expression");
3917 return true;
3918 }
3919
3920 /* We allow any kind of integral typed argument and result. */
3921 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3922 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3923 || !INTEGRAL_TYPE_P (type))
3924 {
3925 error ("type mismatch in binary truth expression");
3926 debug_generic_stmt (type);
3927 debug_generic_stmt (TREE_TYPE (op0));
3928 debug_generic_stmt (TREE_TYPE (op1));
3929 return true;
3930 }
3931
3932 return false;
3933 }
3934
3935 case TRUTH_NOT_EXPR:
3936 {
3937 tree op = TREE_OPERAND (expr, 0);
3938
3939 if (!is_gimple_val (op))
3940 {
3941 error ("invalid operand in unary not");
3942 return true;
3943 }
3944
3945 /* For TRUTH_NOT_EXPR we can have any kind of integral
3946 typed arguments and results. */
3947 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3948 || !INTEGRAL_TYPE_P (type))
3949 {
3950 error ("type mismatch in not expression");
3951 debug_generic_expr (TREE_TYPE (expr));
3952 debug_generic_expr (TREE_TYPE (op));
3953 return true;
3954 }
3955
3956 return false;
3957 }
3958
3959 case CALL_EXPR:
3960 /* FIXME. The C frontend passes unpromoted arguments in case it
3961 didn't see a function declaration before the call. */
becfd6e5
KZ
3962 {
3963 tree decl = CALL_EXPR_FN (expr);
3964
3965 if (TREE_CODE (decl) == FUNCTION_DECL
3966 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3967 && (!DECL_PURE_P (decl))
3968 && (!TREE_READONLY (decl)))
3969 {
3970 error ("invalid pure const state for function");
3971 return true;
3972 }
3973 return false;
3974 }
7e98624c 3975
b691d4b0
RG
3976 case OBJ_TYPE_REF:
3977 /* FIXME. */
3978 return false;
3979
7e98624c
RG
3980 default:;
3981 }
3982
3983 /* Generic handling via classes. */
3984 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3985 {
3986 case tcc_unary:
3987 return verify_gimple_unary_expr (expr);
3988
3989 case tcc_binary:
3990 return verify_gimple_binary_expr (expr);
3991
3992 case tcc_reference:
3993 return verify_gimple_reference (expr);
3994
3995 case tcc_comparison:
3996 {
3997 tree op0 = TREE_OPERAND (expr, 0);
3998 tree op1 = TREE_OPERAND (expr, 1);
3999 if (!is_gimple_val (op0) || !is_gimple_val (op1))
4000 {
4001 error ("invalid operands in comparison expression");
4002 return true;
4003 }
4004 /* For comparisons we do not have the operations type as the
4005 effective type the comparison is carried out in. Instead
4006 we require that either the first operand is trivially
4007 convertible into the second, or the other way around.
4008 The resulting type of a comparison may be any integral type.
4009 Because we special-case pointers to void we allow
4010 comparisons of pointers with the same mode as well. */
4011 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
4012 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
4013 && (!POINTER_TYPE_P (TREE_TYPE (op0))
4014 || !POINTER_TYPE_P (TREE_TYPE (op1))
4015 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
4016 || !INTEGRAL_TYPE_P (type))
4017 {
4018 error ("type mismatch in comparison expression");
4019 debug_generic_stmt (TREE_TYPE (expr));
4020 debug_generic_stmt (TREE_TYPE (op0));
4021 debug_generic_stmt (TREE_TYPE (op1));
4022 return true;
4023 }
4024 break;
4025 }
4026
4027 default:
4028 gcc_unreachable ();
4029 }
4030
4031 return false;
4032}
4033
4034/* Verify the GIMPLE assignment statement STMT. Returns true if there
4035 is an error, otherwise false. */
4036
4037static bool
ed7a4b4b 4038verify_gimple_modify_stmt (const_tree stmt)
7e98624c
RG
4039{
4040 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
4041 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
4042
4043 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
4044
4045 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4046 TREE_TYPE (rhs)))
4047 {
4048 error ("non-trivial conversion at assignment");
4049 debug_generic_expr (TREE_TYPE (lhs));
4050 debug_generic_expr (TREE_TYPE (rhs));
4051 return true;
4052 }
4053
4054 /* Loads/stores from/to a variable are ok. */
4055 if ((is_gimple_val (lhs)
4056 && is_gimple_variable (rhs))
4057 || (is_gimple_val (rhs)
4058 && is_gimple_variable (lhs)))
4059 return false;
4060
4061 /* Aggregate copies are ok. */
4062 if (!is_gimple_reg_type (TREE_TYPE (lhs))
4063 && !is_gimple_reg_type (TREE_TYPE (rhs)))
4064 return false;
4065
4066 /* We might get 'loads' from a parameter which is not a gimple value. */
4067 if (TREE_CODE (rhs) == PARM_DECL)
4068 return verify_gimple_expr (lhs);
4069
4070 if (!is_gimple_variable (lhs)
4071 && verify_gimple_expr (lhs))
4072 return true;
4073
4074 if (!is_gimple_variable (rhs)
4075 && verify_gimple_expr (rhs))
4076 return true;
4077
4078 return false;
4079}
4080
4081/* Verify the GIMPLE statement STMT. Returns true if there is an
4082 error, otherwise false. */
4083
4084static bool
4085verify_gimple_stmt (tree stmt)
4086{
4087 if (!is_gimple_stmt (stmt))
4088 {
4089 error ("is not a valid GIMPLE statement");
4090 return true;
4091 }
4092
4093 if (OMP_DIRECTIVE_P (stmt))
4094 {
4095 /* OpenMP directives are validated by the FE and never operated
4096 on by the optimizers. Furthermore, OMP_FOR may contain
4097 non-gimple expressions when the main index variable has had
4098 its address taken. This does not affect the loop itself
4099 because the header of an OMP_FOR is merely used to determine
4100 how to setup the parallel iteration. */
4101 return false;
4102 }
4103
4104 switch (TREE_CODE (stmt))
4105 {
4106 case GIMPLE_MODIFY_STMT:
4107 return verify_gimple_modify_stmt (stmt);
4108
4109 case GOTO_EXPR:
4110 case LABEL_EXPR:
4111 return false;
4112
4113 case SWITCH_EXPR:
4114 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
4115 {
4116 error ("invalid operand to switch statement");
4117 debug_generic_expr (TREE_OPERAND (stmt, 0));
4118 }
4119 return false;
4120
4121 case RETURN_EXPR:
4122 {
4123 tree op = TREE_OPERAND (stmt, 0);
4124
4125 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4126 {
4127 error ("type error in return expression");
4128 return true;
4129 }
4130
4131 if (op == NULL_TREE
4132 || TREE_CODE (op) == RESULT_DECL)
4133 return false;
4134
4135 return verify_gimple_modify_stmt (op);
4136 }
4137
4138 case CALL_EXPR:
4139 case COND_EXPR:
4140 return verify_gimple_expr (stmt);
4141
4142 case NOP_EXPR:
4143 case CHANGE_DYNAMIC_TYPE_EXPR:
4144 case ASM_EXPR:
2e28e797 4145 case PREDICT_EXPR:
7e98624c
RG
4146 return false;
4147
4148 default:
4149 gcc_unreachable ();
4150 }
4151}
4152
7dc83ebc
RG
4153/* Verify the GIMPLE statements inside the statement list STMTS.
4154 Returns true if there were any errors. */
7e98624c 4155
7dc83ebc
RG
4156static bool
4157verify_gimple_2 (tree stmts)
7e98624c
RG
4158{
4159 tree_stmt_iterator tsi;
7dc83ebc 4160 bool err = false;
7e98624c
RG
4161
4162 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4163 {
4164 tree stmt = tsi_stmt (tsi);
4165
4166 switch (TREE_CODE (stmt))
4167 {
4168 case BIND_EXPR:
7dc83ebc 4169 err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
7e98624c
RG
4170 break;
4171
4172 case TRY_CATCH_EXPR:
4173 case TRY_FINALLY_EXPR:
7dc83ebc
RG
4174 err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
4175 err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
7e98624c
RG
4176 break;
4177
4178 case CATCH_EXPR:
7dc83ebc 4179 err |= verify_gimple_2 (CATCH_BODY (stmt));
7e98624c
RG
4180 break;
4181
4182 case EH_FILTER_EXPR:
7dc83ebc 4183 err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
7e98624c
RG
4184 break;
4185
4186 default:
7dc83ebc
RG
4187 {
4188 bool err2 = verify_gimple_stmt (stmt);
4189 if (err2)
4190 debug_generic_expr (stmt);
4191 err |= err2;
4192 }
7e98624c
RG
4193 }
4194 }
7dc83ebc
RG
4195
4196 return err;
4197}
4198
4199
4200/* Verify the GIMPLE statements inside the statement list STMTS. */
4201
4202void
4203verify_gimple_1 (tree stmts)
4204{
4205 if (verify_gimple_2 (stmts))
4206 internal_error ("verify_gimple failed");
7e98624c
RG
4207}
4208
4209/* Verify the GIMPLE statements inside the current function. */
4210
4211void
4212verify_gimple (void)
4213{
4214 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4215}
6de9cd9a
DN
4216
4217/* Verify STMT, return true if STMT is not in GIMPLE form.
4218 TODO: Implement type checking. */
4219
4220static bool
1eaba2f2 4221verify_stmt (tree stmt, bool last_in_block)
6de9cd9a
DN
4222{
4223 tree addr;
4224
50674e96
DN
4225 if (OMP_DIRECTIVE_P (stmt))
4226 {
4227 /* OpenMP directives are validated by the FE and never operated
4228 on by the optimizers. Furthermore, OMP_FOR may contain
4229 non-gimple expressions when the main index variable has had
4230 its address taken. This does not affect the loop itself
4231 because the header of an OMP_FOR is merely used to determine
4232 how to setup the parallel iteration. */
4233 return false;
4234 }
4235
6de9cd9a
DN
4236 if (!is_gimple_stmt (stmt))
4237 {
ab532386 4238 error ("is not a valid GIMPLE statement");
1eaba2f2 4239 goto fail;
6de9cd9a
DN
4240 }
4241
4242 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4243 if (addr)
4244 {
4245 debug_generic_stmt (addr);
2f9ea521
RG
4246 if (addr != stmt)
4247 {
4248 inform ("in statement");
4249 debug_generic_stmt (stmt);
4250 }
6de9cd9a
DN
4251 return true;
4252 }
4253
1eaba2f2
RH
4254 /* If the statement is marked as part of an EH region, then it is
4255 expected that the statement could throw. Verify that when we
4256 have optimizations that simplify statements such that we prove
4257 that they cannot throw, that we update other data structures
4258 to match. */
4259 if (lookup_stmt_eh_region (stmt) >= 0)
4260 {
4261 if (!tree_could_throw_p (stmt))
4262 {
ab532386 4263 error ("statement marked for throw, but doesn%'t");
1eaba2f2
RH
4264 goto fail;
4265 }
4266 if (!last_in_block && tree_can_throw_internal (stmt))
4267 {
ab532386 4268 error ("statement marked for throw in middle of block");
1eaba2f2
RH
4269 goto fail;
4270 }
4271 }
4272
6de9cd9a 4273 return false;
1eaba2f2
RH
4274
4275 fail:
4276 debug_generic_stmt (stmt);
4277 return true;
6de9cd9a
DN
4278}
4279
4280
4281/* Return true when the T can be shared. */
4282
4283static bool
4284tree_node_can_be_shared (tree t)
4285{
6615c446 4286 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a 4287 || is_gimple_min_invariant (t)
5e23162d 4288 || TREE_CODE (t) == SSA_NAME
953ff289
DN
4289 || t == error_mark_node
4290 || TREE_CODE (t) == IDENTIFIER_NODE)
6de9cd9a
DN
4291 return true;
4292
92b6dff3
JL
4293 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4294 return true;
4295
44de5aeb 4296 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
953ff289
DN
4297 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4298 || TREE_CODE (t) == COMPONENT_REF
4299 || TREE_CODE (t) == REALPART_EXPR
4300 || TREE_CODE (t) == IMAGPART_EXPR)
6de9cd9a
DN
4301 t = TREE_OPERAND (t, 0);
4302
4303 if (DECL_P (t))
4304 return true;
4305
4306 return false;
4307}
4308
4309
4310/* Called via walk_trees. Verify tree sharing. */
4311
4312static tree
4313verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4314{
4437b50d 4315 struct pointer_set_t *visited = (struct pointer_set_t *) data;
6de9cd9a
DN
4316
4317 if (tree_node_can_be_shared (*tp))
4318 {
4319 *walk_subtrees = false;
4320 return NULL;
4321 }
4322
4437b50d
JH
4323 if (pointer_set_insert (visited, *tp))
4324 return *tp;
6de9cd9a
DN
4325
4326 return NULL;
4327}
4328
4329
07beea0d
AH
4330/* Helper function for verify_gimple_tuples. */
4331
4332static tree
4333verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4334 void *data ATTRIBUTE_UNUSED)
4335{
4336 switch (TREE_CODE (*tp))
4337 {
4338 case MODIFY_EXPR:
4339 error ("unexpected non-tuple");
4340 debug_tree (*tp);
4341 gcc_unreachable ();
4342 return NULL_TREE;
4343
4344 default:
4345 return NULL_TREE;
4346 }
4347}
4348
4349/* Verify that there are no trees that should have been converted to
4350 gimple tuples. Return true if T contains a node that should have
4351 been converted to a gimple tuple, but hasn't. */
4352
4353static bool
4354verify_gimple_tuples (tree t)
4355{
4356 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4357}
4358
4437b50d
JH
4359static bool eh_error_found;
4360static int
4361verify_eh_throw_stmt_node (void **slot, void *data)
4362{
4363 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4364 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4365
4366 if (!pointer_set_contains (visited, node->stmt))
4367 {
4368 error ("Dead STMT in EH table");
4369 debug_generic_stmt (node->stmt);
4370 eh_error_found = true;
4371 }
4372 return 0;
4373}
4374
6de9cd9a
DN
4375/* Verify the GIMPLE statement chain. */
4376
4377void
4378verify_stmts (void)
4379{
4380 basic_block bb;
4381 block_stmt_iterator bsi;
4382 bool err = false;
4437b50d 4383 struct pointer_set_t *visited, *visited_stmts;
6de9cd9a
DN
4384 tree addr;
4385
4386 timevar_push (TV_TREE_STMT_VERIFY);
4437b50d
JH
4387 visited = pointer_set_create ();
4388 visited_stmts = pointer_set_create ();
6de9cd9a
DN
4389
4390 FOR_EACH_BB (bb)
4391 {
4392 tree phi;
4393 int i;
4394
17192884 4395 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
4396 {
4397 int phi_num_args = PHI_NUM_ARGS (phi);
4398
4437b50d 4399 pointer_set_insert (visited_stmts, phi);
8de1fc1b
KH
4400 if (bb_for_stmt (phi) != bb)
4401 {
ab532386 4402 error ("bb_for_stmt (phi) is set to a wrong basic block");
8de1fc1b
KH
4403 err |= true;
4404 }
4405
6de9cd9a
DN
4406 for (i = 0; i < phi_num_args; i++)
4407 {
4408 tree t = PHI_ARG_DEF (phi, i);
4409 tree addr;
4410
e9705dc5
AO
4411 if (!t)
4412 {
4413 error ("missing PHI def");
4414 debug_generic_stmt (phi);
4415 err |= true;
4416 continue;
4417 }
6de9cd9a
DN
4418 /* Addressable variables do have SSA_NAMEs but they
4419 are not considered gimple values. */
e9705dc5
AO
4420 else if (TREE_CODE (t) != SSA_NAME
4421 && TREE_CODE (t) != FUNCTION_DECL
220f1c29 4422 && !is_gimple_min_invariant (t))
6de9cd9a
DN
4423 {
4424 error ("PHI def is not a GIMPLE value");
4425 debug_generic_stmt (phi);
4426 debug_generic_stmt (t);
4427 err |= true;
4428 }
4429
4437b50d 4430 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4431 if (addr)
4432 {
ab532386 4433 error ("incorrect sharing of tree nodes");
6de9cd9a
DN
4434 debug_generic_stmt (phi);
4435 debug_generic_stmt (addr);
4436 err |= true;
4437 }
4438 }
4439 }
4440
1eaba2f2 4441 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
6de9cd9a
DN
4442 {
4443 tree stmt = bsi_stmt (bsi);
8de1fc1b 4444
4437b50d 4445 pointer_set_insert (visited_stmts, stmt);
07beea0d
AH
4446 err |= verify_gimple_tuples (stmt);
4447
8de1fc1b
KH
4448 if (bb_for_stmt (stmt) != bb)
4449 {
ab532386 4450 error ("bb_for_stmt (stmt) is set to a wrong basic block");
8de1fc1b
KH
4451 err |= true;
4452 }
4453
1eaba2f2
RH
4454 bsi_next (&bsi);
4455 err |= verify_stmt (stmt, bsi_end_p (bsi));
4437b50d 4456 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
6de9cd9a
DN
4457 if (addr)
4458 {
ab532386 4459 error ("incorrect sharing of tree nodes");
6de9cd9a
DN
4460 debug_generic_stmt (stmt);
4461 debug_generic_stmt (addr);
4462 err |= true;
4463 }
4464 }
4465 }
4437b50d
JH
4466 eh_error_found = false;
4467 if (get_eh_throw_stmt_table (cfun))
4468 htab_traverse (get_eh_throw_stmt_table (cfun),
4469 verify_eh_throw_stmt_node,
4470 visited_stmts);
6de9cd9a 4471
4437b50d 4472 if (err | eh_error_found)
ab532386 4473 internal_error ("verify_stmts failed");
6de9cd9a 4474
4437b50d
JH
4475 pointer_set_destroy (visited);
4476 pointer_set_destroy (visited_stmts);
6946b3f7 4477 verify_histograms ();
6de9cd9a
DN
4478 timevar_pop (TV_TREE_STMT_VERIFY);
4479}
4480
4481
4482/* Verifies that the flow information is OK. */
4483
4484static int
4485tree_verify_flow_info (void)
4486{
4487 int err = 0;
4488 basic_block bb;
4489 block_stmt_iterator bsi;
4490 tree stmt;
4491 edge e;
628f6a4e 4492 edge_iterator ei;
6de9cd9a 4493
7506e1cb 4494 if (ENTRY_BLOCK_PTR->il.tree)
6de9cd9a 4495 {
7506e1cb 4496 error ("ENTRY_BLOCK has IL associated with it");
6de9cd9a
DN
4497 err = 1;
4498 }
4499
7506e1cb 4500 if (EXIT_BLOCK_PTR->il.tree)
6de9cd9a 4501 {
7506e1cb 4502 error ("EXIT_BLOCK has IL associated with it");
6de9cd9a
DN
4503 err = 1;
4504 }
4505
628f6a4e 4506 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
4507 if (e->flags & EDGE_FALLTHRU)
4508 {
ab532386 4509 error ("fallthru to exit from bb %d", e->src->index);
6de9cd9a
DN
4510 err = 1;
4511 }
4512
4513 FOR_EACH_BB (bb)
4514 {
4515 bool found_ctrl_stmt = false;
4516
548414c6
KH
4517 stmt = NULL_TREE;
4518
6de9cd9a
DN
4519 /* Skip labels on the start of basic block. */
4520 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4521 {
548414c6
KH
4522 tree prev_stmt = stmt;
4523
4524 stmt = bsi_stmt (bsi);
4525
4526 if (TREE_CODE (stmt) != LABEL_EXPR)
6de9cd9a
DN
4527 break;
4528
548414c6
KH
4529 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4530 {
953ff289
DN
4531 error ("nonlocal label ");
4532 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4533 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4534 bb->index);
548414c6
KH
4535 err = 1;
4536 }
4537
4538 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
6de9cd9a 4539 {
953ff289
DN
4540 error ("label ");
4541 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4542 fprintf (stderr, " to block does not match in bb %d",
4543 bb->index);
6de9cd9a
DN
4544 err = 1;
4545 }
4546
548414c6 4547 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
6de9cd9a
DN
4548 != current_function_decl)
4549 {
953ff289
DN
4550 error ("label ");
4551 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4552 fprintf (stderr, " has incorrect context in bb %d",
4553 bb->index);
6de9cd9a
DN
4554 err = 1;
4555 }
4556 }
4557
4558 /* Verify that body of basic block BB is free of control flow. */
4559 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4560 {
4561 tree stmt = bsi_stmt (bsi);
4562
4563 if (found_ctrl_stmt)
4564 {
ab532386 4565 error ("control flow in the middle of basic block %d",
6de9cd9a
DN
4566 bb->index);
4567 err = 1;
4568 }
4569
4570 if (stmt_ends_bb_p (stmt))
4571 found_ctrl_stmt = true;
4572
4573 if (TREE_CODE (stmt) == LABEL_EXPR)
4574 {
953ff289
DN
4575 error ("label ");
4576 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4577 fprintf (stderr, " in the middle of basic block %d", bb->index);
6de9cd9a
DN
4578 err = 1;
4579 }
4580 }
953ff289 4581
6de9cd9a
DN
4582 bsi = bsi_last (bb);
4583 if (bsi_end_p (bsi))
4584 continue;
4585
4586 stmt = bsi_stmt (bsi);
4587
cc7220fd
JH
4588 err |= verify_eh_edges (stmt);
4589
6de9cd9a
DN
4590 if (is_ctrl_stmt (stmt))
4591 {
628f6a4e 4592 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4593 if (e->flags & EDGE_FALLTHRU)
4594 {
ab532386 4595 error ("fallthru edge after a control statement in bb %d",
6de9cd9a
DN
4596 bb->index);
4597 err = 1;
4598 }
4599 }
4600
36b24193
ZD
4601 if (TREE_CODE (stmt) != COND_EXPR)
4602 {
4603 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4604 after anything else but if statement. */
4605 FOR_EACH_EDGE (e, ei, bb->succs)
4606 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4607 {
4608 error ("true/false edge after a non-COND_EXPR in bb %d",
4609 bb->index);
4610 err = 1;
4611 }
4612 }
4613
6de9cd9a
DN
4614 switch (TREE_CODE (stmt))
4615 {
4616 case COND_EXPR:
4617 {
4618 edge true_edge;
4619 edge false_edge;
a9b77cd1
ZD
4620
4621 if (COND_EXPR_THEN (stmt) != NULL_TREE
4622 || COND_EXPR_ELSE (stmt) != NULL_TREE)
6de9cd9a 4623 {
a9b77cd1
ZD
4624 error ("COND_EXPR with code in branches at the end of bb %d",
4625 bb->index);
6de9cd9a
DN
4626 err = 1;
4627 }
4628
4629 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4630
4631 if (!true_edge || !false_edge
4632 || !(true_edge->flags & EDGE_TRUE_VALUE)
4633 || !(false_edge->flags & EDGE_FALSE_VALUE)
4634 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4635 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 4636 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a 4637 {
ab532386 4638 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4639 bb->index);
4640 err = 1;
4641 }
6de9cd9a
DN
4642 }
4643 break;
4644
4645 case GOTO_EXPR:
4646 if (simple_goto_p (stmt))
4647 {
ab532386 4648 error ("explicit goto at end of bb %d", bb->index);
6531d1be 4649 err = 1;
6de9cd9a
DN
4650 }
4651 else
4652 {
6531d1be 4653 /* FIXME. We should double check that the labels in the
6de9cd9a 4654 destination blocks have their address taken. */
628f6a4e 4655 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4656 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4657 | EDGE_FALSE_VALUE))
4658 || !(e->flags & EDGE_ABNORMAL))
4659 {
ab532386 4660 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4661 bb->index);
4662 err = 1;
4663 }
4664 }
4665 break;
4666
4667 case RETURN_EXPR:
c5cbcccf
ZD
4668 if (!single_succ_p (bb)
4669 || (single_succ_edge (bb)->flags
4670 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4671 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a 4672 {
ab532386 4673 error ("wrong outgoing edge flags at end of bb %d", bb->index);
6de9cd9a
DN
4674 err = 1;
4675 }
c5cbcccf 4676 if (single_succ (bb) != EXIT_BLOCK_PTR)
6de9cd9a 4677 {
ab532386 4678 error ("return edge does not point to exit in bb %d",
6de9cd9a
DN
4679 bb->index);
4680 err = 1;
4681 }
4682 break;
4683
4684 case SWITCH_EXPR:
4685 {
7853504d 4686 tree prev;
6de9cd9a
DN
4687 edge e;
4688 size_t i, n;
4689 tree vec;
4690
4691 vec = SWITCH_LABELS (stmt);
4692 n = TREE_VEC_LENGTH (vec);
4693
4694 /* Mark all the destination basic blocks. */
4695 for (i = 0; i < n; ++i)
4696 {
4697 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4698 basic_block label_bb = label_to_block (lab);
4699
1e128c5f 4700 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
4701 label_bb->aux = (void *)1;
4702 }
4703
7853504d
SB
4704 /* Verify that the case labels are sorted. */
4705 prev = TREE_VEC_ELT (vec, 0);
b7814a18 4706 for (i = 1; i < n; ++i)
7853504d
SB
4707 {
4708 tree c = TREE_VEC_ELT (vec, i);
4709 if (! CASE_LOW (c))
4710 {
b7814a18
RG
4711 if (i != n - 1)
4712 {
4713 error ("found default case not at end of case vector");
4714 err = 1;
4715 }
7853504d
SB
4716 continue;
4717 }
4718 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4719 {
953ff289 4720 error ("case labels not sorted: ");
7853504d
SB
4721 print_generic_expr (stderr, prev, 0);
4722 fprintf (stderr," is greater than ");
4723 print_generic_expr (stderr, c, 0);
4724 fprintf (stderr," but comes before it.\n");
4725 err = 1;
4726 }
4727 prev = c;
4728 }
b7814a18
RG
4729 /* VRP will remove the default case if it can prove it will
4730 never be executed. So do not verify there always exists
4731 a default case here. */
7853504d 4732
628f6a4e 4733 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4734 {
4735 if (!e->dest->aux)
4736 {
ab532386 4737 error ("extra outgoing edge %d->%d",
6de9cd9a
DN
4738 bb->index, e->dest->index);
4739 err = 1;
4740 }
4741 e->dest->aux = (void *)2;
4742 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4743 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4744 {
ab532386 4745 error ("wrong outgoing edge flags at end of bb %d",
6de9cd9a
DN
4746 bb->index);
4747 err = 1;
4748 }
4749 }
4750
4751 /* Check that we have all of them. */
4752 for (i = 0; i < n; ++i)
4753 {
4754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4755 basic_block label_bb = label_to_block (lab);
4756
4757 if (label_bb->aux != (void *)2)
4758 {
ab532386 4759 error ("missing edge %i->%i",
6de9cd9a
DN
4760 bb->index, label_bb->index);
4761 err = 1;
4762 }
4763 }
4764
628f6a4e 4765 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
4766 e->dest->aux = (void *)0;
4767 }
4768
4769 default: ;
4770 }
4771 }
4772
2b28c07a 4773 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
6de9cd9a
DN
4774 verify_dominators (CDI_DOMINATORS);
4775
4776 return err;
4777}
4778
4779
f0b698c1 4780/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
4781 by edge FALLTHRU. */
4782
4783static void
4784tree_make_forwarder_block (edge fallthru)
4785{
4786 edge e;
628f6a4e 4787 edge_iterator ei;
6de9cd9a 4788 basic_block dummy, bb;
5ae71719 4789 tree phi, new_phi, var;
6de9cd9a
DN
4790
4791 dummy = fallthru->src;
4792 bb = fallthru->dest;
4793
c5cbcccf 4794 if (single_pred_p (bb))
6de9cd9a
DN
4795 return;
4796
cfaab3a9 4797 /* If we redirected a branch we must create new PHI nodes at the
6de9cd9a 4798 start of BB. */
17192884 4799 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
4800 {
4801 var = PHI_RESULT (phi);
4802 new_phi = create_phi_node (var, bb);
4803 SSA_NAME_DEF_STMT (var) = new_phi;
d00ad49b 4804 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
d2e398df 4805 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
6de9cd9a
DN
4806 }
4807
17192884 4808 /* Ensure that the PHI node chain is in the same order. */
5ae71719 4809 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
6de9cd9a
DN
4810
4811 /* Add the arguments we have stored on edges. */
628f6a4e 4812 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
4813 {
4814 if (e == fallthru)
4815 continue;
4816
71882046 4817 flush_pending_stmts (e);
6de9cd9a
DN
4818 }
4819}
4820
4821
6de9cd9a
DN
4822/* Return a non-special label in the head of basic block BLOCK.
4823 Create one if it doesn't exist. */
4824
d7621d3c 4825tree
6de9cd9a
DN
4826tree_block_label (basic_block bb)
4827{
4828 block_stmt_iterator i, s = bsi_start (bb);
4829 bool first = true;
4830 tree label, stmt;
4831
4832 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4833 {
4834 stmt = bsi_stmt (i);
4835 if (TREE_CODE (stmt) != LABEL_EXPR)
4836 break;
4837 label = LABEL_EXPR_LABEL (stmt);
4838 if (!DECL_NONLOCAL (label))
4839 {
4840 if (!first)
4841 bsi_move_before (&i, &s);
4842 return label;
4843 }
4844 }
4845
4846 label = create_artificial_label ();
4847 stmt = build1 (LABEL_EXPR, void_type_node, label);
4848 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4849 return label;
4850}
4851
4852
4853/* Attempt to perform edge redirection by replacing a possibly complex
4854 jump instruction by a goto or by removing the jump completely.
4855 This can apply only if all edges now point to the same block. The
4856 parameters and return values are equivalent to
4857 redirect_edge_and_branch. */
4858
4859static edge
4860tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4861{
4862 basic_block src = e->src;
6de9cd9a
DN
4863 block_stmt_iterator b;
4864 tree stmt;
6de9cd9a 4865
07b43a87
KH
4866 /* We can replace or remove a complex jump only when we have exactly
4867 two edges. */
4868 if (EDGE_COUNT (src->succs) != 2
4869 /* Verify that all targets will be TARGET. Specifically, the
4870 edge that is not E must also go to TARGET. */
4871 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6de9cd9a
DN
4872 return NULL;
4873
4874 b = bsi_last (src);
4875 if (bsi_end_p (b))
4876 return NULL;
4877 stmt = bsi_stmt (b);
4878
4879 if (TREE_CODE (stmt) == COND_EXPR
4880 || TREE_CODE (stmt) == SWITCH_EXPR)
4881 {
736432ee 4882 bsi_remove (&b, true);
6de9cd9a
DN
4883 e = ssa_redirect_edge (e, target);
4884 e->flags = EDGE_FALLTHRU;
4885 return e;
4886 }
4887
4888 return NULL;
4889}
4890
4891
4892/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4893 edge representing the redirected branch. */
4894
4895static edge
4896tree_redirect_edge_and_branch (edge e, basic_block dest)
4897{
4898 basic_block bb = e->src;
4899 block_stmt_iterator bsi;
4900 edge ret;
18965703 4901 tree stmt;
6de9cd9a 4902
4f6c2131 4903 if (e->flags & EDGE_ABNORMAL)
6de9cd9a
DN
4904 return NULL;
4905
6531d1be 4906 if (e->src != ENTRY_BLOCK_PTR
6de9cd9a
DN
4907 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4908 return ret;
4909
4910 if (e->dest == dest)
4911 return NULL;
4912
6de9cd9a
DN
4913 bsi = bsi_last (bb);
4914 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4915
4916 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4917 {
4918 case COND_EXPR:
a9b77cd1 4919 /* For COND_EXPR, we only need to redirect the edge. */
6de9cd9a
DN
4920 break;
4921
4922 case GOTO_EXPR:
4923 /* No non-abnormal edges should lead from a non-simple goto, and
4924 simple ones should be represented implicitly. */
1e128c5f 4925 gcc_unreachable ();
6de9cd9a
DN
4926
4927 case SWITCH_EXPR:
4928 {
d6be0d7f 4929 tree cases = get_cases_for_edge (e, stmt);
18965703 4930 tree label = tree_block_label (dest);
6de9cd9a 4931
d6be0d7f
JL
4932 /* If we have a list of cases associated with E, then use it
4933 as it's a lot faster than walking the entire case vector. */
4934 if (cases)
6de9cd9a 4935 {
4edbbd3f 4936 edge e2 = find_edge (e->src, dest);
d6be0d7f
JL
4937 tree last, first;
4938
4939 first = cases;
4940 while (cases)
4941 {
4942 last = cases;
4943 CASE_LABEL (cases) = label;
4944 cases = TREE_CHAIN (cases);
4945 }
4946
4947 /* If there was already an edge in the CFG, then we need
4948 to move all the cases associated with E to E2. */
4949 if (e2)
4950 {
4951 tree cases2 = get_cases_for_edge (e2, stmt);
4952
4953 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4954 TREE_CHAIN (cases2) = first;
4955 }
6de9cd9a 4956 }
92b6dff3
JL
4957 else
4958 {
d6be0d7f
JL
4959 tree vec = SWITCH_LABELS (stmt);
4960 size_t i, n = TREE_VEC_LENGTH (vec);
4961
4962 for (i = 0; i < n; i++)
4963 {
4964 tree elt = TREE_VEC_ELT (vec, i);
4965
4966 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4967 CASE_LABEL (elt) = label;
4968 }
92b6dff3 4969 }
d6be0d7f 4970
92b6dff3 4971 break;
6de9cd9a 4972 }
6de9cd9a
DN
4973
4974 case RETURN_EXPR:
736432ee 4975 bsi_remove (&bsi, true);
6de9cd9a
DN
4976 e->flags |= EDGE_FALLTHRU;
4977 break;
4978
e5c95afe
ZD
4979 case OMP_RETURN:
4980 case OMP_CONTINUE:
4981 case OMP_SECTIONS_SWITCH:
4982 case OMP_FOR:
4983 /* The edges from OMP constructs can be simply redirected. */
4984 break;
4985
6de9cd9a
DN
4986 default:
4987 /* Otherwise it must be a fallthru edge, and we don't need to
4988 do anything besides redirecting it. */
1e128c5f 4989 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4990 break;
4991 }
4992
4993 /* Update/insert PHI nodes as necessary. */
4994
4995 /* Now update the edges in the CFG. */
4996 e = ssa_redirect_edge (e, dest);
4997
4998 return e;
4999}
5000
14fa2cc0
ZD
5001/* Returns true if it is possible to remove edge E by redirecting
5002 it to the destination of the other edge from E->src. */
5003
5004static bool
9678086d 5005tree_can_remove_branch_p (const_edge e)
14fa2cc0
ZD
5006{
5007 if (e->flags & EDGE_ABNORMAL)
5008 return false;
5009
5010 return true;
5011}
6de9cd9a
DN
5012
5013/* Simple wrapper, as we can always redirect fallthru edges. */
5014
5015static basic_block
5016tree_redirect_edge_and_branch_force (edge e, basic_block dest)
5017{
5018 e = tree_redirect_edge_and_branch (e, dest);
1e128c5f 5019 gcc_assert (e);
6de9cd9a
DN
5020
5021 return NULL;
5022}
5023
5024
5025/* Splits basic block BB after statement STMT (but at least after the
5026 labels). If STMT is NULL, BB is split just after the labels. */
5027
5028static basic_block
5029tree_split_block (basic_block bb, void *stmt)
5030{
597ae074
JH
5031 block_stmt_iterator bsi;
5032 tree_stmt_iterator tsi_tgt;
7506e1cb 5033 tree act, list;
6de9cd9a
DN
5034 basic_block new_bb;
5035 edge e;
628f6a4e 5036 edge_iterator ei;
6de9cd9a
DN
5037
5038 new_bb = create_empty_bb (bb);
5039
5040 /* Redirect the outgoing edges. */
628f6a4e
BE
5041 new_bb->succs = bb->succs;
5042 bb->succs = NULL;
5043 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
5044 e->src = new_bb;
5045
5046 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
5047 stmt = NULL;
5048
5049 /* Move everything from BSI to the new basic block. */
5050 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5051 {
5052 act = bsi_stmt (bsi);
5053 if (TREE_CODE (act) == LABEL_EXPR)
5054 continue;
5055
5056 if (!stmt)
5057 break;
5058
5059 if (stmt == act)
5060 {
5061 bsi_next (&bsi);
5062 break;
5063 }
5064 }
5065
597ae074
JH
5066 if (bsi_end_p (bsi))
5067 return new_bb;
5068
5069 /* Split the statement list - avoid re-creating new containers as this
5070 brings ugly quadratic memory consumption in the inliner.
5071 (We are still quadratic since we need to update stmt BB pointers,
5072 sadly.) */
7506e1cb
ZD
5073 list = tsi_split_statement_list_before (&bsi.tsi);
5074 set_bb_stmt_list (new_bb, list);
5075 for (tsi_tgt = tsi_start (list);
597ae074 5076 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
0a4fe58f 5077 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
6de9cd9a
DN
5078
5079 return new_bb;
5080}
5081
5082
5083/* Moves basic block BB after block AFTER. */
5084
5085static bool
5086tree_move_block_after (basic_block bb, basic_block after)
5087{
5088 if (bb->prev_bb == after)
5089 return true;
5090
5091 unlink_block (bb);
5092 link_block (bb, after);
5093
5094 return true;
5095}
5096
5097
5098/* Return true if basic_block can be duplicated. */
5099
5100static bool
9678086d 5101tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6de9cd9a
DN
5102{
5103 return true;
5104}
5105
84d65814 5106
6de9cd9a
DN
5107/* Create a duplicate of the basic block BB. NOTE: This does not
5108 preserve SSA form. */
5109
5110static basic_block
5111tree_duplicate_bb (basic_block bb)
5112{
5113 basic_block new_bb;
5114 block_stmt_iterator bsi, bsi_tgt;
84d65814 5115 tree phi;
6de9cd9a
DN
5116
5117 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 5118
84d65814
DN
5119 /* Copy the PHI nodes. We ignore PHI node arguments here because
5120 the incoming edges have not been setup yet. */
bb29d951 5121 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
b0382c67 5122 {
84d65814
DN
5123 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
5124 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
b0382c67 5125 }
84d65814
DN
5126
5127 /* Keep the chain of PHI nodes in the same order so that they can be
5128 updated by ssa_redirect_edge. */
5ae71719 5129 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
b0382c67 5130
6de9cd9a
DN
5131 bsi_tgt = bsi_start (new_bb);
5132 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5133 {
84d65814
DN
5134 def_operand_p def_p;
5135 ssa_op_iter op_iter;
5136 tree stmt, copy;
cc7220fd 5137 int region;
6de9cd9a 5138
84d65814 5139 stmt = bsi_stmt (bsi);
6de9cd9a
DN
5140 if (TREE_CODE (stmt) == LABEL_EXPR)
5141 continue;
5142
84d65814
DN
5143 /* Create a new copy of STMT and duplicate STMT's virtual
5144 operands. */
5f240ec4 5145 copy = unshare_expr (stmt);
5f240ec4 5146 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
84d65814 5147 copy_virtual_operands (copy, stmt);
cc7220fd
JH
5148 region = lookup_stmt_eh_region (stmt);
5149 if (region >= 0)
5150 add_stmt_to_eh_region (copy, region);
6946b3f7 5151 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
84d65814
DN
5152
5153 /* Create new names for all the definitions created by COPY and
5154 add replacement mappings for each new name. */
5155 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5156 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6de9cd9a
DN
5157 }
5158
5159 return new_bb;
5160}
5161
5f40b3cb
ZD
5162/* Adds phi node arguments for edge E_COPY after basic block duplication. */
5163
5164static void
5165add_phi_args_after_copy_edge (edge e_copy)
5166{
5167 basic_block bb, bb_copy = e_copy->src, dest;
5168 edge e;
5169 edge_iterator ei;
5170 tree phi, phi_copy, phi_next, def;
5171
5172 if (!phi_nodes (e_copy->dest))
5173 return;
5174
5175 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5176
5177 if (e_copy->dest->flags & BB_DUPLICATED)
5178 dest = get_bb_original (e_copy->dest);
5179 else
5180 dest = e_copy->dest;
5181
5182 e = find_edge (bb, dest);
5183 if (!e)
5184 {
5185 /* During loop unrolling the target of the latch edge is copied.
5186 In this case we are not looking for edge to dest, but to
5187 duplicated block whose original was dest. */
5188 FOR_EACH_EDGE (e, ei, bb->succs)
5189 {
5190 if ((e->dest->flags & BB_DUPLICATED)
5191 && get_bb_original (e->dest) == dest)
5192 break;
5193 }
5194
5195 gcc_assert (e != NULL);
5196 }
5197
5198 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5199 phi;
5200 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5201 {
5202 phi_next = PHI_CHAIN (phi);
5203 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5204 add_phi_arg (phi_copy, def, e_copy);
5205 }
5206}
5207
84d65814 5208
42759f1e
ZD
5209/* Basic block BB_COPY was created by code duplication. Add phi node
5210 arguments for edges going out of BB_COPY. The blocks that were
6580ee77 5211 duplicated have BB_DUPLICATED set. */
42759f1e
ZD
5212
5213void
5214add_phi_args_after_copy_bb (basic_block bb_copy)
5215{
628f6a4e 5216 edge_iterator ei;
5f40b3cb 5217 edge e_copy;
42759f1e 5218
628f6a4e 5219 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e 5220 {
5f40b3cb 5221 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5222 }
5223}
5224
5225/* Blocks in REGION_COPY array of length N_REGION were created by
5226 duplication of basic blocks. Add phi node arguments for edges
5f40b3cb
ZD
5227 going from these blocks. If E_COPY is not NULL, also add
5228 phi node arguments for its destination.*/
42759f1e
ZD
5229
5230void
5f40b3cb
ZD
5231add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5232 edge e_copy)
42759f1e
ZD
5233{
5234 unsigned i;
5235
5236 for (i = 0; i < n_region; i++)
6580ee77 5237 region_copy[i]->flags |= BB_DUPLICATED;
42759f1e
ZD
5238
5239 for (i = 0; i < n_region; i++)
5240 add_phi_args_after_copy_bb (region_copy[i]);
5f40b3cb
ZD
5241 if (e_copy)
5242 add_phi_args_after_copy_edge (e_copy);
42759f1e
ZD
5243
5244 for (i = 0; i < n_region; i++)
6580ee77 5245 region_copy[i]->flags &= ~BB_DUPLICATED;
42759f1e
ZD
5246}
5247
42759f1e
ZD
5248/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5249 important exit edge EXIT. By important we mean that no SSA name defined
5250 inside region is live over the other exit edges of the region. All entry
5251 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5252 to the duplicate of the region. SSA form, dominance and loop information
5253 is updated. The new basic blocks are stored to REGION_COPY in the same
5254 order as they had in REGION, provided that REGION_COPY is not NULL.
5255 The function returns false if it is unable to copy the region,
5256 true otherwise. */
5257
5258bool
5259tree_duplicate_sese_region (edge entry, edge exit,
5260 basic_block *region, unsigned n_region,
5261 basic_block *region_copy)
5262{
66f97d31 5263 unsigned i;
42759f1e
ZD
5264 bool free_region_copy = false, copying_header = false;
5265 struct loop *loop = entry->dest->loop_father;
5266 edge exit_copy;
66f97d31 5267 VEC (basic_block, heap) *doms;
42759f1e 5268 edge redirected;
09bac500
JH
5269 int total_freq = 0, entry_freq = 0;
5270 gcov_type total_count = 0, entry_count = 0;
42759f1e
ZD
5271
5272 if (!can_copy_bbs_p (region, n_region))
5273 return false;
5274
5275 /* Some sanity checking. Note that we do not check for all possible
5276 missuses of the functions. I.e. if you ask to copy something weird,
5277 it will work, but the state of structures probably will not be
5278 correct. */
42759f1e
ZD
5279 for (i = 0; i < n_region; i++)
5280 {
5281 /* We do not handle subloops, i.e. all the blocks must belong to the
5282 same loop. */
5283 if (region[i]->loop_father != loop)
5284 return false;
5285
5286 if (region[i] != entry->dest
5287 && region[i] == loop->header)
5288 return false;
5289 }
5290
561e8a90 5291 set_loop_copy (loop, loop);
42759f1e
ZD
5292
5293 /* In case the function is used for loop header copying (which is the primary
5294 use), ensure that EXIT and its copy will be new latch and entry edges. */
5295 if (loop->header == entry->dest)
5296 {
5297 copying_header = true;
561e8a90 5298 set_loop_copy (loop, loop_outer (loop));
42759f1e
ZD
5299
5300 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5301 return false;
5302
5303 for (i = 0; i < n_region; i++)
5304 if (region[i] != exit->src
5305 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5306 return false;
5307 }
5308
5309 if (!region_copy)
5310 {
858904db 5311 region_copy = XNEWVEC (basic_block, n_region);
42759f1e
ZD
5312 free_region_copy = true;
5313 }
5314
84d65814 5315 gcc_assert (!need_ssa_update_p ());
42759f1e 5316
5deaef19 5317 /* Record blocks outside the region that are dominated by something
42759f1e 5318 inside. */
66f97d31 5319 doms = NULL;
6580ee77
JH
5320 initialize_original_copy_tables ();
5321
66f97d31 5322 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
42759f1e 5323
09bac500
JH
5324 if (entry->dest->count)
5325 {
5326 total_count = entry->dest->count;
5327 entry_count = entry->count;
5328 /* Fix up corner cases, to avoid division by zero or creation of negative
5329 frequencies. */
5330 if (entry_count > total_count)
5331 entry_count = total_count;
5332 }
5333 else
5334 {
5335 total_freq = entry->dest->frequency;
5336 entry_freq = EDGE_FREQUENCY (entry);
5337 /* Fix up corner cases, to avoid division by zero or creation of negative
5338 frequencies. */
5339 if (total_freq == 0)
5340 total_freq = 1;
5341 else if (entry_freq > total_freq)
5342 entry_freq = total_freq;
5343 }
5deaef19 5344
b9a66240
ZD
5345 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5346 split_edge_bb_loc (entry));
09bac500
JH
5347 if (total_count)
5348 {
5349 scale_bbs_frequencies_gcov_type (region, n_region,
5350 total_count - entry_count,
5351 total_count);
5352 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6531d1be 5353 total_count);
09bac500
JH
5354 }
5355 else
5356 {
5357 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5358 total_freq);
5359 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5360 }
42759f1e
ZD
5361
5362 if (copying_header)
5363 {
5364 loop->header = exit->dest;
5365 loop->latch = exit->src;
5366 }
5367
5368 /* Redirect the entry and add the phi node arguments. */
6580ee77 5369 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
42759f1e 5370 gcc_assert (redirected != NULL);
71882046 5371 flush_pending_stmts (entry);
42759f1e
ZD
5372
5373 /* Concerning updating of dominators: We must recount dominators
84d65814
DN
5374 for entry block and its copy. Anything that is outside of the
5375 region, but was dominated by something inside needs recounting as
5376 well. */
42759f1e 5377 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
66f97d31
ZD
5378 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5379 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5f40b3cb 5380 VEC_free (basic_block, heap, doms);
42759f1e 5381
84d65814 5382 /* Add the other PHI node arguments. */
5f40b3cb
ZD
5383 add_phi_args_after_copy (region_copy, n_region, NULL);
5384
5385 /* Update the SSA web. */
5386 update_ssa (TODO_update_ssa);
5387
5388 if (free_region_copy)
5389 free (region_copy);
5390
5391 free_original_copy_tables ();
5392 return true;
5393}
5394
5395/* Duplicates REGION consisting of N_REGION blocks. The new blocks
5396 are stored to REGION_COPY in the same order in that they appear
5397 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5398 the region, EXIT an exit from it. The condition guarding EXIT
5399 is moved to ENTRY. Returns true if duplication succeeds, false
5400 otherwise.
5401
5402 For example,
5403
5404 some_code;
5405 if (cond)
5406 A;
5407 else
5408 B;
5409
5410 is transformed to
5411
5412 if (cond)
5413 {
5414 some_code;
5415 A;
5416 }
5417 else
5418 {
5419 some_code;
5420 B;
5421 }
5422*/
5423
5424bool
5425tree_duplicate_sese_tail (edge entry, edge exit,
5426 basic_block *region, unsigned n_region,
5427 basic_block *region_copy)
5428{
5429 unsigned i;
5430 bool free_region_copy = false;
5431 struct loop *loop = exit->dest->loop_father;
5432 struct loop *orig_loop = entry->dest->loop_father;
5433 basic_block switch_bb, entry_bb, nentry_bb;
5434 VEC (basic_block, heap) *doms;
5435 int total_freq = 0, exit_freq = 0;
5436 gcov_type total_count = 0, exit_count = 0;
5437 edge exits[2], nexits[2], e;
5438 block_stmt_iterator bsi;
5439 tree cond;
5440 edge sorig, snew;
5441
5442 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5443 exits[0] = exit;
5444 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5445
5446 if (!can_copy_bbs_p (region, n_region))
5447 return false;
5448
5449 /* Some sanity checking. Note that we do not check for all possible
5450 missuses of the functions. I.e. if you ask to copy something weird
5451 (e.g., in the example, if there is a jump from inside to the middle
5452 of some_code, or come_code defines some of the values used in cond)
5453 it will work, but the resulting code will not be correct. */
5454 for (i = 0; i < n_region; i++)
5455 {
5456 /* We do not handle subloops, i.e. all the blocks must belong to the
5457 same loop. */
5458 if (region[i]->loop_father != orig_loop)
5459 return false;
5460
5461 if (region[i] == orig_loop->latch)
5462 return false;
5463 }
5464
5465 initialize_original_copy_tables ();
5466 set_loop_copy (orig_loop, loop);
5467
5468 if (!region_copy)
5469 {
5470 region_copy = XNEWVEC (basic_block, n_region);
5471 free_region_copy = true;
5472 }
5473
5474 gcc_assert (!need_ssa_update_p ());
5475
5476 /* Record blocks outside the region that are dominated by something
5477 inside. */
5478 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5479
5480 if (exit->src->count)
5481 {
5482 total_count = exit->src->count;
5483 exit_count = exit->count;
5484 /* Fix up corner cases, to avoid division by zero or creation of negative
5485 frequencies. */
5486 if (exit_count > total_count)
5487 exit_count = total_count;
5488 }
5489 else
5490 {
5491 total_freq = exit->src->frequency;
5492 exit_freq = EDGE_FREQUENCY (exit);
5493 /* Fix up corner cases, to avoid division by zero or creation of negative
5494 frequencies. */
5495 if (total_freq == 0)
5496 total_freq = 1;
5497 if (exit_freq > total_freq)
5498 exit_freq = total_freq;
5499 }
5500
5501 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5502 split_edge_bb_loc (exit));
5503 if (total_count)
5504 {
5505 scale_bbs_frequencies_gcov_type (region, n_region,
5506 total_count - exit_count,
5507 total_count);
5508 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5509 total_count);
5510 }
5511 else
5512 {
5513 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5514 total_freq);
5515 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5516 }
5517
5518 /* Create the switch block, and put the exit condition to it. */
5519 entry_bb = entry->dest;
5520 nentry_bb = get_bb_copy (entry_bb);
5521 if (!last_stmt (entry->src)
5522 || !stmt_ends_bb_p (last_stmt (entry->src)))
5523 switch_bb = entry->src;
5524 else
5525 switch_bb = split_edge (entry);
5526 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5527
5528 bsi = bsi_last (switch_bb);
5529 cond = last_stmt (exit->src);
5530 gcc_assert (TREE_CODE (cond) == COND_EXPR);
5531 bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
5532
5533 sorig = single_succ_edge (switch_bb);
5534 sorig->flags = exits[1]->flags;
5535 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5536
5537 /* Register the new edge from SWITCH_BB in loop exit lists. */
5538 rescan_loop_exit (snew, true, false);
5539
5540 /* Add the PHI node arguments. */
5541 add_phi_args_after_copy (region_copy, n_region, snew);
5542
5543 /* Get rid of now superfluous conditions and associated edges (and phi node
5544 arguments). */
5545 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5546 PENDING_STMT (e) = NULL_TREE;
5547 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5548 PENDING_STMT (e) = NULL_TREE;
5549
5550 /* Anything that is outside of the region, but was dominated by something
5551 inside needs to update dominance info. */
5552 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5553 VEC_free (basic_block, heap, doms);
42759f1e 5554
84d65814
DN
5555 /* Update the SSA web. */
5556 update_ssa (TODO_update_ssa);
42759f1e
ZD
5557
5558 if (free_region_copy)
5559 free (region_copy);
5560
6580ee77 5561 free_original_copy_tables ();
42759f1e
ZD
5562 return true;
5563}
6de9cd9a 5564
50674e96
DN
5565/*
5566DEF_VEC_P(basic_block);
5567DEF_VEC_ALLOC_P(basic_block,heap);
5568*/
5569
5570/* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5571 adding blocks when the dominator traversal reaches EXIT. This
5572 function silently assumes that ENTRY strictly dominates EXIT. */
5573
9f9f72aa 5574void
50674e96
DN
5575gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5576 VEC(basic_block,heap) **bbs_p)
5577{
5578 basic_block son;
5579
5580 for (son = first_dom_son (CDI_DOMINATORS, entry);
5581 son;
5582 son = next_dom_son (CDI_DOMINATORS, son))
5583 {
5584 VEC_safe_push (basic_block, heap, *bbs_p, son);
5585 if (son != exit)
5586 gather_blocks_in_sese_region (son, exit, bbs_p);
5587 }
5588}
5589
917948d3
ZD
5590/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5591 The duplicates are recorded in VARS_MAP. */
5592
5593static void
5594replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5595 tree to_context)
5596{
5597 tree t = *tp, new_t;
5598 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5599 void **loc;
5600
5601 if (DECL_CONTEXT (t) == to_context)
5602 return;
5603
5604 loc = pointer_map_contains (vars_map, t);
5605
5606 if (!loc)
5607 {
5608 loc = pointer_map_insert (vars_map, t);
5609
5610 if (SSA_VAR_P (t))
5611 {
5612 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
cb91fab0 5613 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
917948d3
ZD
5614 }
5615 else
5616 {
5617 gcc_assert (TREE_CODE (t) == CONST_DECL);
5618 new_t = copy_node (t);
5619 }
5620 DECL_CONTEXT (new_t) = to_context;
5621
5622 *loc = new_t;
5623 }
5624 else
3d9a9f94 5625 new_t = (tree) *loc;
917948d3
ZD
5626
5627 *tp = new_t;
5628}
5629
5630/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5631 VARS_MAP maps old ssa names and var_decls to the new ones. */
5632
5633static tree
5634replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5635 tree to_context)
5636{
5637 void **loc;
5638 tree new_name, decl = SSA_NAME_VAR (name);
5639
5640 gcc_assert (is_gimple_reg (name));
5641
5642 loc = pointer_map_contains (vars_map, name);
5643
5644 if (!loc)
5645 {
5646 replace_by_duplicate_decl (&decl, vars_map, to_context);
5647
5648 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5649 if (gimple_in_ssa_p (cfun))
5650 add_referenced_var (decl);
5651
5652 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5653 if (SSA_NAME_IS_DEFAULT_DEF (name))
5654 set_default_def (decl, new_name);
5655 pop_cfun ();
5656
5657 loc = pointer_map_insert (vars_map, name);
5658 *loc = new_name;
5659 }
5660 else
3d9a9f94 5661 new_name = (tree) *loc;
917948d3
ZD
5662
5663 return new_name;
5664}
50674e96
DN
5665
5666struct move_stmt_d
5667{
b357f682
JJ
5668 tree orig_block;
5669 tree new_block;
50674e96
DN
5670 tree from_context;
5671 tree to_context;
917948d3 5672 struct pointer_map_t *vars_map;
fad41cd7 5673 htab_t new_label_map;
50674e96
DN
5674 bool remap_decls_p;
5675};
5676
5677/* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
b357f682
JJ
5678 contained in *TP if it has been ORIG_BLOCK previously and change the
5679 DECL_CONTEXT of every local variable referenced in *TP. */
50674e96
DN
5680
5681static tree
fad41cd7 5682move_stmt_r (tree *tp, int *walk_subtrees, void *data)
50674e96
DN
5683{
5684 struct move_stmt_d *p = (struct move_stmt_d *) data;
fad41cd7 5685 tree t = *tp;
50674e96 5686
b357f682
JJ
5687 if (EXPR_P (t) || GIMPLE_STMT_P (t))
5688 {
5689 tree block = TREE_BLOCK (t);
5690 if (p->orig_block == NULL_TREE
5691 || block == p->orig_block
5692 || block == NULL_TREE)
5693 TREE_BLOCK (t) = p->new_block;
5694#ifdef ENABLE_CHECKING
5695 else if (block != p->new_block)
5696 {
5697 while (block && block != p->orig_block)
5698 block = BLOCK_SUPERCONTEXT (block);
5699 gcc_assert (block);
5700 }
5701#endif
5702 }
50674e96 5703
bed575d5
RS
5704 if (OMP_DIRECTIVE_P (t)
5705 && TREE_CODE (t) != OMP_RETURN
5706 && TREE_CODE (t) != OMP_CONTINUE)
50674e96
DN
5707 {
5708 /* Do not remap variables inside OMP directives. Variables
5709 referenced in clauses and directive header belong to the
5710 parent function and should not be moved into the child
5711 function. */
fad41cd7 5712 bool save_remap_decls_p = p->remap_decls_p;
50674e96 5713 p->remap_decls_p = false;
fad41cd7
RH
5714 *walk_subtrees = 0;
5715
5716 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
50674e96 5717
fad41cd7
RH
5718 p->remap_decls_p = save_remap_decls_p;
5719 }
917948d3 5720 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
50674e96 5721 {
917948d3
ZD
5722 if (TREE_CODE (t) == SSA_NAME)
5723 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5724 else if (TREE_CODE (t) == LABEL_DECL)
fad41cd7
RH
5725 {
5726 if (p->new_label_map)
5727 {
5728 struct tree_map in, *out;
fc8600f9 5729 in.base.from = t;
3d9a9f94
KG
5730 out = (struct tree_map *)
5731 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
fad41cd7
RH
5732 if (out)
5733 *tp = t = out->to;
5734 }
50674e96 5735
fad41cd7
RH
5736 DECL_CONTEXT (t) = p->to_context;
5737 }
5738 else if (p->remap_decls_p)
50674e96 5739 {
917948d3
ZD
5740 /* Replace T with its duplicate. T should no longer appear in the
5741 parent function, so this looks wasteful; however, it may appear
5742 in referenced_vars, and more importantly, as virtual operands of
5743 statements, and in alias lists of other variables. It would be
5744 quite difficult to expunge it from all those places. ??? It might
5745 suffice to do this for addressable variables. */
5746 if ((TREE_CODE (t) == VAR_DECL
5747 && !is_global_var (t))
5748 || TREE_CODE (t) == CONST_DECL)
5749 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5750
5751 if (SSA_VAR_P (t)
5752 && gimple_in_ssa_p (cfun))
fad41cd7 5753 {
917948d3
ZD
5754 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5755 add_referenced_var (*tp);
5756 pop_cfun ();
fad41cd7 5757 }
50674e96 5758 }
917948d3 5759 *walk_subtrees = 0;
50674e96 5760 }
fad41cd7
RH
5761 else if (TYPE_P (t))
5762 *walk_subtrees = 0;
50674e96
DN
5763
5764 return NULL_TREE;
5765}
5766
917948d3
ZD
5767/* Marks virtual operands of all statements in basic blocks BBS for
5768 renaming. */
5769
dea61d92
SP
5770void
5771mark_virtual_ops_in_bb (basic_block bb)
917948d3
ZD
5772{
5773 tree phi;
5774 block_stmt_iterator bsi;
dea61d92
SP
5775
5776 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5777 mark_virtual_ops_for_renaming (phi);
5778
5779 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5780 mark_virtual_ops_for_renaming (bsi_stmt (bsi));
5781}
5782
5783/* Marks virtual operands of all statements in basic blocks BBS for
5784 renaming. */
5785
5786static void
5787mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5788{
917948d3
ZD
5789 basic_block bb;
5790 unsigned i;
5791
5792 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
dea61d92 5793 mark_virtual_ops_in_bb (bb);
917948d3 5794}
50674e96
DN
5795
5796/* Move basic block BB from function CFUN to function DEST_FN. The
5797 block is moved out of the original linked list and placed after
5798 block AFTER in the new list. Also, the block is removed from the
5799 original array of blocks and placed in DEST_FN's array of blocks.
5800 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5801 updated to reflect the moved edges.
6531d1be 5802
917948d3
ZD
5803 The local variables are remapped to new instances, VARS_MAP is used
5804 to record the mapping. */
50674e96
DN
5805
5806static void
5807move_block_to_fn (struct function *dest_cfun, basic_block bb,
5808 basic_block after, bool update_edge_count_p,
b357f682 5809 struct move_stmt_d *d, int eh_offset)
50674e96
DN
5810{
5811 struct control_flow_graph *cfg;
5812 edge_iterator ei;
5813 edge e;
5814 block_stmt_iterator si;
728b26bb 5815 unsigned old_len, new_len;
5f40b3cb 5816 tree phi, next_phi;
50674e96 5817
3722506a
ZD
5818 /* Remove BB from dominance structures. */
5819 delete_from_dominance_info (CDI_DOMINATORS, bb);
5f40b3cb
ZD
5820 if (current_loops)
5821 remove_bb_from_loops (bb);
3722506a 5822
50674e96
DN
5823 /* Link BB to the new linked list. */
5824 move_block_after (bb, after);
5825
5826 /* Update the edge count in the corresponding flowgraphs. */
5827 if (update_edge_count_p)
5828 FOR_EACH_EDGE (e, ei, bb->succs)
5829 {
5830 cfun->cfg->x_n_edges--;
5831 dest_cfun->cfg->x_n_edges++;
5832 }
5833
5834 /* Remove BB from the original basic block array. */
5835 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5836 cfun->cfg->x_n_basic_blocks--;
5837
5838 /* Grow DEST_CFUN's basic block array if needed. */
5839 cfg = dest_cfun->cfg;
5840 cfg->x_n_basic_blocks++;
3722506a
ZD
5841 if (bb->index >= cfg->x_last_basic_block)
5842 cfg->x_last_basic_block = bb->index + 1;
50674e96 5843
728b26bb
DN
5844 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5845 if ((unsigned) cfg->x_last_basic_block >= old_len)
50674e96 5846 {
728b26bb 5847 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
a590ac65
KH
5848 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5849 new_len);
50674e96
DN
5850 }
5851
5852 VEC_replace (basic_block, cfg->x_basic_block_info,
e0310afb 5853 bb->index, bb);
50674e96 5854
917948d3 5855 /* Remap the variables in phi nodes. */
5f40b3cb 5856 for (phi = phi_nodes (bb); phi; phi = next_phi)
917948d3
ZD
5857 {
5858 use_operand_p use;
5859 tree op = PHI_RESULT (phi);
5860 ssa_op_iter oi;
5861
5f40b3cb 5862 next_phi = PHI_CHAIN (phi);
917948d3 5863 if (!is_gimple_reg (op))
5f40b3cb
ZD
5864 {
5865 /* Remove the phi nodes for virtual operands (alias analysis will be
5866 run for the new function, anyway). */
5867 remove_phi_node (phi, NULL, true);
5868 continue;
5869 }
917948d3 5870
b357f682
JJ
5871 SET_PHI_RESULT (phi,
5872 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5873 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5874 {
5875 op = USE_FROM_PTR (use);
5876 if (TREE_CODE (op) == SSA_NAME)
b357f682 5877 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
917948d3
ZD
5878 }
5879 }
5880
50674e96
DN
5881 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5882 {
5883 tree stmt = bsi_stmt (si);
fad41cd7 5884 int region;
50674e96 5885
b357f682 5886 walk_tree (&stmt, move_stmt_r, d, NULL);
50674e96
DN
5887
5888 if (TREE_CODE (stmt) == LABEL_EXPR)
5889 {
50674e96
DN
5890 tree label = LABEL_EXPR_LABEL (stmt);
5891 int uid = LABEL_DECL_UID (label);
5892
5893 gcc_assert (uid > -1);
5894
5895 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5896 if (old_len <= (unsigned) uid)
5897 {
728b26bb 5898 new_len = 3 * uid / 2;
a590ac65
KH
5899 VEC_safe_grow_cleared (basic_block, gc,
5900 cfg->x_label_to_block_map, new_len);
50674e96
DN
5901 }
5902
5903 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5904 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5905
5906 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5907
cb91fab0
JH
5908 if (uid >= dest_cfun->cfg->last_label_uid)
5909 dest_cfun->cfg->last_label_uid = uid + 1;
50674e96 5910 }
fad41cd7
RH
5911 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5912 TREE_OPERAND (stmt, 0) =
5913 build_int_cst (NULL_TREE,
5914 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5915 + eh_offset);
5916
5917 region = lookup_stmt_eh_region (stmt);
5918 if (region >= 0)
5919 {
5920 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5921 remove_stmt_from_eh_region (stmt);
6946b3f7
JH
5922 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5923 gimple_remove_stmt_histograms (cfun, stmt);
fad41cd7 5924 }
917948d3 5925
5f40b3cb
ZD
5926 /* We cannot leave any operands allocated from the operand caches of
5927 the current function. */
5928 free_stmt_operands (stmt);
5929 push_cfun (dest_cfun);
917948d3 5930 update_stmt (stmt);
5f40b3cb 5931 pop_cfun ();
fad41cd7
RH
5932 }
5933}
5934
5935/* Examine the statements in BB (which is in SRC_CFUN); find and return
5936 the outermost EH region. Use REGION as the incoming base EH region. */
5937
5938static int
5939find_outermost_region_in_block (struct function *src_cfun,
5940 basic_block bb, int region)
5941{
5942 block_stmt_iterator si;
6531d1be 5943
fad41cd7
RH
5944 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5945 {
5946 tree stmt = bsi_stmt (si);
5947 int stmt_region;
1799e5d5 5948
07ed51c9
JJ
5949 if (TREE_CODE (stmt) == RESX_EXPR)
5950 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5951 else
5952 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
7e2df4a1
JJ
5953 if (stmt_region > 0)
5954 {
5955 if (region < 0)
5956 region = stmt_region;
5957 else if (stmt_region != region)
5958 {
5959 region = eh_region_outermost (src_cfun, stmt_region, region);
5960 gcc_assert (region != -1);
5961 }
5962 }
50674e96 5963 }
fad41cd7
RH
5964
5965 return region;
50674e96
DN
5966}
5967
fad41cd7
RH
5968static tree
5969new_label_mapper (tree decl, void *data)
5970{
5971 htab_t hash = (htab_t) data;
5972 struct tree_map *m;
5973 void **slot;
5974
5975 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5976
3d9a9f94 5977 m = XNEW (struct tree_map);
fad41cd7 5978 m->hash = DECL_UID (decl);
fc8600f9 5979 m->base.from = decl;
fad41cd7
RH
5980 m->to = create_artificial_label ();
5981 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
cb91fab0
JH
5982 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5983 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
fad41cd7
RH
5984
5985 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5986 gcc_assert (*slot == NULL);
5987
5988 *slot = m;
5989
5990 return m->to;
5991}
50674e96 5992
b357f682
JJ
5993/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5994 subblocks. */
5995
5996static void
5997replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5998 tree to_context)
5999{
6000 tree *tp, t;
6001
6002 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6003 {
6004 t = *tp;
6005 replace_by_duplicate_decl (&t, vars_map, to_context);
6006 if (t != *tp)
6007 {
6008 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6009 {
6010 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6011 DECL_HAS_VALUE_EXPR_P (t) = 1;
6012 }
6013 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6014 *tp = t;
6015 }
6016 }
6017
6018 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6019 replace_block_vars_by_duplicates (block, vars_map, to_context);
6020}
6021
50674e96
DN
6022/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6023 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6024 single basic block in the original CFG and the new basic block is
6025 returned. DEST_CFUN must not have a CFG yet.
6026
6027 Note that the region need not be a pure SESE region. Blocks inside
6028 the region may contain calls to abort/exit. The only restriction
6029 is that ENTRY_BB should be the only entry point and it must
6030 dominate EXIT_BB.
6031
b357f682
JJ
6032 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6033 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6034 to the new function.
6035
50674e96
DN
6036 All local variables referenced in the region are assumed to be in
6037 the corresponding BLOCK_VARS and unexpanded variable lists
6038 associated with DEST_CFUN. */
6039
6040basic_block
6041move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
b357f682 6042 basic_block exit_bb, tree orig_block)
50674e96 6043{
917948d3
ZD
6044 VEC(basic_block,heap) *bbs, *dom_bbs;
6045 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6046 basic_block after, bb, *entry_pred, *exit_succ, abb;
6047 struct function *saved_cfun = cfun;
fad41cd7 6048 int *entry_flag, *exit_flag, eh_offset;
917948d3 6049 unsigned *entry_prob, *exit_prob;
50674e96
DN
6050 unsigned i, num_entry_edges, num_exit_edges;
6051 edge e;
6052 edge_iterator ei;
fad41cd7 6053 htab_t new_label_map;
917948d3 6054 struct pointer_map_t *vars_map;
5f40b3cb 6055 struct loop *loop = entry_bb->loop_father;
b357f682 6056 struct move_stmt_d d;
50674e96
DN
6057
6058 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6059 region. */
6060 gcc_assert (entry_bb != exit_bb
2aee3e57
JJ
6061 && (!exit_bb
6062 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
50674e96 6063
917948d3
ZD
6064 /* Collect all the blocks in the region. Manually add ENTRY_BB
6065 because it won't be added by dfs_enumerate_from. */
50674e96
DN
6066 bbs = NULL;
6067 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6068 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6069
917948d3
ZD
6070 /* The blocks that used to be dominated by something in BBS will now be
6071 dominated by the new block. */
6072 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6073 VEC_address (basic_block, bbs),
6074 VEC_length (basic_block, bbs));
6075
50674e96
DN
6076 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6077 the predecessor edges to ENTRY_BB and the successor edges to
6078 EXIT_BB so that we can re-attach them to the new basic block that
6079 will replace the region. */
6080 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6081 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6082 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
917948d3 6083 entry_prob = XNEWVEC (unsigned, num_entry_edges);
50674e96
DN
6084 i = 0;
6085 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6086 {
917948d3 6087 entry_prob[i] = e->probability;
50674e96
DN
6088 entry_flag[i] = e->flags;
6089 entry_pred[i++] = e->src;
6090 remove_edge (e);
6091 }
6092
2aee3e57 6093 if (exit_bb)
50674e96 6094 {
2aee3e57
JJ
6095 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6096 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6097 sizeof (basic_block));
6098 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
917948d3 6099 exit_prob = XNEWVEC (unsigned, num_exit_edges);
2aee3e57
JJ
6100 i = 0;
6101 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6102 {
917948d3 6103 exit_prob[i] = e->probability;
2aee3e57
JJ
6104 exit_flag[i] = e->flags;
6105 exit_succ[i++] = e->dest;
6106 remove_edge (e);
6107 }
6108 }
6109 else
6110 {
6111 num_exit_edges = 0;
6112 exit_succ = NULL;
6113 exit_flag = NULL;
917948d3 6114 exit_prob = NULL;
50674e96
DN
6115 }
6116
6117 /* Switch context to the child function to initialize DEST_FN's CFG. */
6118 gcc_assert (dest_cfun->cfg == NULL);
917948d3 6119 push_cfun (dest_cfun);
fad41cd7 6120
50674e96 6121 init_empty_tree_cfg ();
fad41cd7
RH
6122
6123 /* Initialize EH information for the new function. */
6124 eh_offset = 0;
6125 new_label_map = NULL;
6126 if (saved_cfun->eh)
6127 {
6128 int region = -1;
6129
6130 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6131 region = find_outermost_region_in_block (saved_cfun, bb, region);
6132
6133 init_eh_for_function ();
6134 if (region != -1)
6135 {
6136 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6137 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6138 new_label_map, region, 0);
6139 }
6140 }
6141
917948d3
ZD
6142 pop_cfun ();
6143
6144 /* The ssa form for virtual operands in the source function will have to
6145 be repaired. We do not care for the real operands -- the sese region
6146 must be closed with respect to those. */
6147 mark_virtual_ops_in_region (bbs);
50674e96
DN
6148
6149 /* Move blocks from BBS into DEST_CFUN. */
6150 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6151 after = dest_cfun->cfg->x_entry_block_ptr;
917948d3 6152 vars_map = pointer_map_create ();
b357f682
JJ
6153
6154 memset (&d, 0, sizeof (d));
6155 d.vars_map = vars_map;
6156 d.from_context = cfun->decl;
6157 d.to_context = dest_cfun->decl;
6158 d.new_label_map = new_label_map;
6159 d.remap_decls_p = true;
6160 d.orig_block = orig_block;
6161 d.new_block = DECL_INITIAL (dest_cfun->decl);
6162
50674e96
DN
6163 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6164 {
6165 /* No need to update edge counts on the last block. It has
6166 already been updated earlier when we detached the region from
6167 the original CFG. */
b357f682 6168 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
50674e96
DN
6169 after = bb;
6170 }
6171
b357f682
JJ
6172 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6173 if (orig_block)
6174 {
6175 tree block;
6176 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6177 == NULL_TREE);
6178 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6179 = BLOCK_SUBBLOCKS (orig_block);
6180 for (block = BLOCK_SUBBLOCKS (orig_block);
6181 block; block = BLOCK_CHAIN (block))
6182 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6183 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6184 }
6185
6186 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6187 vars_map, dest_cfun->decl);
6188
fad41cd7
RH
6189 if (new_label_map)
6190 htab_delete (new_label_map);
917948d3 6191 pointer_map_destroy (vars_map);
50674e96
DN
6192
6193 /* Rewire the entry and exit blocks. The successor to the entry
6194 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6195 the child function. Similarly, the predecessor of DEST_FN's
6196 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6197 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6198 various CFG manipulation function get to the right CFG.
6199
6200 FIXME, this is silly. The CFG ought to become a parameter to
6201 these helpers. */
917948d3 6202 push_cfun (dest_cfun);
50674e96 6203 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
2aee3e57
JJ
6204 if (exit_bb)
6205 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
917948d3 6206 pop_cfun ();
50674e96
DN
6207
6208 /* Back in the original function, the SESE region has disappeared,
6209 create a new basic block in its place. */
6210 bb = create_empty_bb (entry_pred[0]);
5f40b3cb
ZD
6211 if (current_loops)
6212 add_bb_to_loop (bb, loop);
50674e96 6213 for (i = 0; i < num_entry_edges; i++)
917948d3
ZD
6214 {
6215 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6216 e->probability = entry_prob[i];
6217 }
50674e96
DN
6218
6219 for (i = 0; i < num_exit_edges; i++)
917948d3
ZD
6220 {
6221 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6222 e->probability = exit_prob[i];
6223 }
6224
6225 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6226 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6227 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6228 VEC_free (basic_block, heap, dom_bbs);
50674e96 6229
2aee3e57
JJ
6230 if (exit_bb)
6231 {
917948d3 6232 free (exit_prob);
2aee3e57
JJ
6233 free (exit_flag);
6234 free (exit_succ);
6235 }
917948d3 6236 free (entry_prob);
50674e96
DN
6237 free (entry_flag);
6238 free (entry_pred);
50674e96
DN
6239 VEC_free (basic_block, heap, bbs);
6240
6241 return bb;
6242}
6243
84d65814 6244
6de9cd9a
DN
6245/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6246
6247void
6248dump_function_to_file (tree fn, FILE *file, int flags)
6249{
6250 tree arg, vars, var;
459ffad3 6251 struct function *dsf;
6de9cd9a
DN
6252 bool ignore_topmost_bind = false, any_var = false;
6253 basic_block bb;
6254 tree chain;
6531d1be 6255
673fda6b 6256 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
6257
6258 arg = DECL_ARGUMENTS (fn);
6259 while (arg)
6260 {
2f9ea521
RG
6261 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6262 fprintf (file, " ");
6de9cd9a 6263 print_generic_expr (file, arg, dump_flags);
3e894af1
KZ
6264 if (flags & TDF_VERBOSE)
6265 print_node (file, "", arg, 4);
6de9cd9a
DN
6266 if (TREE_CHAIN (arg))
6267 fprintf (file, ", ");
6268 arg = TREE_CHAIN (arg);
6269 }
6270 fprintf (file, ")\n");
6271
3e894af1
KZ
6272 if (flags & TDF_VERBOSE)
6273 print_node (file, "", fn, 2);
6274
459ffad3
EB
6275 dsf = DECL_STRUCT_FUNCTION (fn);
6276 if (dsf && (flags & TDF_DETAILS))
6277 dump_eh_tree (file, dsf);
6278
6de9cd9a
DN
6279 if (flags & TDF_RAW)
6280 {
6281 dump_node (fn, TDF_SLIM | flags, file);
6282 return;
6283 }
6284
953ff289 6285 /* Switch CFUN to point to FN. */
db2960f4 6286 push_cfun (DECL_STRUCT_FUNCTION (fn));
953ff289 6287
6de9cd9a
DN
6288 /* When GIMPLE is lowered, the variables are no longer available in
6289 BIND_EXPRs, so display them separately. */
cb91fab0 6290 if (cfun && cfun->decl == fn && cfun->local_decls)
6de9cd9a
DN
6291 {
6292 ignore_topmost_bind = true;
6293
6294 fprintf (file, "{\n");
cb91fab0 6295 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6de9cd9a
DN
6296 {
6297 var = TREE_VALUE (vars);
6298
6299 print_generic_decl (file, var, flags);
3e894af1
KZ
6300 if (flags & TDF_VERBOSE)
6301 print_node (file, "", var, 4);
6de9cd9a
DN
6302 fprintf (file, "\n");
6303
6304 any_var = true;
6305 }
6306 }
6307
32a87d45 6308 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6de9cd9a
DN
6309 {
6310 /* Make a CFG based dump. */
878f99d2 6311 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
6312 if (!ignore_topmost_bind)
6313 fprintf (file, "{\n");
6314
6315 if (any_var && n_basic_blocks)
6316 fprintf (file, "\n");
6317
6318 FOR_EACH_BB (bb)
6319 dump_generic_bb (file, bb, 2, flags);
6531d1be 6320
6de9cd9a 6321 fprintf (file, "}\n");
878f99d2 6322 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a
DN
6323 }
6324 else
6325 {
6326 int indent;
6327
6328 /* Make a tree based dump. */
6329 chain = DECL_SAVED_TREE (fn);
6330
953ff289 6331 if (chain && TREE_CODE (chain) == BIND_EXPR)
6de9cd9a
DN
6332 {
6333 if (ignore_topmost_bind)
6334 {
6335 chain = BIND_EXPR_BODY (chain);
6336 indent = 2;
6337 }
6338 else
6339 indent = 0;
6340 }
6341 else
6342 {
6343 if (!ignore_topmost_bind)
6344 fprintf (file, "{\n");
6345 indent = 2;
6346 }
6347
6348 if (any_var)
6349 fprintf (file, "\n");
6350
6351 print_generic_stmt_indented (file, chain, flags, indent);
6352 if (ignore_topmost_bind)
6353 fprintf (file, "}\n");
6354 }
6355
6356 fprintf (file, "\n\n");
953ff289
DN
6357
6358 /* Restore CFUN. */
db2960f4 6359 pop_cfun ();
953ff289
DN
6360}
6361
6362
6363/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6364
6365void
6366debug_function (tree fn, int flags)
6367{
6368 dump_function_to_file (fn, stderr, flags);
6de9cd9a
DN
6369}
6370
6371
d7770457 6372/* Print on FILE the indexes for the predecessors of basic_block BB. */
6de9cd9a
DN
6373
6374static void
628f6a4e 6375print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 6376{
628f6a4e
BE
6377 edge e;
6378 edge_iterator ei;
6379
6380 FOR_EACH_EDGE (e, ei, bb->preds)
d7770457 6381 fprintf (file, "bb_%d ", e->src->index);
6de9cd9a
DN
6382}
6383
6384
d7770457 6385/* Print on FILE the indexes for the successors of basic_block BB. */
6de9cd9a
DN
6386
6387static void
628f6a4e 6388print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 6389{
628f6a4e
BE
6390 edge e;
6391 edge_iterator ei;
6392
6393 FOR_EACH_EDGE (e, ei, bb->succs)
d7770457 6394 fprintf (file, "bb_%d ", e->dest->index);
6de9cd9a
DN
6395}
6396
0c8efed8
SP
6397/* Print to FILE the basic block BB following the VERBOSITY level. */
6398
6399void
6400print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6401{
6402 char *s_indent = (char *) alloca ((size_t) indent + 1);
6403 memset ((void *) s_indent, ' ', (size_t) indent);
6404 s_indent[indent] = '\0';
6405
6406 /* Print basic_block's header. */
6407 if (verbosity >= 2)
6408 {
6409 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6410 print_pred_bbs (file, bb);
6411 fprintf (file, "}, succs = {");
6412 print_succ_bbs (file, bb);
6413 fprintf (file, "})\n");
6414 }
6415
6416 /* Print basic_block's body. */
6417 if (verbosity >= 3)
6418 {
6419 fprintf (file, "%s {\n", s_indent);
6420 tree_dump_bb (bb, file, indent + 4);
6421 fprintf (file, "%s }\n", s_indent);
6422 }
6423}
6424
6425static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6de9cd9a 6426
0c8efed8
SP
6427/* Pretty print LOOP on FILE, indented INDENT spaces. Following
6428 VERBOSITY level this outputs the contents of the loop, or just its
6429 structure. */
6de9cd9a
DN
6430
6431static void
0c8efed8 6432print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6de9cd9a
DN
6433{
6434 char *s_indent;
6435 basic_block bb;
6531d1be 6436
6de9cd9a
DN
6437 if (loop == NULL)
6438 return;
6439
6440 s_indent = (char *) alloca ((size_t) indent + 1);
6441 memset ((void *) s_indent, ' ', (size_t) indent);
6442 s_indent[indent] = '\0';
6443
0c8efed8
SP
6444 /* Print loop's header. */
6445 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6446 loop->num, loop->header->index, loop->latch->index);
6447 fprintf (file, ", niter = ");
6448 print_generic_expr (file, loop->nb_iterations, 0);
6531d1be 6449
0c8efed8
SP
6450 if (loop->any_upper_bound)
6451 {
6452 fprintf (file, ", upper_bound = ");
6453 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6454 }
6531d1be 6455
0c8efed8
SP
6456 if (loop->any_estimate)
6457 {
6458 fprintf (file, ", estimate = ");
6459 dump_double_int (file, loop->nb_iterations_estimate, true);
6460 }
6461 fprintf (file, ")\n");
6462
6463 /* Print loop's body. */
6464 if (verbosity >= 1)
6465 {
6466 fprintf (file, "%s{\n", s_indent);
6467 FOR_EACH_BB (bb)
6468 if (bb->loop_father == loop)
6469 print_loops_bb (file, bb, indent, verbosity);
6470
6471 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6472 fprintf (file, "%s}\n", s_indent);
6473 }
6de9cd9a
DN
6474}
6475
0c8efed8
SP
6476/* Print the LOOP and its sibling loops on FILE, indented INDENT
6477 spaces. Following VERBOSITY level this outputs the contents of the
6478 loop, or just its structure. */
6479
6480static void
6481print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6482{
6483 if (loop == NULL)
6484 return;
6485
6486 print_loop (file, loop, indent, verbosity);
6487 print_loop_and_siblings (file, loop->next, indent, verbosity);
6488}
6de9cd9a
DN
6489
6490/* Follow a CFG edge from the entry point of the program, and on entry
6491 of a loop, pretty print the loop structure on FILE. */
6492
6531d1be 6493void
0c8efed8 6494print_loops (FILE *file, int verbosity)
6de9cd9a
DN
6495{
6496 basic_block bb;
6531d1be 6497
24bd1a0b 6498 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
6de9cd9a 6499 if (bb && bb->loop_father)
0c8efed8 6500 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6de9cd9a
DN
6501}
6502
6503
0c8efed8
SP
6504/* Debugging loops structure at tree level, at some VERBOSITY level. */
6505
6506void
6507debug_loops (int verbosity)
6508{
6509 print_loops (stderr, verbosity);
6510}
6511
6512/* Print on stderr the code of LOOP, at some VERBOSITY level. */
6de9cd9a 6513
6531d1be 6514void
0c8efed8 6515debug_loop (struct loop *loop, int verbosity)
6de9cd9a 6516{
0c8efed8 6517 print_loop (stderr, loop, 0, verbosity);
6de9cd9a
DN
6518}
6519
0c8efed8
SP
6520/* Print on stderr the code of loop number NUM, at some VERBOSITY
6521 level. */
6522
6523void
6524debug_loop_num (unsigned num, int verbosity)
6525{
6526 debug_loop (get_loop (num), verbosity);
6527}
6de9cd9a
DN
6528
6529/* Return true if BB ends with a call, possibly followed by some
6530 instructions that must stay with the call. Return false,
6531 otherwise. */
6532
6533static bool
b48d0358 6534tree_block_ends_with_call_p (basic_block bb)
6de9cd9a 6535{
b48d0358 6536 block_stmt_iterator bsi = bsi_last (bb);
0e014996 6537 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6de9cd9a
DN
6538}
6539
6540
6541/* Return true if BB ends with a conditional branch. Return false,
6542 otherwise. */
6543
6544static bool
9678086d 6545tree_block_ends_with_condjump_p (const_basic_block bb)
6de9cd9a 6546{
75547801
KG
6547 /* This CONST_CAST is okay because last_stmt doesn't modify its
6548 argument and the return value is not modified. */
b1d5455a 6549 const_tree stmt = last_stmt (CONST_CAST_BB(bb));
9885da8e 6550 return (stmt && TREE_CODE (stmt) == COND_EXPR);
6de9cd9a
DN
6551}
6552
6553
6554/* Return true if we need to add fake edge to exit at statement T.
6555 Helper function for tree_flow_call_edges_add. */
6556
6557static bool
6558need_fake_edge_p (tree t)
6559{
23ef6d21
BE
6560 tree call, fndecl = NULL_TREE;
6561 int call_flags;
6de9cd9a
DN
6562
6563 /* NORETURN and LONGJMP calls already have an edge to exit.
321cf1f2 6564 CONST and PURE calls do not need one.
6de9cd9a
DN
6565 We don't currently check for CONST and PURE here, although
6566 it would be a good idea, because those attributes are
6567 figured out from the RTL in mark_constant_function, and
6568 the counter incrementation code from -fprofile-arcs
6569 leads to different results from -fbranch-probabilities. */
cd709752 6570 call = get_call_expr_in (t);
23ef6d21
BE
6571 if (call)
6572 {
6573 fndecl = get_callee_fndecl (call);
6574 call_flags = call_expr_flags (call);
6575 }
6576
6577 if (call && fndecl && DECL_BUILT_IN (fndecl)
6578 && (call_flags & ECF_NOTHROW)
6579 && !(call_flags & ECF_NORETURN)
6580 && !(call_flags & ECF_RETURNS_TWICE))
6581 return false;
6582
6583 if (call && !(call_flags & ECF_NORETURN))
6de9cd9a
DN
6584 return true;
6585
6586 if (TREE_CODE (t) == ASM_EXPR
6587 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
6588 return true;
6589
6590 return false;
6591}
6592
6593
6594/* Add fake edges to the function exit for any non constant and non
6595 noreturn calls, volatile inline assembly in the bitmap of blocks
6596 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6597 the number of blocks that were split.
6598
6599 The goal is to expose cases in which entering a basic block does
6600 not imply that all subsequent instructions must be executed. */
6601
6602static int
6603tree_flow_call_edges_add (sbitmap blocks)
6604{
6605 int i;
6606 int blocks_split = 0;
6607 int last_bb = last_basic_block;
6608 bool check_last_block = false;
6609
24bd1a0b 6610 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6de9cd9a
DN
6611 return 0;
6612
6613 if (! blocks)
6614 check_last_block = true;
6615 else
6616 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6617
6618 /* In the last basic block, before epilogue generation, there will be
6619 a fallthru edge to EXIT. Special care is required if the last insn
6620 of the last basic block is a call because make_edge folds duplicate
6621 edges, which would result in the fallthru edge also being marked
6622 fake, which would result in the fallthru edge being removed by
6623 remove_fake_edges, which would result in an invalid CFG.
6624
6625 Moreover, we can't elide the outgoing fake edge, since the block
6626 profiler needs to take this into account in order to solve the minimal
6627 spanning tree in the case that the call doesn't return.
6628
6629 Handle this by adding a dummy instruction in a new last basic block. */
6630 if (check_last_block)
6631 {
6632 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6633 block_stmt_iterator bsi = bsi_last (bb);
6634 tree t = NULL_TREE;
6635 if (!bsi_end_p (bsi))
6636 t = bsi_stmt (bsi);
6637
6a60530d 6638 if (t && need_fake_edge_p (t))
6de9cd9a
DN
6639 {
6640 edge e;
6641
9ff3d2de
JL
6642 e = find_edge (bb, EXIT_BLOCK_PTR);
6643 if (e)
6644 {
6645 bsi_insert_on_edge (e, build_empty_stmt ());
6646 bsi_commit_edge_inserts ();
6647 }
6de9cd9a
DN
6648 }
6649 }
6650
6651 /* Now add fake edges to the function exit for any non constant
6652 calls since there is no way that we can determine if they will
6653 return or not... */
6654 for (i = 0; i < last_bb; i++)
6655 {
6656 basic_block bb = BASIC_BLOCK (i);
6657 block_stmt_iterator bsi;
6658 tree stmt, last_stmt;
6659
6660 if (!bb)
6661 continue;
6662
6663 if (blocks && !TEST_BIT (blocks, i))
6664 continue;
6665
6666 bsi = bsi_last (bb);
6667 if (!bsi_end_p (bsi))
6668 {
6669 last_stmt = bsi_stmt (bsi);
6670 do
6671 {
6672 stmt = bsi_stmt (bsi);
6673 if (need_fake_edge_p (stmt))
6674 {
6675 edge e;
6676 /* The handling above of the final block before the
6677 epilogue should be enough to verify that there is
6678 no edge to the exit block in CFG already.
6679 Calling make_edge in such case would cause us to
6680 mark that edge as fake and remove it later. */
6681#ifdef ENABLE_CHECKING
6682 if (stmt == last_stmt)
628f6a4e 6683 {
9ff3d2de
JL
6684 e = find_edge (bb, EXIT_BLOCK_PTR);
6685 gcc_assert (e == NULL);
628f6a4e 6686 }
6de9cd9a
DN
6687#endif
6688
6689 /* Note that the following may create a new basic block
6690 and renumber the existing basic blocks. */
6691 if (stmt != last_stmt)
6692 {
6693 e = split_block (bb, stmt);
6694 if (e)
6695 blocks_split++;
6696 }
6697 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6698 }
6699 bsi_prev (&bsi);
6700 }
6701 while (!bsi_end_p (bsi));
6702 }
6703 }
6704
6705 if (blocks_split)
6706 verify_flow_info ();
6707
6708 return blocks_split;
6709}
6710
4f6c2131
EB
6711/* Purge dead abnormal call edges from basic block BB. */
6712
6713bool
6714tree_purge_dead_abnormal_call_edges (basic_block bb)
6715{
6716 bool changed = tree_purge_dead_eh_edges (bb);
6717
e3b5732b 6718 if (cfun->has_nonlocal_label)
4f6c2131
EB
6719 {
6720 tree stmt = last_stmt (bb);
6721 edge_iterator ei;
6722 edge e;
6723
6724 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6725 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6726 {
6727 if (e->flags & EDGE_ABNORMAL)
6728 {
6729 remove_edge (e);
6730 changed = true;
6731 }
6732 else
6733 ei_next (&ei);
6734 }
6735
6736 /* See tree_purge_dead_eh_edges below. */
6737 if (changed)
6738 free_dominance_info (CDI_DOMINATORS);
6739 }
6740
6741 return changed;
6742}
6743
672987e8
ZD
6744/* Stores all basic blocks dominated by BB to DOM_BBS. */
6745
6746static void
6747get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6748{
6749 basic_block son;
6750
6751 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6752 for (son = first_dom_son (CDI_DOMINATORS, bb);
6753 son;
6754 son = next_dom_son (CDI_DOMINATORS, son))
6755 get_all_dominated_blocks (son, dom_bbs);
6756}
6757
6758/* Removes edge E and all the blocks dominated by it, and updates dominance
6759 information. The IL in E->src needs to be updated separately.
6760 If dominance info is not available, only the edge E is removed.*/
6761
6762void
6763remove_edge_and_dominated_blocks (edge e)
6764{
6765 VEC (basic_block, heap) *bbs_to_remove = NULL;
6766 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6767 bitmap df, df_idom;
6768 edge f;
6769 edge_iterator ei;
6770 bool none_removed = false;
6771 unsigned i;
6772 basic_block bb, dbb;
6773 bitmap_iterator bi;
6774
2b28c07a 6775 if (!dom_info_available_p (CDI_DOMINATORS))
672987e8
ZD
6776 {
6777 remove_edge (e);
6778 return;
6779 }
6780
6781 /* No updating is needed for edges to exit. */
6782 if (e->dest == EXIT_BLOCK_PTR)
6783 {
6784 if (cfgcleanup_altered_bbs)
6785 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6786 remove_edge (e);
6787 return;
6788 }
6789
6790 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6791 that is not dominated by E->dest, then this set is empty. Otherwise,
6792 all the basic blocks dominated by E->dest are removed.
6793
6794 Also, to DF_IDOM we store the immediate dominators of the blocks in
6795 the dominance frontier of E (i.e., of the successors of the
6796 removed blocks, if there are any, and of E->dest otherwise). */
6797 FOR_EACH_EDGE (f, ei, e->dest->preds)
6798 {
6799 if (f == e)
6800 continue;
6801
6802 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6803 {
6804 none_removed = true;
6805 break;
6806 }
6807 }
6808
6809 df = BITMAP_ALLOC (NULL);
6810 df_idom = BITMAP_ALLOC (NULL);
6811
6812 if (none_removed)
6813 bitmap_set_bit (df_idom,
6814 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6815 else
6816 {
6817 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6818 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6819 {
6820 FOR_EACH_EDGE (f, ei, bb->succs)
6821 {
6822 if (f->dest != EXIT_BLOCK_PTR)
6823 bitmap_set_bit (df, f->dest->index);
6824 }
6825 }
6826 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6827 bitmap_clear_bit (df, bb->index);
6828
6829 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6830 {
6831 bb = BASIC_BLOCK (i);
6832 bitmap_set_bit (df_idom,
6833 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6834 }
6835 }
6836
6837 if (cfgcleanup_altered_bbs)
6838 {
6839 /* Record the set of the altered basic blocks. */
6840 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6841 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6842 }
6843
6844 /* Remove E and the cancelled blocks. */
6845 if (none_removed)
6846 remove_edge (e);
6847 else
6848 {
6849 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6850 delete_basic_block (bb);
6851 }
6852
6853 /* Update the dominance information. The immediate dominator may change only
6854 for blocks whose immediate dominator belongs to DF_IDOM:
6855
6856 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6857 removal. Let Z the arbitrary block such that idom(Z) = Y and
6858 Z dominates X after the removal. Before removal, there exists a path P
6859 from Y to X that avoids Z. Let F be the last edge on P that is
6860 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6861 dominates W, and because of P, Z does not dominate W), and W belongs to
6862 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6863 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6864 {
6865 bb = BASIC_BLOCK (i);
6866 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6867 dbb;
6868 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6869 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6870 }
6871
66f97d31 6872 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
672987e8
ZD
6873
6874 BITMAP_FREE (df);
6875 BITMAP_FREE (df_idom);
6876 VEC_free (basic_block, heap, bbs_to_remove);
6877 VEC_free (basic_block, heap, bbs_to_fix_dom);
6878}
6879
4f6c2131
EB
6880/* Purge dead EH edges from basic block BB. */
6881
1eaba2f2
RH
6882bool
6883tree_purge_dead_eh_edges (basic_block bb)
6884{
6885 bool changed = false;
628f6a4e
BE
6886 edge e;
6887 edge_iterator ei;
1eaba2f2
RH
6888 tree stmt = last_stmt (bb);
6889
6890 if (stmt && tree_can_throw_internal (stmt))
6891 return false;
6892
628f6a4e 6893 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 6894 {
1eaba2f2
RH
6895 if (e->flags & EDGE_EH)
6896 {
672987e8 6897 remove_edge_and_dominated_blocks (e);
1eaba2f2
RH
6898 changed = true;
6899 }
628f6a4e
BE
6900 else
6901 ei_next (&ei);
1eaba2f2
RH
6902 }
6903
6904 return changed;
6905}
6906
6907bool
6ea2b70d 6908tree_purge_all_dead_eh_edges (const_bitmap blocks)
1eaba2f2
RH
6909{
6910 bool changed = false;
3cd8c58a 6911 unsigned i;
87c476a2 6912 bitmap_iterator bi;
1eaba2f2 6913
87c476a2
ZD
6914 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6915 {
6916 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6917 }
1eaba2f2
RH
6918
6919 return changed;
6920}
6de9cd9a 6921
a100ac1e
KH
6922/* This function is called whenever a new edge is created or
6923 redirected. */
6924
6925static void
6926tree_execute_on_growing_pred (edge e)
6927{
6928 basic_block bb = e->dest;
6929
6930 if (phi_nodes (bb))
6931 reserve_phi_args_for_new_edge (bb);
6932}
6933
e51546f8
KH
6934/* This function is called immediately before edge E is removed from
6935 the edge vector E->dest->preds. */
6936
6937static void
6938tree_execute_on_shrinking_pred (edge e)
6939{
6940 if (phi_nodes (e->dest))
6941 remove_phi_args (e);
6942}
6943
1cb7dfc3
MH
6944/*---------------------------------------------------------------------------
6945 Helper functions for Loop versioning
6946 ---------------------------------------------------------------------------*/
6947
6948/* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6949 of 'first'. Both of them are dominated by 'new_head' basic block. When
6950 'new_head' was created by 'second's incoming edge it received phi arguments
6951 on the edge by split_edge(). Later, additional edge 'e' was created to
6531d1be
BF
6952 connect 'new_head' and 'first'. Now this routine adds phi args on this
6953 additional edge 'e' that new_head to second edge received as part of edge
1cb7dfc3
MH
6954 splitting.
6955*/
6956
6957static void
6958tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6959 basic_block new_head, edge e)
6960{
6961 tree phi1, phi2;
d0e12fc6
KH
6962 edge e2 = find_edge (new_head, second);
6963
6964 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6965 edge, we should always have an edge from NEW_HEAD to SECOND. */
6966 gcc_assert (e2 != NULL);
1cb7dfc3
MH
6967
6968 /* Browse all 'second' basic block phi nodes and add phi args to
6969 edge 'e' for 'first' head. PHI args are always in correct order. */
6970
6531d1be
BF
6971 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6972 phi2 && phi1;
1cb7dfc3
MH
6973 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6974 {
d0e12fc6
KH
6975 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6976 add_phi_arg (phi1, def, e);
1cb7dfc3
MH
6977 }
6978}
6979
6531d1be
BF
6980/* Adds a if else statement to COND_BB with condition COND_EXPR.
6981 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
1cb7dfc3
MH
6982 the destination of the ELSE part. */
6983static void
a9b77cd1
ZD
6984tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6985 basic_block second_head ATTRIBUTE_UNUSED,
6986 basic_block cond_bb, void *cond_e)
1cb7dfc3
MH
6987{
6988 block_stmt_iterator bsi;
1cb7dfc3
MH
6989 tree new_cond_expr = NULL_TREE;
6990 tree cond_expr = (tree) cond_e;
6991 edge e0;
6992
6993 /* Build new conditional expr */
a9b77cd1
ZD
6994 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6995 NULL_TREE, NULL_TREE);
1cb7dfc3 6996
6531d1be
BF
6997 /* Add new cond in cond_bb. */
6998 bsi = bsi_start (cond_bb);
1cb7dfc3
MH
6999 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
7000 /* Adjust edges appropriately to connect new head with first head
7001 as well as second head. */
7002 e0 = single_succ_edge (cond_bb);
7003 e0->flags &= ~EDGE_FALLTHRU;
7004 e0->flags |= EDGE_FALSE_VALUE;
7005}
7006
6de9cd9a
DN
7007struct cfg_hooks tree_cfg_hooks = {
7008 "tree",
7009 tree_verify_flow_info,
7010 tree_dump_bb, /* dump_bb */
7011 create_bb, /* create_basic_block */
7012 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
7013 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
14fa2cc0 7014 tree_can_remove_branch_p, /* can_remove_branch_p */
6de9cd9a
DN
7015 remove_bb, /* delete_basic_block */
7016 tree_split_block, /* split_block */
7017 tree_move_block_after, /* move_block_after */
7018 tree_can_merge_blocks_p, /* can_merge_blocks_p */
7019 tree_merge_blocks, /* merge_blocks */
7020 tree_predict_edge, /* predict_edge */
7021 tree_predicted_by_p, /* predicted_by_p */
7022 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
7023 tree_duplicate_bb, /* duplicate_block */
7024 tree_split_edge, /* split_edge */
7025 tree_make_forwarder_block, /* make_forward_block */
7026 NULL, /* tidy_fallthru_edge */
7027 tree_block_ends_with_call_p, /* block_ends_with_call_p */
7028 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
d9d4706f 7029 tree_flow_call_edges_add, /* flow_call_edges_add */
a100ac1e 7030 tree_execute_on_growing_pred, /* execute_on_growing_pred */
e51546f8 7031 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
1cb7dfc3
MH
7032 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7033 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7034 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7035 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6531d1be 7036 flush_pending_stmts /* flush_pending_stmts */
6de9cd9a
DN
7037};
7038
7039
7040/* Split all critical edges. */
7041
c2924966 7042static unsigned int
6de9cd9a
DN
7043split_critical_edges (void)
7044{
7045 basic_block bb;
7046 edge e;
628f6a4e 7047 edge_iterator ei;
6de9cd9a 7048
d6be0d7f
JL
7049 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7050 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7051 mappings around the calls to split_edge. */
7052 start_recording_case_labels ();
6de9cd9a
DN
7053 FOR_ALL_BB (bb)
7054 {
628f6a4e 7055 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
7056 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7057 {
7058 split_edge (e);
7059 }
7060 }
d6be0d7f 7061 end_recording_case_labels ();
c2924966 7062 return 0;
6de9cd9a
DN
7063}
7064
8ddbbcae 7065struct gimple_opt_pass pass_split_crit_edges =
6de9cd9a 7066{
8ddbbcae
JH
7067 {
7068 GIMPLE_PASS,
5d44aeed 7069 "crited", /* name */
6de9cd9a
DN
7070 NULL, /* gate */
7071 split_critical_edges, /* execute */
7072 NULL, /* sub */
7073 NULL, /* next */
7074 0, /* static_pass_number */
7075 TV_TREE_SPLIT_EDGES, /* tv_id */
7076 PROP_cfg, /* properties required */
7077 PROP_no_crit_edges, /* properties_provided */
7078 0, /* properties_destroyed */
7079 0, /* todo_flags_start */
8ddbbcae
JH
7080 TODO_dump_func /* todo_flags_finish */
7081 }
6de9cd9a 7082};
26277d41
PB
7083
7084\f
7085/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
7086 a temporary, make sure and register it to be renamed if necessary,
7087 and finally return the temporary. Put the statements to compute
7088 EXP before the current statement in BSI. */
7089
7090tree
7091gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
7092{
7093 tree t, new_stmt, orig_stmt;
7094
7095 if (is_gimple_val (exp))
7096 return exp;
7097
7098 t = make_rename_temp (type, NULL);
939409af 7099 new_stmt = build_gimple_modify_stmt (t, exp);
26277d41
PB
7100
7101 orig_stmt = bsi_stmt (*bsi);
7102 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
7103 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
7104
7105 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5cd4ec7f 7106 if (gimple_in_ssa_p (cfun))
cfaab3a9 7107 mark_symbols_for_renaming (new_stmt);
26277d41
PB
7108
7109 return t;
7110}
7111
7112/* Build a ternary operation and gimplify it. Emit code before BSI.
7113 Return the gimple_val holding the result. */
7114
7115tree
7116gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
7117 tree type, tree a, tree b, tree c)
7118{
7119 tree ret;
7120
987b67bc 7121 ret = fold_build3 (code, type, a, b, c);
26277d41
PB
7122 STRIP_NOPS (ret);
7123
7124 return gimplify_val (bsi, type, ret);
7125}
7126
7127/* Build a binary operation and gimplify it. Emit code before BSI.
7128 Return the gimple_val holding the result. */
7129
7130tree
7131gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
7132 tree type, tree a, tree b)
7133{
7134 tree ret;
7135
987b67bc 7136 ret = fold_build2 (code, type, a, b);
26277d41
PB
7137 STRIP_NOPS (ret);
7138
7139 return gimplify_val (bsi, type, ret);
7140}
7141
7142/* Build a unary operation and gimplify it. Emit code before BSI.
7143 Return the gimple_val holding the result. */
7144
7145tree
7146gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
7147 tree a)
7148{
7149 tree ret;
7150
987b67bc 7151 ret = fold_build1 (code, type, a);
26277d41
PB
7152 STRIP_NOPS (ret);
7153
7154 return gimplify_val (bsi, type, ret);
7155}
7156
7157
6de9cd9a
DN
7158\f
7159/* Emit return warnings. */
7160
c2924966 7161static unsigned int
6de9cd9a
DN
7162execute_warn_function_return (void)
7163{
9506ac2b 7164 source_location location;
6de9cd9a
DN
7165 tree last;
7166 edge e;
628f6a4e 7167 edge_iterator ei;
6de9cd9a 7168
6de9cd9a
DN
7169 /* If we have a path to EXIT, then we do return. */
7170 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 7171 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 7172 {
9506ac2b 7173 location = UNKNOWN_LOCATION;
628f6a4e 7174 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7175 {
7176 last = last_stmt (e->src);
7177 if (TREE_CODE (last) == RETURN_EXPR
9506ac2b 7178 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
6de9cd9a
DN
7179 break;
7180 }
9506ac2b
PB
7181 if (location == UNKNOWN_LOCATION)
7182 location = cfun->function_end_locus;
d4ee4d25 7183 warning (0, "%H%<noreturn%> function does return", &location);
6de9cd9a
DN
7184 }
7185
7186 /* If we see "return;" in some basic block, then we do reach the end
7187 without returning a value. */
7188 else if (warn_return_type
089efaa4 7189 && !TREE_NO_WARNING (cfun->decl)
628f6a4e 7190 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
7191 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7192 {
628f6a4e 7193 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
7194 {
7195 tree last = last_stmt (e->src);
7196 if (TREE_CODE (last) == RETURN_EXPR
0c9b182b
JJ
7197 && TREE_OPERAND (last, 0) == NULL
7198 && !TREE_NO_WARNING (last))
6de9cd9a 7199 {
9506ac2b
PB
7200 location = EXPR_LOCATION (last);
7201 if (location == UNKNOWN_LOCATION)
7202 location = cfun->function_end_locus;
c5409249 7203 warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
089efaa4 7204 TREE_NO_WARNING (cfun->decl) = 1;
6de9cd9a
DN
7205 break;
7206 }
7207 }
7208 }
c2924966 7209 return 0;
6de9cd9a
DN
7210}
7211
7212
7213/* Given a basic block B which ends with a conditional and has
7214 precisely two successors, determine which of the edges is taken if
7215 the conditional is true and which is taken if the conditional is
7216 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7217
7218void
7219extract_true_false_edges_from_block (basic_block b,
7220 edge *true_edge,
7221 edge *false_edge)
7222{
628f6a4e 7223 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
7224
7225 if (e->flags & EDGE_TRUE_VALUE)
7226 {
7227 *true_edge = e;
628f6a4e 7228 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7229 }
7230 else
7231 {
7232 *false_edge = e;
628f6a4e 7233 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
7234 }
7235}
7236
8ddbbcae 7237struct gimple_opt_pass pass_warn_function_return =
6de9cd9a 7238{
8ddbbcae
JH
7239 {
7240 GIMPLE_PASS,
6de9cd9a
DN
7241 NULL, /* name */
7242 NULL, /* gate */
7243 execute_warn_function_return, /* execute */
7244 NULL, /* sub */
7245 NULL, /* next */
7246 0, /* static_pass_number */
7247 0, /* tv_id */
00bfee6f 7248 PROP_cfg, /* properties_required */
6de9cd9a
DN
7249 0, /* properties_provided */
7250 0, /* properties_destroyed */
7251 0, /* todo_flags_start */
8ddbbcae
JH
7252 0 /* todo_flags_finish */
7253 }
6de9cd9a 7254};
aa313ed4
JH
7255
7256/* Emit noreturn warnings. */
7257
c2924966 7258static unsigned int
aa313ed4
JH
7259execute_warn_function_noreturn (void)
7260{
7261 if (warn_missing_noreturn
7262 && !TREE_THIS_VOLATILE (cfun->decl)
7263 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
e8924938 7264 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
3176a0c2
DD
7265 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7266 "for attribute %<noreturn%>",
aa313ed4 7267 cfun->decl);
c2924966 7268 return 0;
aa313ed4
JH
7269}
7270
8ddbbcae 7271struct gimple_opt_pass pass_warn_function_noreturn =
aa313ed4 7272{
8ddbbcae
JH
7273 {
7274 GIMPLE_PASS,
aa313ed4
JH
7275 NULL, /* name */
7276 NULL, /* gate */
7277 execute_warn_function_noreturn, /* execute */
7278 NULL, /* sub */
7279 NULL, /* next */
7280 0, /* static_pass_number */
7281 0, /* tv_id */
7282 PROP_cfg, /* properties_required */
7283 0, /* properties_provided */
7284 0, /* properties_destroyed */
7285 0, /* todo_flags_start */
8ddbbcae
JH
7286 0 /* todo_flags_finish */
7287 }
aa313ed4 7288};
This page took 3.071636 seconds and 5 git commands to generate.