]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-cfg.c
tree-cfg.c (thread_jumps): Speed up by reordering the two conditions for entering...
[gcc.git] / gcc / tree-cfg.c
CommitLineData
6de9cd9a
DN
1/* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "rtl.h"
28#include "tm_p.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "output.h"
32#include "errors.h"
33#include "flags.h"
34#include "function.h"
35#include "expr.h"
36#include "ggc.h"
37#include "langhooks.h"
38#include "diagnostic.h"
39#include "tree-flow.h"
40#include "timevar.h"
41#include "tree-dump.h"
42#include "tree-pass.h"
43#include "toplev.h"
44#include "except.h"
45#include "cfgloop.h"
42759f1e 46#include "cfglayout.h"
6de9cd9a
DN
47
48/* This file contains functions for building the Control Flow Graph (CFG)
49 for a function tree. */
50
51/* Local declarations. */
52
53/* Initial capacity for the basic block array. */
54static const int initial_cfg_capacity = 20;
55
56/* Mapping of labels to their associated blocks. This can greatly speed up
57 building of the CFG in code with lots of gotos. */
58static GTY(()) varray_type label_to_block_map;
59
60/* CFG statistics. */
61struct cfg_stats_d
62{
63 long num_merged_labels;
64};
65
66static struct cfg_stats_d cfg_stats;
67
68/* Nonzero if we found a computed goto while building basic blocks. */
69static bool found_computed_goto;
70
71/* Basic blocks and flowgraphs. */
72static basic_block create_bb (void *, void *, basic_block);
73static void create_block_annotation (basic_block);
74static void free_blocks_annotations (void);
75static void clear_blocks_annotations (void);
76static void make_blocks (tree);
77static void factor_computed_gotos (void);
6de9cd9a
DN
78
79/* Edges. */
80static void make_edges (void);
81static void make_ctrl_stmt_edges (basic_block);
82static void make_exit_edges (basic_block);
83static void make_cond_expr_edges (basic_block);
84static void make_switch_expr_edges (basic_block);
85static void make_goto_expr_edges (basic_block);
86static edge tree_redirect_edge_and_branch (edge, basic_block);
87static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
88static void split_critical_edges (void);
89
90/* Various helpers. */
91static inline bool stmt_starts_bb_p (tree, tree);
92static int tree_verify_flow_info (void);
93static void tree_make_forwarder_block (edge);
94static bool thread_jumps (void);
95static bool tree_forwarder_block_p (basic_block);
96static void bsi_commit_edge_inserts_1 (edge e);
97static void tree_cfg2vcg (FILE *);
98
99/* Flowgraph optimization and cleanup. */
100static void tree_merge_blocks (basic_block, basic_block);
101static bool tree_can_merge_blocks_p (basic_block, basic_block);
102static void remove_bb (basic_block);
6de9cd9a
DN
103static bool cleanup_control_flow (void);
104static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
105static edge find_taken_edge_cond_expr (basic_block, tree);
106static edge find_taken_edge_switch_expr (basic_block, tree);
107static tree find_case_label_for_value (tree, tree);
108static bool phi_alternatives_equal (basic_block, edge, edge);
109
110
111/*---------------------------------------------------------------------------
112 Create basic blocks
113---------------------------------------------------------------------------*/
114
115/* Entry point to the CFG builder for trees. TP points to the list of
116 statements to be added to the flowgraph. */
117
118static void
119build_tree_cfg (tree *tp)
120{
121 /* Register specific tree functions. */
122 tree_register_cfg_hooks ();
123
124 /* Initialize rbi_pool. */
125 alloc_rbi_pool ();
126
127 /* Initialize the basic block array. */
128 init_flow ();
878f99d2 129 profile_status = PROFILE_ABSENT;
6de9cd9a
DN
130 n_basic_blocks = 0;
131 last_basic_block = 0;
132 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
133 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
134
135 /* Build a mapping of labels to their associated blocks. */
136 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
137 "label to block map");
138
139 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
140 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
141
142 found_computed_goto = 0;
143 make_blocks (*tp);
144
145 /* Computed gotos are hell to deal with, especially if there are
146 lots of them with a large number of destinations. So we factor
147 them to a common computed goto location before we build the
148 edge list. After we convert back to normal form, we will un-factor
149 the computed gotos since factoring introduces an unwanted jump. */
150 if (found_computed_goto)
151 factor_computed_gotos ();
152
f0b698c1 153 /* Make sure there is always at least one block, even if it's empty. */
6de9cd9a
DN
154 if (n_basic_blocks == 0)
155 create_empty_bb (ENTRY_BLOCK_PTR);
156
157 create_block_annotation (ENTRY_BLOCK_PTR);
158 create_block_annotation (EXIT_BLOCK_PTR);
159
160 /* Adjust the size of the array. */
161 VARRAY_GROW (basic_block_info, n_basic_blocks);
162
f667741c
SB
163 /* To speed up statement iterator walks, we first purge dead labels. */
164 cleanup_dead_labels ();
165
166 /* Group case nodes to reduce the number of edges.
167 We do this after cleaning up dead labels because otherwise we miss
168 a lot of obvious case merging opportunities. */
169 group_case_labels ();
170
6de9cd9a
DN
171 /* Create the edges of the flowgraph. */
172 make_edges ();
173
174 /* Debugging dumps. */
175
176 /* Write the flowgraph to a VCG file. */
177 {
178 int local_dump_flags;
179 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
180 if (dump_file)
181 {
182 tree_cfg2vcg (dump_file);
183 dump_end (TDI_vcg, dump_file);
184 }
185 }
186
187 /* Dump a textual representation of the flowgraph. */
188 if (dump_file)
189 dump_tree_cfg (dump_file, dump_flags);
190}
191
192static void
193execute_build_cfg (void)
194{
195 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
196}
197
198struct tree_opt_pass pass_build_cfg =
199{
200 "cfg", /* name */
201 NULL, /* gate */
202 execute_build_cfg, /* execute */
203 NULL, /* sub */
204 NULL, /* next */
205 0, /* static_pass_number */
206 TV_TREE_CFG, /* tv_id */
207 PROP_gimple_leh, /* properties_required */
208 PROP_cfg, /* properties_provided */
209 0, /* properties_destroyed */
210 0, /* todo_flags_start */
9f8628ba
PB
211 TODO_verify_stmts, /* todo_flags_finish */
212 0 /* letter */
6de9cd9a
DN
213};
214
215/* Search the CFG for any computed gotos. If found, factor them to a
216 common computed goto site. Also record the location of that site so
217 that we can un-factor the gotos after we have converted back to
218 normal form. */
219
220static void
221factor_computed_gotos (void)
222{
223 basic_block bb;
224 tree factored_label_decl = NULL;
225 tree var = NULL;
226 tree factored_computed_goto_label = NULL;
227 tree factored_computed_goto = NULL;
228
229 /* We know there are one or more computed gotos in this function.
230 Examine the last statement in each basic block to see if the block
231 ends with a computed goto. */
232
233 FOR_EACH_BB (bb)
234 {
235 block_stmt_iterator bsi = bsi_last (bb);
236 tree last;
237
238 if (bsi_end_p (bsi))
239 continue;
240 last = bsi_stmt (bsi);
241
242 /* Ignore the computed goto we create when we factor the original
243 computed gotos. */
244 if (last == factored_computed_goto)
245 continue;
246
247 /* If the last statement is a computed goto, factor it. */
248 if (computed_goto_p (last))
249 {
250 tree assignment;
251
252 /* The first time we find a computed goto we need to create
253 the factored goto block and the variable each original
254 computed goto will use for their goto destination. */
255 if (! factored_computed_goto)
256 {
257 basic_block new_bb = create_empty_bb (bb);
258 block_stmt_iterator new_bsi = bsi_start (new_bb);
259
260 /* Create the destination of the factored goto. Each original
261 computed goto will put its desired destination into this
262 variable and jump to the label we create immediately
263 below. */
264 var = create_tmp_var (ptr_type_node, "gotovar");
265
266 /* Build a label for the new block which will contain the
267 factored computed goto. */
268 factored_label_decl = create_artificial_label ();
269 factored_computed_goto_label
270 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
271 bsi_insert_after (&new_bsi, factored_computed_goto_label,
272 BSI_NEW_STMT);
273
274 /* Build our new computed goto. */
275 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
276 bsi_insert_after (&new_bsi, factored_computed_goto,
277 BSI_NEW_STMT);
278 }
279
280 /* Copy the original computed goto's destination into VAR. */
281 assignment = build (MODIFY_EXPR, ptr_type_node,
282 var, GOTO_DESTINATION (last));
283 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
284
285 /* And re-vector the computed goto to the new destination. */
286 GOTO_DESTINATION (last) = factored_label_decl;
287 }
288 }
289}
290
291
292/* Create annotations for a single basic block. */
293
294static void
295create_block_annotation (basic_block bb)
296{
297 /* Verify that the tree_annotations field is clear. */
1e128c5f 298 gcc_assert (!bb->tree_annotations);
6de9cd9a
DN
299 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
300}
301
302
303/* Free the annotations for all the basic blocks. */
304
305static void free_blocks_annotations (void)
306{
307 clear_blocks_annotations ();
308}
309
310
311/* Clear the annotations for all the basic blocks. */
312
313static void
314clear_blocks_annotations (void)
315{
316 basic_block bb;
317
318 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
319 bb->tree_annotations = NULL;
320}
321
322
323/* Build a flowgraph for the statement_list STMT_LIST. */
324
325static void
326make_blocks (tree stmt_list)
327{
328 tree_stmt_iterator i = tsi_start (stmt_list);
329 tree stmt = NULL;
330 bool start_new_block = true;
331 bool first_stmt_of_list = true;
332 basic_block bb = ENTRY_BLOCK_PTR;
333
334 while (!tsi_end_p (i))
335 {
336 tree prev_stmt;
337
338 prev_stmt = stmt;
339 stmt = tsi_stmt (i);
340
341 /* If the statement starts a new basic block or if we have determined
342 in a previous pass that we need to create a new block for STMT, do
343 so now. */
344 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
345 {
346 if (!first_stmt_of_list)
347 stmt_list = tsi_split_statement_list_before (&i);
348 bb = create_basic_block (stmt_list, NULL, bb);
349 start_new_block = false;
350 }
351
352 /* Now add STMT to BB and create the subgraphs for special statement
353 codes. */
354 set_bb_for_stmt (stmt, bb);
355
356 if (computed_goto_p (stmt))
357 found_computed_goto = true;
358
359 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
360 next iteration. */
361 if (stmt_ends_bb_p (stmt))
362 start_new_block = true;
363
364 tsi_next (&i);
365 first_stmt_of_list = false;
366 }
367}
368
369
370/* Create and return a new empty basic block after bb AFTER. */
371
372static basic_block
373create_bb (void *h, void *e, basic_block after)
374{
375 basic_block bb;
376
1e128c5f 377 gcc_assert (!e);
6de9cd9a
DN
378
379 /* Create and initialize a new basic block. */
380 bb = alloc_block ();
381 memset (bb, 0, sizeof (*bb));
382
383 bb->index = last_basic_block;
384 bb->flags = BB_NEW;
385 bb->stmt_list = h ? h : alloc_stmt_list ();
386
387 /* Add the new block to the linked list of blocks. */
388 link_block (bb, after);
389
390 /* Grow the basic block array if needed. */
391 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
392 {
393 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
394 VARRAY_GROW (basic_block_info, new_size);
395 }
396
397 /* Add the newly created block to the array. */
398 BASIC_BLOCK (last_basic_block) = bb;
399
400 create_block_annotation (bb);
401
402 n_basic_blocks++;
403 last_basic_block++;
404
405 initialize_bb_rbi (bb);
406 return bb;
407}
408
409
410/*---------------------------------------------------------------------------
411 Edge creation
412---------------------------------------------------------------------------*/
413
414/* Join all the blocks in the flowgraph. */
415
416static void
417make_edges (void)
418{
419 basic_block bb;
6de9cd9a
DN
420
421 /* Create an edge from entry to the first block with executable
422 statements in it. */
423 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
424
425 /* Traverse basic block array placing edges. */
426 FOR_EACH_BB (bb)
427 {
428 tree first = first_stmt (bb);
429 tree last = last_stmt (bb);
430
431 if (first)
432 {
433 /* Edges for statements that always alter flow control. */
434 if (is_ctrl_stmt (last))
435 make_ctrl_stmt_edges (bb);
436
437 /* Edges for statements that sometimes alter flow control. */
438 if (is_ctrl_altering_stmt (last))
439 make_exit_edges (bb);
440 }
441
442 /* Finally, if no edges were created above, this is a regular
443 basic block that only needs a fallthru edge. */
628f6a4e 444 if (EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
445 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
446 }
447
6de9cd9a
DN
448 /* We do not care about fake edges, so remove any that the CFG
449 builder inserted for completeness. */
6809cbf9 450 remove_fake_exit_edges ();
6de9cd9a 451
6de9cd9a
DN
452 /* Clean up the graph and warn for unreachable code. */
453 cleanup_tree_cfg ();
454}
455
456
457/* Create edges for control statement at basic block BB. */
458
459static void
460make_ctrl_stmt_edges (basic_block bb)
461{
462 tree last = last_stmt (bb);
6de9cd9a 463
1e128c5f 464 gcc_assert (last);
6de9cd9a
DN
465 switch (TREE_CODE (last))
466 {
467 case GOTO_EXPR:
468 make_goto_expr_edges (bb);
469 break;
470
471 case RETURN_EXPR:
472 make_edge (bb, EXIT_BLOCK_PTR, 0);
473 break;
474
475 case COND_EXPR:
476 make_cond_expr_edges (bb);
477 break;
478
479 case SWITCH_EXPR:
480 make_switch_expr_edges (bb);
481 break;
482
483 case RESX_EXPR:
484 make_eh_edges (last);
485 /* Yet another NORETURN hack. */
628f6a4e 486 if (EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
487 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
488 break;
489
490 default:
1e128c5f 491 gcc_unreachable ();
6de9cd9a
DN
492 }
493}
494
495
496/* Create exit edges for statements in block BB that alter the flow of
497 control. Statements that alter the control flow are 'goto', 'return'
498 and calls to non-returning functions. */
499
500static void
501make_exit_edges (basic_block bb)
502{
cd709752 503 tree last = last_stmt (bb), op;
6de9cd9a 504
1e128c5f 505 gcc_assert (last);
6de9cd9a
DN
506 switch (TREE_CODE (last))
507 {
508 case CALL_EXPR:
509 /* If this function receives a nonlocal goto, then we need to
510 make edges from this call site to all the nonlocal goto
511 handlers. */
512 if (TREE_SIDE_EFFECTS (last)
513 && current_function_has_nonlocal_label)
514 make_goto_expr_edges (bb);
515
516 /* If this statement has reachable exception handlers, then
517 create abnormal edges to them. */
518 make_eh_edges (last);
519
520 /* Some calls are known not to return. For such calls we create
521 a fake edge.
522
523 We really need to revamp how we build edges so that it's not
524 such a bloody pain to avoid creating edges for this case since
525 all we do is remove these edges when we're done building the
526 CFG. */
527 if (call_expr_flags (last) & (ECF_NORETURN | ECF_LONGJMP))
528 {
529 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
530 return;
531 }
532
533 /* Don't forget the fall-thru edge. */
534 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
535 break;
536
537 case MODIFY_EXPR:
538 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
539 may have an abnormal edge. Search the RHS for this case and
540 create any required edges. */
cd709752
RH
541 op = get_call_expr_in (last);
542 if (op && TREE_SIDE_EFFECTS (op)
6de9cd9a
DN
543 && current_function_has_nonlocal_label)
544 make_goto_expr_edges (bb);
545
546 make_eh_edges (last);
547 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
548 break;
549
550 default:
1e128c5f 551 gcc_unreachable ();
6de9cd9a
DN
552 }
553}
554
555
556/* Create the edges for a COND_EXPR starting at block BB.
557 At this point, both clauses must contain only simple gotos. */
558
559static void
560make_cond_expr_edges (basic_block bb)
561{
562 tree entry = last_stmt (bb);
563 basic_block then_bb, else_bb;
564 tree then_label, else_label;
565
1e128c5f
GB
566 gcc_assert (entry);
567 gcc_assert (TREE_CODE (entry) == COND_EXPR);
6de9cd9a
DN
568
569 /* Entry basic blocks for each component. */
570 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
571 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
572 then_bb = label_to_block (then_label);
573 else_bb = label_to_block (else_label);
574
575 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
576 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
577}
578
579
580/* Create the edges for a SWITCH_EXPR starting at block BB.
581 At this point, the switch body has been lowered and the
582 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
583
584static void
585make_switch_expr_edges (basic_block bb)
586{
587 tree entry = last_stmt (bb);
588 size_t i, n;
589 tree vec;
590
591 vec = SWITCH_LABELS (entry);
592 n = TREE_VEC_LENGTH (vec);
593
594 for (i = 0; i < n; ++i)
595 {
596 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
597 basic_block label_bb = label_to_block (lab);
598 make_edge (bb, label_bb, 0);
599 }
600}
601
602
603/* Return the basic block holding label DEST. */
604
605basic_block
606label_to_block (tree dest)
607{
242229bb
JH
608 int uid = LABEL_DECL_UID (dest);
609
f0b698c1
KH
610 /* We would die hard when faced by an undefined label. Emit a label to
611 the very first basic block. This will hopefully make even the dataflow
242229bb
JH
612 and undefined variable warnings quite right. */
613 if ((errorcount || sorrycount) && uid < 0)
614 {
615 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
616 tree stmt;
617
618 stmt = build1 (LABEL_EXPR, void_type_node, dest);
619 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
620 uid = LABEL_DECL_UID (dest);
621 }
622 return VARRAY_BB (label_to_block_map, uid);
6de9cd9a
DN
623}
624
625
626/* Create edges for a goto statement at block BB. */
627
628static void
629make_goto_expr_edges (basic_block bb)
630{
631 tree goto_t, dest;
632 basic_block target_bb;
633 int for_call;
634 block_stmt_iterator last = bsi_last (bb);
635
636 goto_t = bsi_stmt (last);
637
638 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
639 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
640 from a nonlocal goto. */
641 if (TREE_CODE (goto_t) != GOTO_EXPR)
642 {
643 dest = error_mark_node;
644 for_call = 1;
645 }
646 else
647 {
648 dest = GOTO_DESTINATION (goto_t);
649 for_call = 0;
650
651 /* A GOTO to a local label creates normal edges. */
652 if (simple_goto_p (goto_t))
653 {
62b857ea 654 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
9506ac2b
PB
655#ifdef USE_MAPPED_LOCATION
656 e->goto_locus = EXPR_LOCATION (goto_t);
657#else
62b857ea 658 e->goto_locus = EXPR_LOCUS (goto_t);
9506ac2b 659#endif
6de9cd9a
DN
660 bsi_remove (&last);
661 return;
662 }
663
9cf737f8 664 /* Nothing more to do for nonlocal gotos. */
6de9cd9a
DN
665 if (TREE_CODE (dest) == LABEL_DECL)
666 return;
667
668 /* Computed gotos remain. */
669 }
670
671 /* Look for the block starting with the destination label. In the
672 case of a computed goto, make an edge to any label block we find
673 in the CFG. */
674 FOR_EACH_BB (target_bb)
675 {
676 block_stmt_iterator bsi;
677
678 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
679 {
680 tree target = bsi_stmt (bsi);
681
682 if (TREE_CODE (target) != LABEL_EXPR)
683 break;
684
685 if (
686 /* Computed GOTOs. Make an edge to every label block that has
687 been marked as a potential target for a computed goto. */
688 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
689 /* Nonlocal GOTO target. Make an edge to every label block
690 that has been marked as a potential target for a nonlocal
691 goto. */
692 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
693 {
694 make_edge (bb, target_bb, EDGE_ABNORMAL);
695 break;
696 }
697 }
698 }
699
700 /* Degenerate case of computed goto with no labels. */
628f6a4e 701 if (!for_call && EDGE_COUNT (bb->succs) == 0)
6de9cd9a
DN
702 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
703}
704
705
706/*---------------------------------------------------------------------------
707 Flowgraph analysis
708---------------------------------------------------------------------------*/
709
710/* Remove unreachable blocks and other miscellaneous clean up work. */
711
56b043c8 712bool
6de9cd9a
DN
713cleanup_tree_cfg (void)
714{
56b043c8 715 bool retval = false;
6de9cd9a
DN
716
717 timevar_push (TV_TREE_CLEANUP_CFG);
718
8f28be81 719 retval = cleanup_control_flow ();
26d4492f 720 retval |= delete_unreachable_blocks ();
ab51c2f0 721 retval |= thread_jumps ();
6de9cd9a 722
8f28be81
KH
723#ifdef ENABLE_CHECKING
724 if (retval)
26d4492f
KH
725 {
726 gcc_assert (!cleanup_control_flow ());
727 gcc_assert (!delete_unreachable_blocks ());
ab51c2f0 728 gcc_assert (!thread_jumps ());
26d4492f 729 }
8f28be81
KH
730#endif
731
6de9cd9a
DN
732 /* Merging the blocks creates no new opportunities for the other
733 optimizations, so do it here. */
734 merge_seq_blocks ();
735
736 compact_blocks ();
737
738#ifdef ENABLE_CHECKING
739 verify_flow_info ();
740#endif
741 timevar_pop (TV_TREE_CLEANUP_CFG);
56b043c8 742 return retval;
6de9cd9a
DN
743}
744
745
f698d217
SB
746/* Cleanup useless labels in basic blocks. This is something we wish
747 to do early because it allows us to group case labels before creating
748 the edges for the CFG, and it speeds up block statement iterators in
749 all passes later on.
750 We only run this pass once, running it more than once is probably not
751 profitable. */
752
753/* A map from basic block index to the leading label of that block. */
754static tree *label_for_bb;
755
756/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
757static void
758update_eh_label (struct eh_region *region)
759{
760 tree old_label = get_eh_region_tree_label (region);
761 if (old_label)
762 {
165b54c3
SB
763 tree new_label;
764 basic_block bb = label_to_block (old_label);
765
766 /* ??? After optimizing, there may be EH regions with labels
767 that have already been removed from the function body, so
768 there is no basic block for them. */
769 if (! bb)
770 return;
771
772 new_label = label_for_bb[bb->index];
f698d217
SB
773 set_eh_region_tree_label (region, new_label);
774 }
775}
776
242229bb
JH
777/* Given LABEL return the first label in the same basic block. */
778static tree
779main_block_label (tree label)
780{
781 basic_block bb = label_to_block (label);
782
783 /* label_to_block possibly inserted undefined label into the chain. */
784 if (!label_for_bb[bb->index])
785 label_for_bb[bb->index] = label;
786 return label_for_bb[bb->index];
787}
788
b986ebf3 789/* Cleanup redundant labels. This is a three-step process:
f698d217
SB
790 1) Find the leading label for each block.
791 2) Redirect all references to labels to the leading labels.
792 3) Cleanup all useless labels. */
6de9cd9a 793
165b54c3 794void
6de9cd9a
DN
795cleanup_dead_labels (void)
796{
797 basic_block bb;
f698d217 798 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
6de9cd9a
DN
799
800 /* Find a suitable label for each block. We use the first user-defined
f0b698c1 801 label if there is one, or otherwise just the first label we see. */
6de9cd9a
DN
802 FOR_EACH_BB (bb)
803 {
804 block_stmt_iterator i;
805
806 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
807 {
808 tree label, stmt = bsi_stmt (i);
809
810 if (TREE_CODE (stmt) != LABEL_EXPR)
811 break;
812
813 label = LABEL_EXPR_LABEL (stmt);
814
815 /* If we have not yet seen a label for the current block,
816 remember this one and see if there are more labels. */
817 if (! label_for_bb[bb->index])
818 {
819 label_for_bb[bb->index] = label;
820 continue;
821 }
822
823 /* If we did see a label for the current block already, but it
824 is an artificially created label, replace it if the current
825 label is a user defined label. */
826 if (! DECL_ARTIFICIAL (label)
827 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
828 {
829 label_for_bb[bb->index] = label;
830 break;
831 }
832 }
833 }
834
f698d217
SB
835 /* Now redirect all jumps/branches to the selected label.
836 First do so for each block ending in a control statement. */
6de9cd9a
DN
837 FOR_EACH_BB (bb)
838 {
839 tree stmt = last_stmt (bb);
840 if (!stmt)
841 continue;
842
843 switch (TREE_CODE (stmt))
844 {
845 case COND_EXPR:
846 {
847 tree true_branch, false_branch;
6de9cd9a
DN
848
849 true_branch = COND_EXPR_THEN (stmt);
850 false_branch = COND_EXPR_ELSE (stmt);
6de9cd9a 851
242229bb
JH
852 GOTO_DESTINATION (true_branch)
853 = main_block_label (GOTO_DESTINATION (true_branch));
854 GOTO_DESTINATION (false_branch)
855 = main_block_label (GOTO_DESTINATION (false_branch));
6de9cd9a
DN
856
857 break;
858 }
859
860 case SWITCH_EXPR:
861 {
862 size_t i;
863 tree vec = SWITCH_LABELS (stmt);
864 size_t n = TREE_VEC_LENGTH (vec);
865
866 /* Replace all destination labels. */
867 for (i = 0; i < n; ++i)
242229bb
JH
868 CASE_LABEL (TREE_VEC_ELT (vec, i))
869 = main_block_label (CASE_LABEL (TREE_VEC_ELT (vec, i)));
6de9cd9a
DN
870
871 break;
872 }
873
f667741c
SB
874 /* We have to handle GOTO_EXPRs until they're removed, and we don't
875 remove them until after we've created the CFG edges. */
876 case GOTO_EXPR:
242229bb
JH
877 if (! computed_goto_p (stmt))
878 {
879 GOTO_DESTINATION (stmt)
880 = main_block_label (GOTO_DESTINATION (stmt));
881 break;
882 }
f667741c 883
6de9cd9a
DN
884 default:
885 break;
886 }
887 }
888
f698d217
SB
889 for_each_eh_region (update_eh_label);
890
6de9cd9a
DN
891 /* Finally, purge dead labels. All user-defined labels and labels that
892 can be the target of non-local gotos are preserved. */
893 FOR_EACH_BB (bb)
894 {
895 block_stmt_iterator i;
896 tree label_for_this_bb = label_for_bb[bb->index];
897
898 if (! label_for_this_bb)
899 continue;
900
901 for (i = bsi_start (bb); !bsi_end_p (i); )
902 {
903 tree label, stmt = bsi_stmt (i);
904
905 if (TREE_CODE (stmt) != LABEL_EXPR)
906 break;
907
908 label = LABEL_EXPR_LABEL (stmt);
909
910 if (label == label_for_this_bb
911 || ! DECL_ARTIFICIAL (label)
912 || DECL_NONLOCAL (label))
913 bsi_next (&i);
914 else
915 bsi_remove (&i);
916 }
917 }
918
919 free (label_for_bb);
920}
921
f667741c
SB
922/* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
923 and scan the sorted vector of cases. Combine the ones jumping to the
924 same label.
925 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
926
165b54c3 927void
f667741c
SB
928group_case_labels (void)
929{
930 basic_block bb;
931
932 FOR_EACH_BB (bb)
933 {
934 tree stmt = last_stmt (bb);
935 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
936 {
937 tree labels = SWITCH_LABELS (stmt);
938 int old_size = TREE_VEC_LENGTH (labels);
939 int i, j, new_size = old_size;
29c4d22b
AP
940 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
941 tree default_label;
942
66efeafc 943 /* The default label is always the last case in a switch
29c4d22b
AP
944 statement after gimplification. */
945 default_label = CASE_LABEL (default_case);
f667741c
SB
946
947 /* Look for possible opportunities to merge cases.
948 Ignore the last element of the label vector because it
949 must be the default case. */
950 i = 0;
d717e500 951 while (i < old_size - 1)
f667741c
SB
952 {
953 tree base_case, base_label, base_high, type;
954 base_case = TREE_VEC_ELT (labels, i);
955
1e128c5f 956 gcc_assert (base_case);
f667741c 957 base_label = CASE_LABEL (base_case);
31e9eea2
SB
958
959 /* Discard cases that have the same destination as the
960 default case. */
961 if (base_label == default_label)
962 {
963 TREE_VEC_ELT (labels, i) = NULL_TREE;
964 i++;
29c4d22b 965 new_size--;
31e9eea2
SB
966 continue;
967 }
968
969 type = TREE_TYPE (CASE_LOW (base_case));
f667741c
SB
970 base_high = CASE_HIGH (base_case) ?
971 CASE_HIGH (base_case) : CASE_LOW (base_case);
d717e500 972 i++;
f667741c
SB
973 /* Try to merge case labels. Break out when we reach the end
974 of the label vector or when we cannot merge the next case
975 label with the current one. */
d717e500 976 while (i < old_size - 1)
f667741c 977 {
d717e500 978 tree merge_case = TREE_VEC_ELT (labels, i);
f667741c
SB
979 tree merge_label = CASE_LABEL (merge_case);
980 tree t = int_const_binop (PLUS_EXPR, base_high,
981 integer_one_node, 1);
982
983 /* Merge the cases if they jump to the same place,
984 and their ranges are consecutive. */
985 if (merge_label == base_label
986 && tree_int_cst_equal (CASE_LOW (merge_case), t))
987 {
988 base_high = CASE_HIGH (merge_case) ?
989 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
990 CASE_HIGH (base_case) = base_high;
991 TREE_VEC_ELT (labels, i) = NULL_TREE;
992 new_size--;
d717e500 993 i++;
f667741c
SB
994 }
995 else
996 break;
997 }
998 }
999
1000 /* Compress the case labels in the label vector, and adjust the
1001 length of the vector. */
1002 for (i = 0, j = 0; i < new_size; i++)
1003 {
1004 while (! TREE_VEC_ELT (labels, j))
1005 j++;
1006 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1007 }
1008 TREE_VEC_LENGTH (labels) = new_size;
1009 }
1010 }
1011}
6de9cd9a
DN
1012
1013/* Checks whether we can merge block B into block A. */
1014
1015static bool
1016tree_can_merge_blocks_p (basic_block a, basic_block b)
1017{
1018 tree stmt;
1019 block_stmt_iterator bsi;
1020
628f6a4e 1021 if (EDGE_COUNT (a->succs) != 1)
6de9cd9a
DN
1022 return false;
1023
628f6a4e 1024 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
6de9cd9a
DN
1025 return false;
1026
628f6a4e 1027 if (EDGE_SUCC (a, 0)->dest != b)
6de9cd9a
DN
1028 return false;
1029
1030 if (b == EXIT_BLOCK_PTR)
1031 return false;
1032
628f6a4e 1033 if (EDGE_COUNT (b->preds) > 1)
6de9cd9a
DN
1034 return false;
1035
1036 /* If A ends by a statement causing exceptions or something similar, we
1037 cannot merge the blocks. */
1038 stmt = last_stmt (a);
1039 if (stmt && stmt_ends_bb_p (stmt))
1040 return false;
1041
1042 /* Do not allow a block with only a non-local label to be merged. */
1043 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1044 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1045 return false;
1046
1047 /* There may be no phi nodes at the start of b. Most of these degenerate
1048 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1049 if (phi_nodes (b))
1050 return false;
1051
1052 /* Do not remove user labels. */
1053 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1054 {
1055 stmt = bsi_stmt (bsi);
1056 if (TREE_CODE (stmt) != LABEL_EXPR)
1057 break;
1058 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1059 return false;
1060 }
1061
1062 return true;
1063}
1064
1065
1066/* Merge block B into block A. */
1067
1068static void
1069tree_merge_blocks (basic_block a, basic_block b)
1070{
1071 block_stmt_iterator bsi;
1072 tree_stmt_iterator last;
1073
1074 if (dump_file)
1075 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1076
1077 /* Ensure that B follows A. */
1078 move_block_after (b, a);
1079
628f6a4e 1080 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1e128c5f 1081 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
6de9cd9a
DN
1082
1083 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1084 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1085 {
1086 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1087 bsi_remove (&bsi);
1088 else
1089 {
1090 set_bb_for_stmt (bsi_stmt (bsi), a);
1091 bsi_next (&bsi);
1092 }
1093 }
1094
1095 /* Merge the chains. */
1096 last = tsi_last (a->stmt_list);
1097 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1098 b->stmt_list = NULL;
1099}
1100
1101
1102/* Walk the function tree removing unnecessary statements.
1103
1104 * Empty statement nodes are removed
1105
1106 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1107
1108 * Unnecessary COND_EXPRs are removed
1109
1110 * Some unnecessary BIND_EXPRs are removed
1111
1112 Clearly more work could be done. The trick is doing the analysis
1113 and removal fast enough to be a net improvement in compile times.
1114
1115 Note that when we remove a control structure such as a COND_EXPR
1116 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1117 to ensure we eliminate all the useless code. */
1118
1119struct rus_data
1120{
1121 tree *last_goto;
1122 bool repeat;
1123 bool may_throw;
1124 bool may_branch;
1125 bool has_label;
1126};
1127
1128static void remove_useless_stmts_1 (tree *, struct rus_data *);
1129
1130static bool
1131remove_useless_stmts_warn_notreached (tree stmt)
1132{
9506ac2b 1133 if (EXPR_HAS_LOCATION (stmt))
6de9cd9a 1134 {
9506ac2b
PB
1135 location_t loc = EXPR_LOCATION (stmt);
1136 warning ("%Hwill never be executed", &loc);
6de9cd9a
DN
1137 return true;
1138 }
1139
1140 switch (TREE_CODE (stmt))
1141 {
1142 case STATEMENT_LIST:
1143 {
1144 tree_stmt_iterator i;
1145 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1146 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1147 return true;
1148 }
1149 break;
1150
1151 case COND_EXPR:
1152 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1153 return true;
1154 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1155 return true;
1156 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1157 return true;
1158 break;
1159
1160 case TRY_FINALLY_EXPR:
1161 case TRY_CATCH_EXPR:
1162 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1163 return true;
1164 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1165 return true;
1166 break;
1167
1168 case CATCH_EXPR:
1169 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1170 case EH_FILTER_EXPR:
1171 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1172 case BIND_EXPR:
1173 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1174
1175 default:
1176 /* Not a live container. */
1177 break;
1178 }
1179
1180 return false;
1181}
1182
1183static void
1184remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1185{
1186 tree then_clause, else_clause, cond;
1187 bool save_has_label, then_has_label, else_has_label;
1188
1189 save_has_label = data->has_label;
1190 data->has_label = false;
1191 data->last_goto = NULL;
1192
1193 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1194
1195 then_has_label = data->has_label;
1196 data->has_label = false;
1197 data->last_goto = NULL;
1198
1199 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1200
1201 else_has_label = data->has_label;
1202 data->has_label = save_has_label | then_has_label | else_has_label;
1203
6de9cd9a
DN
1204 then_clause = COND_EXPR_THEN (*stmt_p);
1205 else_clause = COND_EXPR_ELSE (*stmt_p);
1206 cond = COND_EXPR_COND (*stmt_p);
1207
1208 /* If neither arm does anything at all, we can remove the whole IF. */
1209 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1210 {
1211 *stmt_p = build_empty_stmt ();
1212 data->repeat = true;
1213 }
1214
1215 /* If there are no reachable statements in an arm, then we can
1216 zap the entire conditional. */
1217 else if (integer_nonzerop (cond) && !else_has_label)
1218 {
1219 if (warn_notreached)
1220 remove_useless_stmts_warn_notreached (else_clause);
1221 *stmt_p = then_clause;
1222 data->repeat = true;
1223 }
1224 else if (integer_zerop (cond) && !then_has_label)
1225 {
1226 if (warn_notreached)
1227 remove_useless_stmts_warn_notreached (then_clause);
1228 *stmt_p = else_clause;
1229 data->repeat = true;
1230 }
1231
1232 /* Check a couple of simple things on then/else with single stmts. */
1233 else
1234 {
1235 tree then_stmt = expr_only (then_clause);
1236 tree else_stmt = expr_only (else_clause);
1237
1238 /* Notice branches to a common destination. */
1239 if (then_stmt && else_stmt
1240 && TREE_CODE (then_stmt) == GOTO_EXPR
1241 && TREE_CODE (else_stmt) == GOTO_EXPR
1242 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1243 {
1244 *stmt_p = then_stmt;
1245 data->repeat = true;
1246 }
1247
1248 /* If the THEN/ELSE clause merely assigns a value to a variable or
1249 parameter which is already known to contain that value, then
1250 remove the useless THEN/ELSE clause. */
1251 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1252 {
1253 if (else_stmt
1254 && TREE_CODE (else_stmt) == MODIFY_EXPR
1255 && TREE_OPERAND (else_stmt, 0) == cond
1256 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1257 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1258 }
1259 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1260 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1261 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1262 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1263 {
1264 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1265 ? then_stmt : else_stmt);
1266 tree *location = (TREE_CODE (cond) == EQ_EXPR
1267 ? &COND_EXPR_THEN (*stmt_p)
1268 : &COND_EXPR_ELSE (*stmt_p));
1269
1270 if (stmt
1271 && TREE_CODE (stmt) == MODIFY_EXPR
1272 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1273 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1274 *location = alloc_stmt_list ();
1275 }
1276 }
1277
1278 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1279 would be re-introduced during lowering. */
1280 data->last_goto = NULL;
1281}
1282
1283
1284static void
1285remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1286{
1287 bool save_may_branch, save_may_throw;
1288 bool this_may_branch, this_may_throw;
1289
1290 /* Collect may_branch and may_throw information for the body only. */
1291 save_may_branch = data->may_branch;
1292 save_may_throw = data->may_throw;
1293 data->may_branch = false;
1294 data->may_throw = false;
1295 data->last_goto = NULL;
1296
1297 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1298
1299 this_may_branch = data->may_branch;
1300 this_may_throw = data->may_throw;
1301 data->may_branch |= save_may_branch;
1302 data->may_throw |= save_may_throw;
1303 data->last_goto = NULL;
1304
1305 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1306
1307 /* If the body is empty, then we can emit the FINALLY block without
1308 the enclosing TRY_FINALLY_EXPR. */
1309 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1310 {
1311 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1312 data->repeat = true;
1313 }
1314
1315 /* If the handler is empty, then we can emit the TRY block without
1316 the enclosing TRY_FINALLY_EXPR. */
1317 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1318 {
1319 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1320 data->repeat = true;
1321 }
1322
1323 /* If the body neither throws, nor branches, then we can safely
1324 string the TRY and FINALLY blocks together. */
1325 else if (!this_may_branch && !this_may_throw)
1326 {
1327 tree stmt = *stmt_p;
1328 *stmt_p = TREE_OPERAND (stmt, 0);
1329 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1330 data->repeat = true;
1331 }
1332}
1333
1334
1335static void
1336remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1337{
1338 bool save_may_throw, this_may_throw;
1339 tree_stmt_iterator i;
1340 tree stmt;
1341
1342 /* Collect may_throw information for the body only. */
1343 save_may_throw = data->may_throw;
1344 data->may_throw = false;
1345 data->last_goto = NULL;
1346
1347 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1348
1349 this_may_throw = data->may_throw;
1350 data->may_throw = save_may_throw;
1351
1352 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1353 if (!this_may_throw)
1354 {
1355 if (warn_notreached)
1356 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1357 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1358 data->repeat = true;
1359 return;
1360 }
1361
1362 /* Process the catch clause specially. We may be able to tell that
1363 no exceptions propagate past this point. */
1364
1365 this_may_throw = true;
1366 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1367 stmt = tsi_stmt (i);
1368 data->last_goto = NULL;
1369
1370 switch (TREE_CODE (stmt))
1371 {
1372 case CATCH_EXPR:
1373 for (; !tsi_end_p (i); tsi_next (&i))
1374 {
1375 stmt = tsi_stmt (i);
1376 /* If we catch all exceptions, then the body does not
1377 propagate exceptions past this point. */
1378 if (CATCH_TYPES (stmt) == NULL)
1379 this_may_throw = false;
1380 data->last_goto = NULL;
1381 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1382 }
1383 break;
1384
1385 case EH_FILTER_EXPR:
1386 if (EH_FILTER_MUST_NOT_THROW (stmt))
1387 this_may_throw = false;
1388 else if (EH_FILTER_TYPES (stmt) == NULL)
1389 this_may_throw = false;
1390 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1391 break;
1392
1393 default:
1394 /* Otherwise this is a cleanup. */
1395 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1396
1397 /* If the cleanup is empty, then we can emit the TRY block without
1398 the enclosing TRY_CATCH_EXPR. */
1399 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1400 {
1401 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1402 data->repeat = true;
1403 }
1404 break;
1405 }
1406 data->may_throw |= this_may_throw;
1407}
1408
1409
1410static void
1411remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1412{
1413 tree block;
1414
1415 /* First remove anything underneath the BIND_EXPR. */
1416 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1417
1418 /* If the BIND_EXPR has no variables, then we can pull everything
1419 up one level and remove the BIND_EXPR, unless this is the toplevel
1420 BIND_EXPR for the current function or an inlined function.
1421
1422 When this situation occurs we will want to apply this
1423 optimization again. */
1424 block = BIND_EXPR_BLOCK (*stmt_p);
1425 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1426 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1427 && (! block
1428 || ! BLOCK_ABSTRACT_ORIGIN (block)
1429 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1430 != FUNCTION_DECL)))
1431 {
1432 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1433 data->repeat = true;
1434 }
1435}
1436
1437
1438static void
1439remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1440{
1441 tree dest = GOTO_DESTINATION (*stmt_p);
1442
1443 data->may_branch = true;
1444 data->last_goto = NULL;
1445
1446 /* Record the last goto expr, so that we can delete it if unnecessary. */
1447 if (TREE_CODE (dest) == LABEL_DECL)
1448 data->last_goto = stmt_p;
1449}
1450
1451
1452static void
1453remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1454{
1455 tree label = LABEL_EXPR_LABEL (*stmt_p);
1456
1457 data->has_label = true;
1458
1459 /* We do want to jump across non-local label receiver code. */
1460 if (DECL_NONLOCAL (label))
1461 data->last_goto = NULL;
1462
1463 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1464 {
1465 *data->last_goto = build_empty_stmt ();
1466 data->repeat = true;
1467 }
1468
1469 /* ??? Add something here to delete unused labels. */
1470}
1471
1472
1473/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1474 decl. This allows us to eliminate redundant or useless
1475 calls to "const" functions.
1476
1477 Gimplifier already does the same operation, but we may notice functions
1478 being const and pure once their calls has been gimplified, so we need
1479 to update the flag. */
1480
1481static void
1482update_call_expr_flags (tree call)
1483{
1484 tree decl = get_callee_fndecl (call);
1485 if (!decl)
1486 return;
1487 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1488 TREE_SIDE_EFFECTS (call) = 0;
1489 if (TREE_NOTHROW (decl))
1490 TREE_NOTHROW (call) = 1;
1491}
1492
1493
1494/* T is CALL_EXPR. Set current_function_calls_* flags. */
1495
1496void
1497notice_special_calls (tree t)
1498{
1499 int flags = call_expr_flags (t);
1500
1501 if (flags & ECF_MAY_BE_ALLOCA)
1502 current_function_calls_alloca = true;
1503 if (flags & ECF_RETURNS_TWICE)
1504 current_function_calls_setjmp = true;
1505}
1506
1507
1508/* Clear flags set by notice_special_calls. Used by dead code removal
1509 to update the flags. */
1510
1511void
1512clear_special_calls (void)
1513{
1514 current_function_calls_alloca = false;
1515 current_function_calls_setjmp = false;
1516}
1517
1518
1519static void
1520remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1521{
cd709752 1522 tree t = *tp, op;
6de9cd9a
DN
1523
1524 switch (TREE_CODE (t))
1525 {
1526 case COND_EXPR:
1527 remove_useless_stmts_cond (tp, data);
1528 break;
1529
1530 case TRY_FINALLY_EXPR:
1531 remove_useless_stmts_tf (tp, data);
1532 break;
1533
1534 case TRY_CATCH_EXPR:
1535 remove_useless_stmts_tc (tp, data);
1536 break;
1537
1538 case BIND_EXPR:
1539 remove_useless_stmts_bind (tp, data);
1540 break;
1541
1542 case GOTO_EXPR:
1543 remove_useless_stmts_goto (tp, data);
1544 break;
1545
1546 case LABEL_EXPR:
1547 remove_useless_stmts_label (tp, data);
1548 break;
1549
1550 case RETURN_EXPR:
53e782e5 1551 fold_stmt (tp);
6de9cd9a
DN
1552 data->last_goto = NULL;
1553 data->may_branch = true;
1554 break;
1555
1556 case CALL_EXPR:
53e782e5 1557 fold_stmt (tp);
6de9cd9a
DN
1558 data->last_goto = NULL;
1559 notice_special_calls (t);
1560 update_call_expr_flags (t);
1561 if (tree_could_throw_p (t))
1562 data->may_throw = true;
1563 break;
1564
1565 case MODIFY_EXPR:
1566 data->last_goto = NULL;
53e782e5 1567 fold_stmt (tp);
cd709752
RH
1568 op = get_call_expr_in (t);
1569 if (op)
6de9cd9a 1570 {
cd709752
RH
1571 update_call_expr_flags (op);
1572 notice_special_calls (op);
6de9cd9a
DN
1573 }
1574 if (tree_could_throw_p (t))
1575 data->may_throw = true;
1576 break;
1577
1578 case STATEMENT_LIST:
1579 {
1580 tree_stmt_iterator i = tsi_start (t);
1581 while (!tsi_end_p (i))
1582 {
1583 t = tsi_stmt (i);
1584 if (IS_EMPTY_STMT (t))
1585 {
1586 tsi_delink (&i);
1587 continue;
1588 }
1589
1590 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1591
1592 t = tsi_stmt (i);
1593 if (TREE_CODE (t) == STATEMENT_LIST)
1594 {
1595 tsi_link_before (&i, t, TSI_SAME_STMT);
1596 tsi_delink (&i);
1597 }
1598 else
1599 tsi_next (&i);
1600 }
1601 }
1602 break;
53e782e5
AP
1603 case SWITCH_EXPR:
1604 fold_stmt (tp);
1605 data->last_goto = NULL;
1606 break;
6de9cd9a
DN
1607
1608 default:
1609 data->last_goto = NULL;
1610 break;
1611 }
1612}
1613
1614static void
1615remove_useless_stmts (void)
1616{
1617 struct rus_data data;
1618
1619 clear_special_calls ();
1620
1621 do
1622 {
1623 memset (&data, 0, sizeof (data));
1624 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1625 }
1626 while (data.repeat);
1627}
1628
1629
1630struct tree_opt_pass pass_remove_useless_stmts =
1631{
1632 "useless", /* name */
1633 NULL, /* gate */
1634 remove_useless_stmts, /* execute */
1635 NULL, /* sub */
1636 NULL, /* next */
1637 0, /* static_pass_number */
1638 0, /* tv_id */
1639 PROP_gimple_any, /* properties_required */
1640 0, /* properties_provided */
1641 0, /* properties_destroyed */
1642 0, /* todo_flags_start */
9f8628ba
PB
1643 TODO_dump_func, /* todo_flags_finish */
1644 0 /* letter */
6de9cd9a
DN
1645};
1646
1647
1648/* Remove obviously useless statements in basic block BB. */
1649
1650static void
1651cfg_remove_useless_stmts_bb (basic_block bb)
1652{
1653 block_stmt_iterator bsi;
1654 tree stmt = NULL_TREE;
1655 tree cond, var = NULL_TREE, val = NULL_TREE;
1656 struct var_ann_d *ann;
1657
1658 /* Check whether we come here from a condition, and if so, get the
1659 condition. */
628f6a4e
BE
1660 if (EDGE_COUNT (bb->preds) != 1
1661 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
6de9cd9a
DN
1662 return;
1663
628f6a4e 1664 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
6de9cd9a
DN
1665
1666 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1667 {
1668 var = cond;
628f6a4e 1669 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
6de9cd9a
DN
1670 ? boolean_false_node : boolean_true_node);
1671 }
1672 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1673 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1674 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1675 {
1676 var = TREE_OPERAND (cond, 0);
628f6a4e 1677 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
6de9cd9a
DN
1678 ? boolean_true_node : boolean_false_node);
1679 }
1680 else
1681 {
628f6a4e 1682 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
6de9cd9a
DN
1683 cond = invert_truthvalue (cond);
1684 if (TREE_CODE (cond) == EQ_EXPR
1685 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1686 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1687 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1688 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1689 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1690 {
1691 var = TREE_OPERAND (cond, 0);
1692 val = TREE_OPERAND (cond, 1);
1693 }
1694 else
1695 return;
1696 }
1697
1698 /* Only work for normal local variables. */
1699 ann = var_ann (var);
1700 if (!ann
1701 || ann->may_aliases
1702 || TREE_ADDRESSABLE (var))
1703 return;
1704
1705 if (! TREE_CONSTANT (val))
1706 {
1707 ann = var_ann (val);
1708 if (!ann
1709 || ann->may_aliases
1710 || TREE_ADDRESSABLE (val))
1711 return;
1712 }
1713
1714 /* Ignore floating point variables, since comparison behaves weird for
1715 them. */
1716 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1717 return;
1718
1719 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1720 {
1721 stmt = bsi_stmt (bsi);
1722
1723 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1724 which is already known to contain that value, then remove the useless
1725 THEN/ELSE clause. */
1726 if (TREE_CODE (stmt) == MODIFY_EXPR
1727 && TREE_OPERAND (stmt, 0) == var
1728 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1729 {
1730 bsi_remove (&bsi);
1731 continue;
1732 }
1733
631b67ce
RK
1734 /* Invalidate the var if we encounter something that could modify it.
1735 Likewise for the value it was previously set to. Note that we only
1736 consider values that are either a VAR_DECL or PARM_DECL so we
1737 can test for conflict very simply. */
6de9cd9a 1738 if (TREE_CODE (stmt) == ASM_EXPR
6de9cd9a 1739 || (TREE_CODE (stmt) == MODIFY_EXPR
631b67ce
RK
1740 && (TREE_OPERAND (stmt, 0) == var
1741 || TREE_OPERAND (stmt, 0) == val)))
6de9cd9a
DN
1742 return;
1743
1744 bsi_next (&bsi);
1745 }
1746}
1747
1748
1749/* A CFG-aware version of remove_useless_stmts. */
1750
1751void
1752cfg_remove_useless_stmts (void)
1753{
1754 basic_block bb;
1755
1756#ifdef ENABLE_CHECKING
1757 verify_flow_info ();
1758#endif
1759
1760 FOR_EACH_BB (bb)
1761 {
1762 cfg_remove_useless_stmts_bb (bb);
1763 }
1764}
1765
1766
1767/* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1768
1769static void
1770remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1771{
1772 tree phi;
1773
1774 /* Since this block is no longer reachable, we can just delete all
1775 of its PHI nodes. */
1776 phi = phi_nodes (bb);
1777 while (phi)
1778 {
17192884 1779 tree next = PHI_CHAIN (phi);
6de9cd9a
DN
1780 remove_phi_node (phi, NULL_TREE, bb);
1781 phi = next;
1782 }
1783
1784 /* Remove edges to BB's successors. */
628f6a4e
BE
1785 while (EDGE_COUNT (bb->succs) > 0)
1786 ssa_remove_edge (EDGE_SUCC (bb, 0));
6de9cd9a
DN
1787}
1788
1789
1790/* Remove statements of basic block BB. */
1791
1792static void
1793remove_bb (basic_block bb)
1794{
1795 block_stmt_iterator i;
9506ac2b 1796 source_locus loc = 0;
6de9cd9a
DN
1797
1798 if (dump_file)
1799 {
1800 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1801 if (dump_flags & TDF_DETAILS)
1802 {
1803 dump_bb (bb, dump_file, 0);
1804 fprintf (dump_file, "\n");
1805 }
1806 }
1807
1808 /* Remove all the instructions in the block. */
77568960 1809 for (i = bsi_start (bb); !bsi_end_p (i);)
6de9cd9a
DN
1810 {
1811 tree stmt = bsi_stmt (i);
77568960
AP
1812 if (TREE_CODE (stmt) == LABEL_EXPR
1813 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
1814 {
1815 basic_block new_bb = bb->prev_bb;
1816 block_stmt_iterator new_bsi = bsi_after_labels (new_bb);
1817
1818 bsi_remove (&i);
1819 bsi_insert_after (&new_bsi, stmt, BSI_NEW_STMT);
1820 }
1821 else
1822 {
1823 release_defs (stmt);
6de9cd9a 1824
77568960
AP
1825 set_bb_for_stmt (stmt, NULL);
1826 bsi_remove (&i);
1827 }
6de9cd9a
DN
1828
1829 /* Don't warn for removed gotos. Gotos are often removed due to
1830 jump threading, thus resulting in bogus warnings. Not great,
1831 since this way we lose warnings for gotos in the original
1832 program that are indeed unreachable. */
9506ac2b
PB
1833 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1834#ifdef USE_MAPPED_LOCATION
1835 loc = EXPR_LOCATION (stmt);
1836#else
6de9cd9a 1837 loc = EXPR_LOCUS (stmt);
9506ac2b 1838#endif
6de9cd9a
DN
1839 }
1840
1841 /* If requested, give a warning that the first statement in the
1842 block is unreachable. We walk statements backwards in the
1843 loop above, so the last statement we process is the first statement
1844 in the block. */
1845 if (warn_notreached && loc)
9506ac2b
PB
1846#ifdef USE_MAPPED_LOCATION
1847 warning ("%Hwill never be executed", &loc);
1848#else
6de9cd9a 1849 warning ("%Hwill never be executed", loc);
9506ac2b 1850#endif
6de9cd9a
DN
1851
1852 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1853}
1854
6de9cd9a
DN
1855/* Try to remove superfluous control structures. */
1856
1857static bool
1858cleanup_control_flow (void)
1859{
1860 basic_block bb;
1861 block_stmt_iterator bsi;
1862 bool retval = false;
1863 tree stmt;
1864
1865 FOR_EACH_BB (bb)
1866 {
1867 bsi = bsi_last (bb);
1868
1869 if (bsi_end_p (bsi))
1870 continue;
1871
1872 stmt = bsi_stmt (bsi);
1873 if (TREE_CODE (stmt) == COND_EXPR
1874 || TREE_CODE (stmt) == SWITCH_EXPR)
1875 retval |= cleanup_control_expr_graph (bb, bsi);
1876 }
1877 return retval;
1878}
1879
1880
1881/* Disconnect an unreachable block in the control expression starting
1882 at block BB. */
1883
1884static bool
1885cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
1886{
1887 edge taken_edge;
1888 bool retval = false;
1889 tree expr = bsi_stmt (bsi), val;
1890
628f6a4e 1891 if (EDGE_COUNT (bb->succs) > 1)
6de9cd9a 1892 {
628f6a4e
BE
1893 edge e;
1894 edge_iterator ei;
6de9cd9a
DN
1895
1896 switch (TREE_CODE (expr))
1897 {
1898 case COND_EXPR:
1899 val = COND_EXPR_COND (expr);
1900 break;
1901
1902 case SWITCH_EXPR:
1903 val = SWITCH_COND (expr);
1904 if (TREE_CODE (val) != INTEGER_CST)
1905 return false;
1906 break;
1907
1908 default:
1e128c5f 1909 gcc_unreachable ();
6de9cd9a
DN
1910 }
1911
1912 taken_edge = find_taken_edge (bb, val);
1913 if (!taken_edge)
1914 return false;
1915
1916 /* Remove all the edges except the one that is always executed. */
628f6a4e 1917 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6de9cd9a 1918 {
6de9cd9a
DN
1919 if (e != taken_edge)
1920 {
1921 taken_edge->probability += e->probability;
1922 taken_edge->count += e->count;
1923 ssa_remove_edge (e);
1924 retval = true;
1925 }
628f6a4e
BE
1926 else
1927 ei_next (&ei);
6de9cd9a
DN
1928 }
1929 if (taken_edge->probability > REG_BR_PROB_BASE)
1930 taken_edge->probability = REG_BR_PROB_BASE;
1931 }
1932 else
628f6a4e 1933 taken_edge = EDGE_SUCC (bb, 0);
6de9cd9a
DN
1934
1935 bsi_remove (&bsi);
1936 taken_edge->flags = EDGE_FALLTHRU;
1937
1938 /* We removed some paths from the cfg. */
fce22de5 1939 free_dominance_info (CDI_DOMINATORS);
6de9cd9a
DN
1940
1941 return retval;
1942}
1943
1944
255cd731
DN
1945/* Given a control block BB and a predicate VAL, return the edge that
1946 will be taken out of the block. If VAL does not match a unique
8c27b7d4 1947 edge, NULL is returned. */
6de9cd9a
DN
1948
1949edge
1950find_taken_edge (basic_block bb, tree val)
1951{
1952 tree stmt;
1953
1954 stmt = last_stmt (bb);
1955
1e128c5f
GB
1956 gcc_assert (stmt);
1957 gcc_assert (is_ctrl_stmt (stmt));
6de9cd9a 1958
255cd731 1959 /* If VAL is a predicate of the form N RELOP N, where N is an
6a97296a
DN
1960 SSA_NAME, we can usually determine its truth value. */
1961 if (val && COMPARISON_CLASS_P (val))
1962 val = fold (val);
255cd731 1963
6de9cd9a
DN
1964 /* If VAL is not a constant, we can't determine which edge might
1965 be taken. */
1966 if (val == NULL || !really_constant_p (val))
1967 return NULL;
1968
1969 if (TREE_CODE (stmt) == COND_EXPR)
1970 return find_taken_edge_cond_expr (bb, val);
1971
1972 if (TREE_CODE (stmt) == SWITCH_EXPR)
1973 return find_taken_edge_switch_expr (bb, val);
1974
628f6a4e 1975 return EDGE_SUCC (bb, 0);
6de9cd9a
DN
1976}
1977
1978
1979/* Given a constant value VAL and the entry block BB to a COND_EXPR
1980 statement, determine which of the two edges will be taken out of the
1981 block. Return NULL if either edge may be taken. */
1982
1983static edge
1984find_taken_edge_cond_expr (basic_block bb, tree val)
1985{
1986 edge true_edge, false_edge;
1987
1988 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1989
1990 /* If both edges of the branch lead to the same basic block, it doesn't
1991 matter which edge is taken. */
1992 if (true_edge->dest == false_edge->dest)
1993 return true_edge;
1994
1995 /* Otherwise, try to determine which branch of the if() will be taken.
1996 If VAL is a constant but it can't be reduced to a 0 or a 1, then
1997 we don't really know which edge will be taken at runtime. This
1998 may happen when comparing addresses (e.g., if (&var1 == 4)). */
1999 if (integer_nonzerop (val))
2000 return true_edge;
2001 else if (integer_zerop (val))
2002 return false_edge;
2003 else
2004 return NULL;
2005}
2006
2007
2008/* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2009 statement, determine which edge will be taken out of the block. Return
2010 NULL if any edge may be taken. */
2011
2012static edge
2013find_taken_edge_switch_expr (basic_block bb, tree val)
2014{
2015 tree switch_expr, taken_case;
2016 basic_block dest_bb;
2017 edge e;
2018
2019 if (TREE_CODE (val) != INTEGER_CST)
2020 return NULL;
2021
2022 switch_expr = last_stmt (bb);
2023 taken_case = find_case_label_for_value (switch_expr, val);
2024 dest_bb = label_to_block (CASE_LABEL (taken_case));
2025
2026 e = find_edge (bb, dest_bb);
1e128c5f 2027 gcc_assert (e);
6de9cd9a
DN
2028 return e;
2029}
2030
2031
f667741c
SB
2032/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2033 We can make optimal use here of the fact that the case labels are
2034 sorted: We can do a binary search for a case matching VAL. */
6de9cd9a
DN
2035
2036static tree
2037find_case_label_for_value (tree switch_expr, tree val)
2038{
2039 tree vec = SWITCH_LABELS (switch_expr);
f667741c
SB
2040 size_t low, high, n = TREE_VEC_LENGTH (vec);
2041 tree default_case = TREE_VEC_ELT (vec, n - 1);
6de9cd9a 2042
f667741c 2043 for (low = -1, high = n - 1; high - low > 1; )
6de9cd9a 2044 {
f667741c 2045 size_t i = (high + low) / 2;
6de9cd9a 2046 tree t = TREE_VEC_ELT (vec, i);
f667741c
SB
2047 int cmp;
2048
2049 /* Cache the result of comparing CASE_LOW and val. */
2050 cmp = tree_int_cst_compare (CASE_LOW (t), val);
6de9cd9a 2051
f667741c
SB
2052 if (cmp > 0)
2053 high = i;
2054 else
2055 low = i;
2056
2057 if (CASE_HIGH (t) == NULL)
6de9cd9a 2058 {
f667741c
SB
2059 /* A singe-valued case label. */
2060 if (cmp == 0)
6de9cd9a
DN
2061 return t;
2062 }
2063 else
2064 {
2065 /* A case range. We can only handle integer ranges. */
f667741c 2066 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
6de9cd9a
DN
2067 return t;
2068 }
2069 }
2070
6de9cd9a
DN
2071 return default_case;
2072}
2073
2074
2075/* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2076 those alternatives are equal in each of the PHI nodes, then return
2077 true, else return false. */
2078
2079static bool
2080phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2081{
2082 tree phi, val1, val2;
2083 int n1, n2;
2084
17192884 2085 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
2086 {
2087 n1 = phi_arg_from_edge (phi, e1);
2088 n2 = phi_arg_from_edge (phi, e2);
2089
1e128c5f
GB
2090 gcc_assert (n1 >= 0);
2091 gcc_assert (n2 >= 0);
6de9cd9a
DN
2092
2093 val1 = PHI_ARG_DEF (phi, n1);
2094 val2 = PHI_ARG_DEF (phi, n2);
2095
2096 if (!operand_equal_p (val1, val2, 0))
2097 return false;
2098 }
2099
2100 return true;
2101}
2102
2103
6de9cd9a
DN
2104/*---------------------------------------------------------------------------
2105 Debugging functions
2106---------------------------------------------------------------------------*/
2107
2108/* Dump tree-specific information of block BB to file OUTF. */
2109
2110void
2111tree_dump_bb (basic_block bb, FILE *outf, int indent)
2112{
2113 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2114}
2115
2116
2117/* Dump a basic block on stderr. */
2118
2119void
2120debug_tree_bb (basic_block bb)
2121{
2122 dump_bb (bb, stderr, 0);
2123}
2124
2125
2126/* Dump basic block with index N on stderr. */
2127
2128basic_block
2129debug_tree_bb_n (int n)
2130{
2131 debug_tree_bb (BASIC_BLOCK (n));
2132 return BASIC_BLOCK (n);
2133}
2134
2135
2136/* Dump the CFG on stderr.
2137
2138 FLAGS are the same used by the tree dumping functions
2139 (see TDF_* in tree.h). */
2140
2141void
2142debug_tree_cfg (int flags)
2143{
2144 dump_tree_cfg (stderr, flags);
2145}
2146
2147
2148/* Dump the program showing basic block boundaries on the given FILE.
2149
2150 FLAGS are the same used by the tree dumping functions (see TDF_* in
2151 tree.h). */
2152
2153void
2154dump_tree_cfg (FILE *file, int flags)
2155{
2156 if (flags & TDF_DETAILS)
2157 {
2158 const char *funcname
673fda6b 2159 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2160
2161 fputc ('\n', file);
2162 fprintf (file, ";; Function %s\n\n", funcname);
2163 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2164 n_basic_blocks, n_edges, last_basic_block);
2165
2166 brief_dump_cfg (file);
2167 fprintf (file, "\n");
2168 }
2169
2170 if (flags & TDF_STATS)
2171 dump_cfg_stats (file);
2172
2173 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2174}
2175
2176
2177/* Dump CFG statistics on FILE. */
2178
2179void
2180dump_cfg_stats (FILE *file)
2181{
2182 static long max_num_merged_labels = 0;
2183 unsigned long size, total = 0;
f7fda749 2184 int n_edges;
6de9cd9a
DN
2185 basic_block bb;
2186 const char * const fmt_str = "%-30s%-13s%12s\n";
f7fda749 2187 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
6de9cd9a
DN
2188 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2189 const char *funcname
673fda6b 2190 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2191
2192
2193 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2194
2195 fprintf (file, "---------------------------------------------------------\n");
2196 fprintf (file, fmt_str, "", " Number of ", "Memory");
2197 fprintf (file, fmt_str, "", " instances ", "used ");
2198 fprintf (file, "---------------------------------------------------------\n");
2199
2200 size = n_basic_blocks * sizeof (struct basic_block_def);
2201 total += size;
f7fda749
RH
2202 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2203 SCALE (size), LABEL (size));
6de9cd9a
DN
2204
2205 n_edges = 0;
2206 FOR_EACH_BB (bb)
628f6a4e 2207 n_edges += EDGE_COUNT (bb->succs);
6de9cd9a
DN
2208 size = n_edges * sizeof (struct edge_def);
2209 total += size;
2210 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2211
2212 size = n_basic_blocks * sizeof (struct bb_ann_d);
2213 total += size;
2214 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2215 SCALE (size), LABEL (size));
2216
2217 fprintf (file, "---------------------------------------------------------\n");
2218 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2219 LABEL (total));
2220 fprintf (file, "---------------------------------------------------------\n");
2221 fprintf (file, "\n");
2222
2223 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2224 max_num_merged_labels = cfg_stats.num_merged_labels;
2225
2226 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2227 cfg_stats.num_merged_labels, max_num_merged_labels);
2228
2229 fprintf (file, "\n");
2230}
2231
2232
2233/* Dump CFG statistics on stderr. Keep extern so that it's always
2234 linked in the final executable. */
2235
2236void
2237debug_cfg_stats (void)
2238{
2239 dump_cfg_stats (stderr);
2240}
2241
2242
2243/* Dump the flowgraph to a .vcg FILE. */
2244
2245static void
2246tree_cfg2vcg (FILE *file)
2247{
2248 edge e;
628f6a4e 2249 edge_iterator ei;
6de9cd9a
DN
2250 basic_block bb;
2251 const char *funcname
673fda6b 2252 = lang_hooks.decl_printable_name (current_function_decl, 2);
6de9cd9a
DN
2253
2254 /* Write the file header. */
2255 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2256 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2257 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2258
2259 /* Write blocks and edges. */
628f6a4e 2260 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a
DN
2261 {
2262 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2263 e->dest->index);
2264
2265 if (e->flags & EDGE_FAKE)
2266 fprintf (file, " linestyle: dotted priority: 10");
2267 else
2268 fprintf (file, " linestyle: solid priority: 100");
2269
2270 fprintf (file, " }\n");
2271 }
2272 fputc ('\n', file);
2273
2274 FOR_EACH_BB (bb)
2275 {
2276 enum tree_code head_code, end_code;
2277 const char *head_name, *end_name;
2278 int head_line = 0;
2279 int end_line = 0;
2280 tree first = first_stmt (bb);
2281 tree last = last_stmt (bb);
2282
2283 if (first)
2284 {
2285 head_code = TREE_CODE (first);
2286 head_name = tree_code_name[head_code];
2287 head_line = get_lineno (first);
2288 }
2289 else
2290 head_name = "no-statement";
2291
2292 if (last)
2293 {
2294 end_code = TREE_CODE (last);
2295 end_name = tree_code_name[end_code];
2296 end_line = get_lineno (last);
2297 }
2298 else
2299 end_name = "no-statement";
2300
2301 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2302 bb->index, bb->index, head_name, head_line, end_name,
2303 end_line);
2304
628f6a4e 2305 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2306 {
2307 if (e->dest == EXIT_BLOCK_PTR)
2308 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2309 else
2310 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2311
2312 if (e->flags & EDGE_FAKE)
2313 fprintf (file, " priority: 10 linestyle: dotted");
2314 else
2315 fprintf (file, " priority: 100 linestyle: solid");
2316
2317 fprintf (file, " }\n");
2318 }
2319
2320 if (bb->next_bb != EXIT_BLOCK_PTR)
2321 fputc ('\n', file);
2322 }
2323
2324 fputs ("}\n\n", file);
2325}
2326
2327
2328
2329/*---------------------------------------------------------------------------
2330 Miscellaneous helpers
2331---------------------------------------------------------------------------*/
2332
2333/* Return true if T represents a stmt that always transfers control. */
2334
2335bool
2336is_ctrl_stmt (tree t)
2337{
2338 return (TREE_CODE (t) == COND_EXPR
2339 || TREE_CODE (t) == SWITCH_EXPR
2340 || TREE_CODE (t) == GOTO_EXPR
2341 || TREE_CODE (t) == RETURN_EXPR
2342 || TREE_CODE (t) == RESX_EXPR);
2343}
2344
2345
2346/* Return true if T is a statement that may alter the flow of control
2347 (e.g., a call to a non-returning function). */
2348
2349bool
2350is_ctrl_altering_stmt (tree t)
2351{
cd709752 2352 tree call;
6de9cd9a 2353
1e128c5f 2354 gcc_assert (t);
cd709752
RH
2355 call = get_call_expr_in (t);
2356 if (call)
6de9cd9a 2357 {
6de9cd9a
DN
2358 /* A non-pure/const CALL_EXPR alters flow control if the current
2359 function has nonlocal labels. */
cd709752 2360 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
6de9cd9a
DN
2361 return true;
2362
2363 /* A CALL_EXPR also alters control flow if it does not return. */
2364 if (call_expr_flags (call) & (ECF_NORETURN | ECF_LONGJMP))
2365 return true;
6de9cd9a
DN
2366 }
2367
2368 /* If a statement can throw, it alters control flow. */
2369 return tree_can_throw_internal (t);
2370}
2371
2372
2373/* Return true if T is a computed goto. */
2374
2375bool
2376computed_goto_p (tree t)
2377{
2378 return (TREE_CODE (t) == GOTO_EXPR
2379 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2380}
2381
2382
2383/* Checks whether EXPR is a simple local goto. */
2384
2385bool
2386simple_goto_p (tree expr)
2387{
ab8907ef
RH
2388 return (TREE_CODE (expr) == GOTO_EXPR
2389 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
6de9cd9a
DN
2390}
2391
2392
2393/* Return true if T should start a new basic block. PREV_T is the
2394 statement preceding T. It is used when T is a label or a case label.
2395 Labels should only start a new basic block if their previous statement
2396 wasn't a label. Otherwise, sequence of labels would generate
2397 unnecessary basic blocks that only contain a single label. */
2398
2399static inline bool
2400stmt_starts_bb_p (tree t, tree prev_t)
2401{
2402 enum tree_code code;
2403
2404 if (t == NULL_TREE)
2405 return false;
2406
2407 /* LABEL_EXPRs start a new basic block only if the preceding
2408 statement wasn't a label of the same type. This prevents the
2409 creation of consecutive blocks that have nothing but a single
2410 label. */
2411 code = TREE_CODE (t);
2412 if (code == LABEL_EXPR)
2413 {
2414 /* Nonlocal and computed GOTO targets always start a new block. */
2415 if (code == LABEL_EXPR
2416 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2417 || FORCED_LABEL (LABEL_EXPR_LABEL (t))))
2418 return true;
2419
2420 if (prev_t && TREE_CODE (prev_t) == code)
2421 {
2422 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2423 return true;
2424
2425 cfg_stats.num_merged_labels++;
2426 return false;
2427 }
2428 else
2429 return true;
2430 }
2431
2432 return false;
2433}
2434
2435
2436/* Return true if T should end a basic block. */
2437
2438bool
2439stmt_ends_bb_p (tree t)
2440{
2441 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2442}
2443
2444
2445/* Add gotos that used to be represented implicitly in the CFG. */
2446
2447void
2448disband_implicit_edges (void)
2449{
2450 basic_block bb;
2451 block_stmt_iterator last;
2452 edge e;
628f6a4e 2453 edge_iterator ei;
eb4e1c01 2454 tree stmt, label;
6de9cd9a
DN
2455
2456 FOR_EACH_BB (bb)
2457 {
2458 last = bsi_last (bb);
2459 stmt = last_stmt (bb);
2460
2461 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2462 {
2463 /* Remove superfluous gotos from COND_EXPR branches. Moved
2464 from cfg_remove_useless_stmts here since it violates the
2465 invariants for tree--cfg correspondence and thus fits better
2466 here where we do it anyway. */
628f6a4e 2467 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2468 {
2469 if (e->dest != bb->next_bb)
2470 continue;
2471
2472 if (e->flags & EDGE_TRUE_VALUE)
2473 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2474 else if (e->flags & EDGE_FALSE_VALUE)
2475 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2476 else
1e128c5f 2477 gcc_unreachable ();
6de9cd9a
DN
2478 e->flags |= EDGE_FALLTHRU;
2479 }
2480
2481 continue;
2482 }
2483
2484 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2485 {
2486 /* Remove the RETURN_EXPR if we may fall though to the exit
2487 instead. */
628f6a4e
BE
2488 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2489 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
6de9cd9a
DN
2490
2491 if (bb->next_bb == EXIT_BLOCK_PTR
2492 && !TREE_OPERAND (stmt, 0))
2493 {
2494 bsi_remove (&last);
628f6a4e 2495 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
6de9cd9a
DN
2496 }
2497 continue;
2498 }
2499
2500 /* There can be no fallthru edge if the last statement is a control
2501 one. */
2502 if (stmt && is_ctrl_stmt (stmt))
2503 continue;
2504
2505 /* Find a fallthru edge and emit the goto if necessary. */
628f6a4e 2506 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2507 if (e->flags & EDGE_FALLTHRU)
2508 break;
2509
62b857ea 2510 if (!e || e->dest == bb->next_bb)
6de9cd9a
DN
2511 continue;
2512
1e128c5f 2513 gcc_assert (e->dest != EXIT_BLOCK_PTR);
6de9cd9a
DN
2514 label = tree_block_label (e->dest);
2515
62b857ea 2516 stmt = build1 (GOTO_EXPR, void_type_node, label);
9506ac2b
PB
2517#ifdef USE_MAPPED_LOCATION
2518 SET_EXPR_LOCATION (stmt, e->goto_locus);
2519#else
62b857ea 2520 SET_EXPR_LOCUS (stmt, e->goto_locus);
9506ac2b 2521#endif
62b857ea 2522 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
6de9cd9a
DN
2523 e->flags &= ~EDGE_FALLTHRU;
2524 }
2525}
2526
242229bb 2527/* Remove block annotations and other datastructures. */
6de9cd9a
DN
2528
2529void
242229bb 2530delete_tree_cfg_annotations (void)
6de9cd9a 2531{
242229bb 2532 basic_block bb;
6de9cd9a
DN
2533 if (n_basic_blocks > 0)
2534 free_blocks_annotations ();
2535
6de9cd9a
DN
2536 label_to_block_map = NULL;
2537 free_rbi_pool ();
242229bb
JH
2538 FOR_EACH_BB (bb)
2539 bb->rbi = NULL;
6de9cd9a
DN
2540}
2541
2542
2543/* Return the first statement in basic block BB. */
2544
2545tree
2546first_stmt (basic_block bb)
2547{
2548 block_stmt_iterator i = bsi_start (bb);
2549 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2550}
2551
2552
2553/* Return the last statement in basic block BB. */
2554
2555tree
2556last_stmt (basic_block bb)
2557{
2558 block_stmt_iterator b = bsi_last (bb);
2559 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2560}
2561
2562
2563/* Return a pointer to the last statement in block BB. */
2564
2565tree *
2566last_stmt_ptr (basic_block bb)
2567{
2568 block_stmt_iterator last = bsi_last (bb);
2569 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2570}
2571
2572
2573/* Return the last statement of an otherwise empty block. Return NULL
2574 if the block is totally empty, or if it contains more than one
2575 statement. */
2576
2577tree
2578last_and_only_stmt (basic_block bb)
2579{
2580 block_stmt_iterator i = bsi_last (bb);
2581 tree last, prev;
2582
2583 if (bsi_end_p (i))
2584 return NULL_TREE;
2585
2586 last = bsi_stmt (i);
2587 bsi_prev (&i);
2588 if (bsi_end_p (i))
2589 return last;
2590
2591 /* Empty statements should no longer appear in the instruction stream.
2592 Everything that might have appeared before should be deleted by
2593 remove_useless_stmts, and the optimizers should just bsi_remove
2594 instead of smashing with build_empty_stmt.
2595
2596 Thus the only thing that should appear here in a block containing
2597 one executable statement is a label. */
2598 prev = bsi_stmt (i);
2599 if (TREE_CODE (prev) == LABEL_EXPR)
2600 return last;
2601 else
2602 return NULL_TREE;
2603}
2604
2605
2606/* Mark BB as the basic block holding statement T. */
2607
2608void
2609set_bb_for_stmt (tree t, basic_block bb)
2610{
30d396e3
ZD
2611 if (TREE_CODE (t) == PHI_NODE)
2612 PHI_BB (t) = bb;
2613 else if (TREE_CODE (t) == STATEMENT_LIST)
6de9cd9a
DN
2614 {
2615 tree_stmt_iterator i;
2616 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2617 set_bb_for_stmt (tsi_stmt (i), bb);
2618 }
2619 else
2620 {
2621 stmt_ann_t ann = get_stmt_ann (t);
2622 ann->bb = bb;
2623
2624 /* If the statement is a label, add the label to block-to-labels map
2625 so that we can speed up edge creation for GOTO_EXPRs. */
2626 if (TREE_CODE (t) == LABEL_EXPR)
2627 {
2628 int uid;
2629
2630 t = LABEL_EXPR_LABEL (t);
2631 uid = LABEL_DECL_UID (t);
2632 if (uid == -1)
2633 {
2634 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2635 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2636 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2637 }
2638 else
1e128c5f
GB
2639 /* We're moving an existing label. Make sure that we've
2640 removed it from the old block. */
2641 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
6de9cd9a
DN
2642 VARRAY_BB (label_to_block_map, uid) = bb;
2643 }
2644 }
2645}
2646
8b11a64c
ZD
2647/* Finds iterator for STMT. */
2648
2649extern block_stmt_iterator
1a1804c2 2650bsi_for_stmt (tree stmt)
8b11a64c
ZD
2651{
2652 block_stmt_iterator bsi;
2653
2654 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2655 if (bsi_stmt (bsi) == stmt)
2656 return bsi;
2657
1e128c5f 2658 gcc_unreachable ();
8b11a64c 2659}
6de9cd9a
DN
2660
2661/* Insert statement (or statement list) T before the statement
2662 pointed-to by iterator I. M specifies how to update iterator I
2663 after insertion (see enum bsi_iterator_update). */
2664
2665void
2666bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2667{
2668 set_bb_for_stmt (t, i->bb);
6de9cd9a 2669 tsi_link_before (&i->tsi, t, m);
68b9f53b 2670 modify_stmt (t);
6de9cd9a
DN
2671}
2672
2673
2674/* Insert statement (or statement list) T after the statement
2675 pointed-to by iterator I. M specifies how to update iterator I
2676 after insertion (see enum bsi_iterator_update). */
2677
2678void
2679bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2680{
2681 set_bb_for_stmt (t, i->bb);
6de9cd9a 2682 tsi_link_after (&i->tsi, t, m);
68b9f53b 2683 modify_stmt (t);
6de9cd9a
DN
2684}
2685
2686
2687/* Remove the statement pointed to by iterator I. The iterator is updated
2688 to the next statement. */
2689
2690void
2691bsi_remove (block_stmt_iterator *i)
2692{
2693 tree t = bsi_stmt (*i);
2694 set_bb_for_stmt (t, NULL);
6de9cd9a
DN
2695 tsi_delink (&i->tsi);
2696}
2697
2698
2699/* Move the statement at FROM so it comes right after the statement at TO. */
2700
2701void
2702bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2703{
2704 tree stmt = bsi_stmt (*from);
2705 bsi_remove (from);
2706 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2707}
2708
2709
2710/* Move the statement at FROM so it comes right before the statement at TO. */
2711
2712void
2713bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2714{
2715 tree stmt = bsi_stmt (*from);
2716 bsi_remove (from);
2717 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2718}
2719
2720
2721/* Move the statement at FROM to the end of basic block BB. */
2722
2723void
2724bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2725{
2726 block_stmt_iterator last = bsi_last (bb);
2727
2728 /* Have to check bsi_end_p because it could be an empty block. */
2729 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2730 bsi_move_before (from, &last);
2731 else
2732 bsi_move_after (from, &last);
2733}
2734
2735
2736/* Replace the contents of the statement pointed to by iterator BSI
2737 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2738 information of the original statement is preserved. */
2739
2740void
2741bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2742{
2743 int eh_region;
2744 tree orig_stmt = bsi_stmt (*bsi);
2745
2746 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2747 set_bb_for_stmt (stmt, bsi->bb);
2748
2749 /* Preserve EH region information from the original statement, if
2750 requested by the caller. */
2751 if (preserve_eh_info)
2752 {
2753 eh_region = lookup_stmt_eh_region (orig_stmt);
2754 if (eh_region >= 0)
2755 add_stmt_to_eh_region (stmt, eh_region);
2756 }
2757
2758 *bsi_stmt_ptr (*bsi) = stmt;
2759 modify_stmt (stmt);
2760}
2761
2762
2763/* Insert the statement pointed-to by BSI into edge E. Every attempt
2764 is made to place the statement in an existing basic block, but
2765 sometimes that isn't possible. When it isn't possible, the edge is
2766 split and the statement is added to the new block.
2767
2768 In all cases, the returned *BSI points to the correct location. The
2769 return value is true if insertion should be done after the location,
82b85a85
ZD
2770 or false if it should be done before the location. If new basic block
2771 has to be created, it is stored in *NEW_BB. */
6de9cd9a
DN
2772
2773static bool
82b85a85
ZD
2774tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2775 basic_block *new_bb)
6de9cd9a
DN
2776{
2777 basic_block dest, src;
2778 tree tmp;
2779
2780 dest = e->dest;
2781 restart:
2782
2783 /* If the destination has one predecessor which has no PHI nodes,
2784 insert there. Except for the exit block.
2785
2786 The requirement for no PHI nodes could be relaxed. Basically we
2787 would have to examine the PHIs to prove that none of them used
2788 the value set by the statement we want to insert on E. That
2789 hardly seems worth the effort. */
628f6a4e 2790 if (EDGE_COUNT (dest->preds) == 1
6de9cd9a
DN
2791 && ! phi_nodes (dest)
2792 && dest != EXIT_BLOCK_PTR)
2793 {
2794 *bsi = bsi_start (dest);
2795 if (bsi_end_p (*bsi))
2796 return true;
2797
2798 /* Make sure we insert after any leading labels. */
2799 tmp = bsi_stmt (*bsi);
2800 while (TREE_CODE (tmp) == LABEL_EXPR)
2801 {
2802 bsi_next (bsi);
2803 if (bsi_end_p (*bsi))
2804 break;
2805 tmp = bsi_stmt (*bsi);
2806 }
2807
2808 if (bsi_end_p (*bsi))
2809 {
2810 *bsi = bsi_last (dest);
2811 return true;
2812 }
2813 else
2814 return false;
2815 }
2816
2817 /* If the source has one successor, the edge is not abnormal and
2818 the last statement does not end a basic block, insert there.
2819 Except for the entry block. */
2820 src = e->src;
2821 if ((e->flags & EDGE_ABNORMAL) == 0
628f6a4e 2822 && EDGE_COUNT (src->succs) == 1
6de9cd9a
DN
2823 && src != ENTRY_BLOCK_PTR)
2824 {
2825 *bsi = bsi_last (src);
2826 if (bsi_end_p (*bsi))
2827 return true;
2828
2829 tmp = bsi_stmt (*bsi);
2830 if (!stmt_ends_bb_p (tmp))
2831 return true;
ce068299
JH
2832
2833 /* Insert code just before returning the value. We may need to decompose
2834 the return in the case it contains non-trivial operand. */
2835 if (TREE_CODE (tmp) == RETURN_EXPR)
2836 {
2837 tree op = TREE_OPERAND (tmp, 0);
2838 if (!is_gimple_val (op))
2839 {
1e128c5f 2840 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
ce068299
JH
2841 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2842 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2843 }
2844 bsi_prev (bsi);
2845 return true;
2846 }
6de9cd9a
DN
2847 }
2848
2849 /* Otherwise, create a new basic block, and split this edge. */
2850 dest = split_edge (e);
82b85a85
ZD
2851 if (new_bb)
2852 *new_bb = dest;
628f6a4e 2853 e = EDGE_PRED (dest, 0);
6de9cd9a
DN
2854 goto restart;
2855}
2856
2857
2858/* This routine will commit all pending edge insertions, creating any new
2859 basic blocks which are necessary.
2860
2861 If specified, NEW_BLOCKS returns a count of the number of new basic
2862 blocks which were created. */
2863
2864void
2865bsi_commit_edge_inserts (int *new_blocks)
2866{
2867 basic_block bb;
2868 edge e;
2869 int blocks;
628f6a4e 2870 edge_iterator ei;
6de9cd9a
DN
2871
2872 blocks = n_basic_blocks;
2873
628f6a4e 2874 bsi_commit_edge_inserts_1 (EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
6de9cd9a
DN
2875
2876 FOR_EACH_BB (bb)
628f6a4e 2877 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
2878 bsi_commit_edge_inserts_1 (e);
2879
2880 if (new_blocks)
2881 *new_blocks = n_basic_blocks - blocks;
2882}
2883
2884
2885/* Commit insertions pending at edge E. */
2886
2887static void
2888bsi_commit_edge_inserts_1 (edge e)
2889{
2890 if (PENDING_STMT (e))
2891 {
2892 block_stmt_iterator bsi;
2893 tree stmt = PENDING_STMT (e);
2894
2895 PENDING_STMT (e) = NULL_TREE;
2896
82b85a85 2897 if (tree_find_edge_insert_loc (e, &bsi, NULL))
6de9cd9a
DN
2898 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2899 else
2900 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2901 }
2902}
2903
2904
2905/* Add STMT to the pending list of edge E. No actual insertion is
2906 made until a call to bsi_commit_edge_inserts () is made. */
2907
2908void
2909bsi_insert_on_edge (edge e, tree stmt)
2910{
2911 append_to_statement_list (stmt, &PENDING_STMT (e));
2912}
2913
82b85a85
ZD
2914/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
2915 be created, it is returned. */
2916
2917basic_block
2918bsi_insert_on_edge_immediate (edge e, tree stmt)
2919{
2920 block_stmt_iterator bsi;
2921 basic_block new_bb = NULL;
2922
1e128c5f 2923 gcc_assert (!PENDING_STMT (e));
82b85a85
ZD
2924
2925 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
2926 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2927 else
2928 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2929
2930 return new_bb;
2931}
6de9cd9a 2932
6de9cd9a
DN
2933/*---------------------------------------------------------------------------
2934 Tree specific functions for CFG manipulation
2935---------------------------------------------------------------------------*/
2936
2937/* Split a (typically critical) edge EDGE_IN. Return the new block.
2938 Abort on abnormal edges. */
2939
2940static basic_block
2941tree_split_edge (edge edge_in)
2942{
2943 basic_block new_bb, after_bb, dest, src;
2944 edge new_edge, e;
2945 tree phi;
2946 int i, num_elem;
628f6a4e 2947 edge_iterator ei;
6de9cd9a
DN
2948
2949 /* Abnormal edges cannot be split. */
1e128c5f 2950 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
6de9cd9a
DN
2951
2952 src = edge_in->src;
2953 dest = edge_in->dest;
2954
2955 /* Place the new block in the block list. Try to keep the new block
2956 near its "logical" location. This is of most help to humans looking
2957 at debugging dumps. */
628f6a4e 2958 FOR_EACH_EDGE (e, ei, dest->preds)
6de9cd9a
DN
2959 if (e->src->next_bb == dest)
2960 break;
2961 if (!e)
2962 after_bb = dest->prev_bb;
2963 else
2964 after_bb = edge_in->src;
2965
2966 new_bb = create_empty_bb (after_bb);
b829f3fa
JH
2967 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2968 new_bb->count = edge_in->count;
6de9cd9a 2969 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
b829f3fa
JH
2970 new_edge->probability = REG_BR_PROB_BASE;
2971 new_edge->count = edge_in->count;
6de9cd9a
DN
2972
2973 /* Find all the PHI arguments on the original edge, and change them to
2974 the new edge. Do it before redirection, so that the argument does not
2975 get removed. */
17192884 2976 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
2977 {
2978 num_elem = PHI_NUM_ARGS (phi);
2979 for (i = 0; i < num_elem; i++)
2980 if (PHI_ARG_EDGE (phi, i) == edge_in)
2981 {
2982 PHI_ARG_EDGE (phi, i) = new_edge;
2983 break;
2984 }
2985 }
2986
1e128c5f
GB
2987 e = redirect_edge_and_branch (edge_in, new_bb);
2988 gcc_assert (e);
2989 gcc_assert (!PENDING_STMT (edge_in));
6de9cd9a
DN
2990
2991 return new_bb;
2992}
2993
2994
2995/* Return true when BB has label LABEL in it. */
2996
2997static bool
2998has_label_p (basic_block bb, tree label)
2999{
3000 block_stmt_iterator bsi;
3001
3002 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3003 {
3004 tree stmt = bsi_stmt (bsi);
3005
3006 if (TREE_CODE (stmt) != LABEL_EXPR)
3007 return false;
3008 if (LABEL_EXPR_LABEL (stmt) == label)
3009 return true;
3010 }
3011 return false;
3012}
3013
3014
3015/* Callback for walk_tree, check that all elements with address taken are
3016 properly noticed as such. */
3017
3018static tree
2fbe90f2 3019verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
6de9cd9a
DN
3020{
3021 tree t = *tp, x;
3022
3023 if (TYPE_P (t))
3024 *walk_subtrees = 0;
2fbe90f2 3025
50b04185
RK
3026 /* Check operand N for being valid GIMPLE and give error MSG if not.
3027 We check for constants explicitly since they are not considered
3028 gimple invariants if they overflowed. */
2fbe90f2 3029#define CHECK_OP(N, MSG) \
6615c446
JO
3030 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3031 && !is_gimple_val (TREE_OPERAND (t, N))) \
2fbe90f2 3032 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
6de9cd9a
DN
3033
3034 switch (TREE_CODE (t))
3035 {
3036 case SSA_NAME:
3037 if (SSA_NAME_IN_FREE_LIST (t))
3038 {
3039 error ("SSA name in freelist but still referenced");
3040 return *tp;
3041 }
3042 break;
3043
3044 case MODIFY_EXPR:
3045 x = TREE_OPERAND (t, 0);
3046 if (TREE_CODE (x) == BIT_FIELD_REF
3047 && is_gimple_reg (TREE_OPERAND (x, 0)))
3048 {
3049 error ("GIMPLE register modified with BIT_FIELD_REF");
2fbe90f2 3050 return t;
6de9cd9a
DN
3051 }
3052 break;
3053
3054 case ADDR_EXPR:
2fbe90f2
RK
3055 /* Skip any references (they will be checked when we recurse down the
3056 tree) and ensure that any variable used as a prefix is marked
3057 addressable. */
3058 for (x = TREE_OPERAND (t, 0);
3059 (handled_component_p (x)
3060 || TREE_CODE (x) == REALPART_EXPR
3061 || TREE_CODE (x) == IMAGPART_EXPR);
44de5aeb
RK
3062 x = TREE_OPERAND (x, 0))
3063 ;
3064
6de9cd9a
DN
3065 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3066 return NULL;
3067 if (!TREE_ADDRESSABLE (x))
3068 {
3069 error ("address taken, but ADDRESSABLE bit not set");
3070 return x;
3071 }
3072 break;
3073
3074 case COND_EXPR:
3075 x = TREE_OPERAND (t, 0);
3076 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3077 {
3078 error ("non-boolean used in condition");
3079 return x;
3080 }
3081 break;
3082
3083 case NOP_EXPR:
3084 case CONVERT_EXPR:
3085 case FIX_TRUNC_EXPR:
3086 case FIX_CEIL_EXPR:
3087 case FIX_FLOOR_EXPR:
3088 case FIX_ROUND_EXPR:
3089 case FLOAT_EXPR:
3090 case NEGATE_EXPR:
3091 case ABS_EXPR:
3092 case BIT_NOT_EXPR:
3093 case NON_LVALUE_EXPR:
3094 case TRUTH_NOT_EXPR:
2fbe90f2 3095 CHECK_OP (0, "Invalid operand to unary operator");
6de9cd9a
DN
3096 break;
3097
3098 case REALPART_EXPR:
3099 case IMAGPART_EXPR:
2fbe90f2
RK
3100 case COMPONENT_REF:
3101 case ARRAY_REF:
3102 case ARRAY_RANGE_REF:
3103 case BIT_FIELD_REF:
3104 case VIEW_CONVERT_EXPR:
3105 /* We have a nest of references. Verify that each of the operands
3106 that determine where to reference is either a constant or a variable,
3107 verify that the base is valid, and then show we've already checked
3108 the subtrees. */
3109 while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
3110 || handled_component_p (t))
3111 {
3112 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3113 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3114 else if (TREE_CODE (t) == ARRAY_REF
3115 || TREE_CODE (t) == ARRAY_RANGE_REF)
3116 {
3117 CHECK_OP (1, "Invalid array index.");
3118 if (TREE_OPERAND (t, 2))
3119 CHECK_OP (2, "Invalid array lower bound.");
3120 if (TREE_OPERAND (t, 3))
3121 CHECK_OP (3, "Invalid array stride.");
3122 }
3123 else if (TREE_CODE (t) == BIT_FIELD_REF)
3124 {
3125 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3126 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3127 }
3128
3129 t = TREE_OPERAND (t, 0);
3130 }
3131
6615c446 3132 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
2fbe90f2
RK
3133 {
3134 error ("Invalid reference prefix.");
3135 return t;
3136 }
3137 *walk_subtrees = 0;
6de9cd9a
DN
3138 break;
3139
3140 case LT_EXPR:
3141 case LE_EXPR:
3142 case GT_EXPR:
3143 case GE_EXPR:
3144 case EQ_EXPR:
3145 case NE_EXPR:
3146 case UNORDERED_EXPR:
3147 case ORDERED_EXPR:
3148 case UNLT_EXPR:
3149 case UNLE_EXPR:
3150 case UNGT_EXPR:
3151 case UNGE_EXPR:
3152 case UNEQ_EXPR:
d1a7edaf 3153 case LTGT_EXPR:
6de9cd9a
DN
3154 case PLUS_EXPR:
3155 case MINUS_EXPR:
3156 case MULT_EXPR:
3157 case TRUNC_DIV_EXPR:
3158 case CEIL_DIV_EXPR:
3159 case FLOOR_DIV_EXPR:
3160 case ROUND_DIV_EXPR:
3161 case TRUNC_MOD_EXPR:
3162 case CEIL_MOD_EXPR:
3163 case FLOOR_MOD_EXPR:
3164 case ROUND_MOD_EXPR:
3165 case RDIV_EXPR:
3166 case EXACT_DIV_EXPR:
3167 case MIN_EXPR:
3168 case MAX_EXPR:
3169 case LSHIFT_EXPR:
3170 case RSHIFT_EXPR:
3171 case LROTATE_EXPR:
3172 case RROTATE_EXPR:
3173 case BIT_IOR_EXPR:
3174 case BIT_XOR_EXPR:
3175 case BIT_AND_EXPR:
50b04185
RK
3176 CHECK_OP (0, "Invalid operand to binary operator");
3177 CHECK_OP (1, "Invalid operand to binary operator");
6de9cd9a
DN
3178 break;
3179
3180 default:
3181 break;
3182 }
3183 return NULL;
2fbe90f2
RK
3184
3185#undef CHECK_OP
6de9cd9a
DN
3186}
3187
3188
3189/* Verify STMT, return true if STMT is not in GIMPLE form.
3190 TODO: Implement type checking. */
3191
3192static bool
1eaba2f2 3193verify_stmt (tree stmt, bool last_in_block)
6de9cd9a
DN
3194{
3195 tree addr;
3196
3197 if (!is_gimple_stmt (stmt))
3198 {
3199 error ("Is not a valid GIMPLE statement.");
1eaba2f2 3200 goto fail;
6de9cd9a
DN
3201 }
3202
3203 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3204 if (addr)
3205 {
3206 debug_generic_stmt (addr);
3207 return true;
3208 }
3209
1eaba2f2
RH
3210 /* If the statement is marked as part of an EH region, then it is
3211 expected that the statement could throw. Verify that when we
3212 have optimizations that simplify statements such that we prove
3213 that they cannot throw, that we update other data structures
3214 to match. */
3215 if (lookup_stmt_eh_region (stmt) >= 0)
3216 {
3217 if (!tree_could_throw_p (stmt))
3218 {
971801ff 3219 error ("Statement marked for throw, but doesn%'t.");
1eaba2f2
RH
3220 goto fail;
3221 }
3222 if (!last_in_block && tree_can_throw_internal (stmt))
3223 {
3224 error ("Statement marked for throw in middle of block.");
3225 goto fail;
3226 }
3227 }
3228
6de9cd9a 3229 return false;
1eaba2f2
RH
3230
3231 fail:
3232 debug_generic_stmt (stmt);
3233 return true;
6de9cd9a
DN
3234}
3235
3236
3237/* Return true when the T can be shared. */
3238
3239static bool
3240tree_node_can_be_shared (tree t)
3241{
6615c446 3242 if (IS_TYPE_OR_DECL_P (t)
6de9cd9a
DN
3243 /* We check for constants explicitly since they are not considered
3244 gimple invariants if they overflowed. */
6615c446 3245 || CONSTANT_CLASS_P (t)
6de9cd9a
DN
3246 || is_gimple_min_invariant (t)
3247 || TREE_CODE (t) == SSA_NAME)
3248 return true;
3249
44de5aeb 3250 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
6de9cd9a
DN
3251 /* We check for constants explicitly since they are not considered
3252 gimple invariants if they overflowed. */
6615c446 3253 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
6de9cd9a
DN
3254 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3255 || (TREE_CODE (t) == COMPONENT_REF
3256 || TREE_CODE (t) == REALPART_EXPR
3257 || TREE_CODE (t) == IMAGPART_EXPR))
3258 t = TREE_OPERAND (t, 0);
3259
3260 if (DECL_P (t))
3261 return true;
3262
3263 return false;
3264}
3265
3266
3267/* Called via walk_trees. Verify tree sharing. */
3268
3269static tree
3270verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3271{
3272 htab_t htab = (htab_t) data;
3273 void **slot;
3274
3275 if (tree_node_can_be_shared (*tp))
3276 {
3277 *walk_subtrees = false;
3278 return NULL;
3279 }
3280
3281 slot = htab_find_slot (htab, *tp, INSERT);
3282 if (*slot)
3283 return *slot;
3284 *slot = *tp;
3285
3286 return NULL;
3287}
3288
3289
3290/* Verify the GIMPLE statement chain. */
3291
3292void
3293verify_stmts (void)
3294{
3295 basic_block bb;
3296 block_stmt_iterator bsi;
3297 bool err = false;
3298 htab_t htab;
3299 tree addr;
3300
3301 timevar_push (TV_TREE_STMT_VERIFY);
3302 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3303
3304 FOR_EACH_BB (bb)
3305 {
3306 tree phi;
3307 int i;
3308
17192884 3309 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
3310 {
3311 int phi_num_args = PHI_NUM_ARGS (phi);
3312
3313 for (i = 0; i < phi_num_args; i++)
3314 {
3315 tree t = PHI_ARG_DEF (phi, i);
3316 tree addr;
3317
3318 /* Addressable variables do have SSA_NAMEs but they
3319 are not considered gimple values. */
3320 if (TREE_CODE (t) != SSA_NAME
3321 && TREE_CODE (t) != FUNCTION_DECL
3322 && !is_gimple_val (t))
3323 {
3324 error ("PHI def is not a GIMPLE value");
3325 debug_generic_stmt (phi);
3326 debug_generic_stmt (t);
3327 err |= true;
3328 }
3329
3330 addr = walk_tree (&t, verify_expr, NULL, NULL);
3331 if (addr)
3332 {
3333 debug_generic_stmt (addr);
3334 err |= true;
3335 }
3336
3337 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3338 if (addr)
3339 {
3340 error ("Incorrect sharing of tree nodes");
3341 debug_generic_stmt (phi);
3342 debug_generic_stmt (addr);
3343 err |= true;
3344 }
3345 }
3346 }
3347
1eaba2f2 3348 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
6de9cd9a
DN
3349 {
3350 tree stmt = bsi_stmt (bsi);
1eaba2f2
RH
3351 bsi_next (&bsi);
3352 err |= verify_stmt (stmt, bsi_end_p (bsi));
6de9cd9a
DN
3353 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3354 if (addr)
3355 {
3356 error ("Incorrect sharing of tree nodes");
3357 debug_generic_stmt (stmt);
3358 debug_generic_stmt (addr);
3359 err |= true;
3360 }
3361 }
3362 }
3363
3364 if (err)
3365 internal_error ("verify_stmts failed.");
3366
3367 htab_delete (htab);
3368 timevar_pop (TV_TREE_STMT_VERIFY);
3369}
3370
3371
3372/* Verifies that the flow information is OK. */
3373
3374static int
3375tree_verify_flow_info (void)
3376{
3377 int err = 0;
3378 basic_block bb;
3379 block_stmt_iterator bsi;
3380 tree stmt;
3381 edge e;
628f6a4e 3382 edge_iterator ei;
6de9cd9a
DN
3383
3384 if (ENTRY_BLOCK_PTR->stmt_list)
3385 {
3386 error ("ENTRY_BLOCK has a statement list associated with it\n");
3387 err = 1;
3388 }
3389
3390 if (EXIT_BLOCK_PTR->stmt_list)
3391 {
3392 error ("EXIT_BLOCK has a statement list associated with it\n");
3393 err = 1;
3394 }
3395
628f6a4e 3396 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
3397 if (e->flags & EDGE_FALLTHRU)
3398 {
3399 error ("Fallthru to exit from bb %d\n", e->src->index);
3400 err = 1;
3401 }
3402
3403 FOR_EACH_BB (bb)
3404 {
3405 bool found_ctrl_stmt = false;
3406
3407 /* Skip labels on the start of basic block. */
3408 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3409 {
3410 if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR)
3411 break;
3412
3413 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb)
3414 {
77568960 3415 tree stmt = bsi_stmt (bsi);
6de9cd9a 3416 error ("Label %s to block does not match in bb %d\n",
77568960 3417 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
6de9cd9a
DN
3418 bb->index);
3419 err = 1;
3420 }
3421
3422 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi)))
3423 != current_function_decl)
3424 {
77568960 3425 tree stmt = bsi_stmt (bsi);
6de9cd9a 3426 error ("Label %s has incorrect context in bb %d\n",
77568960 3427 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
6de9cd9a
DN
3428 bb->index);
3429 err = 1;
3430 }
3431 }
3432
3433 /* Verify that body of basic block BB is free of control flow. */
3434 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3435 {
3436 tree stmt = bsi_stmt (bsi);
3437
3438 if (found_ctrl_stmt)
3439 {
3440 error ("Control flow in the middle of basic block %d\n",
3441 bb->index);
3442 err = 1;
3443 }
3444
3445 if (stmt_ends_bb_p (stmt))
3446 found_ctrl_stmt = true;
3447
3448 if (TREE_CODE (stmt) == LABEL_EXPR)
3449 {
3450 error ("Label %s in the middle of basic block %d\n",
3451 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3452 bb->index);
3453 err = 1;
3454 }
3455 }
3456 bsi = bsi_last (bb);
3457 if (bsi_end_p (bsi))
3458 continue;
3459
3460 stmt = bsi_stmt (bsi);
3461
3462 if (is_ctrl_stmt (stmt))
3463 {
628f6a4e 3464 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3465 if (e->flags & EDGE_FALLTHRU)
3466 {
3467 error ("Fallthru edge after a control statement in bb %d \n",
3468 bb->index);
3469 err = 1;
3470 }
3471 }
3472
3473 switch (TREE_CODE (stmt))
3474 {
3475 case COND_EXPR:
3476 {
3477 edge true_edge;
3478 edge false_edge;
3479 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3480 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3481 {
3482 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3483 err = 1;
3484 }
3485
3486 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3487
3488 if (!true_edge || !false_edge
3489 || !(true_edge->flags & EDGE_TRUE_VALUE)
3490 || !(false_edge->flags & EDGE_FALSE_VALUE)
3491 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3492 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
628f6a4e 3493 || EDGE_COUNT (bb->succs) >= 3)
6de9cd9a
DN
3494 {
3495 error ("Wrong outgoing edge flags at end of bb %d\n",
3496 bb->index);
3497 err = 1;
3498 }
3499
3500 if (!has_label_p (true_edge->dest,
3501 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3502 {
971801ff 3503 error ("%<then%> label does not match edge at end of bb %d\n",
6de9cd9a
DN
3504 bb->index);
3505 err = 1;
3506 }
3507
3508 if (!has_label_p (false_edge->dest,
3509 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3510 {
971801ff 3511 error ("%<else%> label does not match edge at end of bb %d\n",
6de9cd9a
DN
3512 bb->index);
3513 err = 1;
3514 }
3515 }
3516 break;
3517
3518 case GOTO_EXPR:
3519 if (simple_goto_p (stmt))
3520 {
3521 error ("Explicit goto at end of bb %d\n", bb->index);
3522 err = 1;
3523 }
3524 else
3525 {
3526 /* FIXME. We should double check that the labels in the
3527 destination blocks have their address taken. */
628f6a4e 3528 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3529 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3530 | EDGE_FALSE_VALUE))
3531 || !(e->flags & EDGE_ABNORMAL))
3532 {
3533 error ("Wrong outgoing edge flags at end of bb %d\n",
3534 bb->index);
3535 err = 1;
3536 }
3537 }
3538 break;
3539
3540 case RETURN_EXPR:
628f6a4e
BE
3541 if (EDGE_COUNT (bb->succs) != 1
3542 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
6de9cd9a
DN
3543 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3544 {
3545 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3546 err = 1;
3547 }
628f6a4e 3548 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
6de9cd9a
DN
3549 {
3550 error ("Return edge does not point to exit in bb %d\n",
3551 bb->index);
3552 err = 1;
3553 }
3554 break;
3555
3556 case SWITCH_EXPR:
3557 {
7853504d 3558 tree prev;
6de9cd9a
DN
3559 edge e;
3560 size_t i, n;
3561 tree vec;
3562
3563 vec = SWITCH_LABELS (stmt);
3564 n = TREE_VEC_LENGTH (vec);
3565
3566 /* Mark all the destination basic blocks. */
3567 for (i = 0; i < n; ++i)
3568 {
3569 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3570 basic_block label_bb = label_to_block (lab);
3571
1e128c5f 3572 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
6de9cd9a
DN
3573 label_bb->aux = (void *)1;
3574 }
3575
7853504d
SB
3576 /* Verify that the case labels are sorted. */
3577 prev = TREE_VEC_ELT (vec, 0);
3578 for (i = 1; i < n - 1; ++i)
3579 {
3580 tree c = TREE_VEC_ELT (vec, i);
3581 if (! CASE_LOW (c))
3582 {
3583 error ("Found default case not at end of case vector");
3584 err = 1;
3585 continue;
3586 }
3587 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3588 {
3589 error ("Case labels not sorted:\n ");
3590 print_generic_expr (stderr, prev, 0);
3591 fprintf (stderr," is greater than ");
3592 print_generic_expr (stderr, c, 0);
3593 fprintf (stderr," but comes before it.\n");
3594 err = 1;
3595 }
3596 prev = c;
3597 }
3598 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3599 {
3600 error ("No default case found at end of case vector");
3601 err = 1;
3602 }
3603
628f6a4e 3604 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3605 {
3606 if (!e->dest->aux)
3607 {
3608 error ("Extra outgoing edge %d->%d\n",
3609 bb->index, e->dest->index);
3610 err = 1;
3611 }
3612 e->dest->aux = (void *)2;
3613 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3614 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3615 {
3616 error ("Wrong outgoing edge flags at end of bb %d\n",
3617 bb->index);
3618 err = 1;
3619 }
3620 }
3621
3622 /* Check that we have all of them. */
3623 for (i = 0; i < n; ++i)
3624 {
3625 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3626 basic_block label_bb = label_to_block (lab);
3627
3628 if (label_bb->aux != (void *)2)
3629 {
3630 error ("Missing edge %i->%i\n",
3631 bb->index, label_bb->index);
3632 err = 1;
3633 }
3634 }
3635
628f6a4e 3636 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
3637 e->dest->aux = (void *)0;
3638 }
3639
3640 default: ;
3641 }
3642 }
3643
3644 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3645 verify_dominators (CDI_DOMINATORS);
3646
3647 return err;
3648}
3649
3650
f0b698c1 3651/* Updates phi nodes after creating a forwarder block joined
6de9cd9a
DN
3652 by edge FALLTHRU. */
3653
3654static void
3655tree_make_forwarder_block (edge fallthru)
3656{
3657 edge e;
628f6a4e 3658 edge_iterator ei;
6de9cd9a 3659 basic_block dummy, bb;
17192884 3660 tree phi, new_phi, var, prev, next;
6de9cd9a
DN
3661
3662 dummy = fallthru->src;
3663 bb = fallthru->dest;
3664
628f6a4e 3665 if (EDGE_COUNT (bb->preds) == 1)
6de9cd9a
DN
3666 return;
3667
3668 /* If we redirected a branch we must create new phi nodes at the
3669 start of BB. */
17192884 3670 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
3671 {
3672 var = PHI_RESULT (phi);
3673 new_phi = create_phi_node (var, bb);
3674 SSA_NAME_DEF_STMT (var) = new_phi;
d00ad49b 3675 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
6de9cd9a
DN
3676 add_phi_arg (&new_phi, PHI_RESULT (phi), fallthru);
3677 }
3678
17192884
SB
3679 /* Ensure that the PHI node chain is in the same order. */
3680 prev = NULL;
3681 for (phi = phi_nodes (bb); phi; phi = next)
3682 {
3683 next = PHI_CHAIN (phi);
3684 PHI_CHAIN (phi) = prev;
3685 prev = phi;
3686 }
3687 set_phi_nodes (bb, prev);
6de9cd9a
DN
3688
3689 /* Add the arguments we have stored on edges. */
628f6a4e 3690 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a
DN
3691 {
3692 if (e == fallthru)
3693 continue;
3694
3695 for (phi = phi_nodes (bb), var = PENDING_STMT (e);
3696 phi;
17192884 3697 phi = PHI_CHAIN (phi), var = TREE_CHAIN (var))
6de9cd9a
DN
3698 add_phi_arg (&phi, TREE_VALUE (var), e);
3699
3700 PENDING_STMT (e) = NULL;
3701 }
3702}
3703
3704
3705/* Return true if basic block BB does nothing except pass control
3706 flow to another block and that we can safely insert a label at
10a52335
KH
3707 the start of the successor block.
3708
3709 As a precondition, we require that BB be not equal to
3710 ENTRY_BLOCK_PTR. */
6de9cd9a
DN
3711
3712static bool
3713tree_forwarder_block_p (basic_block bb)
3714{
3715 block_stmt_iterator bsi;
3716 edge e;
628f6a4e 3717 edge_iterator ei;
6de9cd9a 3718
10a52335 3719 /* BB must have a single outgoing edge. */
628f6a4e 3720 if (EDGE_COUNT (bb->succs) != 1
10a52335
KH
3721 /* BB can not have any PHI nodes. This could potentially be
3722 relaxed early in compilation if we re-rewrote the variables
3723 appearing in any PHI nodes in forwarder blocks. */
3724 || phi_nodes (bb)
3725 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
628f6a4e 3726 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
10a52335
KH
3727 /* BB may not have an abnormal outgoing edge. */
3728 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
78b6731d 3729 return false;
6de9cd9a 3730
10a52335
KH
3731#if ENABLE_CHECKING
3732 gcc_assert (bb != ENTRY_BLOCK_PTR);
3733#endif
3734
6de9cd9a 3735 /* Successors of the entry block are not forwarders. */
628f6a4e 3736 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
6de9cd9a 3737 if (e->dest == bb)
78b6731d 3738 return false;
6de9cd9a 3739
6de9cd9a
DN
3740 /* Now walk through the statements. We can ignore labels, anything else
3741 means this is not a forwarder block. */
3742 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3743 {
3744 tree stmt = bsi_stmt (bsi);
3745
3746 switch (TREE_CODE (stmt))
3747 {
3748 case LABEL_EXPR:
3749 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3750 return false;
3751 break;
3752
3753 default:
6de9cd9a
DN
3754 return false;
3755 }
3756 }
3757
3758 return true;
3759}
3760
072269d8
KH
3761/* Thread jumps from BB. */
3762
3763static bool
3764thread_jumps_from_bb (basic_block bb)
3765{
3766 edge_iterator ei;
3767 edge e;
3768 bool retval = false;
3769
3770 /* Examine each of our block's successors to see if it is
3771 forwardable. */
3772 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3773 {
3774 int freq;
3775 gcov_type count;
3776 edge last, old;
3777 basic_block dest, tmp, curr, old_dest;
3778 tree phi;
3779 int arg;
3780
3781 /* If the edge is abnormal or its destination is not
3782 forwardable, then there's nothing to do. */
3783 if ((e->flags & EDGE_ABNORMAL)
3784 || !bb_ann (e->dest)->forwardable)
3785 {
3786 ei_next (&ei);
3787 continue;
3788 }
3789
072269d8
KH
3790 /* Now walk through as many forwarder blocks as possible to find
3791 the ultimate destination we want to thread our jump to. */
3792 last = EDGE_SUCC (e->dest, 0);
3793 bb_ann (e->dest)->forwardable = 0;
3794 for (dest = EDGE_SUCC (e->dest, 0)->dest;
3795 bb_ann (dest)->forwardable;
3796 last = EDGE_SUCC (dest, 0),
3797 dest = EDGE_SUCC (dest, 0)->dest)
3798 bb_ann (dest)->forwardable = 0;
3799
3800 /* Reset the forwardable marks to 1. */
3801 for (tmp = e->dest;
3802 tmp != dest;
3803 tmp = EDGE_SUCC (tmp, 0)->dest)
3804 bb_ann (tmp)->forwardable = 1;
3805
3806 if (dest == e->dest)
3807 {
3808 ei_next (&ei);
3809 continue;
3810 }
3811
3812 old = find_edge (bb, dest);
3813 if (old)
3814 {
3815 /* If there already is an edge, check whether the values in
3816 phi nodes differ. */
3817 if (!phi_alternatives_equal (dest, last, old))
3818 {
3819 /* The previous block is forwarder. Redirect our jump
3820 to that target instead since we know it has no PHI
3821 nodes that will need updating. */
3822 dest = last->src;
3823
3824 /* That might mean that no forwarding at all is
3825 possible. */
3826 if (dest == e->dest)
3827 {
3828 ei_next (&ei);
3829 continue;
3830 }
3831
3832 old = find_edge (bb, dest);
3833 }
3834 }
3835
3836 /* Perform the redirection. */
3837 retval = true;
385efa80
KH
3838 count = e->count;
3839 freq = EDGE_FREQUENCY (e);
072269d8
KH
3840 old_dest = e->dest;
3841 e = redirect_edge_and_branch (e, dest);
3842
3843 /* Update the profile. */
3844 if (profile_status != PROFILE_ABSENT)
3845 for (curr = old_dest;
3846 curr != dest;
3847 curr = EDGE_SUCC (curr, 0)->dest)
3848 {
3849 curr->frequency -= freq;
3850 if (curr->frequency < 0)
3851 curr->frequency = 0;
3852 curr->count -= count;
3853 if (curr->count < 0)
3854 curr->count = 0;
3855 EDGE_SUCC (curr, 0)->count -= count;
3856 if (EDGE_SUCC (curr, 0)->count < 0)
3857 EDGE_SUCC (curr, 0)->count = 0;
3858 }
3859
3860 if (!old)
3861 {
3862 /* Update PHI nodes. We know that the new argument should
3863 have the same value as the argument associated with LAST.
3864 Otherwise we would have changed our target block
3865 above. */
3866 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
3867 {
3868 arg = phi_arg_from_edge (phi, last);
3869 gcc_assert (arg >= 0);
3870 add_phi_arg (&phi, PHI_ARG_DEF (phi, arg), e);
3871 }
3872 }
3873
3874 /* Remove the unreachable blocks (observe that if all blocks
3875 were reachable before, only those in the path we threaded
3876 over and did not have any predecessor outside of the path
3877 become unreachable). */
3878 for (; old_dest != dest; old_dest = tmp)
3879 {
3880 tmp = EDGE_SUCC (old_dest, 0)->dest;
3881
3882 if (EDGE_COUNT (old_dest->preds) > 0)
3883 break;
3884
3885 delete_basic_block (old_dest);
3886 }
3887
3888 /* Update the dominators. */
3889 if (dom_info_available_p (CDI_DOMINATORS))
3890 {
3891 /* If the dominator of the destination was in the
3892 path, set its dominator to the start of the
3893 redirected edge. */
3894 if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
3895 set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
3896
3897 /* Now proceed like if we forwarded just over one edge at a
3898 time. Algorithm for forwarding edge S --> A over
3899 edge A --> B then is
3900
3901 if (idom (B) == A
3902 && !dominated_by (S, B))
3903 idom (B) = idom (A);
3904 recount_idom (A); */
3905
3906 for (; old_dest != dest; old_dest = tmp)
3907 {
3908 basic_block dom;
3909
3910 tmp = EDGE_SUCC (old_dest, 0)->dest;
3911
3912 if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest
3913 && !dominated_by_p (CDI_DOMINATORS, bb, tmp))
3914 {
3915 dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
3916 set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
3917 }
3918
3919 dom = recount_dominator (CDI_DOMINATORS, old_dest);
3920 set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
3921 }
3922 }
3923 }
3924
3925 return retval;
3926}
3927
6de9cd9a
DN
3928
3929/* Thread jumps over empty statements.
3930
3931 This code should _not_ thread over obviously equivalent conditions
2abacef0
KH
3932 as that requires nontrivial updates to the SSA graph.
3933
3934 As a precondition, we require that all basic blocks be reachable.
3935 That is, there should be no opportunities left for
3936 delete_unreachable_blocks. */
072269d8 3937
6de9cd9a
DN
3938static bool
3939thread_jumps (void)
3940{
072269d8 3941 basic_block bb;
6de9cd9a 3942 bool retval = false;
afc3f396 3943 basic_block *worklist = xmalloc (sizeof (basic_block) * last_basic_block);
08445125 3944 unsigned int size = 0;
6de9cd9a
DN
3945
3946 FOR_EACH_BB (bb)
08445125
KH
3947 {
3948 bb_ann (bb)->forwardable = tree_forwarder_block_p (bb);
3949 bb->flags &= ~BB_VISITED;
3950 }
6de9cd9a 3951
af88d4ec
KH
3952 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
3953 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
3954 ENTRY_BLOCK_PTR->flags |= BB_VISITED;
3955
afc3f396
KH
3956 /* Initialize WORKLIST by putting non-forwarder blocks that
3957 immediately precede forwarder blocks because those are the ones
3958 that we know we can thread jumps from. We use BB_VISITED to
3959 indicate whether a given basic block is in WORKLIST or not,
3960 thereby avoiding duplicates in WORKLIST. */
08445125 3961 FOR_EACH_BB (bb)
6de9cd9a 3962 {
08445125
KH
3963 edge_iterator ei;
3964 edge e;
3965
3966 /* We are not interested in finding non-forwarder blocks
3967 directly. We want to find non-forwarder blocks as
3968 predecessors of a forwarder block. */
3969 if (!bb_ann (bb)->forwardable)
3970 continue;
3971
3972 /* Now we know BB is a forwarder block. Visit each of its
3973 incoming edges and add to WORKLIST all non-forwarder blocks
3974 among BB's predecessors. */
3975 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a 3976 {
0b371c72
KH
3977 /* We don't want to put a duplicate into WORKLIST. */
3978 if ((e->src->flags & BB_VISITED) == 0
3979 /* We are not interested in threading jumps from a forwarder
3980 block. */
3981 && !bb_ann (e->src)->forwardable)
08445125
KH
3982 {
3983 e->src->flags |= BB_VISITED;
afc3f396 3984 worklist[size] = e->src;
08445125
KH
3985 size++;
3986 }
3987 }
3988 }
6de9cd9a 3989
08445125
KH
3990 /* Now let's drain WORKLIST. */
3991 while (size > 0)
3992 {
3993 size--;
afc3f396 3994 bb = worklist[size];
08445125
KH
3995
3996 /* BB->INDEX is not longer in WORKLIST, so clear BB_VISITED. */
3997 bb->flags &= ~BB_VISITED;
3998
3999 if (thread_jumps_from_bb (bb))
4000 {
4001 retval = true;
4002
4003 if (tree_forwarder_block_p (bb))
628f6a4e 4004 {
08445125
KH
4005 edge_iterator ej;
4006 edge f;
8a807136 4007
08445125
KH
4008 bb_ann (bb)->forwardable = true;
4009
4010 /* Attempts to thread through BB may have been blocked
4011 because BB was not a forwarder block before. Now
4012 that BB is a forwarder block, we should revisit BB's
4013 predecessors. */
4014 FOR_EACH_EDGE (f, ej, bb->preds)
4015 {
0b371c72
KH
4016 /* We don't want to put a duplicate into WORKLIST. */
4017 if ((f->src->flags & BB_VISITED) == 0
4018 /* We are not interested in threading jumps from a
4019 forwarder block. */
4020 && !bb_ann (f->src)->forwardable)
08445125
KH
4021 {
4022 f->src->flags |= BB_VISITED;
afc3f396 4023 worklist[size] = f->src;
08445125
KH
4024 size++;
4025 }
4026 }
8a807136 4027 }
e61d7b78 4028 }
6de9cd9a 4029 }
08445125 4030
af88d4ec
KH
4031 ENTRY_BLOCK_PTR->flags &= ~BB_VISITED;
4032
08445125 4033 free (worklist);
6de9cd9a
DN
4034
4035 return retval;
4036}
4037
4038
4039/* Return a non-special label in the head of basic block BLOCK.
4040 Create one if it doesn't exist. */
4041
d7621d3c 4042tree
6de9cd9a
DN
4043tree_block_label (basic_block bb)
4044{
4045 block_stmt_iterator i, s = bsi_start (bb);
4046 bool first = true;
4047 tree label, stmt;
4048
4049 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4050 {
4051 stmt = bsi_stmt (i);
4052 if (TREE_CODE (stmt) != LABEL_EXPR)
4053 break;
4054 label = LABEL_EXPR_LABEL (stmt);
4055 if (!DECL_NONLOCAL (label))
4056 {
4057 if (!first)
4058 bsi_move_before (&i, &s);
4059 return label;
4060 }
4061 }
4062
4063 label = create_artificial_label ();
4064 stmt = build1 (LABEL_EXPR, void_type_node, label);
4065 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4066 return label;
4067}
4068
4069
4070/* Attempt to perform edge redirection by replacing a possibly complex
4071 jump instruction by a goto or by removing the jump completely.
4072 This can apply only if all edges now point to the same block. The
4073 parameters and return values are equivalent to
4074 redirect_edge_and_branch. */
4075
4076static edge
4077tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4078{
4079 basic_block src = e->src;
4080 edge tmp;
4081 block_stmt_iterator b;
4082 tree stmt;
628f6a4e 4083 edge_iterator ei;
6de9cd9a
DN
4084
4085 /* Verify that all targets will be TARGET. */
628f6a4e 4086 FOR_EACH_EDGE (tmp, ei, src->succs)
6de9cd9a
DN
4087 if (tmp->dest != target && tmp != e)
4088 break;
4089
4090 if (tmp)
4091 return NULL;
4092
4093 b = bsi_last (src);
4094 if (bsi_end_p (b))
4095 return NULL;
4096 stmt = bsi_stmt (b);
4097
4098 if (TREE_CODE (stmt) == COND_EXPR
4099 || TREE_CODE (stmt) == SWITCH_EXPR)
4100 {
4101 bsi_remove (&b);
4102 e = ssa_redirect_edge (e, target);
4103 e->flags = EDGE_FALLTHRU;
4104 return e;
4105 }
4106
4107 return NULL;
4108}
4109
4110
4111/* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4112 edge representing the redirected branch. */
4113
4114static edge
4115tree_redirect_edge_and_branch (edge e, basic_block dest)
4116{
4117 basic_block bb = e->src;
4118 block_stmt_iterator bsi;
4119 edge ret;
4120 tree label, stmt;
4121
4122 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4123 return NULL;
4124
4125 if (e->src != ENTRY_BLOCK_PTR
4126 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4127 return ret;
4128
4129 if (e->dest == dest)
4130 return NULL;
4131
4132 label = tree_block_label (dest);
4133
4134 bsi = bsi_last (bb);
4135 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4136
4137 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4138 {
4139 case COND_EXPR:
4140 stmt = (e->flags & EDGE_TRUE_VALUE
4141 ? COND_EXPR_THEN (stmt)
4142 : COND_EXPR_ELSE (stmt));
4143 GOTO_DESTINATION (stmt) = label;
4144 break;
4145
4146 case GOTO_EXPR:
4147 /* No non-abnormal edges should lead from a non-simple goto, and
4148 simple ones should be represented implicitly. */
1e128c5f 4149 gcc_unreachable ();
6de9cd9a
DN
4150
4151 case SWITCH_EXPR:
4152 {
4153 tree vec = SWITCH_LABELS (stmt);
4154 size_t i, n = TREE_VEC_LENGTH (vec);
4155
4156 for (i = 0; i < n; ++i)
4157 {
4158 tree elt = TREE_VEC_ELT (vec, i);
4159 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4160 CASE_LABEL (elt) = label;
4161 }
4162 }
4163 break;
4164
4165 case RETURN_EXPR:
4166 bsi_remove (&bsi);
4167 e->flags |= EDGE_FALLTHRU;
4168 break;
4169
4170 default:
4171 /* Otherwise it must be a fallthru edge, and we don't need to
4172 do anything besides redirecting it. */
1e128c5f 4173 gcc_assert (e->flags & EDGE_FALLTHRU);
6de9cd9a
DN
4174 break;
4175 }
4176
4177 /* Update/insert PHI nodes as necessary. */
4178
4179 /* Now update the edges in the CFG. */
4180 e = ssa_redirect_edge (e, dest);
4181
4182 return e;
4183}
4184
4185
4186/* Simple wrapper, as we can always redirect fallthru edges. */
4187
4188static basic_block
4189tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4190{
4191 e = tree_redirect_edge_and_branch (e, dest);
1e128c5f 4192 gcc_assert (e);
6de9cd9a
DN
4193
4194 return NULL;
4195}
4196
4197
4198/* Splits basic block BB after statement STMT (but at least after the
4199 labels). If STMT is NULL, BB is split just after the labels. */
4200
4201static basic_block
4202tree_split_block (basic_block bb, void *stmt)
4203{
4204 block_stmt_iterator bsi, bsi_tgt;
4205 tree act;
4206 basic_block new_bb;
4207 edge e;
628f6a4e 4208 edge_iterator ei;
6de9cd9a
DN
4209
4210 new_bb = create_empty_bb (bb);
4211
4212 /* Redirect the outgoing edges. */
628f6a4e
BE
4213 new_bb->succs = bb->succs;
4214 bb->succs = NULL;
4215 FOR_EACH_EDGE (e, ei, new_bb->succs)
6de9cd9a
DN
4216 e->src = new_bb;
4217
4218 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4219 stmt = NULL;
4220
4221 /* Move everything from BSI to the new basic block. */
4222 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4223 {
4224 act = bsi_stmt (bsi);
4225 if (TREE_CODE (act) == LABEL_EXPR)
4226 continue;
4227
4228 if (!stmt)
4229 break;
4230
4231 if (stmt == act)
4232 {
4233 bsi_next (&bsi);
4234 break;
4235 }
4236 }
4237
4238 bsi_tgt = bsi_start (new_bb);
4239 while (!bsi_end_p (bsi))
4240 {
4241 act = bsi_stmt (bsi);
4242 bsi_remove (&bsi);
4243 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4244 }
4245
4246 return new_bb;
4247}
4248
4249
4250/* Moves basic block BB after block AFTER. */
4251
4252static bool
4253tree_move_block_after (basic_block bb, basic_block after)
4254{
4255 if (bb->prev_bb == after)
4256 return true;
4257
4258 unlink_block (bb);
4259 link_block (bb, after);
4260
4261 return true;
4262}
4263
4264
4265/* Return true if basic_block can be duplicated. */
4266
4267static bool
4268tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4269{
4270 return true;
4271}
4272
6de9cd9a
DN
4273/* Create a duplicate of the basic block BB. NOTE: This does not
4274 preserve SSA form. */
4275
4276static basic_block
4277tree_duplicate_bb (basic_block bb)
4278{
4279 basic_block new_bb;
4280 block_stmt_iterator bsi, bsi_tgt;
4c124b4c
AM
4281 tree phi, val;
4282 ssa_op_iter op_iter;
6de9cd9a
DN
4283
4284 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
b0382c67 4285
42759f1e
ZD
4286 /* First copy the phi nodes. We do not copy phi node arguments here,
4287 since the edges are not ready yet. Keep the chain of phi nodes in
4288 the same order, so that we can add them later. */
b0382c67
ZD
4289 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4290 {
4291 mark_for_rewrite (PHI_RESULT (phi));
42759f1e 4292 create_phi_node (PHI_RESULT (phi), new_bb);
b0382c67 4293 }
42759f1e 4294 set_phi_nodes (new_bb, nreverse (phi_nodes (new_bb)));
b0382c67 4295
6de9cd9a
DN
4296 bsi_tgt = bsi_start (new_bb);
4297 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4298 {
4299 tree stmt = bsi_stmt (bsi);
5f240ec4 4300 tree copy;
6de9cd9a
DN
4301
4302 if (TREE_CODE (stmt) == LABEL_EXPR)
4303 continue;
4304
b0382c67
ZD
4305 /* Record the definitions. */
4306 get_stmt_operands (stmt);
4307
4c124b4c
AM
4308 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4309 mark_for_rewrite (val);
b0382c67 4310
5f240ec4
ZD
4311 copy = unshare_expr (stmt);
4312
4313 /* Copy also the virtual operands. */
4314 get_stmt_ann (copy);
4315 copy_virtual_operands (copy, stmt);
4316
4317 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
6de9cd9a
DN
4318 }
4319
4320 return new_bb;
4321}
4322
42759f1e
ZD
4323/* Basic block BB_COPY was created by code duplication. Add phi node
4324 arguments for edges going out of BB_COPY. The blocks that were
4325 duplicated have rbi->duplicated set to one. */
4326
4327void
4328add_phi_args_after_copy_bb (basic_block bb_copy)
4329{
4330 basic_block bb, dest;
4331 edge e, e_copy;
628f6a4e 4332 edge_iterator ei;
42759f1e
ZD
4333 tree phi, phi_copy, phi_next, def;
4334
4335 bb = bb_copy->rbi->original;
4336
628f6a4e 4337 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
42759f1e
ZD
4338 {
4339 if (!phi_nodes (e_copy->dest))
4340 continue;
4341
4342 if (e_copy->dest->rbi->duplicated)
4343 dest = e_copy->dest->rbi->original;
4344 else
4345 dest = e_copy->dest;
4346
4347 e = find_edge (bb, dest);
4348 if (!e)
4349 {
4350 /* During loop unrolling the target of the latch edge is copied.
4351 In this case we are not looking for edge to dest, but to
4352 duplicated block whose original was dest. */
628f6a4e 4353 FOR_EACH_EDGE (e, ei, bb->succs)
42759f1e
ZD
4354 if (e->dest->rbi->duplicated
4355 && e->dest->rbi->original == dest)
4356 break;
4357
4358 gcc_assert (e != NULL);
4359 }
4360
4361 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4362 phi;
4363 phi = phi_next, phi_copy = TREE_CHAIN (phi_copy))
4364 {
4365 phi_next = TREE_CHAIN (phi);
4366
4367 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4368 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4369 add_phi_arg (&phi_copy, def, e_copy);
4370 }
4371 }
4372}
4373
4374/* Blocks in REGION_COPY array of length N_REGION were created by
4375 duplication of basic blocks. Add phi node arguments for edges
4376 going from these blocks. */
4377
4378void
4379add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4380{
4381 unsigned i;
4382
4383 for (i = 0; i < n_region; i++)
4384 region_copy[i]->rbi->duplicated = 1;
4385
4386 for (i = 0; i < n_region; i++)
4387 add_phi_args_after_copy_bb (region_copy[i]);
4388
4389 for (i = 0; i < n_region; i++)
4390 region_copy[i]->rbi->duplicated = 0;
4391}
4392
4393/* Maps the old ssa name FROM_NAME to TO_NAME. */
4394
4395struct ssa_name_map_entry
4396{
4397 tree from_name;
4398 tree to_name;
4399};
4400
4401/* Hash function for ssa_name_map_entry. */
4402
4403static hashval_t
4404ssa_name_map_entry_hash (const void *entry)
4405{
4406 const struct ssa_name_map_entry *en = entry;
4407 return SSA_NAME_VERSION (en->from_name);
4408}
4409
4410/* Equality function for ssa_name_map_entry. */
4411
4412static int
4413ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4414{
4415 const struct ssa_name_map_entry *en = in_table;
4416
4417 return en->from_name == ssa_name;
4418}
4419
4420/* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4421 to MAP. */
4422
4423void
4424allocate_ssa_names (bitmap definitions, htab_t *map)
4425{
4426 tree name;
4427 struct ssa_name_map_entry *entry;
4428 PTR *slot;
4429 unsigned ver;
87c476a2 4430 bitmap_iterator bi;
42759f1e
ZD
4431
4432 if (!*map)
4433 *map = htab_create (10, ssa_name_map_entry_hash,
4434 ssa_name_map_entry_eq, free);
87c476a2 4435 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
42759f1e
ZD
4436 {
4437 name = ssa_name (ver);
4438 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4439 INSERT);
4440 if (*slot)
4441 entry = *slot;
4442 else
4443 {
4444 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4445 entry->from_name = name;
4446 *slot = entry;
4447 }
4448 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
87c476a2 4449 }
42759f1e
ZD
4450}
4451
4452/* Rewrite the definition DEF in statement STMT to new ssa name as specified
4453 by the mapping MAP. */
4454
4455static void
4456rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4457{
4458 tree name = DEF_FROM_PTR (def);
4459 struct ssa_name_map_entry *entry;
4460
4461 gcc_assert (TREE_CODE (name) == SSA_NAME);
4462
4463 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4464 if (!entry)
4465 return;
4466
4467 SET_DEF (def, entry->to_name);
4468 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4469}
4470
4471/* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4472
4473static void
4474rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4475{
4476 tree name = USE_FROM_PTR (use);
4477 struct ssa_name_map_entry *entry;
4478
4479 if (TREE_CODE (name) != SSA_NAME)
4480 return;
4481
4482 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4483 if (!entry)
4484 return;
4485
4486 SET_USE (use, entry->to_name);
4487}
4488
4489/* Rewrite the ssa names in basic block BB to new ones as specified by the
4490 mapping MAP. */
4491
4492void
4493rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4494{
4495 unsigned i;
4496 edge e;
628f6a4e 4497 edge_iterator ei;
42759f1e
ZD
4498 tree phi, stmt;
4499 block_stmt_iterator bsi;
4500 use_optype uses;
4501 vuse_optype vuses;
4502 def_optype defs;
4503 v_may_def_optype v_may_defs;
4504 v_must_def_optype v_must_defs;
4505 stmt_ann_t ann;
4506
628f6a4e 4507 FOR_EACH_EDGE (e, ei, bb->preds)
42759f1e
ZD
4508 if (e->flags & EDGE_ABNORMAL)
4509 break;
4510
4511 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
4512 {
4513 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4514 if (e)
4515 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4516 }
4517
4518 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4519 {
4520 stmt = bsi_stmt (bsi);
4521 get_stmt_operands (stmt);
4522 ann = stmt_ann (stmt);
4523
4524 uses = USE_OPS (ann);
4525 for (i = 0; i < NUM_USES (uses); i++)
4526 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4527
4528 defs = DEF_OPS (ann);
4529 for (i = 0; i < NUM_DEFS (defs); i++)
4530 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4531
4532 vuses = VUSE_OPS (ann);
4533 for (i = 0; i < NUM_VUSES (vuses); i++)
4534 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4535
4536 v_may_defs = V_MAY_DEF_OPS (ann);
4537 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4538 {
4539 rewrite_to_new_ssa_names_use
4540 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4541 rewrite_to_new_ssa_names_def
4542 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4543 }
4544
4545 v_must_defs = V_MUST_DEF_OPS (ann);
4546 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
52328bf6
DB
4547 {
4548 rewrite_to_new_ssa_names_def
4549 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4550 rewrite_to_new_ssa_names_use
4551 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4552 }
42759f1e
ZD
4553 }
4554
628f6a4e 4555 FOR_EACH_EDGE (e, ei, bb->succs)
42759f1e
ZD
4556 for (phi = phi_nodes (e->dest); phi; phi = TREE_CHAIN (phi))
4557 {
4558 rewrite_to_new_ssa_names_use
4559 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4560
4561 if (e->flags & EDGE_ABNORMAL)
4562 {
4563 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4564 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4565 }
4566 }
4567}
4568
4569/* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4570 by the mapping MAP. */
4571
4572void
4573rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4574{
4575 unsigned r;
4576
4577 for (r = 0; r < n_region; r++)
4578 rewrite_to_new_ssa_names_bb (region[r], map);
4579}
4580
4581/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4582 important exit edge EXIT. By important we mean that no SSA name defined
4583 inside region is live over the other exit edges of the region. All entry
4584 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4585 to the duplicate of the region. SSA form, dominance and loop information
4586 is updated. The new basic blocks are stored to REGION_COPY in the same
4587 order as they had in REGION, provided that REGION_COPY is not NULL.
4588 The function returns false if it is unable to copy the region,
4589 true otherwise. */
4590
4591bool
4592tree_duplicate_sese_region (edge entry, edge exit,
4593 basic_block *region, unsigned n_region,
4594 basic_block *region_copy)
4595{
4596 unsigned i, n_doms, ver;
4597 bool free_region_copy = false, copying_header = false;
4598 struct loop *loop = entry->dest->loop_father;
4599 edge exit_copy;
4600 bitmap definitions;
4601 tree phi, var;
4602 basic_block *doms;
4603 htab_t ssa_name_map = NULL;
4604 edge redirected;
87c476a2 4605 bitmap_iterator bi;
42759f1e
ZD
4606
4607 if (!can_copy_bbs_p (region, n_region))
4608 return false;
4609
4610 /* Some sanity checking. Note that we do not check for all possible
4611 missuses of the functions. I.e. if you ask to copy something weird,
4612 it will work, but the state of structures probably will not be
4613 correct. */
4614
4615 for (i = 0; i < n_region; i++)
4616 {
4617 /* We do not handle subloops, i.e. all the blocks must belong to the
4618 same loop. */
4619 if (region[i]->loop_father != loop)
4620 return false;
4621
4622 if (region[i] != entry->dest
4623 && region[i] == loop->header)
4624 return false;
4625 }
4626
4627 loop->copy = loop;
4628
4629 /* In case the function is used for loop header copying (which is the primary
4630 use), ensure that EXIT and its copy will be new latch and entry edges. */
4631 if (loop->header == entry->dest)
4632 {
4633 copying_header = true;
4634 loop->copy = loop->outer;
4635
4636 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4637 return false;
4638
4639 for (i = 0; i < n_region; i++)
4640 if (region[i] != exit->src
4641 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4642 return false;
4643 }
4644
4645 if (!region_copy)
4646 {
4647 region_copy = xmalloc (sizeof (basic_block) * n_region);
4648 free_region_copy = true;
4649 }
4650
4651 gcc_assert (!any_marked_for_rewrite_p ());
4652
4653 /* Record blocks outside the region that are duplicated by something
4654 inside. */
4655 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4656 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4657
4658 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4659 definitions = marked_ssa_names ();
4660
4661 if (copying_header)
4662 {
4663 loop->header = exit->dest;
4664 loop->latch = exit->src;
4665 }
4666
4667 /* Redirect the entry and add the phi node arguments. */
4668 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4669 gcc_assert (redirected != NULL);
4670 for (phi = phi_nodes (entry->dest), var = PENDING_STMT (entry);
4671 phi;
4672 phi = TREE_CHAIN (phi), var = TREE_CHAIN (var))
4673 add_phi_arg (&phi, TREE_VALUE (var), entry);
4674 PENDING_STMT (entry) = NULL;
4675
4676 /* Concerning updating of dominators: We must recount dominators
4677 for entry block and its copy. Anything that is outside of the region, but
4678 was dominated by something inside needs recounting as well. */
4679 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4680 doms[n_doms++] = entry->dest->rbi->original;
4681 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4682 free (doms);
4683
4684 /* Add the other phi node arguments. */
4685 add_phi_args_after_copy (region_copy, n_region);
4686
4687 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4688 uses, it should be possible to emit phi nodes just for definitions that
4689 are used outside region. */
87c476a2 4690 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
42759f1e
ZD
4691 {
4692 tree name = ssa_name (ver);
4693
4694 phi = create_phi_node (name, exit->dest);
4695 add_phi_arg (&phi, name, exit);
4696 add_phi_arg (&phi, name, exit_copy);
4697
4698 SSA_NAME_DEF_STMT (name) = phi;
87c476a2 4699 }
42759f1e
ZD
4700
4701 /* And create new definitions inside region and its copy. TODO -- once we
4702 have immediate uses, it might be better to leave definitions in region
4703 unchanged, create new ssa names for phi nodes on exit, and rewrite
4704 the uses, to avoid changing the copied region. */
4705 allocate_ssa_names (definitions, &ssa_name_map);
4706 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
4707 allocate_ssa_names (definitions, &ssa_name_map);
4708 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
4709 htab_delete (ssa_name_map);
4710
4711 if (free_region_copy)
4712 free (region_copy);
4713
4714 unmark_all_for_rewrite ();
4715 BITMAP_XFREE (definitions);
4716
4717 return true;
4718}
6de9cd9a
DN
4719
4720/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4721
4722void
4723dump_function_to_file (tree fn, FILE *file, int flags)
4724{
4725 tree arg, vars, var;
4726 bool ignore_topmost_bind = false, any_var = false;
4727 basic_block bb;
4728 tree chain;
4729
673fda6b 4730 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6de9cd9a
DN
4731
4732 arg = DECL_ARGUMENTS (fn);
4733 while (arg)
4734 {
4735 print_generic_expr (file, arg, dump_flags);
4736 if (TREE_CHAIN (arg))
4737 fprintf (file, ", ");
4738 arg = TREE_CHAIN (arg);
4739 }
4740 fprintf (file, ")\n");
4741
4742 if (flags & TDF_RAW)
4743 {
4744 dump_node (fn, TDF_SLIM | flags, file);
4745 return;
4746 }
4747
4748 /* When GIMPLE is lowered, the variables are no longer available in
4749 BIND_EXPRs, so display them separately. */
4750 if (cfun && cfun->unexpanded_var_list)
4751 {
4752 ignore_topmost_bind = true;
4753
4754 fprintf (file, "{\n");
4755 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4756 {
4757 var = TREE_VALUE (vars);
4758
4759 print_generic_decl (file, var, flags);
4760 fprintf (file, "\n");
4761
4762 any_var = true;
4763 }
4764 }
4765
4766 if (basic_block_info)
4767 {
4768 /* Make a CFG based dump. */
878f99d2 4769 check_bb_profile (ENTRY_BLOCK_PTR, file);
6de9cd9a
DN
4770 if (!ignore_topmost_bind)
4771 fprintf (file, "{\n");
4772
4773 if (any_var && n_basic_blocks)
4774 fprintf (file, "\n");
4775
4776 FOR_EACH_BB (bb)
4777 dump_generic_bb (file, bb, 2, flags);
4778
4779 fprintf (file, "}\n");
878f99d2 4780 check_bb_profile (EXIT_BLOCK_PTR, file);
6de9cd9a
DN
4781 }
4782 else
4783 {
4784 int indent;
4785
4786 /* Make a tree based dump. */
4787 chain = DECL_SAVED_TREE (fn);
4788
4789 if (TREE_CODE (chain) == BIND_EXPR)
4790 {
4791 if (ignore_topmost_bind)
4792 {
4793 chain = BIND_EXPR_BODY (chain);
4794 indent = 2;
4795 }
4796 else
4797 indent = 0;
4798 }
4799 else
4800 {
4801 if (!ignore_topmost_bind)
4802 fprintf (file, "{\n");
4803 indent = 2;
4804 }
4805
4806 if (any_var)
4807 fprintf (file, "\n");
4808
4809 print_generic_stmt_indented (file, chain, flags, indent);
4810 if (ignore_topmost_bind)
4811 fprintf (file, "}\n");
4812 }
4813
4814 fprintf (file, "\n\n");
4815}
4816
4817
4818/* Pretty print of the loops intermediate representation. */
4819static void print_loop (FILE *, struct loop *, int);
628f6a4e
BE
4820static void print_pred_bbs (FILE *, basic_block bb);
4821static void print_succ_bbs (FILE *, basic_block bb);
6de9cd9a
DN
4822
4823
4824/* Print the predecessors indexes of edge E on FILE. */
4825
4826static void
628f6a4e 4827print_pred_bbs (FILE *file, basic_block bb)
6de9cd9a 4828{
628f6a4e
BE
4829 edge e;
4830 edge_iterator ei;
4831
4832 FOR_EACH_EDGE (e, ei, bb->preds)
6de9cd9a 4833 fprintf (file, "bb_%d", e->src->index);
6de9cd9a
DN
4834}
4835
4836
4837/* Print the successors indexes of edge E on FILE. */
4838
4839static void
628f6a4e 4840print_succ_bbs (FILE *file, basic_block bb)
6de9cd9a 4841{
628f6a4e
BE
4842 edge e;
4843 edge_iterator ei;
4844
4845 FOR_EACH_EDGE (e, ei, bb->succs)
4846 fprintf (file, "bb_%d", e->src->index);
6de9cd9a
DN
4847}
4848
4849
4850/* Pretty print LOOP on FILE, indented INDENT spaces. */
4851
4852static void
4853print_loop (FILE *file, struct loop *loop, int indent)
4854{
4855 char *s_indent;
4856 basic_block bb;
4857
4858 if (loop == NULL)
4859 return;
4860
4861 s_indent = (char *) alloca ((size_t) indent + 1);
4862 memset ((void *) s_indent, ' ', (size_t) indent);
4863 s_indent[indent] = '\0';
4864
4865 /* Print the loop's header. */
4866 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4867
4868 /* Print the loop's body. */
4869 fprintf (file, "%s{\n", s_indent);
4870 FOR_EACH_BB (bb)
4871 if (bb->loop_father == loop)
4872 {
4873 /* Print the basic_block's header. */
4874 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
628f6a4e 4875 print_pred_bbs (file, bb);
6de9cd9a 4876 fprintf (file, "}, succs = {");
628f6a4e 4877 print_succ_bbs (file, bb);
6de9cd9a
DN
4878 fprintf (file, "})\n");
4879
4880 /* Print the basic_block's body. */
4881 fprintf (file, "%s {\n", s_indent);
4882 tree_dump_bb (bb, file, indent + 4);
4883 fprintf (file, "%s }\n", s_indent);
4884 }
4885
4886 print_loop (file, loop->inner, indent + 2);
4887 fprintf (file, "%s}\n", s_indent);
4888 print_loop (file, loop->next, indent);
4889}
4890
4891
4892/* Follow a CFG edge from the entry point of the program, and on entry
4893 of a loop, pretty print the loop structure on FILE. */
4894
4895void
4896print_loop_ir (FILE *file)
4897{
4898 basic_block bb;
4899
4900 bb = BASIC_BLOCK (0);
4901 if (bb && bb->loop_father)
4902 print_loop (file, bb->loop_father, 0);
4903}
4904
4905
4906/* Debugging loops structure at tree level. */
4907
4908void
4909debug_loop_ir (void)
4910{
4911 print_loop_ir (stderr);
4912}
4913
4914
4915/* Return true if BB ends with a call, possibly followed by some
4916 instructions that must stay with the call. Return false,
4917 otherwise. */
4918
4919static bool
4920tree_block_ends_with_call_p (basic_block bb)
4921{
4922 block_stmt_iterator bsi = bsi_last (bb);
cd709752 4923 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6de9cd9a
DN
4924}
4925
4926
4927/* Return true if BB ends with a conditional branch. Return false,
4928 otherwise. */
4929
4930static bool
4931tree_block_ends_with_condjump_p (basic_block bb)
4932{
4933 tree stmt = tsi_stmt (bsi_last (bb).tsi);
4934 return (TREE_CODE (stmt) == COND_EXPR);
4935}
4936
4937
4938/* Return true if we need to add fake edge to exit at statement T.
4939 Helper function for tree_flow_call_edges_add. */
4940
4941static bool
4942need_fake_edge_p (tree t)
4943{
cd709752 4944 tree call;
6de9cd9a
DN
4945
4946 /* NORETURN and LONGJMP calls already have an edge to exit.
4947 CONST, PURE and ALWAYS_RETURN calls do not need one.
4948 We don't currently check for CONST and PURE here, although
4949 it would be a good idea, because those attributes are
4950 figured out from the RTL in mark_constant_function, and
4951 the counter incrementation code from -fprofile-arcs
4952 leads to different results from -fbranch-probabilities. */
cd709752
RH
4953 call = get_call_expr_in (t);
4954 if (call
4955 && !(call_expr_flags (call) &
4956 (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
6de9cd9a
DN
4957 return true;
4958
4959 if (TREE_CODE (t) == ASM_EXPR
4960 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4961 return true;
4962
4963 return false;
4964}
4965
4966
4967/* Add fake edges to the function exit for any non constant and non
4968 noreturn calls, volatile inline assembly in the bitmap of blocks
4969 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4970 the number of blocks that were split.
4971
4972 The goal is to expose cases in which entering a basic block does
4973 not imply that all subsequent instructions must be executed. */
4974
4975static int
4976tree_flow_call_edges_add (sbitmap blocks)
4977{
4978 int i;
4979 int blocks_split = 0;
4980 int last_bb = last_basic_block;
4981 bool check_last_block = false;
4982
4983 if (n_basic_blocks == 0)
4984 return 0;
4985
4986 if (! blocks)
4987 check_last_block = true;
4988 else
4989 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4990
4991 /* In the last basic block, before epilogue generation, there will be
4992 a fallthru edge to EXIT. Special care is required if the last insn
4993 of the last basic block is a call because make_edge folds duplicate
4994 edges, which would result in the fallthru edge also being marked
4995 fake, which would result in the fallthru edge being removed by
4996 remove_fake_edges, which would result in an invalid CFG.
4997
4998 Moreover, we can't elide the outgoing fake edge, since the block
4999 profiler needs to take this into account in order to solve the minimal
5000 spanning tree in the case that the call doesn't return.
5001
5002 Handle this by adding a dummy instruction in a new last basic block. */
5003 if (check_last_block)
5004 {
628f6a4e 5005 edge_iterator ei;
6de9cd9a
DN
5006 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5007 block_stmt_iterator bsi = bsi_last (bb);
5008 tree t = NULL_TREE;
5009 if (!bsi_end_p (bsi))
5010 t = bsi_stmt (bsi);
5011
5012 if (need_fake_edge_p (t))
5013 {
5014 edge e;
5015
628f6a4e 5016 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
5017 if (e->dest == EXIT_BLOCK_PTR)
5018 {
5019 bsi_insert_on_edge (e, build_empty_stmt ());
5020 bsi_commit_edge_inserts ((int *)NULL);
5021 break;
5022 }
5023 }
5024 }
5025
5026 /* Now add fake edges to the function exit for any non constant
5027 calls since there is no way that we can determine if they will
5028 return or not... */
5029 for (i = 0; i < last_bb; i++)
5030 {
5031 basic_block bb = BASIC_BLOCK (i);
5032 block_stmt_iterator bsi;
5033 tree stmt, last_stmt;
5034
5035 if (!bb)
5036 continue;
5037
5038 if (blocks && !TEST_BIT (blocks, i))
5039 continue;
5040
5041 bsi = bsi_last (bb);
5042 if (!bsi_end_p (bsi))
5043 {
5044 last_stmt = bsi_stmt (bsi);
5045 do
5046 {
5047 stmt = bsi_stmt (bsi);
5048 if (need_fake_edge_p (stmt))
5049 {
5050 edge e;
5051 /* The handling above of the final block before the
5052 epilogue should be enough to verify that there is
5053 no edge to the exit block in CFG already.
5054 Calling make_edge in such case would cause us to
5055 mark that edge as fake and remove it later. */
5056#ifdef ENABLE_CHECKING
5057 if (stmt == last_stmt)
628f6a4e
BE
5058 {
5059 edge_iterator ei;
5060 FOR_EACH_EDGE (e, ei, bb->succs)
5061 gcc_assert (e->dest != EXIT_BLOCK_PTR);
5062 }
6de9cd9a
DN
5063#endif
5064
5065 /* Note that the following may create a new basic block
5066 and renumber the existing basic blocks. */
5067 if (stmt != last_stmt)
5068 {
5069 e = split_block (bb, stmt);
5070 if (e)
5071 blocks_split++;
5072 }
5073 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5074 }
5075 bsi_prev (&bsi);
5076 }
5077 while (!bsi_end_p (bsi));
5078 }
5079 }
5080
5081 if (blocks_split)
5082 verify_flow_info ();
5083
5084 return blocks_split;
5085}
5086
1eaba2f2
RH
5087bool
5088tree_purge_dead_eh_edges (basic_block bb)
5089{
5090 bool changed = false;
628f6a4e
BE
5091 edge e;
5092 edge_iterator ei;
1eaba2f2
RH
5093 tree stmt = last_stmt (bb);
5094
5095 if (stmt && tree_can_throw_internal (stmt))
5096 return false;
5097
628f6a4e 5098 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1eaba2f2 5099 {
1eaba2f2
RH
5100 if (e->flags & EDGE_EH)
5101 {
5102 ssa_remove_edge (e);
5103 changed = true;
5104 }
628f6a4e
BE
5105 else
5106 ei_next (&ei);
1eaba2f2
RH
5107 }
5108
69d49802
JJ
5109 /* Removal of dead EH edges might change dominators of not
5110 just immediate successors. E.g. when bb1 is changed so that
5111 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5112 eh edges purged by this function in:
5113 0
5114 / \
5115 v v
5116 1-->2
5117 / \ |
5118 v v |
5119 3-->4 |
5120 \ v
5121 --->5
5122 |
5123 -
5124 idom(bb5) must be recomputed. For now just free the dominance
5125 info. */
5126 if (changed)
5127 free_dominance_info (CDI_DOMINATORS);
5128
1eaba2f2
RH
5129 return changed;
5130}
5131
5132bool
5133tree_purge_all_dead_eh_edges (bitmap blocks)
5134{
5135 bool changed = false;
5136 size_t i;
87c476a2 5137 bitmap_iterator bi;
1eaba2f2 5138
87c476a2
ZD
5139 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5140 {
5141 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5142 }
1eaba2f2
RH
5143
5144 return changed;
5145}
6de9cd9a
DN
5146
5147struct cfg_hooks tree_cfg_hooks = {
5148 "tree",
5149 tree_verify_flow_info,
5150 tree_dump_bb, /* dump_bb */
5151 create_bb, /* create_basic_block */
5152 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5153 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5154 remove_bb, /* delete_basic_block */
5155 tree_split_block, /* split_block */
5156 tree_move_block_after, /* move_block_after */
5157 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5158 tree_merge_blocks, /* merge_blocks */
5159 tree_predict_edge, /* predict_edge */
5160 tree_predicted_by_p, /* predicted_by_p */
5161 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5162 tree_duplicate_bb, /* duplicate_block */
5163 tree_split_edge, /* split_edge */
5164 tree_make_forwarder_block, /* make_forward_block */
5165 NULL, /* tidy_fallthru_edge */
5166 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5167 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5168 tree_flow_call_edges_add /* flow_call_edges_add */
5169};
5170
5171
5172/* Split all critical edges. */
5173
5174static void
5175split_critical_edges (void)
5176{
5177 basic_block bb;
5178 edge e;
628f6a4e 5179 edge_iterator ei;
6de9cd9a
DN
5180
5181 FOR_ALL_BB (bb)
5182 {
628f6a4e 5183 FOR_EACH_EDGE (e, ei, bb->succs)
6de9cd9a
DN
5184 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5185 {
5186 split_edge (e);
5187 }
5188 }
5189}
5190
5191struct tree_opt_pass pass_split_crit_edges =
5192{
5d44aeed 5193 "crited", /* name */
6de9cd9a
DN
5194 NULL, /* gate */
5195 split_critical_edges, /* execute */
5196 NULL, /* sub */
5197 NULL, /* next */
5198 0, /* static_pass_number */
5199 TV_TREE_SPLIT_EDGES, /* tv_id */
5200 PROP_cfg, /* properties required */
5201 PROP_no_crit_edges, /* properties_provided */
5202 0, /* properties_destroyed */
5203 0, /* todo_flags_start */
9f8628ba
PB
5204 TODO_dump_func, /* todo_flags_finish */
5205 0 /* letter */
6de9cd9a 5206};
26277d41
PB
5207
5208\f
5209/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5210 a temporary, make sure and register it to be renamed if necessary,
5211 and finally return the temporary. Put the statements to compute
5212 EXP before the current statement in BSI. */
5213
5214tree
5215gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5216{
5217 tree t, new_stmt, orig_stmt;
5218
5219 if (is_gimple_val (exp))
5220 return exp;
5221
5222 t = make_rename_temp (type, NULL);
5223 new_stmt = build (MODIFY_EXPR, type, t, exp);
5224
5225 orig_stmt = bsi_stmt (*bsi);
5226 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5227 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5228
5229 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5230
5231 return t;
5232}
5233
5234/* Build a ternary operation and gimplify it. Emit code before BSI.
5235 Return the gimple_val holding the result. */
5236
5237tree
5238gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5239 tree type, tree a, tree b, tree c)
5240{
5241 tree ret;
5242
5243 ret = fold (build3 (code, type, a, b, c));
5244 STRIP_NOPS (ret);
5245
5246 return gimplify_val (bsi, type, ret);
5247}
5248
5249/* Build a binary operation and gimplify it. Emit code before BSI.
5250 Return the gimple_val holding the result. */
5251
5252tree
5253gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5254 tree type, tree a, tree b)
5255{
5256 tree ret;
5257
5258 ret = fold (build2 (code, type, a, b));
5259 STRIP_NOPS (ret);
5260
5261 return gimplify_val (bsi, type, ret);
5262}
5263
5264/* Build a unary operation and gimplify it. Emit code before BSI.
5265 Return the gimple_val holding the result. */
5266
5267tree
5268gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5269 tree a)
5270{
5271 tree ret;
5272
5273 ret = fold (build1 (code, type, a));
5274 STRIP_NOPS (ret);
5275
5276 return gimplify_val (bsi, type, ret);
5277}
5278
5279
6de9cd9a
DN
5280\f
5281/* Emit return warnings. */
5282
5283static void
5284execute_warn_function_return (void)
5285{
9506ac2b
PB
5286#ifdef USE_MAPPED_LOCATION
5287 source_location location;
5288#else
6de9cd9a 5289 location_t *locus;
9506ac2b 5290#endif
6de9cd9a
DN
5291 tree last;
5292 edge e;
628f6a4e 5293 edge_iterator ei;
6de9cd9a
DN
5294
5295 if (warn_missing_noreturn
5296 && !TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 5297 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
6de9cd9a 5298 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
971801ff
JM
5299 warning ("%Jfunction might be possible candidate for "
5300 "attribute %<noreturn%>",
6de9cd9a
DN
5301 cfun->decl);
5302
5303 /* If we have a path to EXIT, then we do return. */
5304 if (TREE_THIS_VOLATILE (cfun->decl)
628f6a4e 5305 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
6de9cd9a 5306 {
9506ac2b
PB
5307#ifdef USE_MAPPED_LOCATION
5308 location = UNKNOWN_LOCATION;
5309#else
6de9cd9a 5310 locus = NULL;
9506ac2b 5311#endif
628f6a4e 5312 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
5313 {
5314 last = last_stmt (e->src);
5315 if (TREE_CODE (last) == RETURN_EXPR
9506ac2b
PB
5316#ifdef USE_MAPPED_LOCATION
5317 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5318#else
6de9cd9a 5319 && (locus = EXPR_LOCUS (last)) != NULL)
9506ac2b 5320#endif
6de9cd9a
DN
5321 break;
5322 }
9506ac2b
PB
5323#ifdef USE_MAPPED_LOCATION
5324 if (location == UNKNOWN_LOCATION)
5325 location = cfun->function_end_locus;
971801ff 5326 warning ("%H%<noreturn%> function does return", &location);
9506ac2b 5327#else
6de9cd9a
DN
5328 if (!locus)
5329 locus = &cfun->function_end_locus;
971801ff 5330 warning ("%H%<noreturn%> function does return", locus);
9506ac2b 5331#endif
6de9cd9a
DN
5332 }
5333
5334 /* If we see "return;" in some basic block, then we do reach the end
5335 without returning a value. */
5336 else if (warn_return_type
628f6a4e 5337 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
6de9cd9a
DN
5338 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5339 {
628f6a4e 5340 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6de9cd9a
DN
5341 {
5342 tree last = last_stmt (e->src);
5343 if (TREE_CODE (last) == RETURN_EXPR
5344 && TREE_OPERAND (last, 0) == NULL)
5345 {
9506ac2b
PB
5346#ifdef USE_MAPPED_LOCATION
5347 location = EXPR_LOCATION (last);
5348 if (location == UNKNOWN_LOCATION)
5349 location = cfun->function_end_locus;
5350 warning ("%Hcontrol reaches end of non-void function", &location);
5351#else
6de9cd9a
DN
5352 locus = EXPR_LOCUS (last);
5353 if (!locus)
5354 locus = &cfun->function_end_locus;
5355 warning ("%Hcontrol reaches end of non-void function", locus);
9506ac2b 5356#endif
6de9cd9a
DN
5357 break;
5358 }
5359 }
5360 }
5361}
5362
5363
5364/* Given a basic block B which ends with a conditional and has
5365 precisely two successors, determine which of the edges is taken if
5366 the conditional is true and which is taken if the conditional is
5367 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5368
5369void
5370extract_true_false_edges_from_block (basic_block b,
5371 edge *true_edge,
5372 edge *false_edge)
5373{
628f6a4e 5374 edge e = EDGE_SUCC (b, 0);
6de9cd9a
DN
5375
5376 if (e->flags & EDGE_TRUE_VALUE)
5377 {
5378 *true_edge = e;
628f6a4e 5379 *false_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
5380 }
5381 else
5382 {
5383 *false_edge = e;
628f6a4e 5384 *true_edge = EDGE_SUCC (b, 1);
6de9cd9a
DN
5385 }
5386}
5387
5388struct tree_opt_pass pass_warn_function_return =
5389{
5390 NULL, /* name */
5391 NULL, /* gate */
5392 execute_warn_function_return, /* execute */
5393 NULL, /* sub */
5394 NULL, /* next */
5395 0, /* static_pass_number */
5396 0, /* tv_id */
00bfee6f 5397 PROP_cfg, /* properties_required */
6de9cd9a
DN
5398 0, /* properties_provided */
5399 0, /* properties_destroyed */
5400 0, /* todo_flags_start */
9f8628ba
PB
5401 0, /* todo_flags_finish */
5402 0 /* letter */
6de9cd9a
DN
5403};
5404
5405#include "gt-tree-cfg.h"
This page took 0.944827 seconds and 5 git commands to generate.