]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-cfg.cc
Merge branch 'releases/gcc-12' into devel/omp/gcc-12
[gcc.git] / gcc / tree-cfg.cc
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2022 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
68
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
71
72 /* Local declarations. */
73
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
76
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
81
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
84
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
89
90 static hash_map<edge, tree> *edge_to_cases;
91
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
95
96 static bitmap touched_switch_bbs;
97
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
111 {
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
114 };
115
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
118 {
119 int location_line;
120 int discriminator;
121 };
122
123 /* Hashtable helpers. */
124
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
126 {
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
130 };
131
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
134
135 inline hashval_t
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
137 {
138 return item->location_line;
139 }
140
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
143
144 inline bool
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
147 {
148 return a->location_line == b->location_line;
149 }
150
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
152
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
155
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
165
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
173
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
180
181 void
182 init_empty_tree_cfg_for_function (struct function *fn)
183 {
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
195
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 }
204
205 void
206 init_empty_tree_cfg (void)
207 {
208 init_empty_tree_cfg_for_function (cfun);
209 }
210
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
214
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
217
218 static void
219 build_gimple_cfg (gimple_seq seq)
220 {
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
223
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225
226 init_empty_tree_cfg ();
227
228 make_blocks (seq);
229
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
239
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
242
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
247
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
255 }
256
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
260
261 static void
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
263 {
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
266
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
269
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 {
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
278
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 {
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
302 }
303
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
307 }
308 }
309
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
313
314 static void
315 replace_loop_annotate (void)
316 {
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
320
321 for (auto loop : loops_list (cfun, 0))
322 {
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
325
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
329
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
332 }
333
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
336 {
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
338 {
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
345
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
347 {
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
356 }
357
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
362 }
363 }
364 }
365
366 static unsigned int
367 execute_build_cfg (void)
368 {
369 gimple_seq body = gimple_body (current_function_decl);
370
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 {
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
377 }
378 cleanup_tree_cfg ();
379
380 bb_to_omp_idx.release ();
381
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
385 }
386
387 namespace {
388
389 const pass_data pass_data_build_cfg =
390 {
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
400 };
401
402 class pass_build_cfg : public gimple_opt_pass
403 {
404 public:
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
407 {}
408
409 /* opt_pass methods: */
410 virtual unsigned int execute (function *) { return execute_build_cfg (); }
411
412 }; // class pass_build_cfg
413
414 } // anon namespace
415
416 gimple_opt_pass *
417 make_pass_build_cfg (gcc::context *ctxt)
418 {
419 return new pass_build_cfg (ctxt);
420 }
421
422
423 /* Return true if T is a computed goto. */
424
425 bool
426 computed_goto_p (gimple *t)
427 {
428 return (gimple_code (t) == GIMPLE_GOTO
429 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
430 }
431
432 /* Returns true if the sequence of statements STMTS only contains
433 a call to __builtin_unreachable (). */
434
435 bool
436 gimple_seq_unreachable_p (gimple_seq stmts)
437 {
438 if (stmts == NULL
439 /* Return false if -fsanitize=unreachable, we don't want to
440 optimize away those calls, but rather turn them into
441 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
442 later. */
443 || sanitize_flags_p (SANITIZE_UNREACHABLE))
444 return false;
445
446 gimple_stmt_iterator gsi = gsi_last (stmts);
447
448 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
449 return false;
450
451 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
452 {
453 gimple *stmt = gsi_stmt (gsi);
454 if (gimple_code (stmt) != GIMPLE_LABEL
455 && !is_gimple_debug (stmt)
456 && !gimple_clobber_p (stmt))
457 return false;
458 }
459 return true;
460 }
461
462 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
463 the other edge points to a bb with just __builtin_unreachable ().
464 I.e. return true for C->M edge in:
465 <bb C>:
466 ...
467 if (something)
468 goto <bb N>;
469 else
470 goto <bb M>;
471 <bb N>:
472 __builtin_unreachable ();
473 <bb M>: */
474
475 bool
476 assert_unreachable_fallthru_edge_p (edge e)
477 {
478 basic_block pred_bb = e->src;
479 gimple *last = last_stmt (pred_bb);
480 if (last && gimple_code (last) == GIMPLE_COND)
481 {
482 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
483 if (other_bb == e->dest)
484 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
485 if (EDGE_COUNT (other_bb->succs) == 0)
486 return gimple_seq_unreachable_p (bb_seq (other_bb));
487 }
488 return false;
489 }
490
491
492 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
493 could alter control flow except via eh. We initialize the flag at
494 CFG build time and only ever clear it later. */
495
496 static void
497 gimple_call_initialize_ctrl_altering (gimple *stmt)
498 {
499 int flags = gimple_call_flags (stmt);
500
501 /* A call alters control flow if it can make an abnormal goto. */
502 if (call_can_make_abnormal_goto (stmt)
503 /* A call also alters control flow if it does not return. */
504 || flags & ECF_NORETURN
505 /* TM ending statements have backedges out of the transaction.
506 Return true so we split the basic block containing them.
507 Note that the TM_BUILTIN test is merely an optimization. */
508 || ((flags & ECF_TM_BUILTIN)
509 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
510 /* BUILT_IN_RETURN call is same as return statement. */
511 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
512 /* IFN_UNIQUE should be the last insn, to make checking for it
513 as cheap as possible. */
514 || (gimple_call_internal_p (stmt)
515 && gimple_call_internal_unique_p (stmt)))
516 gimple_call_set_ctrl_altering (stmt, true);
517 else
518 gimple_call_set_ctrl_altering (stmt, false);
519 }
520
521
522 /* Insert SEQ after BB and build a flowgraph. */
523
524 static basic_block
525 make_blocks_1 (gimple_seq seq, basic_block bb)
526 {
527 gimple_stmt_iterator i = gsi_start (seq);
528 gimple *stmt = NULL;
529 gimple *prev_stmt = NULL;
530 bool start_new_block = true;
531 bool first_stmt_of_seq = true;
532
533 while (!gsi_end_p (i))
534 {
535 /* PREV_STMT should only be set to a debug stmt if the debug
536 stmt is before nondebug stmts. Once stmt reaches a nondebug
537 nonlabel, prev_stmt will be set to it, so that
538 stmt_starts_bb_p will know to start a new block if a label is
539 found. However, if stmt was a label after debug stmts only,
540 keep the label in prev_stmt even if we find further debug
541 stmts, for there may be other labels after them, and they
542 should land in the same block. */
543 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
544 prev_stmt = stmt;
545 stmt = gsi_stmt (i);
546
547 if (stmt && is_gimple_call (stmt))
548 gimple_call_initialize_ctrl_altering (stmt);
549
550 /* If the statement starts a new basic block or if we have determined
551 in a previous pass that we need to create a new block for STMT, do
552 so now. */
553 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 {
555 if (!first_stmt_of_seq)
556 gsi_split_seq_before (&i, &seq);
557 bb = create_basic_block (seq, bb);
558 start_new_block = false;
559 prev_stmt = NULL;
560 }
561
562 /* Now add STMT to BB and create the subgraphs for special statement
563 codes. */
564 gimple_set_bb (stmt, bb);
565
566 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
567 next iteration. */
568 if (stmt_ends_bb_p (stmt))
569 {
570 /* If the stmt can make abnormal goto use a new temporary
571 for the assignment to the LHS. This makes sure the old value
572 of the LHS is available on the abnormal edge. Otherwise
573 we will end up with overlapping life-ranges for abnormal
574 SSA names. */
575 if (gimple_has_lhs (stmt)
576 && stmt_can_make_abnormal_goto (stmt)
577 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
578 {
579 tree lhs = gimple_get_lhs (stmt);
580 tree tmp = create_tmp_var (TREE_TYPE (lhs));
581 gimple *s = gimple_build_assign (lhs, tmp);
582 gimple_set_location (s, gimple_location (stmt));
583 gimple_set_block (s, gimple_block (stmt));
584 gimple_set_lhs (stmt, tmp);
585 gsi_insert_after (&i, s, GSI_SAME_STMT);
586 }
587 start_new_block = true;
588 }
589
590 gsi_next (&i);
591 first_stmt_of_seq = false;
592 }
593 return bb;
594 }
595
596 /* Build a flowgraph for the sequence of stmts SEQ. */
597
598 static void
599 make_blocks (gimple_seq seq)
600 {
601 /* Look for debug markers right before labels, and move the debug
602 stmts after the labels. Accepting labels among debug markers
603 adds no value, just complexity; if we wanted to annotate labels
604 with view numbers (so sequencing among markers would matter) or
605 somesuch, we're probably better off still moving the labels, but
606 adding other debug annotations in their original positions or
607 emitting nonbind or bind markers associated with the labels in
608 the original position of the labels.
609
610 Moving labels would probably be simpler, but we can't do that:
611 moving labels assigns label ids to them, and doing so because of
612 debug markers makes for -fcompare-debug and possibly even codegen
613 differences. So, we have to move the debug stmts instead. To
614 that end, we scan SEQ backwards, marking the position of the
615 latest (earliest we find) label, and moving debug stmts that are
616 not separated from it by nondebug nonlabel stmts after the
617 label. */
618 if (MAY_HAVE_DEBUG_MARKER_STMTS)
619 {
620 gimple_stmt_iterator label = gsi_none ();
621
622 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
623 {
624 gimple *stmt = gsi_stmt (i);
625
626 /* If this is the first label we encounter (latest in SEQ)
627 before nondebug stmts, record its position. */
628 if (is_a <glabel *> (stmt))
629 {
630 if (gsi_end_p (label))
631 label = i;
632 continue;
633 }
634
635 /* Without a recorded label position to move debug stmts to,
636 there's nothing to do. */
637 if (gsi_end_p (label))
638 continue;
639
640 /* Move the debug stmt at I after LABEL. */
641 if (is_gimple_debug (stmt))
642 {
643 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
644 /* As STMT is removed, I advances to the stmt after
645 STMT, so the gsi_prev in the for "increment"
646 expression gets us to the stmt we're to visit after
647 STMT. LABEL, however, would advance to the moved
648 stmt if we passed it to gsi_move_after, so pass it a
649 copy instead, so as to keep LABEL pointing to the
650 LABEL. */
651 gimple_stmt_iterator copy = label;
652 gsi_move_after (&i, &copy);
653 continue;
654 }
655
656 /* There aren't any (more?) debug stmts before label, so
657 there isn't anything else to move after it. */
658 label = gsi_none ();
659 }
660 }
661
662 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
663 }
664
665 /* Create and return a new empty basic block after bb AFTER. */
666
667 static basic_block
668 create_bb (void *h, void *e, basic_block after)
669 {
670 basic_block bb;
671
672 gcc_assert (!e);
673
674 /* Create and initialize a new basic block. Since alloc_block uses
675 GC allocation that clears memory to allocate a basic block, we do
676 not have to clear the newly allocated basic block here. */
677 bb = alloc_block ();
678
679 bb->index = last_basic_block_for_fn (cfun);
680 bb->flags = BB_NEW;
681 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
682
683 /* Add the new block to the linked list of blocks. */
684 link_block (bb, after);
685
686 /* Grow the basic block array if needed. */
687 if ((size_t) last_basic_block_for_fn (cfun)
688 == basic_block_info_for_fn (cfun)->length ())
689 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
690 last_basic_block_for_fn (cfun) + 1);
691
692 /* Add the newly created block to the array. */
693 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
694
695 n_basic_blocks_for_fn (cfun)++;
696 last_basic_block_for_fn (cfun)++;
697
698 return bb;
699 }
700
701
702 /*---------------------------------------------------------------------------
703 Edge creation
704 ---------------------------------------------------------------------------*/
705
706 /* If basic block BB has an abnormal edge to a basic block
707 containing IFN_ABNORMAL_DISPATCHER internal call, return
708 that the dispatcher's basic block, otherwise return NULL. */
709
710 basic_block
711 get_abnormal_succ_dispatcher (basic_block bb)
712 {
713 edge e;
714 edge_iterator ei;
715
716 FOR_EACH_EDGE (e, ei, bb->succs)
717 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
718 {
719 gimple_stmt_iterator gsi
720 = gsi_start_nondebug_after_labels_bb (e->dest);
721 gimple *g = gsi_stmt (gsi);
722 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
723 return e->dest;
724 }
725 return NULL;
726 }
727
728 /* Helper function for make_edges. Create a basic block with
729 with ABNORMAL_DISPATCHER internal call in it if needed, and
730 create abnormal edges from BBS to it and from it to FOR_BB
731 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
732
733 static void
734 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (!bb_to_omp_idx.is_empty ())
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx.is_empty ())
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (!bb_to_omp_idx.is_empty ()
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (!bb_to_omp_idx.is_empty ()
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int cur_omp_region_idx = 0;
967
968 /* Create an edge from entry to the first block with executable
969 statements in it. */
970 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 EDGE_FALLTHRU);
973
974 /* Traverse the basic block array placing edges. */
975 FOR_EACH_BB_FN (bb, cfun)
976 {
977 int mer;
978
979 if (!bb_to_omp_idx.is_empty ())
980 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
981
982 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983 if (mer == 1)
984 ab_edge_goto.safe_push (bb);
985 else if (mer == 2)
986 ab_edge_call.safe_push (bb);
987
988 if (cur_region && bb_to_omp_idx.is_empty ())
989 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
990 }
991
992 /* Computed gotos are hell to deal with, especially if there are
993 lots of them with a large number of destinations. So we factor
994 them to a common computed goto location before we build the
995 edge list. After we convert back to normal form, we will un-factor
996 the computed gotos since factoring introduces an unwanted jump.
997 For non-local gotos and abnormal edges from calls to calls that return
998 twice or forced labels, factor the abnormal edges too, by having all
999 abnormal edges from the calls go to a common artificial basic block
1000 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001 basic block to all forced labels and calls returning twice.
1002 We do this per-OpenMP structured block, because those regions
1003 are guaranteed to be single entry single exit by the standard,
1004 so it is not allowed to enter or exit such regions abnormally this way,
1005 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006 must not transfer control across SESE region boundaries. */
1007 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1008 {
1009 gimple_stmt_iterator gsi;
1010 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011 basic_block *dispatcher_bbs = dispatcher_bb_array;
1012 int count = n_basic_blocks_for_fn (cfun);
1013
1014 if (!bb_to_omp_idx.is_empty ())
1015 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1016
1017 FOR_EACH_BB_FN (bb, cfun)
1018 {
1019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 {
1021 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 tree target;
1023
1024 if (!label_stmt)
1025 break;
1026
1027 target = gimple_label_label (label_stmt);
1028
1029 /* Make an edge to every label block that has been marked as a
1030 potential target for a computed goto or a non-local goto. */
1031 if (FORCED_LABEL (target))
1032 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1033 true);
1034 if (DECL_NONLOCAL (target))
1035 {
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1037 false);
1038 break;
1039 }
1040 }
1041
1042 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 gsi_next_nondebug (&gsi);
1044 if (!gsi_end_p (gsi))
1045 {
1046 /* Make an edge to every setjmp-like call. */
1047 gimple *call_stmt = gsi_stmt (gsi);
1048 if (is_gimple_call (call_stmt)
1049 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 || gimple_call_builtin_p (call_stmt,
1051 BUILT_IN_SETJMP_RECEIVER)))
1052 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1053 false);
1054 }
1055 }
1056
1057 if (!bb_to_omp_idx.is_empty ())
1058 XDELETE (dispatcher_bbs);
1059 }
1060
1061 omp_free_regions ();
1062 }
1063
1064 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1065 needed. Returns true if new bbs were created.
1066 Note: This is transitional code, and should not be used for new code. We
1067 should be able to get rid of this by rewriting all target va-arg
1068 gimplification hooks to use an interface gimple_build_cond_value as described
1069 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1070
1071 bool
1072 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1073 {
1074 gimple *stmt = gsi_stmt (*gsi);
1075 basic_block bb = gimple_bb (stmt);
1076 basic_block lastbb, afterbb;
1077 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1078 edge e;
1079 lastbb = make_blocks_1 (seq, bb);
1080 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1081 return false;
1082 e = split_block (bb, stmt);
1083 /* Move e->dest to come after the new basic blocks. */
1084 afterbb = e->dest;
1085 unlink_block (afterbb);
1086 link_block (afterbb, lastbb);
1087 redirect_edge_succ (e, bb->next_bb);
1088 bb = bb->next_bb;
1089 while (bb != afterbb)
1090 {
1091 struct omp_region *cur_region = NULL;
1092 profile_count cnt = profile_count::zero ();
1093 bool all = true;
1094
1095 int cur_omp_region_idx = 0;
1096 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1097 gcc_assert (!mer && !cur_region);
1098 add_bb_to_loop (bb, afterbb->loop_father);
1099
1100 edge e;
1101 edge_iterator ei;
1102 FOR_EACH_EDGE (e, ei, bb->preds)
1103 {
1104 if (e->count ().initialized_p ())
1105 cnt += e->count ();
1106 else
1107 all = false;
1108 }
1109 tree_guess_outgoing_edge_probabilities (bb);
1110 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1111 bb->count = cnt;
1112
1113 bb = bb->next_bb;
1114 }
1115 return true;
1116 }
1117
1118 /* Find the next available discriminator value for LOCUS. The
1119 discriminator distinguishes among several basic blocks that
1120 share a common locus, allowing for more accurate sample-based
1121 profiling. */
1122
1123 static int
1124 next_discriminator_for_locus (int line)
1125 {
1126 struct locus_discrim_map item;
1127 struct locus_discrim_map **slot;
1128
1129 item.location_line = line;
1130 item.discriminator = 0;
1131 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1132 gcc_assert (slot);
1133 if (*slot == HTAB_EMPTY_ENTRY)
1134 {
1135 *slot = XNEW (struct locus_discrim_map);
1136 gcc_assert (*slot);
1137 (*slot)->location_line = line;
1138 (*slot)->discriminator = 0;
1139 }
1140 (*slot)->discriminator++;
1141 return (*slot)->discriminator;
1142 }
1143
1144 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1145
1146 static bool
1147 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1148 {
1149 expanded_location to;
1150
1151 if (locus1 == locus2)
1152 return true;
1153
1154 to = expand_location (locus2);
1155
1156 if (from->line != to.line)
1157 return false;
1158 if (from->file == to.file)
1159 return true;
1160 return (from->file != NULL
1161 && to.file != NULL
1162 && filename_cmp (from->file, to.file) == 0);
1163 }
1164
1165 /* Assign discriminators to each basic block. */
1166
1167 static void
1168 assign_discriminators (void)
1169 {
1170 basic_block bb;
1171
1172 FOR_EACH_BB_FN (bb, cfun)
1173 {
1174 edge e;
1175 edge_iterator ei;
1176 gimple *last = last_stmt (bb);
1177 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1178
1179 if (locus == UNKNOWN_LOCATION)
1180 continue;
1181
1182 expanded_location locus_e = expand_location (locus);
1183
1184 FOR_EACH_EDGE (e, ei, bb->succs)
1185 {
1186 gimple *first = first_non_label_stmt (e->dest);
1187 gimple *last = last_stmt (e->dest);
1188 if ((first && same_line_p (locus, &locus_e,
1189 gimple_location (first)))
1190 || (last && same_line_p (locus, &locus_e,
1191 gimple_location (last))))
1192 {
1193 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1194 bb->discriminator
1195 = next_discriminator_for_locus (locus_e.line);
1196 else
1197 e->dest->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 }
1200 }
1201 }
1202 }
1203
1204 /* Create the edges for a GIMPLE_COND starting at block BB. */
1205
1206 static void
1207 make_cond_expr_edges (basic_block bb)
1208 {
1209 gcond *entry = as_a <gcond *> (last_stmt (bb));
1210 gimple *then_stmt, *else_stmt;
1211 basic_block then_bb, else_bb;
1212 tree then_label, else_label;
1213 edge e;
1214
1215 gcc_assert (entry);
1216 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1217
1218 /* Entry basic blocks for each component. */
1219 then_label = gimple_cond_true_label (entry);
1220 else_label = gimple_cond_false_label (entry);
1221 then_bb = label_to_block (cfun, then_label);
1222 else_bb = label_to_block (cfun, else_label);
1223 then_stmt = first_stmt (then_bb);
1224 else_stmt = first_stmt (else_bb);
1225
1226 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1227 e->goto_locus = gimple_location (then_stmt);
1228 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1229 if (e)
1230 e->goto_locus = gimple_location (else_stmt);
1231
1232 /* We do not need the labels anymore. */
1233 gimple_cond_set_true_label (entry, NULL_TREE);
1234 gimple_cond_set_false_label (entry, NULL_TREE);
1235 }
1236
1237
1238 /* Called for each element in the hash table (P) as we delete the
1239 edge to cases hash table.
1240
1241 Clear all the CASE_CHAINs to prevent problems with copying of
1242 SWITCH_EXPRs and structure sharing rules, then free the hash table
1243 element. */
1244
1245 bool
1246 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1247 {
1248 tree t, next;
1249
1250 for (t = value; t; t = next)
1251 {
1252 next = CASE_CHAIN (t);
1253 CASE_CHAIN (t) = NULL;
1254 }
1255
1256 return true;
1257 }
1258
1259 /* Start recording information mapping edges to case labels. */
1260
1261 void
1262 start_recording_case_labels (void)
1263 {
1264 gcc_assert (edge_to_cases == NULL);
1265 edge_to_cases = new hash_map<edge, tree>;
1266 touched_switch_bbs = BITMAP_ALLOC (NULL);
1267 }
1268
1269 /* Return nonzero if we are recording information for case labels. */
1270
1271 static bool
1272 recording_case_labels_p (void)
1273 {
1274 return (edge_to_cases != NULL);
1275 }
1276
1277 /* Stop recording information mapping edges to case labels and
1278 remove any information we have recorded. */
1279 void
1280 end_recording_case_labels (void)
1281 {
1282 bitmap_iterator bi;
1283 unsigned i;
1284 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1285 delete edge_to_cases;
1286 edge_to_cases = NULL;
1287 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1288 {
1289 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1290 if (bb)
1291 {
1292 gimple *stmt = last_stmt (bb);
1293 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1294 group_case_labels_stmt (as_a <gswitch *> (stmt));
1295 }
1296 }
1297 BITMAP_FREE (touched_switch_bbs);
1298 }
1299
1300 /* If we are inside a {start,end}_recording_cases block, then return
1301 a chain of CASE_LABEL_EXPRs from T which reference E.
1302
1303 Otherwise return NULL. */
1304
1305 static tree
1306 get_cases_for_edge (edge e, gswitch *t)
1307 {
1308 tree *slot;
1309 size_t i, n;
1310
1311 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1312 chains available. Return NULL so the caller can detect this case. */
1313 if (!recording_case_labels_p ())
1314 return NULL;
1315
1316 slot = edge_to_cases->get (e);
1317 if (slot)
1318 return *slot;
1319
1320 /* If we did not find E in the hash table, then this must be the first
1321 time we have been queried for information about E & T. Add all the
1322 elements from T to the hash table then perform the query again. */
1323
1324 n = gimple_switch_num_labels (t);
1325 for (i = 0; i < n; i++)
1326 {
1327 tree elt = gimple_switch_label (t, i);
1328 tree lab = CASE_LABEL (elt);
1329 basic_block label_bb = label_to_block (cfun, lab);
1330 edge this_edge = find_edge (e->src, label_bb);
1331
1332 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1333 a new chain. */
1334 tree &s = edge_to_cases->get_or_insert (this_edge);
1335 CASE_CHAIN (elt) = s;
1336 s = elt;
1337 }
1338
1339 return *edge_to_cases->get (e);
1340 }
1341
1342 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1343
1344 static void
1345 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1346 {
1347 size_t i, n;
1348
1349 n = gimple_switch_num_labels (entry);
1350
1351 for (i = 0; i < n; ++i)
1352 {
1353 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1354 make_edge (bb, label_bb, 0);
1355 }
1356 }
1357
1358
1359 /* Return the basic block holding label DEST. */
1360
1361 basic_block
1362 label_to_block (struct function *ifun, tree dest)
1363 {
1364 int uid = LABEL_DECL_UID (dest);
1365
1366 /* We would die hard when faced by an undefined label. Emit a label to
1367 the very first basic block. This will hopefully make even the dataflow
1368 and undefined variable warnings quite right. */
1369 if (seen_error () && uid < 0)
1370 {
1371 gimple_stmt_iterator gsi =
1372 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1373 gimple *stmt;
1374
1375 stmt = gimple_build_label (dest);
1376 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1377 uid = LABEL_DECL_UID (dest);
1378 }
1379 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1380 return NULL;
1381 return (*ifun->cfg->x_label_to_block_map)[uid];
1382 }
1383
1384 /* Create edges for a goto statement at block BB. Returns true
1385 if abnormal edges should be created. */
1386
1387 static bool
1388 make_goto_expr_edges (basic_block bb)
1389 {
1390 gimple_stmt_iterator last = gsi_last_bb (bb);
1391 gimple *goto_t = gsi_stmt (last);
1392
1393 /* A simple GOTO creates normal edges. */
1394 if (simple_goto_p (goto_t))
1395 {
1396 tree dest = gimple_goto_dest (goto_t);
1397 basic_block label_bb = label_to_block (cfun, dest);
1398 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1399 e->goto_locus = gimple_location (goto_t);
1400 gsi_remove (&last, true);
1401 return false;
1402 }
1403
1404 /* A computed GOTO creates abnormal edges. */
1405 return true;
1406 }
1407
1408 /* Create edges for an asm statement with labels at block BB. */
1409
1410 static void
1411 make_gimple_asm_edges (basic_block bb)
1412 {
1413 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1414 int i, n = gimple_asm_nlabels (stmt);
1415
1416 for (i = 0; i < n; ++i)
1417 {
1418 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1419 basic_block label_bb = label_to_block (cfun, label);
1420 make_edge (bb, label_bb, 0);
1421 }
1422 }
1423
1424 /*---------------------------------------------------------------------------
1425 Flowgraph analysis
1426 ---------------------------------------------------------------------------*/
1427
1428 /* Cleanup useless labels in basic blocks. This is something we wish
1429 to do early because it allows us to group case labels before creating
1430 the edges for the CFG, and it speeds up block statement iterators in
1431 all passes later on.
1432 We rerun this pass after CFG is created, to get rid of the labels that
1433 are no longer referenced. After then we do not run it any more, since
1434 (almost) no new labels should be created. */
1435
1436 /* A map from basic block index to the leading label of that block. */
1437 struct label_record
1438 {
1439 /* The label. */
1440 tree label;
1441
1442 /* True if the label is referenced from somewhere. */
1443 bool used;
1444 };
1445
1446 /* Given LABEL return the first label in the same basic block. */
1447
1448 static tree
1449 main_block_label (tree label, label_record *label_for_bb)
1450 {
1451 basic_block bb = label_to_block (cfun, label);
1452 tree main_label = label_for_bb[bb->index].label;
1453
1454 /* label_to_block possibly inserted undefined label into the chain. */
1455 if (!main_label)
1456 {
1457 label_for_bb[bb->index].label = label;
1458 main_label = label;
1459 }
1460
1461 label_for_bb[bb->index].used = true;
1462 return main_label;
1463 }
1464
1465 /* Clean up redundant labels within the exception tree. */
1466
1467 static void
1468 cleanup_dead_labels_eh (label_record *label_for_bb)
1469 {
1470 eh_landing_pad lp;
1471 eh_region r;
1472 tree lab;
1473 int i;
1474
1475 if (cfun->eh == NULL)
1476 return;
1477
1478 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1479 if (lp && lp->post_landing_pad)
1480 {
1481 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1482 if (lab != lp->post_landing_pad)
1483 {
1484 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1485 lp->post_landing_pad = lab;
1486 EH_LANDING_PAD_NR (lab) = lp->index;
1487 }
1488 }
1489
1490 FOR_ALL_EH_REGION (r)
1491 switch (r->type)
1492 {
1493 case ERT_CLEANUP:
1494 case ERT_MUST_NOT_THROW:
1495 break;
1496
1497 case ERT_TRY:
1498 {
1499 eh_catch c;
1500 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1501 {
1502 lab = c->label;
1503 if (lab)
1504 c->label = main_block_label (lab, label_for_bb);
1505 }
1506 }
1507 break;
1508
1509 case ERT_ALLOWED_EXCEPTIONS:
1510 lab = r->u.allowed.label;
1511 if (lab)
1512 r->u.allowed.label = main_block_label (lab, label_for_bb);
1513 break;
1514 }
1515 }
1516
1517
1518 /* Cleanup redundant labels. This is a three-step process:
1519 1) Find the leading label for each block.
1520 2) Redirect all references to labels to the leading labels.
1521 3) Cleanup all useless labels. */
1522
1523 void
1524 cleanup_dead_labels (void)
1525 {
1526 basic_block bb;
1527 label_record *label_for_bb = XCNEWVEC (struct label_record,
1528 last_basic_block_for_fn (cfun));
1529
1530 /* Find a suitable label for each block. We use the first user-defined
1531 label if there is one, or otherwise just the first label we see. */
1532 FOR_EACH_BB_FN (bb, cfun)
1533 {
1534 gimple_stmt_iterator i;
1535
1536 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1537 {
1538 tree label;
1539 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1540
1541 if (!label_stmt)
1542 break;
1543
1544 label = gimple_label_label (label_stmt);
1545
1546 /* If we have not yet seen a label for the current block,
1547 remember this one and see if there are more labels. */
1548 if (!label_for_bb[bb->index].label)
1549 {
1550 label_for_bb[bb->index].label = label;
1551 continue;
1552 }
1553
1554 /* If we did see a label for the current block already, but it
1555 is an artificially created label, replace it if the current
1556 label is a user defined label. */
1557 if (!DECL_ARTIFICIAL (label)
1558 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1559 {
1560 label_for_bb[bb->index].label = label;
1561 break;
1562 }
1563 }
1564 }
1565
1566 /* Now redirect all jumps/branches to the selected label.
1567 First do so for each block ending in a control statement. */
1568 FOR_EACH_BB_FN (bb, cfun)
1569 {
1570 gimple *stmt = last_stmt (bb);
1571 tree label, new_label;
1572
1573 if (!stmt)
1574 continue;
1575
1576 switch (gimple_code (stmt))
1577 {
1578 case GIMPLE_COND:
1579 {
1580 gcond *cond_stmt = as_a <gcond *> (stmt);
1581 label = gimple_cond_true_label (cond_stmt);
1582 if (label)
1583 {
1584 new_label = main_block_label (label, label_for_bb);
1585 if (new_label != label)
1586 gimple_cond_set_true_label (cond_stmt, new_label);
1587 }
1588
1589 label = gimple_cond_false_label (cond_stmt);
1590 if (label)
1591 {
1592 new_label = main_block_label (label, label_for_bb);
1593 if (new_label != label)
1594 gimple_cond_set_false_label (cond_stmt, new_label);
1595 }
1596 }
1597 break;
1598
1599 case GIMPLE_SWITCH:
1600 {
1601 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1602 size_t i, n = gimple_switch_num_labels (switch_stmt);
1603
1604 /* Replace all destination labels. */
1605 for (i = 0; i < n; ++i)
1606 {
1607 tree case_label = gimple_switch_label (switch_stmt, i);
1608 label = CASE_LABEL (case_label);
1609 new_label = main_block_label (label, label_for_bb);
1610 if (new_label != label)
1611 CASE_LABEL (case_label) = new_label;
1612 }
1613 break;
1614 }
1615
1616 case GIMPLE_ASM:
1617 {
1618 gasm *asm_stmt = as_a <gasm *> (stmt);
1619 int i, n = gimple_asm_nlabels (asm_stmt);
1620
1621 for (i = 0; i < n; ++i)
1622 {
1623 tree cons = gimple_asm_label_op (asm_stmt, i);
1624 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1625 TREE_VALUE (cons) = label;
1626 }
1627 break;
1628 }
1629
1630 /* We have to handle gotos until they're removed, and we don't
1631 remove them until after we've created the CFG edges. */
1632 case GIMPLE_GOTO:
1633 if (!computed_goto_p (stmt))
1634 {
1635 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1636 label = gimple_goto_dest (goto_stmt);
1637 new_label = main_block_label (label, label_for_bb);
1638 if (new_label != label)
1639 gimple_goto_set_dest (goto_stmt, new_label);
1640 }
1641 break;
1642
1643 case GIMPLE_TRANSACTION:
1644 {
1645 gtransaction *txn = as_a <gtransaction *> (stmt);
1646
1647 label = gimple_transaction_label_norm (txn);
1648 if (label)
1649 {
1650 new_label = main_block_label (label, label_for_bb);
1651 if (new_label != label)
1652 gimple_transaction_set_label_norm (txn, new_label);
1653 }
1654
1655 label = gimple_transaction_label_uninst (txn);
1656 if (label)
1657 {
1658 new_label = main_block_label (label, label_for_bb);
1659 if (new_label != label)
1660 gimple_transaction_set_label_uninst (txn, new_label);
1661 }
1662
1663 label = gimple_transaction_label_over (txn);
1664 if (label)
1665 {
1666 new_label = main_block_label (label, label_for_bb);
1667 if (new_label != label)
1668 gimple_transaction_set_label_over (txn, new_label);
1669 }
1670 }
1671 break;
1672
1673 case GIMPLE_OMP_METADIRECTIVE:
1674 {
1675 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1676 {
1677 label = gimple_omp_metadirective_label (stmt, i);
1678 new_label = main_block_label (label, label_for_bb);
1679 if (new_label != label)
1680 gimple_omp_metadirective_set_label (stmt, i, new_label);
1681 }
1682 }
1683 break;
1684
1685 default:
1686 break;
1687 }
1688 }
1689
1690 /* Do the same for the exception region tree labels. */
1691 cleanup_dead_labels_eh (label_for_bb);
1692
1693 /* Finally, purge dead labels. All user-defined labels and labels that
1694 can be the target of non-local gotos and labels which have their
1695 address taken are preserved. */
1696 FOR_EACH_BB_FN (bb, cfun)
1697 {
1698 gimple_stmt_iterator i;
1699 tree label_for_this_bb = label_for_bb[bb->index].label;
1700
1701 if (!label_for_this_bb)
1702 continue;
1703
1704 /* If the main label of the block is unused, we may still remove it. */
1705 if (!label_for_bb[bb->index].used)
1706 label_for_this_bb = NULL;
1707
1708 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1709 {
1710 tree label;
1711 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1712
1713 if (!label_stmt)
1714 break;
1715
1716 label = gimple_label_label (label_stmt);
1717
1718 if (label == label_for_this_bb
1719 || !DECL_ARTIFICIAL (label)
1720 || DECL_NONLOCAL (label)
1721 || FORCED_LABEL (label))
1722 gsi_next (&i);
1723 else
1724 {
1725 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1726 gsi_remove (&i, true);
1727 }
1728 }
1729 }
1730
1731 free (label_for_bb);
1732 }
1733
1734 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1735 the ones jumping to the same label.
1736 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1737
1738 bool
1739 group_case_labels_stmt (gswitch *stmt)
1740 {
1741 int old_size = gimple_switch_num_labels (stmt);
1742 int i, next_index, new_size;
1743 basic_block default_bb = NULL;
1744 hash_set<tree> *removed_labels = NULL;
1745
1746 default_bb = gimple_switch_default_bb (cfun, stmt);
1747
1748 /* Look for possible opportunities to merge cases. */
1749 new_size = i = 1;
1750 while (i < old_size)
1751 {
1752 tree base_case, base_high;
1753 basic_block base_bb;
1754
1755 base_case = gimple_switch_label (stmt, i);
1756
1757 gcc_assert (base_case);
1758 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1759
1760 /* Discard cases that have the same destination as the default case or
1761 whose destination blocks have already been removed as unreachable. */
1762 if (base_bb == NULL
1763 || base_bb == default_bb
1764 || (removed_labels
1765 && removed_labels->contains (CASE_LABEL (base_case))))
1766 {
1767 i++;
1768 continue;
1769 }
1770
1771 base_high = CASE_HIGH (base_case)
1772 ? CASE_HIGH (base_case)
1773 : CASE_LOW (base_case);
1774 next_index = i + 1;
1775
1776 /* Try to merge case labels. Break out when we reach the end
1777 of the label vector or when we cannot merge the next case
1778 label with the current one. */
1779 while (next_index < old_size)
1780 {
1781 tree merge_case = gimple_switch_label (stmt, next_index);
1782 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1783 wide_int bhp1 = wi::to_wide (base_high) + 1;
1784
1785 /* Merge the cases if they jump to the same place,
1786 and their ranges are consecutive. */
1787 if (merge_bb == base_bb
1788 && (removed_labels == NULL
1789 || !removed_labels->contains (CASE_LABEL (merge_case)))
1790 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1791 {
1792 base_high
1793 = (CASE_HIGH (merge_case)
1794 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1795 CASE_HIGH (base_case) = base_high;
1796 next_index++;
1797 }
1798 else
1799 break;
1800 }
1801
1802 /* Discard cases that have an unreachable destination block. */
1803 if (EDGE_COUNT (base_bb->succs) == 0
1804 && gimple_seq_unreachable_p (bb_seq (base_bb))
1805 /* Don't optimize this if __builtin_unreachable () is the
1806 implicitly added one by the C++ FE too early, before
1807 -Wreturn-type can be diagnosed. We'll optimize it later
1808 during switchconv pass or any other cfg cleanup. */
1809 && (gimple_in_ssa_p (cfun)
1810 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1811 != BUILTINS_LOCATION)))
1812 {
1813 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1814 if (base_edge != NULL)
1815 {
1816 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1817 !gsi_end_p (gsi); gsi_next (&gsi))
1818 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1819 {
1820 if (FORCED_LABEL (gimple_label_label (stmt))
1821 || DECL_NONLOCAL (gimple_label_label (stmt)))
1822 {
1823 /* Forced/non-local labels aren't going to be removed,
1824 but they will be moved to some neighbouring basic
1825 block. If some later case label refers to one of
1826 those labels, we should throw that case away rather
1827 than keeping it around and refering to some random
1828 other basic block without an edge to it. */
1829 if (removed_labels == NULL)
1830 removed_labels = new hash_set<tree>;
1831 removed_labels->add (gimple_label_label (stmt));
1832 }
1833 }
1834 else
1835 break;
1836 remove_edge_and_dominated_blocks (base_edge);
1837 }
1838 i = next_index;
1839 continue;
1840 }
1841
1842 if (new_size < i)
1843 gimple_switch_set_label (stmt, new_size,
1844 gimple_switch_label (stmt, i));
1845 i = next_index;
1846 new_size++;
1847 }
1848
1849 gcc_assert (new_size <= old_size);
1850
1851 if (new_size < old_size)
1852 gimple_switch_set_num_labels (stmt, new_size);
1853
1854 delete removed_labels;
1855 return new_size < old_size;
1856 }
1857
1858 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1859 and scan the sorted vector of cases. Combine the ones jumping to the
1860 same label. */
1861
1862 bool
1863 group_case_labels (void)
1864 {
1865 basic_block bb;
1866 bool changed = false;
1867
1868 FOR_EACH_BB_FN (bb, cfun)
1869 {
1870 gimple *stmt = last_stmt (bb);
1871 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1872 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1873 }
1874
1875 return changed;
1876 }
1877
1878 /* Checks whether we can merge block B into block A. */
1879
1880 static bool
1881 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1882 {
1883 gimple *stmt;
1884
1885 if (!single_succ_p (a))
1886 return false;
1887
1888 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1889 return false;
1890
1891 if (single_succ (a) != b)
1892 return false;
1893
1894 if (!single_pred_p (b))
1895 return false;
1896
1897 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1898 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1899 return false;
1900
1901 /* If A ends by a statement causing exceptions or something similar, we
1902 cannot merge the blocks. */
1903 stmt = last_stmt (a);
1904 if (stmt && stmt_ends_bb_p (stmt))
1905 return false;
1906
1907 /* Do not allow a block with only a non-local label to be merged. */
1908 if (stmt)
1909 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1910 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1911 return false;
1912
1913 /* Examine the labels at the beginning of B. */
1914 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1915 gsi_next (&gsi))
1916 {
1917 tree lab;
1918 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1919 if (!label_stmt)
1920 break;
1921 lab = gimple_label_label (label_stmt);
1922
1923 /* Do not remove user forced labels or for -O0 any user labels. */
1924 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1925 return false;
1926 }
1927
1928 /* Protect simple loop latches. We only want to avoid merging
1929 the latch with the loop header or with a block in another
1930 loop in this case. */
1931 if (current_loops
1932 && b->loop_father->latch == b
1933 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1934 && (b->loop_father->header == a
1935 || b->loop_father != a->loop_father))
1936 return false;
1937
1938 /* It must be possible to eliminate all phi nodes in B. If ssa form
1939 is not up-to-date and a name-mapping is registered, we cannot eliminate
1940 any phis. Symbols marked for renaming are never a problem though. */
1941 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1942 gsi_next (&gsi))
1943 {
1944 gphi *phi = gsi.phi ();
1945 /* Technically only new names matter. */
1946 if (name_registered_for_update_p (PHI_RESULT (phi)))
1947 return false;
1948 }
1949
1950 /* When not optimizing, don't merge if we'd lose goto_locus. */
1951 if (!optimize
1952 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1953 {
1954 location_t goto_locus = single_succ_edge (a)->goto_locus;
1955 gimple_stmt_iterator prev, next;
1956 prev = gsi_last_nondebug_bb (a);
1957 next = gsi_after_labels (b);
1958 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1959 gsi_next_nondebug (&next);
1960 if ((gsi_end_p (prev)
1961 || gimple_location (gsi_stmt (prev)) != goto_locus)
1962 && (gsi_end_p (next)
1963 || gimple_location (gsi_stmt (next)) != goto_locus))
1964 return false;
1965 }
1966
1967 return true;
1968 }
1969
1970 /* Replaces all uses of NAME by VAL. */
1971
1972 void
1973 replace_uses_by (tree name, tree val)
1974 {
1975 imm_use_iterator imm_iter;
1976 use_operand_p use;
1977 gimple *stmt;
1978 edge e;
1979
1980 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1981 {
1982 /* Mark the block if we change the last stmt in it. */
1983 if (cfgcleanup_altered_bbs
1984 && stmt_ends_bb_p (stmt))
1985 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1986
1987 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1988 {
1989 replace_exp (use, val);
1990
1991 if (gimple_code (stmt) == GIMPLE_PHI)
1992 {
1993 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1994 PHI_ARG_INDEX_FROM_USE (use));
1995 if (e->flags & EDGE_ABNORMAL
1996 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1997 {
1998 /* This can only occur for virtual operands, since
1999 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2000 would prevent replacement. */
2001 gcc_checking_assert (virtual_operand_p (name));
2002 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2003 }
2004 }
2005 }
2006
2007 if (gimple_code (stmt) != GIMPLE_PHI)
2008 {
2009 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2010 gimple *orig_stmt = stmt;
2011 size_t i;
2012
2013 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2014 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2015 only change sth from non-invariant to invariant, and only
2016 when propagating constants. */
2017 if (is_gimple_min_invariant (val))
2018 for (i = 0; i < gimple_num_ops (stmt); i++)
2019 {
2020 tree op = gimple_op (stmt, i);
2021 /* Operands may be empty here. For example, the labels
2022 of a GIMPLE_COND are nulled out following the creation
2023 of the corresponding CFG edges. */
2024 if (op && TREE_CODE (op) == ADDR_EXPR)
2025 recompute_tree_invariant_for_addr_expr (op);
2026 }
2027
2028 if (fold_stmt (&gsi))
2029 stmt = gsi_stmt (gsi);
2030
2031 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2032 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2033
2034 update_stmt (stmt);
2035 }
2036 }
2037
2038 gcc_checking_assert (has_zero_uses (name));
2039
2040 /* Also update the trees stored in loop structures. */
2041 if (current_loops)
2042 {
2043 for (auto loop : loops_list (cfun, 0))
2044 substitute_in_loop_info (loop, name, val);
2045 }
2046 }
2047
2048 /* Merge block B into block A. */
2049
2050 static void
2051 gimple_merge_blocks (basic_block a, basic_block b)
2052 {
2053 gimple_stmt_iterator last, gsi;
2054 gphi_iterator psi;
2055
2056 if (dump_file)
2057 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2058
2059 /* Remove all single-valued PHI nodes from block B of the form
2060 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2061 gsi = gsi_last_bb (a);
2062 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2063 {
2064 gimple *phi = gsi_stmt (psi);
2065 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2066 gimple *copy;
2067 bool may_replace_uses = (virtual_operand_p (def)
2068 || may_propagate_copy (def, use));
2069
2070 /* In case we maintain loop closed ssa form, do not propagate arguments
2071 of loop exit phi nodes. */
2072 if (current_loops
2073 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2074 && !virtual_operand_p (def)
2075 && TREE_CODE (use) == SSA_NAME
2076 && a->loop_father != b->loop_father)
2077 may_replace_uses = false;
2078
2079 if (!may_replace_uses)
2080 {
2081 gcc_assert (!virtual_operand_p (def));
2082
2083 /* Note that just emitting the copies is fine -- there is no problem
2084 with ordering of phi nodes. This is because A is the single
2085 predecessor of B, therefore results of the phi nodes cannot
2086 appear as arguments of the phi nodes. */
2087 copy = gimple_build_assign (def, use);
2088 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2089 remove_phi_node (&psi, false);
2090 }
2091 else
2092 {
2093 /* If we deal with a PHI for virtual operands, we can simply
2094 propagate these without fussing with folding or updating
2095 the stmt. */
2096 if (virtual_operand_p (def))
2097 {
2098 imm_use_iterator iter;
2099 use_operand_p use_p;
2100 gimple *stmt;
2101
2102 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2103 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2104 SET_USE (use_p, use);
2105
2106 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2107 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2108 }
2109 else
2110 replace_uses_by (def, use);
2111
2112 remove_phi_node (&psi, true);
2113 }
2114 }
2115
2116 /* Ensure that B follows A. */
2117 move_block_after (b, a);
2118
2119 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2120 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2121
2122 /* Remove labels from B and set gimple_bb to A for other statements. */
2123 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2124 {
2125 gimple *stmt = gsi_stmt (gsi);
2126 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2127 {
2128 tree label = gimple_label_label (label_stmt);
2129 int lp_nr;
2130
2131 gsi_remove (&gsi, false);
2132
2133 /* Now that we can thread computed gotos, we might have
2134 a situation where we have a forced label in block B
2135 However, the label at the start of block B might still be
2136 used in other ways (think about the runtime checking for
2137 Fortran assigned gotos). So we cannot just delete the
2138 label. Instead we move the label to the start of block A. */
2139 if (FORCED_LABEL (label))
2140 {
2141 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2142 tree first_label = NULL_TREE;
2143 if (!gsi_end_p (dest_gsi))
2144 if (glabel *first_label_stmt
2145 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2146 first_label = gimple_label_label (first_label_stmt);
2147 if (first_label
2148 && (DECL_NONLOCAL (first_label)
2149 || EH_LANDING_PAD_NR (first_label) != 0))
2150 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2151 else
2152 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2153 }
2154 /* Other user labels keep around in a form of a debug stmt. */
2155 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2156 {
2157 gimple *dbg = gimple_build_debug_bind (label,
2158 integer_zero_node,
2159 stmt);
2160 gimple_debug_bind_reset_value (dbg);
2161 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2162 }
2163
2164 lp_nr = EH_LANDING_PAD_NR (label);
2165 if (lp_nr)
2166 {
2167 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2168 lp->post_landing_pad = NULL;
2169 }
2170 }
2171 else
2172 {
2173 gimple_set_bb (stmt, a);
2174 gsi_next (&gsi);
2175 }
2176 }
2177
2178 /* When merging two BBs, if their counts are different, the larger count
2179 is selected as the new bb count. This is to handle inconsistent
2180 profiles. */
2181 if (a->loop_father == b->loop_father)
2182 {
2183 a->count = a->count.merge (b->count);
2184 }
2185
2186 /* Merge the sequences. */
2187 last = gsi_last_bb (a);
2188 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2189 set_bb_seq (b, NULL);
2190
2191 if (cfgcleanup_altered_bbs)
2192 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2193 }
2194
2195
2196 /* Return the one of two successors of BB that is not reachable by a
2197 complex edge, if there is one. Else, return BB. We use
2198 this in optimizations that use post-dominators for their heuristics,
2199 to catch the cases in C++ where function calls are involved. */
2200
2201 basic_block
2202 single_noncomplex_succ (basic_block bb)
2203 {
2204 edge e0, e1;
2205 if (EDGE_COUNT (bb->succs) != 2)
2206 return bb;
2207
2208 e0 = EDGE_SUCC (bb, 0);
2209 e1 = EDGE_SUCC (bb, 1);
2210 if (e0->flags & EDGE_COMPLEX)
2211 return e1->dest;
2212 if (e1->flags & EDGE_COMPLEX)
2213 return e0->dest;
2214
2215 return bb;
2216 }
2217
2218 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2219
2220 void
2221 notice_special_calls (gcall *call)
2222 {
2223 int flags = gimple_call_flags (call);
2224
2225 if (flags & ECF_MAY_BE_ALLOCA)
2226 cfun->calls_alloca = true;
2227 if (flags & ECF_RETURNS_TWICE)
2228 cfun->calls_setjmp = true;
2229 }
2230
2231
2232 /* Clear flags set by notice_special_calls. Used by dead code removal
2233 to update the flags. */
2234
2235 void
2236 clear_special_calls (void)
2237 {
2238 cfun->calls_alloca = false;
2239 cfun->calls_setjmp = false;
2240 }
2241
2242 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2243
2244 static void
2245 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2246 {
2247 /* Since this block is no longer reachable, we can just delete all
2248 of its PHI nodes. */
2249 remove_phi_nodes (bb);
2250
2251 /* Remove edges to BB's successors. */
2252 while (EDGE_COUNT (bb->succs) > 0)
2253 remove_edge (EDGE_SUCC (bb, 0));
2254 }
2255
2256
2257 /* Remove statements of basic block BB. */
2258
2259 static void
2260 remove_bb (basic_block bb)
2261 {
2262 gimple_stmt_iterator i;
2263
2264 if (dump_file)
2265 {
2266 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2267 if (dump_flags & TDF_DETAILS)
2268 {
2269 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2270 fprintf (dump_file, "\n");
2271 }
2272 }
2273
2274 if (current_loops)
2275 {
2276 class loop *loop = bb->loop_father;
2277
2278 /* If a loop gets removed, clean up the information associated
2279 with it. */
2280 if (loop->latch == bb
2281 || loop->header == bb)
2282 free_numbers_of_iterations_estimates (loop);
2283 }
2284
2285 /* Remove all the instructions in the block. */
2286 if (bb_seq (bb) != NULL)
2287 {
2288 /* Walk backwards so as to get a chance to substitute all
2289 released DEFs into debug stmts. See
2290 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2291 details. */
2292 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2293 {
2294 gimple *stmt = gsi_stmt (i);
2295 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2296 if (label_stmt
2297 && (FORCED_LABEL (gimple_label_label (label_stmt))
2298 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2299 {
2300 basic_block new_bb;
2301 gimple_stmt_iterator new_gsi;
2302
2303 /* A non-reachable non-local label may still be referenced.
2304 But it no longer needs to carry the extra semantics of
2305 non-locality. */
2306 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2307 {
2308 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2309 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2310 }
2311
2312 new_bb = bb->prev_bb;
2313 /* Don't move any labels into ENTRY block. */
2314 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2315 {
2316 new_bb = single_succ (new_bb);
2317 gcc_assert (new_bb != bb);
2318 }
2319 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2320 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2321 || (bb_to_omp_idx[bb->index]
2322 != bb_to_omp_idx[new_bb->index])))
2323 {
2324 /* During cfg pass make sure to put orphaned labels
2325 into the right OMP region. */
2326 unsigned int i;
2327 int idx;
2328 new_bb = NULL;
2329 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2330 if (i >= NUM_FIXED_BLOCKS
2331 && idx == bb_to_omp_idx[bb->index]
2332 && i != (unsigned) bb->index)
2333 {
2334 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2335 break;
2336 }
2337 if (new_bb == NULL)
2338 {
2339 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2340 gcc_assert (new_bb != bb);
2341 }
2342 }
2343 new_gsi = gsi_after_labels (new_bb);
2344 gsi_remove (&i, false);
2345 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2346 }
2347 else
2348 {
2349 /* Release SSA definitions. */
2350 release_defs (stmt);
2351 gsi_remove (&i, true);
2352 }
2353
2354 if (gsi_end_p (i))
2355 i = gsi_last_bb (bb);
2356 else
2357 gsi_prev (&i);
2358 }
2359 }
2360
2361 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2362 bb_to_omp_idx[bb->index] = -1;
2363 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2364 bb->il.gimple.seq = NULL;
2365 bb->il.gimple.phi_nodes = NULL;
2366 }
2367
2368
2369 /* Given a basic block BB and a value VAL for use in the final statement
2370 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2371 the edge that will be taken out of the block.
2372 If VAL is NULL_TREE, then the current value of the final statement's
2373 predicate or index is used.
2374 If the value does not match a unique edge, NULL is returned. */
2375
2376 edge
2377 find_taken_edge (basic_block bb, tree val)
2378 {
2379 gimple *stmt;
2380
2381 stmt = last_stmt (bb);
2382
2383 /* Handle ENTRY and EXIT. */
2384 if (!stmt)
2385 return NULL;
2386
2387 if (gimple_code (stmt) == GIMPLE_COND)
2388 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2389
2390 if (gimple_code (stmt) == GIMPLE_SWITCH)
2391 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2392
2393 if (computed_goto_p (stmt))
2394 {
2395 /* Only optimize if the argument is a label, if the argument is
2396 not a label then we cannot construct a proper CFG.
2397
2398 It may be the case that we only need to allow the LABEL_REF to
2399 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2400 appear inside a LABEL_EXPR just to be safe. */
2401 if (val
2402 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2403 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2404 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2405 }
2406
2407 /* Otherwise we only know the taken successor edge if it's unique. */
2408 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2409 }
2410
2411 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2412 statement, determine which of the outgoing edges will be taken out of the
2413 block. Return NULL if either edge may be taken. */
2414
2415 static edge
2416 find_taken_edge_computed_goto (basic_block bb, tree val)
2417 {
2418 basic_block dest;
2419 edge e = NULL;
2420
2421 dest = label_to_block (cfun, val);
2422 if (dest)
2423 e = find_edge (bb, dest);
2424
2425 /* It's possible for find_edge to return NULL here on invalid code
2426 that abuses the labels-as-values extension (e.g. code that attempts to
2427 jump *between* functions via stored labels-as-values; PR 84136).
2428 If so, then we simply return that NULL for the edge.
2429 We don't currently have a way of detecting such invalid code, so we
2430 can't assert that it was the case when a NULL edge occurs here. */
2431
2432 return e;
2433 }
2434
2435 /* Given COND_STMT and a constant value VAL for use as the predicate,
2436 determine which of the two edges will be taken out of
2437 the statement's block. Return NULL if either edge may be taken.
2438 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2439 is used. */
2440
2441 static edge
2442 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2443 {
2444 edge true_edge, false_edge;
2445
2446 if (val == NULL_TREE)
2447 {
2448 /* Use the current value of the predicate. */
2449 if (gimple_cond_true_p (cond_stmt))
2450 val = integer_one_node;
2451 else if (gimple_cond_false_p (cond_stmt))
2452 val = integer_zero_node;
2453 else
2454 return NULL;
2455 }
2456 else if (TREE_CODE (val) != INTEGER_CST)
2457 return NULL;
2458
2459 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2460 &true_edge, &false_edge);
2461
2462 return (integer_zerop (val) ? false_edge : true_edge);
2463 }
2464
2465 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2466 which edge will be taken out of the statement's block. Return NULL if any
2467 edge may be taken.
2468 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2469 is used. */
2470
2471 edge
2472 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2473 {
2474 basic_block dest_bb;
2475 edge e;
2476 tree taken_case;
2477
2478 if (gimple_switch_num_labels (switch_stmt) == 1)
2479 taken_case = gimple_switch_default_label (switch_stmt);
2480 else
2481 {
2482 if (val == NULL_TREE)
2483 val = gimple_switch_index (switch_stmt);
2484 if (TREE_CODE (val) != INTEGER_CST)
2485 return NULL;
2486 else
2487 taken_case = find_case_label_for_value (switch_stmt, val);
2488 }
2489 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2490
2491 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2492 gcc_assert (e);
2493 return e;
2494 }
2495
2496
2497 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2498 We can make optimal use here of the fact that the case labels are
2499 sorted: We can do a binary search for a case matching VAL. */
2500
2501 tree
2502 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2503 {
2504 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2505 tree default_case = gimple_switch_default_label (switch_stmt);
2506
2507 for (low = 0, high = n; high - low > 1; )
2508 {
2509 size_t i = (high + low) / 2;
2510 tree t = gimple_switch_label (switch_stmt, i);
2511 int cmp;
2512
2513 /* Cache the result of comparing CASE_LOW and val. */
2514 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2515
2516 if (cmp > 0)
2517 high = i;
2518 else
2519 low = i;
2520
2521 if (CASE_HIGH (t) == NULL)
2522 {
2523 /* A singe-valued case label. */
2524 if (cmp == 0)
2525 return t;
2526 }
2527 else
2528 {
2529 /* A case range. We can only handle integer ranges. */
2530 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2531 return t;
2532 }
2533 }
2534
2535 return default_case;
2536 }
2537
2538
2539 /* Dump a basic block on stderr. */
2540
2541 void
2542 gimple_debug_bb (basic_block bb)
2543 {
2544 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2545 }
2546
2547
2548 /* Dump basic block with index N on stderr. */
2549
2550 basic_block
2551 gimple_debug_bb_n (int n)
2552 {
2553 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2554 return BASIC_BLOCK_FOR_FN (cfun, n);
2555 }
2556
2557
2558 /* Dump the CFG on stderr.
2559
2560 FLAGS are the same used by the tree dumping functions
2561 (see TDF_* in dumpfile.h). */
2562
2563 void
2564 gimple_debug_cfg (dump_flags_t flags)
2565 {
2566 gimple_dump_cfg (stderr, flags);
2567 }
2568
2569
2570 /* Dump the program showing basic block boundaries on the given FILE.
2571
2572 FLAGS are the same used by the tree dumping functions (see TDF_* in
2573 tree.h). */
2574
2575 void
2576 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2577 {
2578 if (flags & TDF_DETAILS)
2579 {
2580 dump_function_header (file, current_function_decl, flags);
2581 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2582 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2583 last_basic_block_for_fn (cfun));
2584
2585 brief_dump_cfg (file, flags);
2586 fprintf (file, "\n");
2587 }
2588
2589 if (flags & TDF_STATS)
2590 dump_cfg_stats (file);
2591
2592 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2593 }
2594
2595
2596 /* Dump CFG statistics on FILE. */
2597
2598 void
2599 dump_cfg_stats (FILE *file)
2600 {
2601 static long max_num_merged_labels = 0;
2602 unsigned long size, total = 0;
2603 long num_edges;
2604 basic_block bb;
2605 const char * const fmt_str = "%-30s%-13s%12s\n";
2606 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2607 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2608 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2609 const char *funcname = current_function_name ();
2610
2611 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2612
2613 fprintf (file, "---------------------------------------------------------\n");
2614 fprintf (file, fmt_str, "", " Number of ", "Memory");
2615 fprintf (file, fmt_str, "", " instances ", "used ");
2616 fprintf (file, "---------------------------------------------------------\n");
2617
2618 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2619 total += size;
2620 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2621 SIZE_AMOUNT (size));
2622
2623 num_edges = 0;
2624 FOR_EACH_BB_FN (bb, cfun)
2625 num_edges += EDGE_COUNT (bb->succs);
2626 size = num_edges * sizeof (class edge_def);
2627 total += size;
2628 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2629
2630 fprintf (file, "---------------------------------------------------------\n");
2631 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2632 SIZE_AMOUNT (total));
2633 fprintf (file, "---------------------------------------------------------\n");
2634 fprintf (file, "\n");
2635
2636 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2637 max_num_merged_labels = cfg_stats.num_merged_labels;
2638
2639 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2640 cfg_stats.num_merged_labels, max_num_merged_labels);
2641
2642 fprintf (file, "\n");
2643 }
2644
2645
2646 /* Dump CFG statistics on stderr. Keep extern so that it's always
2647 linked in the final executable. */
2648
2649 DEBUG_FUNCTION void
2650 debug_cfg_stats (void)
2651 {
2652 dump_cfg_stats (stderr);
2653 }
2654
2655 /*---------------------------------------------------------------------------
2656 Miscellaneous helpers
2657 ---------------------------------------------------------------------------*/
2658
2659 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2660 flow. Transfers of control flow associated with EH are excluded. */
2661
2662 static bool
2663 call_can_make_abnormal_goto (gimple *t)
2664 {
2665 /* If the function has no non-local labels, then a call cannot make an
2666 abnormal transfer of control. */
2667 if (!cfun->has_nonlocal_label
2668 && !cfun->calls_setjmp)
2669 return false;
2670
2671 /* Likewise if the call has no side effects. */
2672 if (!gimple_has_side_effects (t))
2673 return false;
2674
2675 /* Likewise if the called function is leaf. */
2676 if (gimple_call_flags (t) & ECF_LEAF)
2677 return false;
2678
2679 return true;
2680 }
2681
2682
2683 /* Return true if T can make an abnormal transfer of control flow.
2684 Transfers of control flow associated with EH are excluded. */
2685
2686 bool
2687 stmt_can_make_abnormal_goto (gimple *t)
2688 {
2689 if (computed_goto_p (t))
2690 return true;
2691 if (is_gimple_call (t))
2692 return call_can_make_abnormal_goto (t);
2693 return false;
2694 }
2695
2696
2697 /* Return true if T represents a stmt that always transfers control. */
2698
2699 bool
2700 is_ctrl_stmt (gimple *t)
2701 {
2702 switch (gimple_code (t))
2703 {
2704 case GIMPLE_COND:
2705 case GIMPLE_SWITCH:
2706 case GIMPLE_GOTO:
2707 case GIMPLE_RETURN:
2708 case GIMPLE_RESX:
2709 return true;
2710 default:
2711 return false;
2712 }
2713 }
2714
2715
2716 /* Return true if T is a statement that may alter the flow of control
2717 (e.g., a call to a non-returning function). */
2718
2719 bool
2720 is_ctrl_altering_stmt (gimple *t)
2721 {
2722 gcc_assert (t);
2723
2724 switch (gimple_code (t))
2725 {
2726 case GIMPLE_CALL:
2727 /* Per stmt call flag indicates whether the call could alter
2728 controlflow. */
2729 if (gimple_call_ctrl_altering_p (t))
2730 return true;
2731 break;
2732
2733 case GIMPLE_EH_DISPATCH:
2734 /* EH_DISPATCH branches to the individual catch handlers at
2735 this level of a try or allowed-exceptions region. It can
2736 fallthru to the next statement as well. */
2737 return true;
2738
2739 case GIMPLE_ASM:
2740 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2741 return true;
2742 break;
2743
2744 CASE_GIMPLE_OMP:
2745 /* OpenMP directives alter control flow. */
2746 return true;
2747
2748 case GIMPLE_TRANSACTION:
2749 /* A transaction start alters control flow. */
2750 return true;
2751
2752 default:
2753 break;
2754 }
2755
2756 /* If a statement can throw, it alters control flow. */
2757 return stmt_can_throw_internal (cfun, t);
2758 }
2759
2760
2761 /* Return true if T is a simple local goto. */
2762
2763 bool
2764 simple_goto_p (gimple *t)
2765 {
2766 return (gimple_code (t) == GIMPLE_GOTO
2767 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2768 }
2769
2770
2771 /* Return true if STMT should start a new basic block. PREV_STMT is
2772 the statement preceding STMT. It is used when STMT is a label or a
2773 case label. Labels should only start a new basic block if their
2774 previous statement wasn't a label. Otherwise, sequence of labels
2775 would generate unnecessary basic blocks that only contain a single
2776 label. */
2777
2778 static inline bool
2779 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2780 {
2781 if (stmt == NULL)
2782 return false;
2783
2784 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2785 any nondebug stmts in the block. We don't want to start another
2786 block in this case: the debug stmt will already have started the
2787 one STMT would start if we weren't outputting debug stmts. */
2788 if (prev_stmt && is_gimple_debug (prev_stmt))
2789 return false;
2790
2791 /* Labels start a new basic block only if the preceding statement
2792 wasn't a label of the same type. This prevents the creation of
2793 consecutive blocks that have nothing but a single label. */
2794 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2795 {
2796 /* Nonlocal and computed GOTO targets always start a new block. */
2797 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2798 || FORCED_LABEL (gimple_label_label (label_stmt)))
2799 return true;
2800
2801 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2802 {
2803 if (DECL_NONLOCAL (gimple_label_label (plabel))
2804 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2805 return true;
2806
2807 cfg_stats.num_merged_labels++;
2808 return false;
2809 }
2810 else
2811 return true;
2812 }
2813 else if (gimple_code (stmt) == GIMPLE_CALL)
2814 {
2815 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2816 /* setjmp acts similar to a nonlocal GOTO target and thus should
2817 start a new block. */
2818 return true;
2819 if (gimple_call_internal_p (stmt, IFN_PHI)
2820 && prev_stmt
2821 && gimple_code (prev_stmt) != GIMPLE_LABEL
2822 && (gimple_code (prev_stmt) != GIMPLE_CALL
2823 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2824 /* PHI nodes start a new block unless preceeded by a label
2825 or another PHI. */
2826 return true;
2827 }
2828
2829 return false;
2830 }
2831
2832
2833 /* Return true if T should end a basic block. */
2834
2835 bool
2836 stmt_ends_bb_p (gimple *t)
2837 {
2838 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2839 }
2840
2841 /* Remove block annotations and other data structures. */
2842
2843 void
2844 delete_tree_cfg_annotations (struct function *fn)
2845 {
2846 vec_free (label_to_block_map_for_fn (fn));
2847 }
2848
2849 /* Return the virtual phi in BB. */
2850
2851 gphi *
2852 get_virtual_phi (basic_block bb)
2853 {
2854 for (gphi_iterator gsi = gsi_start_phis (bb);
2855 !gsi_end_p (gsi);
2856 gsi_next (&gsi))
2857 {
2858 gphi *phi = gsi.phi ();
2859
2860 if (virtual_operand_p (PHI_RESULT (phi)))
2861 return phi;
2862 }
2863
2864 return NULL;
2865 }
2866
2867 /* Return the first statement in basic block BB. */
2868
2869 gimple *
2870 first_stmt (basic_block bb)
2871 {
2872 gimple_stmt_iterator i = gsi_start_bb (bb);
2873 gimple *stmt = NULL;
2874
2875 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2876 {
2877 gsi_next (&i);
2878 stmt = NULL;
2879 }
2880 return stmt;
2881 }
2882
2883 /* Return the first non-label statement in basic block BB. */
2884
2885 static gimple *
2886 first_non_label_stmt (basic_block bb)
2887 {
2888 gimple_stmt_iterator i = gsi_start_bb (bb);
2889 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2890 gsi_next (&i);
2891 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2892 }
2893
2894 /* Return the last statement in basic block BB. */
2895
2896 gimple *
2897 last_stmt (basic_block bb)
2898 {
2899 gimple_stmt_iterator i = gsi_last_bb (bb);
2900 gimple *stmt = NULL;
2901
2902 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2903 {
2904 gsi_prev (&i);
2905 stmt = NULL;
2906 }
2907 return stmt;
2908 }
2909
2910 /* Return the last statement of an otherwise empty block. Return NULL
2911 if the block is totally empty, or if it contains more than one
2912 statement. */
2913
2914 gimple *
2915 last_and_only_stmt (basic_block bb)
2916 {
2917 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2918 gimple *last, *prev;
2919
2920 if (gsi_end_p (i))
2921 return NULL;
2922
2923 last = gsi_stmt (i);
2924 gsi_prev_nondebug (&i);
2925 if (gsi_end_p (i))
2926 return last;
2927
2928 /* Empty statements should no longer appear in the instruction stream.
2929 Everything that might have appeared before should be deleted by
2930 remove_useless_stmts, and the optimizers should just gsi_remove
2931 instead of smashing with build_empty_stmt.
2932
2933 Thus the only thing that should appear here in a block containing
2934 one executable statement is a label. */
2935 prev = gsi_stmt (i);
2936 if (gimple_code (prev) == GIMPLE_LABEL)
2937 return last;
2938 else
2939 return NULL;
2940 }
2941
2942 /* Returns the basic block after which the new basic block created
2943 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2944 near its "logical" location. This is of most help to humans looking
2945 at debugging dumps. */
2946
2947 basic_block
2948 split_edge_bb_loc (edge edge_in)
2949 {
2950 basic_block dest = edge_in->dest;
2951 basic_block dest_prev = dest->prev_bb;
2952
2953 if (dest_prev)
2954 {
2955 edge e = find_edge (dest_prev, dest);
2956 if (e && !(e->flags & EDGE_COMPLEX))
2957 return edge_in->src;
2958 }
2959 return dest_prev;
2960 }
2961
2962 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2963 Abort on abnormal edges. */
2964
2965 static basic_block
2966 gimple_split_edge (edge edge_in)
2967 {
2968 basic_block new_bb, after_bb, dest;
2969 edge new_edge, e;
2970
2971 /* Abnormal edges cannot be split. */
2972 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2973
2974 dest = edge_in->dest;
2975
2976 after_bb = split_edge_bb_loc (edge_in);
2977
2978 new_bb = create_empty_bb (after_bb);
2979 new_bb->count = edge_in->count ();
2980
2981 /* We want to avoid re-allocating PHIs when we first
2982 add the fallthru edge from new_bb to dest but we also
2983 want to avoid changing PHI argument order when
2984 first redirecting edge_in away from dest. The former
2985 avoids changing PHI argument order by adding them
2986 last and then the redirection swapping it back into
2987 place by means of unordered remove.
2988 So hack around things by temporarily removing all PHIs
2989 from the destination during the edge redirection and then
2990 making sure the edges stay in order. */
2991 gimple_seq saved_phis = phi_nodes (dest);
2992 unsigned old_dest_idx = edge_in->dest_idx;
2993 set_phi_nodes (dest, NULL);
2994 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2995 e = redirect_edge_and_branch (edge_in, new_bb);
2996 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2997 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2998 dest->il.gimple.phi_nodes = saved_phis;
2999
3000 return new_bb;
3001 }
3002
3003
3004 /* Verify properties of the address expression T whose base should be
3005 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3006
3007 static bool
3008 verify_address (tree t, bool verify_addressable)
3009 {
3010 bool old_constant;
3011 bool old_side_effects;
3012 bool new_constant;
3013 bool new_side_effects;
3014
3015 old_constant = TREE_CONSTANT (t);
3016 old_side_effects = TREE_SIDE_EFFECTS (t);
3017
3018 recompute_tree_invariant_for_addr_expr (t);
3019 new_side_effects = TREE_SIDE_EFFECTS (t);
3020 new_constant = TREE_CONSTANT (t);
3021
3022 if (old_constant != new_constant)
3023 {
3024 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3025 return true;
3026 }
3027 if (old_side_effects != new_side_effects)
3028 {
3029 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3030 return true;
3031 }
3032
3033 tree base = TREE_OPERAND (t, 0);
3034 while (handled_component_p (base))
3035 base = TREE_OPERAND (base, 0);
3036
3037 if (!(VAR_P (base)
3038 || TREE_CODE (base) == PARM_DECL
3039 || TREE_CODE (base) == RESULT_DECL))
3040 return false;
3041
3042 if (verify_addressable && !TREE_ADDRESSABLE (base))
3043 {
3044 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3045 return true;
3046 }
3047
3048 return false;
3049 }
3050
3051
3052 /* Verify if EXPR is a valid GIMPLE reference expression. If
3053 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3054 if there is an error, otherwise false. */
3055
3056 static bool
3057 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3058 {
3059 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3060
3061 if (TREE_CODE (expr) == REALPART_EXPR
3062 || TREE_CODE (expr) == IMAGPART_EXPR
3063 || TREE_CODE (expr) == BIT_FIELD_REF)
3064 {
3065 tree op = TREE_OPERAND (expr, 0);
3066 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3067 {
3068 error ("non-scalar %qs", code_name);
3069 return true;
3070 }
3071
3072 if (TREE_CODE (expr) == BIT_FIELD_REF)
3073 {
3074 tree t1 = TREE_OPERAND (expr, 1);
3075 tree t2 = TREE_OPERAND (expr, 2);
3076 poly_uint64 size, bitpos;
3077 if (!poly_int_tree_p (t1, &size)
3078 || !poly_int_tree_p (t2, &bitpos)
3079 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3080 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3081 {
3082 error ("invalid position or size operand to %qs", code_name);
3083 return true;
3084 }
3085 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3086 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3087 {
3088 error ("integral result type precision does not match "
3089 "field size of %qs", code_name);
3090 return true;
3091 }
3092 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3093 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3094 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3095 size))
3096 {
3097 error ("mode size of non-integral result does not "
3098 "match field size of %qs",
3099 code_name);
3100 return true;
3101 }
3102 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3103 && !type_has_mode_precision_p (TREE_TYPE (op)))
3104 {
3105 error ("%qs of non-mode-precision operand", code_name);
3106 return true;
3107 }
3108 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3109 && maybe_gt (size + bitpos,
3110 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3111 {
3112 error ("position plus size exceeds size of referenced object in "
3113 "%qs", code_name);
3114 return true;
3115 }
3116 }
3117
3118 if ((TREE_CODE (expr) == REALPART_EXPR
3119 || TREE_CODE (expr) == IMAGPART_EXPR)
3120 && !useless_type_conversion_p (TREE_TYPE (expr),
3121 TREE_TYPE (TREE_TYPE (op))))
3122 {
3123 error ("type mismatch in %qs reference", code_name);
3124 debug_generic_stmt (TREE_TYPE (expr));
3125 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3126 return true;
3127 }
3128 expr = op;
3129 }
3130
3131 while (handled_component_p (expr))
3132 {
3133 code_name = get_tree_code_name (TREE_CODE (expr));
3134
3135 if (TREE_CODE (expr) == REALPART_EXPR
3136 || TREE_CODE (expr) == IMAGPART_EXPR
3137 || TREE_CODE (expr) == BIT_FIELD_REF)
3138 {
3139 error ("non-top-level %qs", code_name);
3140 return true;
3141 }
3142
3143 tree op = TREE_OPERAND (expr, 0);
3144
3145 if (TREE_CODE (expr) == ARRAY_REF
3146 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3147 {
3148 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3149 || (TREE_OPERAND (expr, 2)
3150 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3151 || (TREE_OPERAND (expr, 3)
3152 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3153 {
3154 error ("invalid operands to %qs", code_name);
3155 debug_generic_stmt (expr);
3156 return true;
3157 }
3158 }
3159
3160 /* Verify if the reference array element types are compatible. */
3161 if (TREE_CODE (expr) == ARRAY_REF
3162 && !useless_type_conversion_p (TREE_TYPE (expr),
3163 TREE_TYPE (TREE_TYPE (op))))
3164 {
3165 error ("type mismatch in %qs", code_name);
3166 debug_generic_stmt (TREE_TYPE (expr));
3167 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3168 return true;
3169 }
3170 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3171 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3172 TREE_TYPE (TREE_TYPE (op))))
3173 {
3174 error ("type mismatch in %qs", code_name);
3175 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3176 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3177 return true;
3178 }
3179
3180 if (TREE_CODE (expr) == COMPONENT_REF)
3181 {
3182 if (TREE_OPERAND (expr, 2)
3183 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3184 {
3185 error ("invalid %qs offset operator", code_name);
3186 return true;
3187 }
3188 if (!useless_type_conversion_p (TREE_TYPE (expr),
3189 TREE_TYPE (TREE_OPERAND (expr, 1))))
3190 {
3191 error ("type mismatch in %qs", code_name);
3192 debug_generic_stmt (TREE_TYPE (expr));
3193 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3194 return true;
3195 }
3196 }
3197
3198 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3199 {
3200 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3201 that their operand is not an SSA name or an invariant when
3202 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3203 bug). Otherwise there is nothing to verify, gross mismatches at
3204 most invoke undefined behavior. */
3205 if (require_lvalue
3206 && (TREE_CODE (op) == SSA_NAME
3207 || is_gimple_min_invariant (op)))
3208 {
3209 error ("conversion of %qs on the left hand side of %qs",
3210 get_tree_code_name (TREE_CODE (op)), code_name);
3211 debug_generic_stmt (expr);
3212 return true;
3213 }
3214 else if (TREE_CODE (op) == SSA_NAME
3215 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3216 {
3217 error ("conversion of register to a different size in %qs",
3218 code_name);
3219 debug_generic_stmt (expr);
3220 return true;
3221 }
3222 else if (!handled_component_p (op))
3223 return false;
3224 }
3225
3226 expr = op;
3227 }
3228
3229 code_name = get_tree_code_name (TREE_CODE (expr));
3230
3231 if (TREE_CODE (expr) == MEM_REF)
3232 {
3233 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3234 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3235 && verify_address (TREE_OPERAND (expr, 0), false)))
3236 {
3237 error ("invalid address operand in %qs", code_name);
3238 debug_generic_stmt (expr);
3239 return true;
3240 }
3241 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3242 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3243 {
3244 error ("invalid offset operand in %qs", code_name);
3245 debug_generic_stmt (expr);
3246 return true;
3247 }
3248 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3249 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3250 {
3251 error ("invalid clique in %qs", code_name);
3252 debug_generic_stmt (expr);
3253 return true;
3254 }
3255 }
3256 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3257 {
3258 if (!TMR_BASE (expr)
3259 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3260 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3261 && verify_address (TMR_BASE (expr), false)))
3262 {
3263 error ("invalid address operand in %qs", code_name);
3264 return true;
3265 }
3266 if (!TMR_OFFSET (expr)
3267 || !poly_int_tree_p (TMR_OFFSET (expr))
3268 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3269 {
3270 error ("invalid offset operand in %qs", code_name);
3271 debug_generic_stmt (expr);
3272 return true;
3273 }
3274 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3275 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3276 {
3277 error ("invalid clique in %qs", code_name);
3278 debug_generic_stmt (expr);
3279 return true;
3280 }
3281 }
3282 else if (TREE_CODE (expr) == INDIRECT_REF)
3283 {
3284 error ("%qs in gimple IL", code_name);
3285 debug_generic_stmt (expr);
3286 return true;
3287 }
3288
3289 if (!require_lvalue
3290 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3291 return false;
3292
3293 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3294 return false;
3295
3296 if (TREE_CODE (expr) != TARGET_MEM_REF
3297 && TREE_CODE (expr) != MEM_REF)
3298 {
3299 error ("invalid expression for min lvalue");
3300 return true;
3301 }
3302
3303 return false;
3304 }
3305
3306 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3307 list of pointer-to types that is trivially convertible to DEST. */
3308
3309 static bool
3310 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3311 {
3312 tree src;
3313
3314 if (!TYPE_POINTER_TO (src_obj))
3315 return true;
3316
3317 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3318 if (useless_type_conversion_p (dest, src))
3319 return true;
3320
3321 return false;
3322 }
3323
3324 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3325 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3326
3327 static bool
3328 valid_fixed_convert_types_p (tree type1, tree type2)
3329 {
3330 return (FIXED_POINT_TYPE_P (type1)
3331 && (INTEGRAL_TYPE_P (type2)
3332 || SCALAR_FLOAT_TYPE_P (type2)
3333 || FIXED_POINT_TYPE_P (type2)));
3334 }
3335
3336 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3337 is a problem, otherwise false. */
3338
3339 static bool
3340 verify_gimple_call (gcall *stmt)
3341 {
3342 tree fn = gimple_call_fn (stmt);
3343 tree fntype, fndecl;
3344 unsigned i;
3345
3346 if (gimple_call_internal_p (stmt))
3347 {
3348 if (fn)
3349 {
3350 error ("gimple call has two targets");
3351 debug_generic_stmt (fn);
3352 return true;
3353 }
3354 }
3355 else
3356 {
3357 if (!fn)
3358 {
3359 error ("gimple call has no target");
3360 return true;
3361 }
3362 }
3363
3364 if (fn && !is_gimple_call_addr (fn))
3365 {
3366 error ("invalid function in gimple call");
3367 debug_generic_stmt (fn);
3368 return true;
3369 }
3370
3371 if (fn
3372 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3373 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3374 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3375 {
3376 error ("non-function in gimple call");
3377 return true;
3378 }
3379
3380 fndecl = gimple_call_fndecl (stmt);
3381 if (fndecl
3382 && TREE_CODE (fndecl) == FUNCTION_DECL
3383 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3384 && !DECL_PURE_P (fndecl)
3385 && !TREE_READONLY (fndecl))
3386 {
3387 error ("invalid pure const state for function");
3388 return true;
3389 }
3390
3391 tree lhs = gimple_call_lhs (stmt);
3392 if (lhs
3393 && (!is_gimple_reg (lhs)
3394 && (!is_gimple_lvalue (lhs)
3395 || verify_types_in_gimple_reference
3396 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3397 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3398 {
3399 error ("invalid LHS in gimple call");
3400 return true;
3401 }
3402
3403 if (gimple_call_ctrl_altering_p (stmt)
3404 && gimple_call_noreturn_p (stmt)
3405 && should_remove_lhs_p (lhs))
3406 {
3407 error ("LHS in %<noreturn%> call");
3408 return true;
3409 }
3410
3411 fntype = gimple_call_fntype (stmt);
3412 if (fntype
3413 && lhs
3414 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3415 /* ??? At least C++ misses conversions at assignments from
3416 void * call results.
3417 For now simply allow arbitrary pointer type conversions. */
3418 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3419 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3420 {
3421 error ("invalid conversion in gimple call");
3422 debug_generic_stmt (TREE_TYPE (lhs));
3423 debug_generic_stmt (TREE_TYPE (fntype));
3424 return true;
3425 }
3426
3427 if (gimple_call_chain (stmt)
3428 && !is_gimple_val (gimple_call_chain (stmt)))
3429 {
3430 error ("invalid static chain in gimple call");
3431 debug_generic_stmt (gimple_call_chain (stmt));
3432 return true;
3433 }
3434
3435 /* If there is a static chain argument, the call should either be
3436 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3437 if (gimple_call_chain (stmt)
3438 && fndecl
3439 && !DECL_STATIC_CHAIN (fndecl))
3440 {
3441 error ("static chain with function that doesn%'t use one");
3442 return true;
3443 }
3444
3445 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3446 {
3447 switch (DECL_FUNCTION_CODE (fndecl))
3448 {
3449 case BUILT_IN_UNREACHABLE:
3450 case BUILT_IN_TRAP:
3451 if (gimple_call_num_args (stmt) > 0)
3452 {
3453 /* Built-in unreachable with parameters might not be caught by
3454 undefined behavior sanitizer. Front-ends do check users do not
3455 call them that way but we also produce calls to
3456 __builtin_unreachable internally, for example when IPA figures
3457 out a call cannot happen in a legal program. In such cases,
3458 we must make sure arguments are stripped off. */
3459 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3460 "with arguments");
3461 return true;
3462 }
3463 break;
3464 default:
3465 break;
3466 }
3467 }
3468
3469 /* For a call to .DEFERRED_INIT,
3470 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3471 we should guarantee that when the 1st argument is a constant, it should
3472 be the same as the size of the LHS. */
3473
3474 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3475 {
3476 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3477 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3478
3479 if (TREE_CODE (lhs) == SSA_NAME)
3480 lhs = SSA_NAME_VAR (lhs);
3481
3482 poly_uint64 size_from_arg0, size_from_lhs;
3483 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3484 &size_from_arg0);
3485 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3486 &size_from_lhs);
3487 if (is_constant_size_arg0 && is_constant_size_lhs)
3488 if (maybe_ne (size_from_arg0, size_from_lhs))
3489 {
3490 error ("%<DEFERRED_INIT%> calls should have same "
3491 "constant size for the first argument and LHS");
3492 return true;
3493 }
3494 }
3495
3496 /* ??? The C frontend passes unpromoted arguments in case it
3497 didn't see a function declaration before the call. So for now
3498 leave the call arguments mostly unverified. Once we gimplify
3499 unit-at-a-time we have a chance to fix this. */
3500 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3501 {
3502 tree arg = gimple_call_arg (stmt, i);
3503 if ((is_gimple_reg_type (TREE_TYPE (arg))
3504 && !is_gimple_val (arg))
3505 || (!is_gimple_reg_type (TREE_TYPE (arg))
3506 && !is_gimple_lvalue (arg)))
3507 {
3508 error ("invalid argument to gimple call");
3509 debug_generic_expr (arg);
3510 return true;
3511 }
3512 if (!is_gimple_reg (arg))
3513 {
3514 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3515 arg = TREE_OPERAND (arg, 0);
3516 if (verify_types_in_gimple_reference (arg, false))
3517 return true;
3518 }
3519 }
3520
3521 return false;
3522 }
3523
3524 /* Verifies the gimple comparison with the result type TYPE and
3525 the operands OP0 and OP1, comparison code is CODE. */
3526
3527 static bool
3528 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3529 {
3530 tree op0_type = TREE_TYPE (op0);
3531 tree op1_type = TREE_TYPE (op1);
3532
3533 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3534 {
3535 error ("invalid operands in gimple comparison");
3536 return true;
3537 }
3538
3539 /* For comparisons we do not have the operations type as the
3540 effective type the comparison is carried out in. Instead
3541 we require that either the first operand is trivially
3542 convertible into the second, or the other way around. */
3543 if (!useless_type_conversion_p (op0_type, op1_type)
3544 && !useless_type_conversion_p (op1_type, op0_type))
3545 {
3546 error ("mismatching comparison operand types");
3547 debug_generic_expr (op0_type);
3548 debug_generic_expr (op1_type);
3549 return true;
3550 }
3551
3552 /* The resulting type of a comparison may be an effective boolean type. */
3553 if (INTEGRAL_TYPE_P (type)
3554 && (TREE_CODE (type) == BOOLEAN_TYPE
3555 || TYPE_PRECISION (type) == 1))
3556 {
3557 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3558 || TREE_CODE (op1_type) == VECTOR_TYPE)
3559 && code != EQ_EXPR && code != NE_EXPR
3560 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3561 && !VECTOR_INTEGER_TYPE_P (op0_type))
3562 {
3563 error ("unsupported operation or type for vector comparison"
3564 " returning a boolean");
3565 debug_generic_expr (op0_type);
3566 debug_generic_expr (op1_type);
3567 return true;
3568 }
3569 }
3570 /* Or a boolean vector type with the same element count
3571 as the comparison operand types. */
3572 else if (TREE_CODE (type) == VECTOR_TYPE
3573 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3574 {
3575 if (TREE_CODE (op0_type) != VECTOR_TYPE
3576 || TREE_CODE (op1_type) != VECTOR_TYPE)
3577 {
3578 error ("non-vector operands in vector comparison");
3579 debug_generic_expr (op0_type);
3580 debug_generic_expr (op1_type);
3581 return true;
3582 }
3583
3584 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3585 TYPE_VECTOR_SUBPARTS (op0_type)))
3586 {
3587 error ("invalid vector comparison resulting type");
3588 debug_generic_expr (type);
3589 return true;
3590 }
3591 }
3592 else
3593 {
3594 error ("bogus comparison result type");
3595 debug_generic_expr (type);
3596 return true;
3597 }
3598
3599 return false;
3600 }
3601
3602 /* Verify a gimple assignment statement STMT with an unary rhs.
3603 Returns true if anything is wrong. */
3604
3605 static bool
3606 verify_gimple_assign_unary (gassign *stmt)
3607 {
3608 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3609 tree lhs = gimple_assign_lhs (stmt);
3610 tree lhs_type = TREE_TYPE (lhs);
3611 tree rhs1 = gimple_assign_rhs1 (stmt);
3612 tree rhs1_type = TREE_TYPE (rhs1);
3613
3614 if (!is_gimple_reg (lhs))
3615 {
3616 error ("non-register as LHS of unary operation");
3617 return true;
3618 }
3619
3620 if (!is_gimple_val (rhs1))
3621 {
3622 error ("invalid operand in unary operation");
3623 return true;
3624 }
3625
3626 const char* const code_name = get_tree_code_name (rhs_code);
3627
3628 /* First handle conversions. */
3629 switch (rhs_code)
3630 {
3631 CASE_CONVERT:
3632 {
3633 /* Allow conversions between vectors with the same number of elements,
3634 provided that the conversion is OK for the element types too. */
3635 if (VECTOR_TYPE_P (lhs_type)
3636 && VECTOR_TYPE_P (rhs1_type)
3637 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3638 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3639 {
3640 lhs_type = TREE_TYPE (lhs_type);
3641 rhs1_type = TREE_TYPE (rhs1_type);
3642 }
3643 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3644 {
3645 error ("invalid vector types in nop conversion");
3646 debug_generic_expr (lhs_type);
3647 debug_generic_expr (rhs1_type);
3648 return true;
3649 }
3650
3651 /* Allow conversions from pointer type to integral type only if
3652 there is no sign or zero extension involved.
3653 For targets were the precision of ptrofftype doesn't match that
3654 of pointers we allow conversions to types where
3655 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3656 if ((POINTER_TYPE_P (lhs_type)
3657 && INTEGRAL_TYPE_P (rhs1_type))
3658 || (POINTER_TYPE_P (rhs1_type)
3659 && INTEGRAL_TYPE_P (lhs_type)
3660 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3661 #if defined(POINTERS_EXTEND_UNSIGNED)
3662 || (TYPE_MODE (rhs1_type) == ptr_mode
3663 && (TYPE_PRECISION (lhs_type)
3664 == BITS_PER_WORD /* word_mode */
3665 || (TYPE_PRECISION (lhs_type)
3666 == GET_MODE_PRECISION (Pmode))))
3667 #endif
3668 )))
3669 return false;
3670
3671 /* Allow conversion from integral to offset type and vice versa. */
3672 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3673 && INTEGRAL_TYPE_P (rhs1_type))
3674 || (INTEGRAL_TYPE_P (lhs_type)
3675 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3676 return false;
3677
3678 /* Otherwise assert we are converting between types of the
3679 same kind. */
3680 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3681 {
3682 error ("invalid types in nop conversion");
3683 debug_generic_expr (lhs_type);
3684 debug_generic_expr (rhs1_type);
3685 return true;
3686 }
3687
3688 return false;
3689 }
3690
3691 case ADDR_SPACE_CONVERT_EXPR:
3692 {
3693 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3694 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3695 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3696 {
3697 error ("invalid types in address space conversion");
3698 debug_generic_expr (lhs_type);
3699 debug_generic_expr (rhs1_type);
3700 return true;
3701 }
3702
3703 return false;
3704 }
3705
3706 case FIXED_CONVERT_EXPR:
3707 {
3708 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3709 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3710 {
3711 error ("invalid types in fixed-point conversion");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 return true;
3715 }
3716
3717 return false;
3718 }
3719
3720 case FLOAT_EXPR:
3721 {
3722 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3723 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3724 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3725 {
3726 error ("invalid types in conversion to floating-point");
3727 debug_generic_expr (lhs_type);
3728 debug_generic_expr (rhs1_type);
3729 return true;
3730 }
3731
3732 return false;
3733 }
3734
3735 case FIX_TRUNC_EXPR:
3736 {
3737 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3738 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3739 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3740 {
3741 error ("invalid types in conversion to integer");
3742 debug_generic_expr (lhs_type);
3743 debug_generic_expr (rhs1_type);
3744 return true;
3745 }
3746
3747 return false;
3748 }
3749
3750 case VEC_UNPACK_HI_EXPR:
3751 case VEC_UNPACK_LO_EXPR:
3752 case VEC_UNPACK_FLOAT_HI_EXPR:
3753 case VEC_UNPACK_FLOAT_LO_EXPR:
3754 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3755 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3756 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3757 || TREE_CODE (lhs_type) != VECTOR_TYPE
3758 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3759 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3760 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3761 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3762 || ((rhs_code == VEC_UNPACK_HI_EXPR
3763 || rhs_code == VEC_UNPACK_LO_EXPR)
3764 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3765 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3766 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3767 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3768 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3769 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3770 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3771 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3772 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3773 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3774 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3775 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3776 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3777 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3778 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3779 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3780 {
3781 error ("type mismatch in %qs expression", code_name);
3782 debug_generic_expr (lhs_type);
3783 debug_generic_expr (rhs1_type);
3784 return true;
3785 }
3786
3787 return false;
3788
3789 case NEGATE_EXPR:
3790 case ABS_EXPR:
3791 case BIT_NOT_EXPR:
3792 case PAREN_EXPR:
3793 case CONJ_EXPR:
3794 /* Disallow pointer and offset types for many of the unary gimple. */
3795 if (POINTER_TYPE_P (lhs_type)
3796 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3797 {
3798 error ("invalid types for %qs", code_name);
3799 debug_generic_expr (lhs_type);
3800 debug_generic_expr (rhs1_type);
3801 return true;
3802 }
3803 break;
3804
3805 case ABSU_EXPR:
3806 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3807 || !TYPE_UNSIGNED (lhs_type)
3808 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3809 || TYPE_UNSIGNED (rhs1_type)
3810 || element_precision (lhs_type) != element_precision (rhs1_type))
3811 {
3812 error ("invalid types for %qs", code_name);
3813 debug_generic_expr (lhs_type);
3814 debug_generic_expr (rhs1_type);
3815 return true;
3816 }
3817 return false;
3818
3819 case VEC_DUPLICATE_EXPR:
3820 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3821 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3822 {
3823 error ("%qs should be from a scalar to a like vector", code_name);
3824 debug_generic_expr (lhs_type);
3825 debug_generic_expr (rhs1_type);
3826 return true;
3827 }
3828 return false;
3829
3830 default:
3831 gcc_unreachable ();
3832 }
3833
3834 /* For the remaining codes assert there is no conversion involved. */
3835 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3836 {
3837 error ("non-trivial conversion in unary operation");
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 return true;
3841 }
3842
3843 return false;
3844 }
3845
3846 /* Verify a gimple assignment statement STMT with a binary rhs.
3847 Returns true if anything is wrong. */
3848
3849 static bool
3850 verify_gimple_assign_binary (gassign *stmt)
3851 {
3852 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3853 tree lhs = gimple_assign_lhs (stmt);
3854 tree lhs_type = TREE_TYPE (lhs);
3855 tree rhs1 = gimple_assign_rhs1 (stmt);
3856 tree rhs1_type = TREE_TYPE (rhs1);
3857 tree rhs2 = gimple_assign_rhs2 (stmt);
3858 tree rhs2_type = TREE_TYPE (rhs2);
3859
3860 if (!is_gimple_reg (lhs))
3861 {
3862 error ("non-register as LHS of binary operation");
3863 return true;
3864 }
3865
3866 if (!is_gimple_val (rhs1)
3867 || !is_gimple_val (rhs2))
3868 {
3869 error ("invalid operands in binary operation");
3870 return true;
3871 }
3872
3873 const char* const code_name = get_tree_code_name (rhs_code);
3874
3875 /* First handle operations that involve different types. */
3876 switch (rhs_code)
3877 {
3878 case COMPLEX_EXPR:
3879 {
3880 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3881 || !(INTEGRAL_TYPE_P (rhs1_type)
3882 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3883 || !(INTEGRAL_TYPE_P (rhs2_type)
3884 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3885 {
3886 error ("type mismatch in %qs", code_name);
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 debug_generic_expr (rhs2_type);
3890 return true;
3891 }
3892
3893 return false;
3894 }
3895
3896 case LSHIFT_EXPR:
3897 case RSHIFT_EXPR:
3898 case LROTATE_EXPR:
3899 case RROTATE_EXPR:
3900 {
3901 /* Shifts and rotates are ok on integral types, fixed point
3902 types and integer vector types. */
3903 if ((!INTEGRAL_TYPE_P (rhs1_type)
3904 && !FIXED_POINT_TYPE_P (rhs1_type)
3905 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3906 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3907 || (!INTEGRAL_TYPE_P (rhs2_type)
3908 /* Vector shifts of vectors are also ok. */
3909 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3910 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3911 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3912 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3913 || !useless_type_conversion_p (lhs_type, rhs1_type))
3914 {
3915 error ("type mismatch in %qs", code_name);
3916 debug_generic_expr (lhs_type);
3917 debug_generic_expr (rhs1_type);
3918 debug_generic_expr (rhs2_type);
3919 return true;
3920 }
3921
3922 return false;
3923 }
3924
3925 case WIDEN_LSHIFT_EXPR:
3926 {
3927 if (!INTEGRAL_TYPE_P (lhs_type)
3928 || !INTEGRAL_TYPE_P (rhs1_type)
3929 || TREE_CODE (rhs2) != INTEGER_CST
3930 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3931 {
3932 error ("type mismatch in %qs", code_name);
3933 debug_generic_expr (lhs_type);
3934 debug_generic_expr (rhs1_type);
3935 debug_generic_expr (rhs2_type);
3936 return true;
3937 }
3938
3939 return false;
3940 }
3941
3942 case VEC_WIDEN_LSHIFT_HI_EXPR:
3943 case VEC_WIDEN_LSHIFT_LO_EXPR:
3944 {
3945 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3946 || TREE_CODE (lhs_type) != VECTOR_TYPE
3947 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3948 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3949 || TREE_CODE (rhs2) != INTEGER_CST
3950 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3951 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3952 {
3953 error ("type mismatch in %qs", code_name);
3954 debug_generic_expr (lhs_type);
3955 debug_generic_expr (rhs1_type);
3956 debug_generic_expr (rhs2_type);
3957 return true;
3958 }
3959
3960 return false;
3961 }
3962
3963 case WIDEN_PLUS_EXPR:
3964 case WIDEN_MINUS_EXPR:
3965 case PLUS_EXPR:
3966 case MINUS_EXPR:
3967 {
3968 tree lhs_etype = lhs_type;
3969 tree rhs1_etype = rhs1_type;
3970 tree rhs2_etype = rhs2_type;
3971 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3972 {
3973 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3974 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3975 {
3976 error ("invalid non-vector operands to %qs", code_name);
3977 return true;
3978 }
3979 lhs_etype = TREE_TYPE (lhs_type);
3980 rhs1_etype = TREE_TYPE (rhs1_type);
3981 rhs2_etype = TREE_TYPE (rhs2_type);
3982 }
3983 if (POINTER_TYPE_P (lhs_etype)
3984 || POINTER_TYPE_P (rhs1_etype)
3985 || POINTER_TYPE_P (rhs2_etype))
3986 {
3987 error ("invalid (pointer) operands %qs", code_name);
3988 return true;
3989 }
3990
3991 /* Continue with generic binary expression handling. */
3992 break;
3993 }
3994
3995 case POINTER_PLUS_EXPR:
3996 {
3997 if (!POINTER_TYPE_P (rhs1_type)
3998 || !useless_type_conversion_p (lhs_type, rhs1_type)
3999 || !ptrofftype_p (rhs2_type))
4000 {
4001 error ("type mismatch in %qs", code_name);
4002 debug_generic_stmt (lhs_type);
4003 debug_generic_stmt (rhs1_type);
4004 debug_generic_stmt (rhs2_type);
4005 return true;
4006 }
4007
4008 return false;
4009 }
4010
4011 case POINTER_DIFF_EXPR:
4012 {
4013 if (!POINTER_TYPE_P (rhs1_type)
4014 || !POINTER_TYPE_P (rhs2_type)
4015 /* Because we special-case pointers to void we allow difference
4016 of arbitrary pointers with the same mode. */
4017 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4018 || !INTEGRAL_TYPE_P (lhs_type)
4019 || TYPE_UNSIGNED (lhs_type)
4020 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4021 {
4022 error ("type mismatch in %qs", code_name);
4023 debug_generic_stmt (lhs_type);
4024 debug_generic_stmt (rhs1_type);
4025 debug_generic_stmt (rhs2_type);
4026 return true;
4027 }
4028
4029 return false;
4030 }
4031
4032 case TRUTH_ANDIF_EXPR:
4033 case TRUTH_ORIF_EXPR:
4034 case TRUTH_AND_EXPR:
4035 case TRUTH_OR_EXPR:
4036 case TRUTH_XOR_EXPR:
4037
4038 gcc_unreachable ();
4039
4040 case LT_EXPR:
4041 case LE_EXPR:
4042 case GT_EXPR:
4043 case GE_EXPR:
4044 case EQ_EXPR:
4045 case NE_EXPR:
4046 case UNORDERED_EXPR:
4047 case ORDERED_EXPR:
4048 case UNLT_EXPR:
4049 case UNLE_EXPR:
4050 case UNGT_EXPR:
4051 case UNGE_EXPR:
4052 case UNEQ_EXPR:
4053 case LTGT_EXPR:
4054 /* Comparisons are also binary, but the result type is not
4055 connected to the operand types. */
4056 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4057
4058 case WIDEN_MULT_EXPR:
4059 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4060 return true;
4061 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4062 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4063
4064 case WIDEN_SUM_EXPR:
4065 {
4066 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4067 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4068 && ((!INTEGRAL_TYPE_P (rhs1_type)
4069 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4070 || (!INTEGRAL_TYPE_P (lhs_type)
4071 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4072 || !useless_type_conversion_p (lhs_type, rhs2_type)
4073 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4074 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4075 {
4076 error ("type mismatch in %qs", code_name);
4077 debug_generic_expr (lhs_type);
4078 debug_generic_expr (rhs1_type);
4079 debug_generic_expr (rhs2_type);
4080 return true;
4081 }
4082 return false;
4083 }
4084
4085 case VEC_WIDEN_MINUS_HI_EXPR:
4086 case VEC_WIDEN_MINUS_LO_EXPR:
4087 case VEC_WIDEN_PLUS_HI_EXPR:
4088 case VEC_WIDEN_PLUS_LO_EXPR:
4089 case VEC_WIDEN_MULT_HI_EXPR:
4090 case VEC_WIDEN_MULT_LO_EXPR:
4091 case VEC_WIDEN_MULT_EVEN_EXPR:
4092 case VEC_WIDEN_MULT_ODD_EXPR:
4093 {
4094 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4095 || TREE_CODE (lhs_type) != VECTOR_TYPE
4096 || !types_compatible_p (rhs1_type, rhs2_type)
4097 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4098 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4099 {
4100 error ("type mismatch in %qs", code_name);
4101 debug_generic_expr (lhs_type);
4102 debug_generic_expr (rhs1_type);
4103 debug_generic_expr (rhs2_type);
4104 return true;
4105 }
4106 return false;
4107 }
4108
4109 case VEC_PACK_TRUNC_EXPR:
4110 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4111 vector boolean types. */
4112 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4113 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4114 && types_compatible_p (rhs1_type, rhs2_type)
4115 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4116 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4117 return false;
4118
4119 /* Fallthru. */
4120 case VEC_PACK_SAT_EXPR:
4121 case VEC_PACK_FIX_TRUNC_EXPR:
4122 {
4123 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4124 || TREE_CODE (lhs_type) != VECTOR_TYPE
4125 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4126 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4127 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4128 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4129 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4130 || !types_compatible_p (rhs1_type, rhs2_type)
4131 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4132 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4133 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4134 TYPE_VECTOR_SUBPARTS (lhs_type)))
4135 {
4136 error ("type mismatch in %qs", code_name);
4137 debug_generic_expr (lhs_type);
4138 debug_generic_expr (rhs1_type);
4139 debug_generic_expr (rhs2_type);
4140 return true;
4141 }
4142
4143 return false;
4144 }
4145
4146 case VEC_PACK_FLOAT_EXPR:
4147 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4148 || TREE_CODE (lhs_type) != VECTOR_TYPE
4149 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4150 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4151 || !types_compatible_p (rhs1_type, rhs2_type)
4152 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4153 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4154 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4155 TYPE_VECTOR_SUBPARTS (lhs_type)))
4156 {
4157 error ("type mismatch in %qs", code_name);
4158 debug_generic_expr (lhs_type);
4159 debug_generic_expr (rhs1_type);
4160 debug_generic_expr (rhs2_type);
4161 return true;
4162 }
4163
4164 return false;
4165
4166 case MULT_EXPR:
4167 case MULT_HIGHPART_EXPR:
4168 case TRUNC_DIV_EXPR:
4169 case CEIL_DIV_EXPR:
4170 case FLOOR_DIV_EXPR:
4171 case ROUND_DIV_EXPR:
4172 case TRUNC_MOD_EXPR:
4173 case CEIL_MOD_EXPR:
4174 case FLOOR_MOD_EXPR:
4175 case ROUND_MOD_EXPR:
4176 case RDIV_EXPR:
4177 case EXACT_DIV_EXPR:
4178 /* Disallow pointer and offset types for many of the binary gimple. */
4179 if (POINTER_TYPE_P (lhs_type)
4180 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4181 {
4182 error ("invalid types for %qs", code_name);
4183 debug_generic_expr (lhs_type);
4184 debug_generic_expr (rhs1_type);
4185 debug_generic_expr (rhs2_type);
4186 return true;
4187 }
4188 /* Continue with generic binary expression handling. */
4189 break;
4190
4191 case MIN_EXPR:
4192 case MAX_EXPR:
4193 case BIT_IOR_EXPR:
4194 case BIT_XOR_EXPR:
4195 case BIT_AND_EXPR:
4196 /* Continue with generic binary expression handling. */
4197 break;
4198
4199 case VEC_SERIES_EXPR:
4200 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4201 {
4202 error ("type mismatch in %qs", code_name);
4203 debug_generic_expr (rhs1_type);
4204 debug_generic_expr (rhs2_type);
4205 return true;
4206 }
4207 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4208 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4209 {
4210 error ("vector type expected in %qs", code_name);
4211 debug_generic_expr (lhs_type);
4212 return true;
4213 }
4214 return false;
4215
4216 default:
4217 gcc_unreachable ();
4218 }
4219
4220 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4221 || !useless_type_conversion_p (lhs_type, rhs2_type))
4222 {
4223 error ("type mismatch in binary expression");
4224 debug_generic_stmt (lhs_type);
4225 debug_generic_stmt (rhs1_type);
4226 debug_generic_stmt (rhs2_type);
4227 return true;
4228 }
4229
4230 return false;
4231 }
4232
4233 /* Verify a gimple assignment statement STMT with a ternary rhs.
4234 Returns true if anything is wrong. */
4235
4236 static bool
4237 verify_gimple_assign_ternary (gassign *stmt)
4238 {
4239 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4240 tree lhs = gimple_assign_lhs (stmt);
4241 tree lhs_type = TREE_TYPE (lhs);
4242 tree rhs1 = gimple_assign_rhs1 (stmt);
4243 tree rhs1_type = TREE_TYPE (rhs1);
4244 tree rhs2 = gimple_assign_rhs2 (stmt);
4245 tree rhs2_type = TREE_TYPE (rhs2);
4246 tree rhs3 = gimple_assign_rhs3 (stmt);
4247 tree rhs3_type = TREE_TYPE (rhs3);
4248
4249 if (!is_gimple_reg (lhs))
4250 {
4251 error ("non-register as LHS of ternary operation");
4252 return true;
4253 }
4254
4255 if ((rhs_code == COND_EXPR
4256 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4257 || !is_gimple_val (rhs2)
4258 || !is_gimple_val (rhs3))
4259 {
4260 error ("invalid operands in ternary operation");
4261 return true;
4262 }
4263
4264 const char* const code_name = get_tree_code_name (rhs_code);
4265
4266 /* First handle operations that involve different types. */
4267 switch (rhs_code)
4268 {
4269 case WIDEN_MULT_PLUS_EXPR:
4270 case WIDEN_MULT_MINUS_EXPR:
4271 if ((!INTEGRAL_TYPE_P (rhs1_type)
4272 && !FIXED_POINT_TYPE_P (rhs1_type))
4273 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4274 || !useless_type_conversion_p (lhs_type, rhs3_type)
4275 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4276 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4277 {
4278 error ("type mismatch in %qs", code_name);
4279 debug_generic_expr (lhs_type);
4280 debug_generic_expr (rhs1_type);
4281 debug_generic_expr (rhs2_type);
4282 debug_generic_expr (rhs3_type);
4283 return true;
4284 }
4285 break;
4286
4287 case VEC_COND_EXPR:
4288 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4289 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4290 TYPE_VECTOR_SUBPARTS (lhs_type)))
4291 {
4292 error ("the first argument of a %qs must be of a "
4293 "boolean vector type of the same number of elements "
4294 "as the result", code_name);
4295 debug_generic_expr (lhs_type);
4296 debug_generic_expr (rhs1_type);
4297 return true;
4298 }
4299 if (!is_gimple_val (rhs1))
4300 return true;
4301 /* Fallthrough. */
4302 case COND_EXPR:
4303 if (!is_gimple_val (rhs1)
4304 && (!is_gimple_condexpr (rhs1)
4305 || verify_gimple_comparison (TREE_TYPE (rhs1),
4306 TREE_OPERAND (rhs1, 0),
4307 TREE_OPERAND (rhs1, 1),
4308 TREE_CODE (rhs1))))
4309 return true;
4310 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4311 || !useless_type_conversion_p (lhs_type, rhs3_type))
4312 {
4313 error ("type mismatch in %qs", code_name);
4314 debug_generic_expr (lhs_type);
4315 debug_generic_expr (rhs2_type);
4316 debug_generic_expr (rhs3_type);
4317 return true;
4318 }
4319 break;
4320
4321 case VEC_PERM_EXPR:
4322 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4323 || !useless_type_conversion_p (lhs_type, rhs2_type))
4324 {
4325 error ("type mismatch in %qs", code_name);
4326 debug_generic_expr (lhs_type);
4327 debug_generic_expr (rhs1_type);
4328 debug_generic_expr (rhs2_type);
4329 debug_generic_expr (rhs3_type);
4330 return true;
4331 }
4332
4333 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4334 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4335 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4336 {
4337 error ("vector types expected in %qs", code_name);
4338 debug_generic_expr (lhs_type);
4339 debug_generic_expr (rhs1_type);
4340 debug_generic_expr (rhs2_type);
4341 debug_generic_expr (rhs3_type);
4342 return true;
4343 }
4344
4345 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4346 TYPE_VECTOR_SUBPARTS (rhs2_type))
4347 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4348 TYPE_VECTOR_SUBPARTS (rhs3_type))
4349 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4350 TYPE_VECTOR_SUBPARTS (lhs_type)))
4351 {
4352 error ("vectors with different element number found in %qs",
4353 code_name);
4354 debug_generic_expr (lhs_type);
4355 debug_generic_expr (rhs1_type);
4356 debug_generic_expr (rhs2_type);
4357 debug_generic_expr (rhs3_type);
4358 return true;
4359 }
4360
4361 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4362 || (TREE_CODE (rhs3) != VECTOR_CST
4363 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4364 (TREE_TYPE (rhs3_type)))
4365 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4366 (TREE_TYPE (rhs1_type))))))
4367 {
4368 error ("invalid mask type in %qs", code_name);
4369 debug_generic_expr (lhs_type);
4370 debug_generic_expr (rhs1_type);
4371 debug_generic_expr (rhs2_type);
4372 debug_generic_expr (rhs3_type);
4373 return true;
4374 }
4375
4376 return false;
4377
4378 case SAD_EXPR:
4379 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4380 || !useless_type_conversion_p (lhs_type, rhs3_type)
4381 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4382 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4383 {
4384 error ("type mismatch in %qs", code_name);
4385 debug_generic_expr (lhs_type);
4386 debug_generic_expr (rhs1_type);
4387 debug_generic_expr (rhs2_type);
4388 debug_generic_expr (rhs3_type);
4389 return true;
4390 }
4391
4392 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4393 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4394 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4395 {
4396 error ("vector types expected in %qs", code_name);
4397 debug_generic_expr (lhs_type);
4398 debug_generic_expr (rhs1_type);
4399 debug_generic_expr (rhs2_type);
4400 debug_generic_expr (rhs3_type);
4401 return true;
4402 }
4403
4404 return false;
4405
4406 case BIT_INSERT_EXPR:
4407 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4408 {
4409 error ("type mismatch in %qs", code_name);
4410 debug_generic_expr (lhs_type);
4411 debug_generic_expr (rhs1_type);
4412 return true;
4413 }
4414 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4415 && INTEGRAL_TYPE_P (rhs2_type))
4416 /* Vector element insert. */
4417 || (VECTOR_TYPE_P (rhs1_type)
4418 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4419 /* Aligned sub-vector insert. */
4420 || (VECTOR_TYPE_P (rhs1_type)
4421 && VECTOR_TYPE_P (rhs2_type)
4422 && types_compatible_p (TREE_TYPE (rhs1_type),
4423 TREE_TYPE (rhs2_type))
4424 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4425 TYPE_VECTOR_SUBPARTS (rhs2_type))
4426 && multiple_p (wi::to_poly_offset (rhs3),
4427 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4428 {
4429 error ("not allowed type combination in %qs", code_name);
4430 debug_generic_expr (rhs1_type);
4431 debug_generic_expr (rhs2_type);
4432 return true;
4433 }
4434 if (! tree_fits_uhwi_p (rhs3)
4435 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4436 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4437 {
4438 error ("invalid position or size in %qs", code_name);
4439 return true;
4440 }
4441 if (INTEGRAL_TYPE_P (rhs1_type)
4442 && !type_has_mode_precision_p (rhs1_type))
4443 {
4444 error ("%qs into non-mode-precision operand", code_name);
4445 return true;
4446 }
4447 if (INTEGRAL_TYPE_P (rhs1_type))
4448 {
4449 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4450 if (bitpos >= TYPE_PRECISION (rhs1_type)
4451 || (bitpos + TYPE_PRECISION (rhs2_type)
4452 > TYPE_PRECISION (rhs1_type)))
4453 {
4454 error ("insertion out of range in %qs", code_name);
4455 return true;
4456 }
4457 }
4458 else if (VECTOR_TYPE_P (rhs1_type))
4459 {
4460 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4461 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4462 if (bitpos % bitsize != 0)
4463 {
4464 error ("%qs not at element boundary", code_name);
4465 return true;
4466 }
4467 }
4468 return false;
4469
4470 case DOT_PROD_EXPR:
4471 {
4472 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4473 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4474 && ((!INTEGRAL_TYPE_P (rhs1_type)
4475 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4476 || (!INTEGRAL_TYPE_P (lhs_type)
4477 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4478 /* rhs1_type and rhs2_type may differ in sign. */
4479 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4480 || !useless_type_conversion_p (lhs_type, rhs3_type)
4481 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4482 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4483 {
4484 error ("type mismatch in %qs", code_name);
4485 debug_generic_expr (lhs_type);
4486 debug_generic_expr (rhs1_type);
4487 debug_generic_expr (rhs2_type);
4488 return true;
4489 }
4490 return false;
4491 }
4492
4493 case REALIGN_LOAD_EXPR:
4494 /* FIXME. */
4495 return false;
4496
4497 default:
4498 gcc_unreachable ();
4499 }
4500 return false;
4501 }
4502
4503 /* Verify a gimple assignment statement STMT with a single rhs.
4504 Returns true if anything is wrong. */
4505
4506 static bool
4507 verify_gimple_assign_single (gassign *stmt)
4508 {
4509 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4510 tree lhs = gimple_assign_lhs (stmt);
4511 tree lhs_type = TREE_TYPE (lhs);
4512 tree rhs1 = gimple_assign_rhs1 (stmt);
4513 tree rhs1_type = TREE_TYPE (rhs1);
4514 bool res = false;
4515
4516 const char* const code_name = get_tree_code_name (rhs_code);
4517
4518 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4519 {
4520 error ("non-trivial conversion in %qs", code_name);
4521 debug_generic_expr (lhs_type);
4522 debug_generic_expr (rhs1_type);
4523 return true;
4524 }
4525
4526 if (gimple_clobber_p (stmt)
4527 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4528 {
4529 error ("%qs LHS in clobber statement",
4530 get_tree_code_name (TREE_CODE (lhs)));
4531 debug_generic_expr (lhs);
4532 return true;
4533 }
4534
4535 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4536 {
4537 error ("%qs LHS in assignment statement",
4538 get_tree_code_name (TREE_CODE (lhs)));
4539 debug_generic_expr (lhs);
4540 return true;
4541 }
4542
4543 if (handled_component_p (lhs)
4544 || TREE_CODE (lhs) == MEM_REF
4545 || TREE_CODE (lhs) == TARGET_MEM_REF)
4546 res |= verify_types_in_gimple_reference (lhs, true);
4547
4548 /* Special codes we cannot handle via their class. */
4549 switch (rhs_code)
4550 {
4551 case ADDR_EXPR:
4552 {
4553 tree op = TREE_OPERAND (rhs1, 0);
4554 if (!is_gimple_addressable (op))
4555 {
4556 error ("invalid operand in %qs", code_name);
4557 return true;
4558 }
4559
4560 /* Technically there is no longer a need for matching types, but
4561 gimple hygiene asks for this check. In LTO we can end up
4562 combining incompatible units and thus end up with addresses
4563 of globals that change their type to a common one. */
4564 if (!in_lto_p
4565 && !types_compatible_p (TREE_TYPE (op),
4566 TREE_TYPE (TREE_TYPE (rhs1)))
4567 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4568 TREE_TYPE (op)))
4569 {
4570 error ("type mismatch in %qs", code_name);
4571 debug_generic_stmt (TREE_TYPE (rhs1));
4572 debug_generic_stmt (TREE_TYPE (op));
4573 return true;
4574 }
4575
4576 return (verify_address (rhs1, true)
4577 || verify_types_in_gimple_reference (op, true));
4578 }
4579
4580 /* tcc_reference */
4581 case INDIRECT_REF:
4582 error ("%qs in gimple IL", code_name);
4583 return true;
4584
4585 case COMPONENT_REF:
4586 case BIT_FIELD_REF:
4587 case ARRAY_REF:
4588 case ARRAY_RANGE_REF:
4589 case VIEW_CONVERT_EXPR:
4590 case REALPART_EXPR:
4591 case IMAGPART_EXPR:
4592 case TARGET_MEM_REF:
4593 case MEM_REF:
4594 if (!is_gimple_reg (lhs)
4595 && is_gimple_reg_type (TREE_TYPE (lhs)))
4596 {
4597 error ("invalid RHS for gimple memory store: %qs", code_name);
4598 debug_generic_stmt (lhs);
4599 debug_generic_stmt (rhs1);
4600 return true;
4601 }
4602 return res || verify_types_in_gimple_reference (rhs1, false);
4603
4604 /* tcc_constant */
4605 case SSA_NAME:
4606 case INTEGER_CST:
4607 case REAL_CST:
4608 case FIXED_CST:
4609 case COMPLEX_CST:
4610 case VECTOR_CST:
4611 case STRING_CST:
4612 return res;
4613
4614 /* tcc_declaration */
4615 case CONST_DECL:
4616 return res;
4617 case VAR_DECL:
4618 case PARM_DECL:
4619 if (!is_gimple_reg (lhs)
4620 && !is_gimple_reg (rhs1)
4621 && is_gimple_reg_type (TREE_TYPE (lhs)))
4622 {
4623 error ("invalid RHS for gimple memory store: %qs", code_name);
4624 debug_generic_stmt (lhs);
4625 debug_generic_stmt (rhs1);
4626 return true;
4627 }
4628 return res;
4629
4630 case CONSTRUCTOR:
4631 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4632 {
4633 unsigned int i;
4634 tree elt_i, elt_v, elt_t = NULL_TREE;
4635
4636 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4637 return res;
4638 /* For vector CONSTRUCTORs we require that either it is empty
4639 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4640 (then the element count must be correct to cover the whole
4641 outer vector and index must be NULL on all elements, or it is
4642 a CONSTRUCTOR of scalar elements, where we as an exception allow
4643 smaller number of elements (assuming zero filling) and
4644 consecutive indexes as compared to NULL indexes (such
4645 CONSTRUCTORs can appear in the IL from FEs). */
4646 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4647 {
4648 if (elt_t == NULL_TREE)
4649 {
4650 elt_t = TREE_TYPE (elt_v);
4651 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4652 {
4653 tree elt_t = TREE_TYPE (elt_v);
4654 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4655 TREE_TYPE (elt_t)))
4656 {
4657 error ("incorrect type of vector %qs elements",
4658 code_name);
4659 debug_generic_stmt (rhs1);
4660 return true;
4661 }
4662 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4663 * TYPE_VECTOR_SUBPARTS (elt_t),
4664 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4665 {
4666 error ("incorrect number of vector %qs elements",
4667 code_name);
4668 debug_generic_stmt (rhs1);
4669 return true;
4670 }
4671 }
4672 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4673 elt_t))
4674 {
4675 error ("incorrect type of vector %qs elements",
4676 code_name);
4677 debug_generic_stmt (rhs1);
4678 return true;
4679 }
4680 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4681 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4682 {
4683 error ("incorrect number of vector %qs elements",
4684 code_name);
4685 debug_generic_stmt (rhs1);
4686 return true;
4687 }
4688 }
4689 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4690 {
4691 error ("incorrect type of vector CONSTRUCTOR elements");
4692 debug_generic_stmt (rhs1);
4693 return true;
4694 }
4695 if (elt_i != NULL_TREE
4696 && (TREE_CODE (elt_t) == VECTOR_TYPE
4697 || TREE_CODE (elt_i) != INTEGER_CST
4698 || compare_tree_int (elt_i, i) != 0))
4699 {
4700 error ("vector %qs with non-NULL element index",
4701 code_name);
4702 debug_generic_stmt (rhs1);
4703 return true;
4704 }
4705 if (!is_gimple_val (elt_v))
4706 {
4707 error ("vector %qs element is not a GIMPLE value",
4708 code_name);
4709 debug_generic_stmt (rhs1);
4710 return true;
4711 }
4712 }
4713 }
4714 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4715 {
4716 error ("non-vector %qs with elements", code_name);
4717 debug_generic_stmt (rhs1);
4718 return true;
4719 }
4720 return res;
4721
4722 case ASSERT_EXPR:
4723 /* FIXME. */
4724 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4725 if (rhs1 == boolean_false_node)
4726 {
4727 error ("%qs with an always-false condition", code_name);
4728 debug_generic_stmt (rhs1);
4729 return true;
4730 }
4731 break;
4732
4733 case WITH_SIZE_EXPR:
4734 error ("%qs RHS in assignment statement",
4735 get_tree_code_name (rhs_code));
4736 debug_generic_expr (rhs1);
4737 return true;
4738
4739 case OBJ_TYPE_REF:
4740 /* FIXME. */
4741 return res;
4742
4743 default:;
4744 }
4745
4746 return res;
4747 }
4748
4749 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4750 is a problem, otherwise false. */
4751
4752 static bool
4753 verify_gimple_assign (gassign *stmt)
4754 {
4755 switch (gimple_assign_rhs_class (stmt))
4756 {
4757 case GIMPLE_SINGLE_RHS:
4758 return verify_gimple_assign_single (stmt);
4759
4760 case GIMPLE_UNARY_RHS:
4761 return verify_gimple_assign_unary (stmt);
4762
4763 case GIMPLE_BINARY_RHS:
4764 return verify_gimple_assign_binary (stmt);
4765
4766 case GIMPLE_TERNARY_RHS:
4767 return verify_gimple_assign_ternary (stmt);
4768
4769 default:
4770 gcc_unreachable ();
4771 }
4772 }
4773
4774 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4775 is a problem, otherwise false. */
4776
4777 static bool
4778 verify_gimple_return (greturn *stmt)
4779 {
4780 tree op = gimple_return_retval (stmt);
4781 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4782
4783 /* We cannot test for present return values as we do not fix up missing
4784 return values from the original source. */
4785 if (op == NULL)
4786 return false;
4787
4788 if (!is_gimple_val (op)
4789 && TREE_CODE (op) != RESULT_DECL)
4790 {
4791 error ("invalid operand in return statement");
4792 debug_generic_stmt (op);
4793 return true;
4794 }
4795
4796 if ((TREE_CODE (op) == RESULT_DECL
4797 && DECL_BY_REFERENCE (op))
4798 || (TREE_CODE (op) == SSA_NAME
4799 && SSA_NAME_VAR (op)
4800 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4801 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4802 op = TREE_TYPE (op);
4803
4804 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4805 {
4806 error ("invalid conversion in return statement");
4807 debug_generic_stmt (restype);
4808 debug_generic_stmt (TREE_TYPE (op));
4809 return true;
4810 }
4811
4812 return false;
4813 }
4814
4815
4816 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4817 is a problem, otherwise false. */
4818
4819 static bool
4820 verify_gimple_goto (ggoto *stmt)
4821 {
4822 tree dest = gimple_goto_dest (stmt);
4823
4824 /* ??? We have two canonical forms of direct goto destinations, a
4825 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4826 if (TREE_CODE (dest) != LABEL_DECL
4827 && (!is_gimple_val (dest)
4828 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4829 {
4830 error ("goto destination is neither a label nor a pointer");
4831 return true;
4832 }
4833
4834 return false;
4835 }
4836
4837 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4838 is a problem, otherwise false. */
4839
4840 static bool
4841 verify_gimple_switch (gswitch *stmt)
4842 {
4843 unsigned int i, n;
4844 tree elt, prev_upper_bound = NULL_TREE;
4845 tree index_type, elt_type = NULL_TREE;
4846
4847 if (!is_gimple_val (gimple_switch_index (stmt)))
4848 {
4849 error ("invalid operand to switch statement");
4850 debug_generic_stmt (gimple_switch_index (stmt));
4851 return true;
4852 }
4853
4854 index_type = TREE_TYPE (gimple_switch_index (stmt));
4855 if (! INTEGRAL_TYPE_P (index_type))
4856 {
4857 error ("non-integral type switch statement");
4858 debug_generic_expr (index_type);
4859 return true;
4860 }
4861
4862 elt = gimple_switch_label (stmt, 0);
4863 if (CASE_LOW (elt) != NULL_TREE
4864 || CASE_HIGH (elt) != NULL_TREE
4865 || CASE_CHAIN (elt) != NULL_TREE)
4866 {
4867 error ("invalid default case label in switch statement");
4868 debug_generic_expr (elt);
4869 return true;
4870 }
4871
4872 n = gimple_switch_num_labels (stmt);
4873 for (i = 1; i < n; i++)
4874 {
4875 elt = gimple_switch_label (stmt, i);
4876
4877 if (CASE_CHAIN (elt))
4878 {
4879 error ("invalid %<CASE_CHAIN%>");
4880 debug_generic_expr (elt);
4881 return true;
4882 }
4883 if (! CASE_LOW (elt))
4884 {
4885 error ("invalid case label in switch statement");
4886 debug_generic_expr (elt);
4887 return true;
4888 }
4889 if (CASE_HIGH (elt)
4890 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4891 {
4892 error ("invalid case range in switch statement");
4893 debug_generic_expr (elt);
4894 return true;
4895 }
4896
4897 if (! elt_type)
4898 {
4899 elt_type = TREE_TYPE (CASE_LOW (elt));
4900 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4901 {
4902 error ("type precision mismatch in switch statement");
4903 return true;
4904 }
4905 }
4906 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4907 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4908 {
4909 error ("type mismatch for case label in switch statement");
4910 debug_generic_expr (elt);
4911 return true;
4912 }
4913
4914 if (prev_upper_bound)
4915 {
4916 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4917 {
4918 error ("case labels not sorted in switch statement");
4919 return true;
4920 }
4921 }
4922
4923 prev_upper_bound = CASE_HIGH (elt);
4924 if (! prev_upper_bound)
4925 prev_upper_bound = CASE_LOW (elt);
4926 }
4927
4928 return false;
4929 }
4930
4931 /* Verify a gimple debug statement STMT.
4932 Returns true if anything is wrong. */
4933
4934 static bool
4935 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4936 {
4937 /* There isn't much that could be wrong in a gimple debug stmt. A
4938 gimple debug bind stmt, for example, maps a tree, that's usually
4939 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4940 component or member of an aggregate type, to another tree, that
4941 can be an arbitrary expression. These stmts expand into debug
4942 insns, and are converted to debug notes by var-tracking.cc. */
4943 return false;
4944 }
4945
4946 /* Verify a gimple label statement STMT.
4947 Returns true if anything is wrong. */
4948
4949 static bool
4950 verify_gimple_label (glabel *stmt)
4951 {
4952 tree decl = gimple_label_label (stmt);
4953 int uid;
4954 bool err = false;
4955
4956 if (TREE_CODE (decl) != LABEL_DECL)
4957 return true;
4958 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4959 && DECL_CONTEXT (decl) != current_function_decl)
4960 {
4961 error ("label context is not the current function declaration");
4962 err |= true;
4963 }
4964
4965 uid = LABEL_DECL_UID (decl);
4966 if (cfun->cfg
4967 && (uid == -1
4968 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4969 {
4970 error ("incorrect entry in %<label_to_block_map%>");
4971 err |= true;
4972 }
4973
4974 uid = EH_LANDING_PAD_NR (decl);
4975 if (uid)
4976 {
4977 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4978 if (decl != lp->post_landing_pad)
4979 {
4980 error ("incorrect setting of landing pad number");
4981 err |= true;
4982 }
4983 }
4984
4985 return err;
4986 }
4987
4988 /* Verify a gimple cond statement STMT.
4989 Returns true if anything is wrong. */
4990
4991 static bool
4992 verify_gimple_cond (gcond *stmt)
4993 {
4994 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4995 {
4996 error ("invalid comparison code in gimple cond");
4997 return true;
4998 }
4999 if (!(!gimple_cond_true_label (stmt)
5000 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5001 || !(!gimple_cond_false_label (stmt)
5002 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5003 {
5004 error ("invalid labels in gimple cond");
5005 return true;
5006 }
5007
5008 return verify_gimple_comparison (boolean_type_node,
5009 gimple_cond_lhs (stmt),
5010 gimple_cond_rhs (stmt),
5011 gimple_cond_code (stmt));
5012 }
5013
5014 /* Verify the GIMPLE statement STMT. Returns true if there is an
5015 error, otherwise false. */
5016
5017 static bool
5018 verify_gimple_stmt (gimple *stmt)
5019 {
5020 switch (gimple_code (stmt))
5021 {
5022 case GIMPLE_ASSIGN:
5023 return verify_gimple_assign (as_a <gassign *> (stmt));
5024
5025 case GIMPLE_LABEL:
5026 return verify_gimple_label (as_a <glabel *> (stmt));
5027
5028 case GIMPLE_CALL:
5029 return verify_gimple_call (as_a <gcall *> (stmt));
5030
5031 case GIMPLE_COND:
5032 return verify_gimple_cond (as_a <gcond *> (stmt));
5033
5034 case GIMPLE_GOTO:
5035 return verify_gimple_goto (as_a <ggoto *> (stmt));
5036
5037 case GIMPLE_SWITCH:
5038 return verify_gimple_switch (as_a <gswitch *> (stmt));
5039
5040 case GIMPLE_RETURN:
5041 return verify_gimple_return (as_a <greturn *> (stmt));
5042
5043 case GIMPLE_ASM:
5044 return false;
5045
5046 case GIMPLE_TRANSACTION:
5047 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5048
5049 /* Tuples that do not have tree operands. */
5050 case GIMPLE_NOP:
5051 case GIMPLE_PREDICT:
5052 case GIMPLE_RESX:
5053 case GIMPLE_EH_DISPATCH:
5054 case GIMPLE_EH_MUST_NOT_THROW:
5055 return false;
5056
5057 CASE_GIMPLE_OMP:
5058 /* OpenMP directives are validated by the FE and never operated
5059 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5060 non-gimple expressions when the main index variable has had
5061 its address taken. This does not affect the loop itself
5062 because the header of an GIMPLE_OMP_FOR is merely used to determine
5063 how to setup the parallel iteration. */
5064 return false;
5065
5066 case GIMPLE_DEBUG:
5067 return verify_gimple_debug (stmt);
5068
5069 default:
5070 gcc_unreachable ();
5071 }
5072 }
5073
5074 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5075 and false otherwise. */
5076
5077 static bool
5078 verify_gimple_phi (gphi *phi)
5079 {
5080 bool err = false;
5081 unsigned i;
5082 tree phi_result = gimple_phi_result (phi);
5083 bool virtual_p;
5084
5085 if (!phi_result)
5086 {
5087 error ("invalid %<PHI%> result");
5088 return true;
5089 }
5090
5091 virtual_p = virtual_operand_p (phi_result);
5092 if (TREE_CODE (phi_result) != SSA_NAME
5093 || (virtual_p
5094 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5095 {
5096 error ("invalid %<PHI%> result");
5097 err = true;
5098 }
5099
5100 for (i = 0; i < gimple_phi_num_args (phi); i++)
5101 {
5102 tree t = gimple_phi_arg_def (phi, i);
5103
5104 if (!t)
5105 {
5106 error ("missing %<PHI%> def");
5107 err |= true;
5108 continue;
5109 }
5110 /* Addressable variables do have SSA_NAMEs but they
5111 are not considered gimple values. */
5112 else if ((TREE_CODE (t) == SSA_NAME
5113 && virtual_p != virtual_operand_p (t))
5114 || (virtual_p
5115 && (TREE_CODE (t) != SSA_NAME
5116 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5117 || (!virtual_p
5118 && !is_gimple_val (t)))
5119 {
5120 error ("invalid %<PHI%> argument");
5121 debug_generic_expr (t);
5122 err |= true;
5123 }
5124 #ifdef ENABLE_TYPES_CHECKING
5125 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5126 {
5127 error ("incompatible types in %<PHI%> argument %u", i);
5128 debug_generic_stmt (TREE_TYPE (phi_result));
5129 debug_generic_stmt (TREE_TYPE (t));
5130 err |= true;
5131 }
5132 #endif
5133 }
5134
5135 return err;
5136 }
5137
5138 /* Verify the GIMPLE statements inside the sequence STMTS. */
5139
5140 static bool
5141 verify_gimple_in_seq_2 (gimple_seq stmts)
5142 {
5143 gimple_stmt_iterator ittr;
5144 bool err = false;
5145
5146 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5147 {
5148 gimple *stmt = gsi_stmt (ittr);
5149
5150 switch (gimple_code (stmt))
5151 {
5152 case GIMPLE_BIND:
5153 err |= verify_gimple_in_seq_2 (
5154 gimple_bind_body (as_a <gbind *> (stmt)));
5155 break;
5156
5157 case GIMPLE_TRY:
5158 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5159 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5160 break;
5161
5162 case GIMPLE_EH_FILTER:
5163 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5164 break;
5165
5166 case GIMPLE_EH_ELSE:
5167 {
5168 geh_else *eh_else = as_a <geh_else *> (stmt);
5169 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5170 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5171 }
5172 break;
5173
5174 case GIMPLE_CATCH:
5175 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5176 as_a <gcatch *> (stmt)));
5177 break;
5178
5179 case GIMPLE_TRANSACTION:
5180 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5181 break;
5182
5183 default:
5184 {
5185 bool err2 = verify_gimple_stmt (stmt);
5186 if (err2)
5187 debug_gimple_stmt (stmt);
5188 err |= err2;
5189 }
5190 }
5191 }
5192
5193 return err;
5194 }
5195
5196 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5197 is a problem, otherwise false. */
5198
5199 static bool
5200 verify_gimple_transaction (gtransaction *stmt)
5201 {
5202 tree lab;
5203
5204 lab = gimple_transaction_label_norm (stmt);
5205 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5206 return true;
5207 lab = gimple_transaction_label_uninst (stmt);
5208 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5209 return true;
5210 lab = gimple_transaction_label_over (stmt);
5211 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5212 return true;
5213
5214 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5215 }
5216
5217
5218 /* Verify the GIMPLE statements inside the statement list STMTS. */
5219
5220 DEBUG_FUNCTION void
5221 verify_gimple_in_seq (gimple_seq stmts)
5222 {
5223 timevar_push (TV_TREE_STMT_VERIFY);
5224 if (verify_gimple_in_seq_2 (stmts))
5225 internal_error ("%<verify_gimple%> failed");
5226 timevar_pop (TV_TREE_STMT_VERIFY);
5227 }
5228
5229 /* Return true when the T can be shared. */
5230
5231 static bool
5232 tree_node_can_be_shared (tree t)
5233 {
5234 if (IS_TYPE_OR_DECL_P (t)
5235 || TREE_CODE (t) == SSA_NAME
5236 || TREE_CODE (t) == IDENTIFIER_NODE
5237 || TREE_CODE (t) == CASE_LABEL_EXPR
5238 || is_gimple_min_invariant (t))
5239 return true;
5240
5241 if (t == error_mark_node)
5242 return true;
5243
5244 return false;
5245 }
5246
5247 /* Called via walk_tree. Verify tree sharing. */
5248
5249 static tree
5250 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5251 {
5252 hash_set<void *> *visited = (hash_set<void *> *) data;
5253
5254 if (tree_node_can_be_shared (*tp))
5255 {
5256 *walk_subtrees = false;
5257 return NULL;
5258 }
5259
5260 if (visited->add (*tp))
5261 return *tp;
5262
5263 return NULL;
5264 }
5265
5266 /* Called via walk_gimple_stmt. Verify tree sharing. */
5267
5268 static tree
5269 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5270 {
5271 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5272 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5273 }
5274
5275 static bool eh_error_found;
5276 bool
5277 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5278 hash_set<gimple *> *visited)
5279 {
5280 if (!visited->contains (stmt))
5281 {
5282 error ("dead statement in EH table");
5283 debug_gimple_stmt (stmt);
5284 eh_error_found = true;
5285 }
5286 return true;
5287 }
5288
5289 /* Verify if the location LOCs block is in BLOCKS. */
5290
5291 static bool
5292 verify_location (hash_set<tree> *blocks, location_t loc)
5293 {
5294 tree block = LOCATION_BLOCK (loc);
5295 if (block != NULL_TREE
5296 && !blocks->contains (block))
5297 {
5298 error ("location references block not in block tree");
5299 return true;
5300 }
5301 if (block != NULL_TREE)
5302 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5303 return false;
5304 }
5305
5306 /* Called via walk_tree. Verify that expressions have no blocks. */
5307
5308 static tree
5309 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5310 {
5311 if (!EXPR_P (*tp))
5312 {
5313 *walk_subtrees = false;
5314 return NULL;
5315 }
5316
5317 location_t loc = EXPR_LOCATION (*tp);
5318 if (LOCATION_BLOCK (loc) != NULL)
5319 return *tp;
5320
5321 return NULL;
5322 }
5323
5324 /* Called via walk_tree. Verify locations of expressions. */
5325
5326 static tree
5327 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5328 {
5329 hash_set<tree> *blocks = (hash_set<tree> *) data;
5330 tree t = *tp;
5331
5332 /* ??? This doesn't really belong here but there's no good place to
5333 stick this remainder of old verify_expr. */
5334 /* ??? This barfs on debug stmts which contain binds to vars with
5335 different function context. */
5336 #if 0
5337 if (VAR_P (t)
5338 || TREE_CODE (t) == PARM_DECL
5339 || TREE_CODE (t) == RESULT_DECL)
5340 {
5341 tree context = decl_function_context (t);
5342 if (context != cfun->decl
5343 && !SCOPE_FILE_SCOPE_P (context)
5344 && !TREE_STATIC (t)
5345 && !DECL_EXTERNAL (t))
5346 {
5347 error ("local declaration from a different function");
5348 return t;
5349 }
5350 }
5351 #endif
5352
5353 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5354 {
5355 tree x = DECL_DEBUG_EXPR (t);
5356 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5357 if (addr)
5358 return addr;
5359 }
5360 if ((VAR_P (t)
5361 || TREE_CODE (t) == PARM_DECL
5362 || TREE_CODE (t) == RESULT_DECL)
5363 && DECL_HAS_VALUE_EXPR_P (t))
5364 {
5365 tree x = DECL_VALUE_EXPR (t);
5366 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5367 if (addr)
5368 return addr;
5369 }
5370
5371 if (!EXPR_P (t))
5372 {
5373 *walk_subtrees = false;
5374 return NULL;
5375 }
5376
5377 location_t loc = EXPR_LOCATION (t);
5378 if (verify_location (blocks, loc))
5379 return t;
5380
5381 return NULL;
5382 }
5383
5384 /* Called via walk_gimple_op. Verify locations of expressions. */
5385
5386 static tree
5387 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5388 {
5389 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5390 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5391 }
5392
5393 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5394
5395 static void
5396 collect_subblocks (hash_set<tree> *blocks, tree block)
5397 {
5398 tree t;
5399 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5400 {
5401 blocks->add (t);
5402 collect_subblocks (blocks, t);
5403 }
5404 }
5405
5406 /* Disable warnings about missing quoting in GCC diagnostics for
5407 the verification errors. Their format strings don't follow
5408 GCC diagnostic conventions and trigger an ICE in the end. */
5409 #if __GNUC__ >= 10
5410 # pragma GCC diagnostic push
5411 # pragma GCC diagnostic ignored "-Wformat-diag"
5412 #endif
5413
5414 /* Verify the GIMPLE statements in the CFG of FN. */
5415
5416 DEBUG_FUNCTION void
5417 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5418 {
5419 basic_block bb;
5420 bool err = false;
5421
5422 timevar_push (TV_TREE_STMT_VERIFY);
5423 hash_set<void *> visited;
5424 hash_set<gimple *> visited_throwing_stmts;
5425
5426 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5427 hash_set<tree> blocks;
5428 if (DECL_INITIAL (fn->decl))
5429 {
5430 blocks.add (DECL_INITIAL (fn->decl));
5431 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5432 }
5433
5434 FOR_EACH_BB_FN (bb, fn)
5435 {
5436 gimple_stmt_iterator gsi;
5437 edge_iterator ei;
5438 edge e;
5439
5440 for (gphi_iterator gpi = gsi_start_phis (bb);
5441 !gsi_end_p (gpi);
5442 gsi_next (&gpi))
5443 {
5444 gphi *phi = gpi.phi ();
5445 bool err2 = false;
5446 unsigned i;
5447
5448 if (gimple_bb (phi) != bb)
5449 {
5450 error ("gimple_bb (phi) is set to a wrong basic block");
5451 err2 = true;
5452 }
5453
5454 err2 |= verify_gimple_phi (phi);
5455
5456 /* Only PHI arguments have locations. */
5457 if (gimple_location (phi) != UNKNOWN_LOCATION)
5458 {
5459 error ("PHI node with location");
5460 err2 = true;
5461 }
5462
5463 for (i = 0; i < gimple_phi_num_args (phi); i++)
5464 {
5465 tree arg = gimple_phi_arg_def (phi, i);
5466 tree addr = walk_tree (&arg, verify_node_sharing_1,
5467 &visited, NULL);
5468 if (addr)
5469 {
5470 error ("incorrect sharing of tree nodes");
5471 debug_generic_expr (addr);
5472 err2 |= true;
5473 }
5474 location_t loc = gimple_phi_arg_location (phi, i);
5475 if (virtual_operand_p (gimple_phi_result (phi))
5476 && loc != UNKNOWN_LOCATION)
5477 {
5478 error ("virtual PHI with argument locations");
5479 err2 = true;
5480 }
5481 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5482 if (addr)
5483 {
5484 debug_generic_expr (addr);
5485 err2 = true;
5486 }
5487 err2 |= verify_location (&blocks, loc);
5488 }
5489
5490 if (err2)
5491 debug_gimple_stmt (phi);
5492 err |= err2;
5493 }
5494
5495 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5496 {
5497 gimple *stmt = gsi_stmt (gsi);
5498 bool err2 = false;
5499 struct walk_stmt_info wi;
5500 tree addr;
5501 int lp_nr;
5502
5503 if (gimple_bb (stmt) != bb)
5504 {
5505 error ("gimple_bb (stmt) is set to a wrong basic block");
5506 err2 = true;
5507 }
5508
5509 err2 |= verify_gimple_stmt (stmt);
5510 err2 |= verify_location (&blocks, gimple_location (stmt));
5511
5512 memset (&wi, 0, sizeof (wi));
5513 wi.info = (void *) &visited;
5514 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5515 if (addr)
5516 {
5517 error ("incorrect sharing of tree nodes");
5518 debug_generic_expr (addr);
5519 err2 |= true;
5520 }
5521
5522 memset (&wi, 0, sizeof (wi));
5523 wi.info = (void *) &blocks;
5524 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5525 if (addr)
5526 {
5527 debug_generic_expr (addr);
5528 err2 |= true;
5529 }
5530
5531 /* If the statement is marked as part of an EH region, then it is
5532 expected that the statement could throw. Verify that when we
5533 have optimizations that simplify statements such that we prove
5534 that they cannot throw, that we update other data structures
5535 to match. */
5536 lp_nr = lookup_stmt_eh_lp (stmt);
5537 if (lp_nr != 0)
5538 visited_throwing_stmts.add (stmt);
5539 if (lp_nr > 0)
5540 {
5541 if (!stmt_could_throw_p (cfun, stmt))
5542 {
5543 if (verify_nothrow)
5544 {
5545 error ("statement marked for throw, but doesn%'t");
5546 err2 |= true;
5547 }
5548 }
5549 else if (!gsi_one_before_end_p (gsi))
5550 {
5551 error ("statement marked for throw in middle of block");
5552 err2 |= true;
5553 }
5554 }
5555
5556 if (err2)
5557 debug_gimple_stmt (stmt);
5558 err |= err2;
5559 }
5560
5561 FOR_EACH_EDGE (e, ei, bb->succs)
5562 if (e->goto_locus != UNKNOWN_LOCATION)
5563 err |= verify_location (&blocks, e->goto_locus);
5564 }
5565
5566 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5567 eh_error_found = false;
5568 if (eh_table)
5569 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5570 (&visited_throwing_stmts);
5571
5572 if (err || eh_error_found)
5573 internal_error ("verify_gimple failed");
5574
5575 verify_histograms ();
5576 timevar_pop (TV_TREE_STMT_VERIFY);
5577 }
5578
5579
5580 /* Verifies that the flow information is OK. */
5581
5582 static int
5583 gimple_verify_flow_info (void)
5584 {
5585 int err = 0;
5586 basic_block bb;
5587 gimple_stmt_iterator gsi;
5588 gimple *stmt;
5589 edge e;
5590 edge_iterator ei;
5591
5592 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5593 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5594 {
5595 error ("ENTRY_BLOCK has IL associated with it");
5596 err = 1;
5597 }
5598
5599 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5600 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5601 {
5602 error ("EXIT_BLOCK has IL associated with it");
5603 err = 1;
5604 }
5605
5606 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5607 if (e->flags & EDGE_FALLTHRU)
5608 {
5609 error ("fallthru to exit from bb %d", e->src->index);
5610 err = 1;
5611 }
5612
5613 FOR_EACH_BB_FN (bb, cfun)
5614 {
5615 bool found_ctrl_stmt = false;
5616
5617 stmt = NULL;
5618
5619 /* Skip labels on the start of basic block. */
5620 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5621 {
5622 tree label;
5623 gimple *prev_stmt = stmt;
5624
5625 stmt = gsi_stmt (gsi);
5626
5627 if (gimple_code (stmt) != GIMPLE_LABEL)
5628 break;
5629
5630 label = gimple_label_label (as_a <glabel *> (stmt));
5631 if (prev_stmt && DECL_NONLOCAL (label))
5632 {
5633 error ("nonlocal label %qD is not first in a sequence "
5634 "of labels in bb %d", label, bb->index);
5635 err = 1;
5636 }
5637
5638 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5639 {
5640 error ("EH landing pad label %qD is not first in a sequence "
5641 "of labels in bb %d", label, bb->index);
5642 err = 1;
5643 }
5644
5645 if (label_to_block (cfun, label) != bb)
5646 {
5647 error ("label %qD to block does not match in bb %d",
5648 label, bb->index);
5649 err = 1;
5650 }
5651
5652 if (decl_function_context (label) != current_function_decl)
5653 {
5654 error ("label %qD has incorrect context in bb %d",
5655 label, bb->index);
5656 err = 1;
5657 }
5658 }
5659
5660 /* Verify that body of basic block BB is free of control flow. */
5661 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5662 {
5663 gimple *stmt = gsi_stmt (gsi);
5664
5665 if (found_ctrl_stmt)
5666 {
5667 error ("control flow in the middle of basic block %d",
5668 bb->index);
5669 err = 1;
5670 }
5671
5672 if (stmt_ends_bb_p (stmt))
5673 found_ctrl_stmt = true;
5674
5675 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5676 {
5677 error ("label %qD in the middle of basic block %d",
5678 gimple_label_label (label_stmt), bb->index);
5679 err = 1;
5680 }
5681 }
5682
5683 gsi = gsi_last_nondebug_bb (bb);
5684 if (gsi_end_p (gsi))
5685 continue;
5686
5687 stmt = gsi_stmt (gsi);
5688
5689 if (gimple_code (stmt) == GIMPLE_LABEL)
5690 continue;
5691
5692 err |= verify_eh_edges (stmt);
5693
5694 if (is_ctrl_stmt (stmt))
5695 {
5696 FOR_EACH_EDGE (e, ei, bb->succs)
5697 if (e->flags & EDGE_FALLTHRU)
5698 {
5699 error ("fallthru edge after a control statement in bb %d",
5700 bb->index);
5701 err = 1;
5702 }
5703 }
5704
5705 if (gimple_code (stmt) != GIMPLE_COND)
5706 {
5707 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5708 after anything else but if statement. */
5709 FOR_EACH_EDGE (e, ei, bb->succs)
5710 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5711 {
5712 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5713 bb->index);
5714 err = 1;
5715 }
5716 }
5717
5718 switch (gimple_code (stmt))
5719 {
5720 case GIMPLE_COND:
5721 {
5722 edge true_edge;
5723 edge false_edge;
5724
5725 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5726
5727 if (!true_edge
5728 || !false_edge
5729 || !(true_edge->flags & EDGE_TRUE_VALUE)
5730 || !(false_edge->flags & EDGE_FALSE_VALUE)
5731 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5732 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5733 || EDGE_COUNT (bb->succs) >= 3)
5734 {
5735 error ("wrong outgoing edge flags at end of bb %d",
5736 bb->index);
5737 err = 1;
5738 }
5739 }
5740 break;
5741
5742 case GIMPLE_GOTO:
5743 if (simple_goto_p (stmt))
5744 {
5745 error ("explicit goto at end of bb %d", bb->index);
5746 err = 1;
5747 }
5748 else
5749 {
5750 /* FIXME. We should double check that the labels in the
5751 destination blocks have their address taken. */
5752 FOR_EACH_EDGE (e, ei, bb->succs)
5753 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5754 | EDGE_FALSE_VALUE))
5755 || !(e->flags & EDGE_ABNORMAL))
5756 {
5757 error ("wrong outgoing edge flags at end of bb %d",
5758 bb->index);
5759 err = 1;
5760 }
5761 }
5762 break;
5763
5764 case GIMPLE_CALL:
5765 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5766 break;
5767 /* fallthru */
5768 case GIMPLE_RETURN:
5769 if (!single_succ_p (bb)
5770 || (single_succ_edge (bb)->flags
5771 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5772 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5773 {
5774 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5775 err = 1;
5776 }
5777 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5778 {
5779 error ("return edge does not point to exit in bb %d",
5780 bb->index);
5781 err = 1;
5782 }
5783 break;
5784
5785 case GIMPLE_SWITCH:
5786 {
5787 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5788 tree prev;
5789 edge e;
5790 size_t i, n;
5791
5792 n = gimple_switch_num_labels (switch_stmt);
5793
5794 /* Mark all the destination basic blocks. */
5795 for (i = 0; i < n; ++i)
5796 {
5797 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5798 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5799 label_bb->aux = (void *)1;
5800 }
5801
5802 /* Verify that the case labels are sorted. */
5803 prev = gimple_switch_label (switch_stmt, 0);
5804 for (i = 1; i < n; ++i)
5805 {
5806 tree c = gimple_switch_label (switch_stmt, i);
5807 if (!CASE_LOW (c))
5808 {
5809 error ("found default case not at the start of "
5810 "case vector");
5811 err = 1;
5812 continue;
5813 }
5814 if (CASE_LOW (prev)
5815 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5816 {
5817 error ("case labels not sorted: ");
5818 print_generic_expr (stderr, prev);
5819 fprintf (stderr," is greater than ");
5820 print_generic_expr (stderr, c);
5821 fprintf (stderr," but comes before it.\n");
5822 err = 1;
5823 }
5824 prev = c;
5825 }
5826 /* VRP will remove the default case if it can prove it will
5827 never be executed. So do not verify there always exists
5828 a default case here. */
5829
5830 FOR_EACH_EDGE (e, ei, bb->succs)
5831 {
5832 if (!e->dest->aux)
5833 {
5834 error ("extra outgoing edge %d->%d",
5835 bb->index, e->dest->index);
5836 err = 1;
5837 }
5838
5839 e->dest->aux = (void *)2;
5840 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5841 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5842 {
5843 error ("wrong outgoing edge flags at end of bb %d",
5844 bb->index);
5845 err = 1;
5846 }
5847 }
5848
5849 /* Check that we have all of them. */
5850 for (i = 0; i < n; ++i)
5851 {
5852 basic_block label_bb = gimple_switch_label_bb (cfun,
5853 switch_stmt, i);
5854
5855 if (label_bb->aux != (void *)2)
5856 {
5857 error ("missing edge %i->%i", bb->index, label_bb->index);
5858 err = 1;
5859 }
5860 }
5861
5862 FOR_EACH_EDGE (e, ei, bb->succs)
5863 e->dest->aux = (void *)0;
5864 }
5865 break;
5866
5867 case GIMPLE_EH_DISPATCH:
5868 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5869 break;
5870
5871 default:
5872 break;
5873 }
5874 }
5875
5876 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5877 verify_dominators (CDI_DOMINATORS);
5878
5879 return err;
5880 }
5881
5882 #if __GNUC__ >= 10
5883 # pragma GCC diagnostic pop
5884 #endif
5885
5886 /* Updates phi nodes after creating a forwarder block joined
5887 by edge FALLTHRU. */
5888
5889 static void
5890 gimple_make_forwarder_block (edge fallthru)
5891 {
5892 edge e;
5893 edge_iterator ei;
5894 basic_block dummy, bb;
5895 tree var;
5896 gphi_iterator gsi;
5897 bool forward_location_p;
5898
5899 dummy = fallthru->src;
5900 bb = fallthru->dest;
5901
5902 if (single_pred_p (bb))
5903 return;
5904
5905 /* We can forward location info if we have only one predecessor. */
5906 forward_location_p = single_pred_p (dummy);
5907
5908 /* If we redirected a branch we must create new PHI nodes at the
5909 start of BB. */
5910 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5911 {
5912 gphi *phi, *new_phi;
5913
5914 phi = gsi.phi ();
5915 var = gimple_phi_result (phi);
5916 new_phi = create_phi_node (var, bb);
5917 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5918 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5919 forward_location_p
5920 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5921 }
5922
5923 /* Add the arguments we have stored on edges. */
5924 FOR_EACH_EDGE (e, ei, bb->preds)
5925 {
5926 if (e == fallthru)
5927 continue;
5928
5929 flush_pending_stmts (e);
5930 }
5931 }
5932
5933
5934 /* Return a non-special label in the head of basic block BLOCK.
5935 Create one if it doesn't exist. */
5936
5937 tree
5938 gimple_block_label (basic_block bb)
5939 {
5940 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5941 bool first = true;
5942 tree label;
5943 glabel *stmt;
5944
5945 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5946 {
5947 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5948 if (!stmt)
5949 break;
5950 label = gimple_label_label (stmt);
5951 if (!DECL_NONLOCAL (label))
5952 {
5953 if (!first)
5954 gsi_move_before (&i, &s);
5955 return label;
5956 }
5957 }
5958
5959 label = create_artificial_label (UNKNOWN_LOCATION);
5960 stmt = gimple_build_label (label);
5961 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5962 return label;
5963 }
5964
5965
5966 /* Attempt to perform edge redirection by replacing a possibly complex
5967 jump instruction by a goto or by removing the jump completely.
5968 This can apply only if all edges now point to the same block. The
5969 parameters and return values are equivalent to
5970 redirect_edge_and_branch. */
5971
5972 static edge
5973 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5974 {
5975 basic_block src = e->src;
5976 gimple_stmt_iterator i;
5977 gimple *stmt;
5978
5979 /* We can replace or remove a complex jump only when we have exactly
5980 two edges. */
5981 if (EDGE_COUNT (src->succs) != 2
5982 /* Verify that all targets will be TARGET. Specifically, the
5983 edge that is not E must also go to TARGET. */
5984 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5985 return NULL;
5986
5987 i = gsi_last_bb (src);
5988 if (gsi_end_p (i))
5989 return NULL;
5990
5991 stmt = gsi_stmt (i);
5992
5993 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5994 {
5995 gsi_remove (&i, true);
5996 e = ssa_redirect_edge (e, target);
5997 e->flags = EDGE_FALLTHRU;
5998 return e;
5999 }
6000
6001 return NULL;
6002 }
6003
6004
6005 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6006 edge representing the redirected branch. */
6007
6008 static edge
6009 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6010 {
6011 basic_block bb = e->src;
6012 gimple_stmt_iterator gsi;
6013 edge ret;
6014 gimple *stmt;
6015
6016 if (e->flags & EDGE_ABNORMAL)
6017 return NULL;
6018
6019 if (e->dest == dest)
6020 return NULL;
6021
6022 if (e->flags & EDGE_EH)
6023 return redirect_eh_edge (e, dest);
6024
6025 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6026 {
6027 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6028 if (ret)
6029 return ret;
6030 }
6031
6032 gsi = gsi_last_nondebug_bb (bb);
6033 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6034
6035 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6036 {
6037 case GIMPLE_COND:
6038 /* For COND_EXPR, we only need to redirect the edge. */
6039 break;
6040
6041 case GIMPLE_GOTO:
6042 /* No non-abnormal edges should lead from a non-simple goto, and
6043 simple ones should be represented implicitly. */
6044 gcc_unreachable ();
6045
6046 case GIMPLE_SWITCH:
6047 {
6048 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6049 tree label = gimple_block_label (dest);
6050 tree cases = get_cases_for_edge (e, switch_stmt);
6051
6052 /* If we have a list of cases associated with E, then use it
6053 as it's a lot faster than walking the entire case vector. */
6054 if (cases)
6055 {
6056 edge e2 = find_edge (e->src, dest);
6057 tree last, first;
6058
6059 first = cases;
6060 while (cases)
6061 {
6062 last = cases;
6063 CASE_LABEL (cases) = label;
6064 cases = CASE_CHAIN (cases);
6065 }
6066
6067 /* If there was already an edge in the CFG, then we need
6068 to move all the cases associated with E to E2. */
6069 if (e2)
6070 {
6071 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6072
6073 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6074 CASE_CHAIN (cases2) = first;
6075 }
6076 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6077 }
6078 else
6079 {
6080 size_t i, n = gimple_switch_num_labels (switch_stmt);
6081
6082 for (i = 0; i < n; i++)
6083 {
6084 tree elt = gimple_switch_label (switch_stmt, i);
6085 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6086 CASE_LABEL (elt) = label;
6087 }
6088 }
6089 }
6090 break;
6091
6092 case GIMPLE_ASM:
6093 {
6094 gasm *asm_stmt = as_a <gasm *> (stmt);
6095 int i, n = gimple_asm_nlabels (asm_stmt);
6096 tree label = NULL;
6097
6098 for (i = 0; i < n; ++i)
6099 {
6100 tree cons = gimple_asm_label_op (asm_stmt, i);
6101 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6102 {
6103 if (!label)
6104 label = gimple_block_label (dest);
6105 TREE_VALUE (cons) = label;
6106 }
6107 }
6108
6109 /* If we didn't find any label matching the former edge in the
6110 asm labels, we must be redirecting the fallthrough
6111 edge. */
6112 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6113 }
6114 break;
6115
6116 case GIMPLE_RETURN:
6117 gsi_remove (&gsi, true);
6118 e->flags |= EDGE_FALLTHRU;
6119 break;
6120
6121 case GIMPLE_OMP_RETURN:
6122 case GIMPLE_OMP_CONTINUE:
6123 case GIMPLE_OMP_SECTIONS_SWITCH:
6124 case GIMPLE_OMP_FOR:
6125 /* The edges from OMP constructs can be simply redirected. */
6126 break;
6127
6128 case GIMPLE_EH_DISPATCH:
6129 if (!(e->flags & EDGE_FALLTHRU))
6130 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6131 break;
6132
6133 case GIMPLE_TRANSACTION:
6134 if (e->flags & EDGE_TM_ABORT)
6135 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6136 gimple_block_label (dest));
6137 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6138 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6139 gimple_block_label (dest));
6140 else
6141 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6142 gimple_block_label (dest));
6143 break;
6144
6145 case GIMPLE_OMP_METADIRECTIVE:
6146 {
6147 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
6148 {
6149 tree label = gimple_omp_metadirective_label (stmt, i);
6150 if (label_to_block (cfun, label) == e->dest)
6151 gimple_omp_metadirective_set_label (stmt, i,
6152 gimple_block_label (dest));
6153 }
6154 }
6155 break;
6156
6157 default:
6158 /* Otherwise it must be a fallthru edge, and we don't need to
6159 do anything besides redirecting it. */
6160 gcc_assert (e->flags & EDGE_FALLTHRU);
6161 break;
6162 }
6163
6164 /* Update/insert PHI nodes as necessary. */
6165
6166 /* Now update the edges in the CFG. */
6167 e = ssa_redirect_edge (e, dest);
6168
6169 return e;
6170 }
6171
6172 /* Returns true if it is possible to remove edge E by redirecting
6173 it to the destination of the other edge from E->src. */
6174
6175 static bool
6176 gimple_can_remove_branch_p (const_edge e)
6177 {
6178 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6179 return false;
6180
6181 return true;
6182 }
6183
6184 /* Simple wrapper, as we can always redirect fallthru edges. */
6185
6186 static basic_block
6187 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6188 {
6189 e = gimple_redirect_edge_and_branch (e, dest);
6190 gcc_assert (e);
6191
6192 return NULL;
6193 }
6194
6195
6196 /* Splits basic block BB after statement STMT (but at least after the
6197 labels). If STMT is NULL, BB is split just after the labels. */
6198
6199 static basic_block
6200 gimple_split_block (basic_block bb, void *stmt)
6201 {
6202 gimple_stmt_iterator gsi;
6203 gimple_stmt_iterator gsi_tgt;
6204 gimple_seq list;
6205 basic_block new_bb;
6206 edge e;
6207 edge_iterator ei;
6208
6209 new_bb = create_empty_bb (bb);
6210
6211 /* Redirect the outgoing edges. */
6212 new_bb->succs = bb->succs;
6213 bb->succs = NULL;
6214 FOR_EACH_EDGE (e, ei, new_bb->succs)
6215 e->src = new_bb;
6216
6217 /* Get a stmt iterator pointing to the first stmt to move. */
6218 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6219 gsi = gsi_after_labels (bb);
6220 else
6221 {
6222 gsi = gsi_for_stmt ((gimple *) stmt);
6223 gsi_next (&gsi);
6224 }
6225
6226 /* Move everything from GSI to the new basic block. */
6227 if (gsi_end_p (gsi))
6228 return new_bb;
6229
6230 /* Split the statement list - avoid re-creating new containers as this
6231 brings ugly quadratic memory consumption in the inliner.
6232 (We are still quadratic since we need to update stmt BB pointers,
6233 sadly.) */
6234 gsi_split_seq_before (&gsi, &list);
6235 set_bb_seq (new_bb, list);
6236 for (gsi_tgt = gsi_start (list);
6237 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6238 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6239
6240 return new_bb;
6241 }
6242
6243
6244 /* Moves basic block BB after block AFTER. */
6245
6246 static bool
6247 gimple_move_block_after (basic_block bb, basic_block after)
6248 {
6249 if (bb->prev_bb == after)
6250 return true;
6251
6252 unlink_block (bb);
6253 link_block (bb, after);
6254
6255 return true;
6256 }
6257
6258
6259 /* Return TRUE if block BB has no executable statements, otherwise return
6260 FALSE. */
6261
6262 static bool
6263 gimple_empty_block_p (basic_block bb)
6264 {
6265 /* BB must have no executable statements. */
6266 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6267 if (phi_nodes (bb))
6268 return false;
6269 while (!gsi_end_p (gsi))
6270 {
6271 gimple *stmt = gsi_stmt (gsi);
6272 if (is_gimple_debug (stmt))
6273 ;
6274 else if (gimple_code (stmt) == GIMPLE_NOP
6275 || gimple_code (stmt) == GIMPLE_PREDICT)
6276 ;
6277 else
6278 return false;
6279 gsi_next (&gsi);
6280 }
6281 return true;
6282 }
6283
6284
6285 /* Split a basic block if it ends with a conditional branch and if the
6286 other part of the block is not empty. */
6287
6288 static basic_block
6289 gimple_split_block_before_cond_jump (basic_block bb)
6290 {
6291 gimple *last, *split_point;
6292 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6293 if (gsi_end_p (gsi))
6294 return NULL;
6295 last = gsi_stmt (gsi);
6296 if (gimple_code (last) != GIMPLE_COND
6297 && gimple_code (last) != GIMPLE_SWITCH)
6298 return NULL;
6299 gsi_prev (&gsi);
6300 split_point = gsi_stmt (gsi);
6301 return split_block (bb, split_point)->dest;
6302 }
6303
6304
6305 /* Return true if basic_block can be duplicated. */
6306
6307 static bool
6308 gimple_can_duplicate_bb_p (const_basic_block bb)
6309 {
6310 gimple *last = last_stmt (CONST_CAST_BB (bb));
6311
6312 /* Do checks that can only fail for the last stmt, to minimize the work in the
6313 stmt loop. */
6314 if (last) {
6315 /* A transaction is a single entry multiple exit region. It
6316 must be duplicated in its entirety or not at all. */
6317 if (gimple_code (last) == GIMPLE_TRANSACTION)
6318 return false;
6319
6320 /* An IFN_UNIQUE call must be duplicated as part of its group,
6321 or not at all. */
6322 if (is_gimple_call (last)
6323 && gimple_call_internal_p (last)
6324 && gimple_call_internal_unique_p (last))
6325 return false;
6326 }
6327
6328 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6329 !gsi_end_p (gsi); gsi_next (&gsi))
6330 {
6331 gimple *g = gsi_stmt (gsi);
6332
6333 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6334 duplicated as part of its group, or not at all.
6335 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6336 group, so the same holds there. */
6337 if (is_gimple_call (g)
6338 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6339 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6340 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6341 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6342 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6343 return false;
6344 }
6345
6346 return true;
6347 }
6348
6349 /* Create a duplicate of the basic block BB. NOTE: This does not
6350 preserve SSA form. */
6351
6352 static basic_block
6353 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6354 {
6355 basic_block new_bb;
6356 gimple_stmt_iterator gsi_tgt;
6357
6358 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6359
6360 /* Copy the PHI nodes. We ignore PHI node arguments here because
6361 the incoming edges have not been setup yet. */
6362 for (gphi_iterator gpi = gsi_start_phis (bb);
6363 !gsi_end_p (gpi);
6364 gsi_next (&gpi))
6365 {
6366 gphi *phi, *copy;
6367 phi = gpi.phi ();
6368 copy = create_phi_node (NULL_TREE, new_bb);
6369 create_new_def_for (gimple_phi_result (phi), copy,
6370 gimple_phi_result_ptr (copy));
6371 gimple_set_uid (copy, gimple_uid (phi));
6372 }
6373
6374 gsi_tgt = gsi_start_bb (new_bb);
6375 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6376 !gsi_end_p (gsi);
6377 gsi_next (&gsi))
6378 {
6379 def_operand_p def_p;
6380 ssa_op_iter op_iter;
6381 tree lhs;
6382 gimple *stmt, *copy;
6383
6384 stmt = gsi_stmt (gsi);
6385 if (gimple_code (stmt) == GIMPLE_LABEL)
6386 continue;
6387
6388 /* Don't duplicate label debug stmts. */
6389 if (gimple_debug_bind_p (stmt)
6390 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6391 == LABEL_DECL)
6392 continue;
6393
6394 /* Create a new copy of STMT and duplicate STMT's virtual
6395 operands. */
6396 copy = gimple_copy (stmt);
6397 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6398
6399 maybe_duplicate_eh_stmt (copy, stmt);
6400 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6401
6402 /* When copying around a stmt writing into a local non-user
6403 aggregate, make sure it won't share stack slot with other
6404 vars. */
6405 lhs = gimple_get_lhs (stmt);
6406 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6407 {
6408 tree base = get_base_address (lhs);
6409 if (base
6410 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6411 && DECL_IGNORED_P (base)
6412 && !TREE_STATIC (base)
6413 && !DECL_EXTERNAL (base)
6414 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6415 DECL_NONSHAREABLE (base) = 1;
6416 }
6417
6418 /* If requested remap dependence info of cliques brought in
6419 via inlining. */
6420 if (id)
6421 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6422 {
6423 tree op = gimple_op (copy, i);
6424 if (!op)
6425 continue;
6426 if (TREE_CODE (op) == ADDR_EXPR
6427 || TREE_CODE (op) == WITH_SIZE_EXPR)
6428 op = TREE_OPERAND (op, 0);
6429 while (handled_component_p (op))
6430 op = TREE_OPERAND (op, 0);
6431 if ((TREE_CODE (op) == MEM_REF
6432 || TREE_CODE (op) == TARGET_MEM_REF)
6433 && MR_DEPENDENCE_CLIQUE (op) > 1
6434 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6435 {
6436 if (!id->dependence_map)
6437 id->dependence_map = new hash_map<dependence_hash,
6438 unsigned short>;
6439 bool existed;
6440 unsigned short &newc = id->dependence_map->get_or_insert
6441 (MR_DEPENDENCE_CLIQUE (op), &existed);
6442 if (!existed)
6443 {
6444 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6445 newc = ++cfun->last_clique;
6446 }
6447 MR_DEPENDENCE_CLIQUE (op) = newc;
6448 }
6449 }
6450
6451 /* Create new names for all the definitions created by COPY and
6452 add replacement mappings for each new name. */
6453 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6454 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6455 }
6456
6457 return new_bb;
6458 }
6459
6460 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6461
6462 static void
6463 add_phi_args_after_copy_edge (edge e_copy)
6464 {
6465 basic_block bb, bb_copy = e_copy->src, dest;
6466 edge e;
6467 edge_iterator ei;
6468 gphi *phi, *phi_copy;
6469 tree def;
6470 gphi_iterator psi, psi_copy;
6471
6472 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6473 return;
6474
6475 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6476
6477 if (e_copy->dest->flags & BB_DUPLICATED)
6478 dest = get_bb_original (e_copy->dest);
6479 else
6480 dest = e_copy->dest;
6481
6482 e = find_edge (bb, dest);
6483 if (!e)
6484 {
6485 /* During loop unrolling the target of the latch edge is copied.
6486 In this case we are not looking for edge to dest, but to
6487 duplicated block whose original was dest. */
6488 FOR_EACH_EDGE (e, ei, bb->succs)
6489 {
6490 if ((e->dest->flags & BB_DUPLICATED)
6491 && get_bb_original (e->dest) == dest)
6492 break;
6493 }
6494
6495 gcc_assert (e != NULL);
6496 }
6497
6498 for (psi = gsi_start_phis (e->dest),
6499 psi_copy = gsi_start_phis (e_copy->dest);
6500 !gsi_end_p (psi);
6501 gsi_next (&psi), gsi_next (&psi_copy))
6502 {
6503 phi = psi.phi ();
6504 phi_copy = psi_copy.phi ();
6505 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6506 add_phi_arg (phi_copy, def, e_copy,
6507 gimple_phi_arg_location_from_edge (phi, e));
6508 }
6509 }
6510
6511
6512 /* Basic block BB_COPY was created by code duplication. Add phi node
6513 arguments for edges going out of BB_COPY. The blocks that were
6514 duplicated have BB_DUPLICATED set. */
6515
6516 void
6517 add_phi_args_after_copy_bb (basic_block bb_copy)
6518 {
6519 edge e_copy;
6520 edge_iterator ei;
6521
6522 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6523 {
6524 add_phi_args_after_copy_edge (e_copy);
6525 }
6526 }
6527
6528 /* Blocks in REGION_COPY array of length N_REGION were created by
6529 duplication of basic blocks. Add phi node arguments for edges
6530 going from these blocks. If E_COPY is not NULL, also add
6531 phi node arguments for its destination.*/
6532
6533 void
6534 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6535 edge e_copy)
6536 {
6537 unsigned i;
6538
6539 for (i = 0; i < n_region; i++)
6540 region_copy[i]->flags |= BB_DUPLICATED;
6541
6542 for (i = 0; i < n_region; i++)
6543 add_phi_args_after_copy_bb (region_copy[i]);
6544 if (e_copy)
6545 add_phi_args_after_copy_edge (e_copy);
6546
6547 for (i = 0; i < n_region; i++)
6548 region_copy[i]->flags &= ~BB_DUPLICATED;
6549 }
6550
6551 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6552 important exit edge EXIT. By important we mean that no SSA name defined
6553 inside region is live over the other exit edges of the region. All entry
6554 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6555 to the duplicate of the region. Dominance and loop information is
6556 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6557 UPDATE_DOMINANCE is false then we assume that the caller will update the
6558 dominance information after calling this function. The new basic
6559 blocks are stored to REGION_COPY in the same order as they had in REGION,
6560 provided that REGION_COPY is not NULL.
6561 The function returns false if it is unable to copy the region,
6562 true otherwise. */
6563
6564 bool
6565 gimple_duplicate_sese_region (edge entry, edge exit,
6566 basic_block *region, unsigned n_region,
6567 basic_block *region_copy,
6568 bool update_dominance)
6569 {
6570 unsigned i;
6571 bool free_region_copy = false, copying_header = false;
6572 class loop *loop = entry->dest->loop_father;
6573 edge exit_copy;
6574 edge redirected;
6575 profile_count total_count = profile_count::uninitialized ();
6576 profile_count entry_count = profile_count::uninitialized ();
6577
6578 if (!can_copy_bbs_p (region, n_region))
6579 return false;
6580
6581 /* Some sanity checking. Note that we do not check for all possible
6582 missuses of the functions. I.e. if you ask to copy something weird,
6583 it will work, but the state of structures probably will not be
6584 correct. */
6585 for (i = 0; i < n_region; i++)
6586 {
6587 /* We do not handle subloops, i.e. all the blocks must belong to the
6588 same loop. */
6589 if (region[i]->loop_father != loop)
6590 return false;
6591
6592 if (region[i] != entry->dest
6593 && region[i] == loop->header)
6594 return false;
6595 }
6596
6597 /* In case the function is used for loop header copying (which is the primary
6598 use), ensure that EXIT and its copy will be new latch and entry edges. */
6599 if (loop->header == entry->dest)
6600 {
6601 copying_header = true;
6602
6603 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6604 return false;
6605
6606 for (i = 0; i < n_region; i++)
6607 if (region[i] != exit->src
6608 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6609 return false;
6610 }
6611
6612 initialize_original_copy_tables ();
6613
6614 if (copying_header)
6615 set_loop_copy (loop, loop_outer (loop));
6616 else
6617 set_loop_copy (loop, loop);
6618
6619 if (!region_copy)
6620 {
6621 region_copy = XNEWVEC (basic_block, n_region);
6622 free_region_copy = true;
6623 }
6624
6625 /* Record blocks outside the region that are dominated by something
6626 inside. */
6627 auto_vec<basic_block> doms;
6628 if (update_dominance)
6629 {
6630 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6631 }
6632
6633 if (entry->dest->count.initialized_p ())
6634 {
6635 total_count = entry->dest->count;
6636 entry_count = entry->count ();
6637 /* Fix up corner cases, to avoid division by zero or creation of negative
6638 frequencies. */
6639 if (entry_count > total_count)
6640 entry_count = total_count;
6641 }
6642
6643 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6644 split_edge_bb_loc (entry), update_dominance);
6645 if (total_count.initialized_p () && entry_count.initialized_p ())
6646 {
6647 scale_bbs_frequencies_profile_count (region, n_region,
6648 total_count - entry_count,
6649 total_count);
6650 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6651 total_count);
6652 }
6653
6654 if (copying_header)
6655 {
6656 loop->header = exit->dest;
6657 loop->latch = exit->src;
6658 }
6659
6660 /* Redirect the entry and add the phi node arguments. */
6661 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6662 gcc_assert (redirected != NULL);
6663 flush_pending_stmts (entry);
6664
6665 /* Concerning updating of dominators: We must recount dominators
6666 for entry block and its copy. Anything that is outside of the
6667 region, but was dominated by something inside needs recounting as
6668 well. */
6669 if (update_dominance)
6670 {
6671 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6672 doms.safe_push (get_bb_original (entry->dest));
6673 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6674 }
6675
6676 /* Add the other PHI node arguments. */
6677 add_phi_args_after_copy (region_copy, n_region, NULL);
6678
6679 if (free_region_copy)
6680 free (region_copy);
6681
6682 free_original_copy_tables ();
6683 return true;
6684 }
6685
6686 /* Checks if BB is part of the region defined by N_REGION BBS. */
6687 static bool
6688 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6689 {
6690 unsigned int n;
6691
6692 for (n = 0; n < n_region; n++)
6693 {
6694 if (bb == bbs[n])
6695 return true;
6696 }
6697 return false;
6698 }
6699
6700 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6701 are stored to REGION_COPY in the same order in that they appear
6702 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6703 the region, EXIT an exit from it. The condition guarding EXIT
6704 is moved to ENTRY. Returns true if duplication succeeds, false
6705 otherwise.
6706
6707 For example,
6708
6709 some_code;
6710 if (cond)
6711 A;
6712 else
6713 B;
6714
6715 is transformed to
6716
6717 if (cond)
6718 {
6719 some_code;
6720 A;
6721 }
6722 else
6723 {
6724 some_code;
6725 B;
6726 }
6727 */
6728
6729 bool
6730 gimple_duplicate_sese_tail (edge entry, edge exit,
6731 basic_block *region, unsigned n_region,
6732 basic_block *region_copy)
6733 {
6734 unsigned i;
6735 bool free_region_copy = false;
6736 class loop *loop = exit->dest->loop_father;
6737 class loop *orig_loop = entry->dest->loop_father;
6738 basic_block switch_bb, entry_bb, nentry_bb;
6739 profile_count total_count = profile_count::uninitialized (),
6740 exit_count = profile_count::uninitialized ();
6741 edge exits[2], nexits[2], e;
6742 gimple_stmt_iterator gsi;
6743 gimple *cond_stmt;
6744 edge sorig, snew;
6745 basic_block exit_bb;
6746 gphi_iterator psi;
6747 gphi *phi;
6748 tree def;
6749 class loop *target, *aloop, *cloop;
6750
6751 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6752 exits[0] = exit;
6753 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6754
6755 if (!can_copy_bbs_p (region, n_region))
6756 return false;
6757
6758 initialize_original_copy_tables ();
6759 set_loop_copy (orig_loop, loop);
6760
6761 target= loop;
6762 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6763 {
6764 if (bb_part_of_region_p (aloop->header, region, n_region))
6765 {
6766 cloop = duplicate_loop (aloop, target);
6767 duplicate_subloops (aloop, cloop);
6768 }
6769 }
6770
6771 if (!region_copy)
6772 {
6773 region_copy = XNEWVEC (basic_block, n_region);
6774 free_region_copy = true;
6775 }
6776
6777 gcc_assert (!need_ssa_update_p (cfun));
6778
6779 /* Record blocks outside the region that are dominated by something
6780 inside. */
6781 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6782 n_region);
6783
6784 total_count = exit->src->count;
6785 exit_count = exit->count ();
6786 /* Fix up corner cases, to avoid division by zero or creation of negative
6787 frequencies. */
6788 if (exit_count > total_count)
6789 exit_count = total_count;
6790
6791 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6792 split_edge_bb_loc (exit), true);
6793 if (total_count.initialized_p () && exit_count.initialized_p ())
6794 {
6795 scale_bbs_frequencies_profile_count (region, n_region,
6796 total_count - exit_count,
6797 total_count);
6798 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6799 total_count);
6800 }
6801
6802 /* Create the switch block, and put the exit condition to it. */
6803 entry_bb = entry->dest;
6804 nentry_bb = get_bb_copy (entry_bb);
6805 if (!last_stmt (entry->src)
6806 || !stmt_ends_bb_p (last_stmt (entry->src)))
6807 switch_bb = entry->src;
6808 else
6809 switch_bb = split_edge (entry);
6810 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6811
6812 gsi = gsi_last_bb (switch_bb);
6813 cond_stmt = last_stmt (exit->src);
6814 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6815 cond_stmt = gimple_copy (cond_stmt);
6816
6817 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6818
6819 sorig = single_succ_edge (switch_bb);
6820 sorig->flags = exits[1]->flags;
6821 sorig->probability = exits[1]->probability;
6822 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6823 snew->probability = exits[0]->probability;
6824
6825
6826 /* Register the new edge from SWITCH_BB in loop exit lists. */
6827 rescan_loop_exit (snew, true, false);
6828
6829 /* Add the PHI node arguments. */
6830 add_phi_args_after_copy (region_copy, n_region, snew);
6831
6832 /* Get rid of now superfluous conditions and associated edges (and phi node
6833 arguments). */
6834 exit_bb = exit->dest;
6835
6836 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6837 PENDING_STMT (e) = NULL;
6838
6839 /* The latch of ORIG_LOOP was copied, and so was the backedge
6840 to the original header. We redirect this backedge to EXIT_BB. */
6841 for (i = 0; i < n_region; i++)
6842 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6843 {
6844 gcc_assert (single_succ_edge (region_copy[i]));
6845 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6846 PENDING_STMT (e) = NULL;
6847 for (psi = gsi_start_phis (exit_bb);
6848 !gsi_end_p (psi);
6849 gsi_next (&psi))
6850 {
6851 phi = psi.phi ();
6852 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6853 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6854 }
6855 }
6856 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6857 PENDING_STMT (e) = NULL;
6858
6859 /* Anything that is outside of the region, but was dominated by something
6860 inside needs to update dominance info. */
6861 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6862 /* Update the SSA web. */
6863 update_ssa (TODO_update_ssa);
6864
6865 if (free_region_copy)
6866 free (region_copy);
6867
6868 free_original_copy_tables ();
6869 return true;
6870 }
6871
6872 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6873 adding blocks when the dominator traversal reaches EXIT. This
6874 function silently assumes that ENTRY strictly dominates EXIT. */
6875
6876 void
6877 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6878 vec<basic_block> *bbs_p)
6879 {
6880 basic_block son;
6881
6882 for (son = first_dom_son (CDI_DOMINATORS, entry);
6883 son;
6884 son = next_dom_son (CDI_DOMINATORS, son))
6885 {
6886 bbs_p->safe_push (son);
6887 if (son != exit)
6888 gather_blocks_in_sese_region (son, exit, bbs_p);
6889 }
6890 }
6891
6892 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6893 The duplicates are recorded in VARS_MAP. */
6894
6895 static void
6896 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6897 tree to_context)
6898 {
6899 tree t = *tp, new_t;
6900 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6901
6902 if (DECL_CONTEXT (t) == to_context)
6903 return;
6904
6905 bool existed;
6906 tree &loc = vars_map->get_or_insert (t, &existed);
6907
6908 if (!existed)
6909 {
6910 if (SSA_VAR_P (t))
6911 {
6912 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6913 add_local_decl (f, new_t);
6914 }
6915 else
6916 {
6917 gcc_assert (TREE_CODE (t) == CONST_DECL);
6918 new_t = copy_node (t);
6919 }
6920 DECL_CONTEXT (new_t) = to_context;
6921
6922 loc = new_t;
6923 }
6924 else
6925 new_t = loc;
6926
6927 *tp = new_t;
6928 }
6929
6930
6931 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6932 VARS_MAP maps old ssa names and var_decls to the new ones. */
6933
6934 static tree
6935 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6936 tree to_context)
6937 {
6938 tree new_name;
6939
6940 gcc_assert (!virtual_operand_p (name));
6941
6942 tree *loc = vars_map->get (name);
6943
6944 if (!loc)
6945 {
6946 tree decl = SSA_NAME_VAR (name);
6947 if (decl)
6948 {
6949 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6950 replace_by_duplicate_decl (&decl, vars_map, to_context);
6951 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6952 decl, SSA_NAME_DEF_STMT (name));
6953 }
6954 else
6955 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6956 name, SSA_NAME_DEF_STMT (name));
6957
6958 /* Now that we've used the def stmt to define new_name, make sure it
6959 doesn't define name anymore. */
6960 SSA_NAME_DEF_STMT (name) = NULL;
6961
6962 vars_map->put (name, new_name);
6963 }
6964 else
6965 new_name = *loc;
6966
6967 return new_name;
6968 }
6969
6970 struct move_stmt_d
6971 {
6972 tree orig_block;
6973 tree new_block;
6974 tree from_context;
6975 tree to_context;
6976 hash_map<tree, tree> *vars_map;
6977 htab_t new_label_map;
6978 hash_map<void *, void *> *eh_map;
6979 bool remap_decls_p;
6980 };
6981
6982 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6983 contained in *TP if it has been ORIG_BLOCK previously and change the
6984 DECL_CONTEXT of every local variable referenced in *TP. */
6985
6986 static tree
6987 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6988 {
6989 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6990 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6991 tree t = *tp;
6992
6993 if (EXPR_P (t))
6994 {
6995 tree block = TREE_BLOCK (t);
6996 if (block == NULL_TREE)
6997 ;
6998 else if (block == p->orig_block
6999 || p->orig_block == NULL_TREE)
7000 {
7001 /* tree_node_can_be_shared says we can share invariant
7002 addresses but unshare_expr copies them anyways. Make sure
7003 to unshare before adjusting the block in place - we do not
7004 always see a copy here. */
7005 if (TREE_CODE (t) == ADDR_EXPR
7006 && is_gimple_min_invariant (t))
7007 *tp = t = unshare_expr (t);
7008 TREE_SET_BLOCK (t, p->new_block);
7009 }
7010 else if (flag_checking)
7011 {
7012 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7013 block = BLOCK_SUPERCONTEXT (block);
7014 gcc_assert (block == p->orig_block);
7015 }
7016 }
7017 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7018 {
7019 if (TREE_CODE (t) == SSA_NAME)
7020 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7021 else if (TREE_CODE (t) == PARM_DECL
7022 && gimple_in_ssa_p (cfun))
7023 *tp = *(p->vars_map->get (t));
7024 else if (TREE_CODE (t) == LABEL_DECL)
7025 {
7026 if (p->new_label_map)
7027 {
7028 struct tree_map in, *out;
7029 in.base.from = t;
7030 out = (struct tree_map *)
7031 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7032 if (out)
7033 *tp = t = out->to;
7034 }
7035
7036 /* For FORCED_LABELs we can end up with references from other
7037 functions if some SESE regions are outlined. It is UB to
7038 jump in between them, but they could be used just for printing
7039 addresses etc. In that case, DECL_CONTEXT on the label should
7040 be the function containing the glabel stmt with that LABEL_DECL,
7041 rather than whatever function a reference to the label was seen
7042 last time. */
7043 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7044 DECL_CONTEXT (t) = p->to_context;
7045 }
7046 else if (p->remap_decls_p)
7047 {
7048 /* Replace T with its duplicate. T should no longer appear in the
7049 parent function, so this looks wasteful; however, it may appear
7050 in referenced_vars, and more importantly, as virtual operands of
7051 statements, and in alias lists of other variables. It would be
7052 quite difficult to expunge it from all those places. ??? It might
7053 suffice to do this for addressable variables. */
7054 if ((VAR_P (t) && !is_global_var (t))
7055 || TREE_CODE (t) == CONST_DECL)
7056 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7057 }
7058 *walk_subtrees = 0;
7059 }
7060 else if (TYPE_P (t))
7061 *walk_subtrees = 0;
7062
7063 return NULL_TREE;
7064 }
7065
7066 /* Helper for move_stmt_r. Given an EH region number for the source
7067 function, map that to the duplicate EH regio number in the dest. */
7068
7069 static int
7070 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7071 {
7072 eh_region old_r, new_r;
7073
7074 old_r = get_eh_region_from_number (old_nr);
7075 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7076
7077 return new_r->index;
7078 }
7079
7080 /* Similar, but operate on INTEGER_CSTs. */
7081
7082 static tree
7083 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7084 {
7085 int old_nr, new_nr;
7086
7087 old_nr = tree_to_shwi (old_t_nr);
7088 new_nr = move_stmt_eh_region_nr (old_nr, p);
7089
7090 return build_int_cst (integer_type_node, new_nr);
7091 }
7092
7093 /* Like move_stmt_op, but for gimple statements.
7094
7095 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7096 contained in the current statement in *GSI_P and change the
7097 DECL_CONTEXT of every local variable referenced in the current
7098 statement. */
7099
7100 static tree
7101 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7102 struct walk_stmt_info *wi)
7103 {
7104 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7105 gimple *stmt = gsi_stmt (*gsi_p);
7106 tree block = gimple_block (stmt);
7107
7108 if (block == p->orig_block
7109 || (p->orig_block == NULL_TREE
7110 && block != NULL_TREE))
7111 gimple_set_block (stmt, p->new_block);
7112
7113 switch (gimple_code (stmt))
7114 {
7115 case GIMPLE_CALL:
7116 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7117 {
7118 tree r, fndecl = gimple_call_fndecl (stmt);
7119 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7120 switch (DECL_FUNCTION_CODE (fndecl))
7121 {
7122 case BUILT_IN_EH_COPY_VALUES:
7123 r = gimple_call_arg (stmt, 1);
7124 r = move_stmt_eh_region_tree_nr (r, p);
7125 gimple_call_set_arg (stmt, 1, r);
7126 /* FALLTHRU */
7127
7128 case BUILT_IN_EH_POINTER:
7129 case BUILT_IN_EH_FILTER:
7130 r = gimple_call_arg (stmt, 0);
7131 r = move_stmt_eh_region_tree_nr (r, p);
7132 gimple_call_set_arg (stmt, 0, r);
7133 break;
7134
7135 default:
7136 break;
7137 }
7138 }
7139 break;
7140
7141 case GIMPLE_RESX:
7142 {
7143 gresx *resx_stmt = as_a <gresx *> (stmt);
7144 int r = gimple_resx_region (resx_stmt);
7145 r = move_stmt_eh_region_nr (r, p);
7146 gimple_resx_set_region (resx_stmt, r);
7147 }
7148 break;
7149
7150 case GIMPLE_EH_DISPATCH:
7151 {
7152 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7153 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7154 r = move_stmt_eh_region_nr (r, p);
7155 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7156 }
7157 break;
7158
7159 case GIMPLE_OMP_RETURN:
7160 case GIMPLE_OMP_CONTINUE:
7161 break;
7162
7163 case GIMPLE_LABEL:
7164 {
7165 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7166 so that such labels can be referenced from other regions.
7167 Make sure to update it when seeing a GIMPLE_LABEL though,
7168 that is the owner of the label. */
7169 walk_gimple_op (stmt, move_stmt_op, wi);
7170 *handled_ops_p = true;
7171 tree label = gimple_label_label (as_a <glabel *> (stmt));
7172 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7173 DECL_CONTEXT (label) = p->to_context;
7174 }
7175 break;
7176
7177 default:
7178 if (is_gimple_omp (stmt))
7179 {
7180 /* Do not remap variables inside OMP directives. Variables
7181 referenced in clauses and directive header belong to the
7182 parent function and should not be moved into the child
7183 function. */
7184 bool save_remap_decls_p = p->remap_decls_p;
7185 p->remap_decls_p = false;
7186 *handled_ops_p = true;
7187
7188 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7189 move_stmt_op, wi);
7190
7191 p->remap_decls_p = save_remap_decls_p;
7192 }
7193 break;
7194 }
7195
7196 return NULL_TREE;
7197 }
7198
7199 /* Move basic block BB from function CFUN to function DEST_FN. The
7200 block is moved out of the original linked list and placed after
7201 block AFTER in the new list. Also, the block is removed from the
7202 original array of blocks and placed in DEST_FN's array of blocks.
7203 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7204 updated to reflect the moved edges.
7205
7206 The local variables are remapped to new instances, VARS_MAP is used
7207 to record the mapping. */
7208
7209 static void
7210 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7211 basic_block after, bool update_edge_count_p,
7212 struct move_stmt_d *d)
7213 {
7214 struct control_flow_graph *cfg;
7215 edge_iterator ei;
7216 edge e;
7217 gimple_stmt_iterator si;
7218 unsigned old_len;
7219
7220 /* Remove BB from dominance structures. */
7221 delete_from_dominance_info (CDI_DOMINATORS, bb);
7222
7223 /* Move BB from its current loop to the copy in the new function. */
7224 if (current_loops)
7225 {
7226 class loop *new_loop = (class loop *)bb->loop_father->aux;
7227 if (new_loop)
7228 bb->loop_father = new_loop;
7229 }
7230
7231 /* Link BB to the new linked list. */
7232 move_block_after (bb, after);
7233
7234 /* Update the edge count in the corresponding flowgraphs. */
7235 if (update_edge_count_p)
7236 FOR_EACH_EDGE (e, ei, bb->succs)
7237 {
7238 cfun->cfg->x_n_edges--;
7239 dest_cfun->cfg->x_n_edges++;
7240 }
7241
7242 /* Remove BB from the original basic block array. */
7243 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7244 cfun->cfg->x_n_basic_blocks--;
7245
7246 /* Grow DEST_CFUN's basic block array if needed. */
7247 cfg = dest_cfun->cfg;
7248 cfg->x_n_basic_blocks++;
7249 if (bb->index >= cfg->x_last_basic_block)
7250 cfg->x_last_basic_block = bb->index + 1;
7251
7252 old_len = vec_safe_length (cfg->x_basic_block_info);
7253 if ((unsigned) cfg->x_last_basic_block >= old_len)
7254 vec_safe_grow_cleared (cfg->x_basic_block_info,
7255 cfg->x_last_basic_block + 1);
7256
7257 (*cfg->x_basic_block_info)[bb->index] = bb;
7258
7259 /* Remap the variables in phi nodes. */
7260 for (gphi_iterator psi = gsi_start_phis (bb);
7261 !gsi_end_p (psi); )
7262 {
7263 gphi *phi = psi.phi ();
7264 use_operand_p use;
7265 tree op = PHI_RESULT (phi);
7266 ssa_op_iter oi;
7267 unsigned i;
7268
7269 if (virtual_operand_p (op))
7270 {
7271 /* Remove the phi nodes for virtual operands (alias analysis will be
7272 run for the new function, anyway). But replace all uses that
7273 might be outside of the region we move. */
7274 use_operand_p use_p;
7275 imm_use_iterator iter;
7276 gimple *use_stmt;
7277 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7278 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7279 SET_USE (use_p, SSA_NAME_VAR (op));
7280 remove_phi_node (&psi, true);
7281 continue;
7282 }
7283
7284 SET_PHI_RESULT (phi,
7285 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7286 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7287 {
7288 op = USE_FROM_PTR (use);
7289 if (TREE_CODE (op) == SSA_NAME)
7290 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7291 }
7292
7293 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7294 {
7295 location_t locus = gimple_phi_arg_location (phi, i);
7296 tree block = LOCATION_BLOCK (locus);
7297
7298 if (locus == UNKNOWN_LOCATION)
7299 continue;
7300 if (d->orig_block == NULL_TREE || block == d->orig_block)
7301 {
7302 locus = set_block (locus, d->new_block);
7303 gimple_phi_arg_set_location (phi, i, locus);
7304 }
7305 }
7306
7307 gsi_next (&psi);
7308 }
7309
7310 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7311 {
7312 gimple *stmt = gsi_stmt (si);
7313 struct walk_stmt_info wi;
7314
7315 memset (&wi, 0, sizeof (wi));
7316 wi.info = d;
7317 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7318
7319 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7320 {
7321 tree label = gimple_label_label (label_stmt);
7322 int uid = LABEL_DECL_UID (label);
7323
7324 gcc_assert (uid > -1);
7325
7326 old_len = vec_safe_length (cfg->x_label_to_block_map);
7327 if (old_len <= (unsigned) uid)
7328 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7329
7330 (*cfg->x_label_to_block_map)[uid] = bb;
7331 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7332
7333 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7334
7335 if (uid >= dest_cfun->cfg->last_label_uid)
7336 dest_cfun->cfg->last_label_uid = uid + 1;
7337 }
7338
7339 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7340 remove_stmt_from_eh_lp_fn (cfun, stmt);
7341
7342 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7343 gimple_remove_stmt_histograms (cfun, stmt);
7344
7345 /* We cannot leave any operands allocated from the operand caches of
7346 the current function. */
7347 free_stmt_operands (cfun, stmt);
7348 push_cfun (dest_cfun);
7349 update_stmt (stmt);
7350 if (is_gimple_call (stmt))
7351 notice_special_calls (as_a <gcall *> (stmt));
7352 pop_cfun ();
7353 }
7354
7355 FOR_EACH_EDGE (e, ei, bb->succs)
7356 if (e->goto_locus != UNKNOWN_LOCATION)
7357 {
7358 tree block = LOCATION_BLOCK (e->goto_locus);
7359 if (d->orig_block == NULL_TREE
7360 || block == d->orig_block)
7361 e->goto_locus = set_block (e->goto_locus, d->new_block);
7362 }
7363 }
7364
7365 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7366 the outermost EH region. Use REGION as the incoming base EH region.
7367 If there is no single outermost region, return NULL and set *ALL to
7368 true. */
7369
7370 static eh_region
7371 find_outermost_region_in_block (struct function *src_cfun,
7372 basic_block bb, eh_region region,
7373 bool *all)
7374 {
7375 gimple_stmt_iterator si;
7376
7377 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7378 {
7379 gimple *stmt = gsi_stmt (si);
7380 eh_region stmt_region;
7381 int lp_nr;
7382
7383 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7384 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7385 if (stmt_region)
7386 {
7387 if (region == NULL)
7388 region = stmt_region;
7389 else if (stmt_region != region)
7390 {
7391 region = eh_region_outermost (src_cfun, stmt_region, region);
7392 if (region == NULL)
7393 {
7394 *all = true;
7395 return NULL;
7396 }
7397 }
7398 }
7399 }
7400
7401 return region;
7402 }
7403
7404 static tree
7405 new_label_mapper (tree decl, void *data)
7406 {
7407 htab_t hash = (htab_t) data;
7408 struct tree_map *m;
7409 void **slot;
7410
7411 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7412
7413 m = XNEW (struct tree_map);
7414 m->hash = DECL_UID (decl);
7415 m->base.from = decl;
7416 m->to = create_artificial_label (UNKNOWN_LOCATION);
7417 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7418 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7419 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7420
7421 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7422 gcc_assert (*slot == NULL);
7423
7424 *slot = m;
7425
7426 return m->to;
7427 }
7428
7429 /* Tree walker to replace the decls used inside value expressions by
7430 duplicates. */
7431
7432 static tree
7433 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7434 {
7435 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7436
7437 switch (TREE_CODE (*tp))
7438 {
7439 case VAR_DECL:
7440 case PARM_DECL:
7441 case RESULT_DECL:
7442 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7443 break;
7444 default:
7445 break;
7446 }
7447
7448 if (IS_TYPE_OR_DECL_P (*tp))
7449 *walk_subtrees = false;
7450
7451 return NULL;
7452 }
7453
7454 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7455 subblocks. */
7456
7457 static void
7458 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7459 tree to_context)
7460 {
7461 tree *tp, t;
7462
7463 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7464 {
7465 t = *tp;
7466 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7467 continue;
7468 replace_by_duplicate_decl (&t, vars_map, to_context);
7469 if (t != *tp)
7470 {
7471 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7472 {
7473 tree x = DECL_VALUE_EXPR (*tp);
7474 struct replace_decls_d rd = { vars_map, to_context };
7475 unshare_expr (x);
7476 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7477 SET_DECL_VALUE_EXPR (t, x);
7478 DECL_HAS_VALUE_EXPR_P (t) = 1;
7479 }
7480 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7481 *tp = t;
7482 }
7483 }
7484
7485 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7486 replace_block_vars_by_duplicates (block, vars_map, to_context);
7487 }
7488
7489 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7490 from FN1 to FN2. */
7491
7492 static void
7493 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7494 class loop *loop)
7495 {
7496 /* Discard it from the old loop array. */
7497 (*get_loops (fn1))[loop->num] = NULL;
7498
7499 /* Place it in the new loop array, assigning it a new number. */
7500 loop->num = number_of_loops (fn2);
7501 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7502
7503 /* Recurse to children. */
7504 for (loop = loop->inner; loop; loop = loop->next)
7505 fixup_loop_arrays_after_move (fn1, fn2, loop);
7506 }
7507
7508 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7509 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7510
7511 DEBUG_FUNCTION void
7512 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7513 {
7514 basic_block bb;
7515 edge_iterator ei;
7516 edge e;
7517 bitmap bbs = BITMAP_ALLOC (NULL);
7518 int i;
7519
7520 gcc_assert (entry != NULL);
7521 gcc_assert (entry != exit);
7522 gcc_assert (bbs_p != NULL);
7523
7524 gcc_assert (bbs_p->length () > 0);
7525
7526 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7527 bitmap_set_bit (bbs, bb->index);
7528
7529 gcc_assert (bitmap_bit_p (bbs, entry->index));
7530 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7531
7532 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7533 {
7534 if (bb == entry)
7535 {
7536 gcc_assert (single_pred_p (entry));
7537 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7538 }
7539 else
7540 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7541 {
7542 e = ei_edge (ei);
7543 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7544 }
7545
7546 if (bb == exit)
7547 {
7548 gcc_assert (single_succ_p (exit));
7549 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7550 }
7551 else
7552 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7553 {
7554 e = ei_edge (ei);
7555 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7556 }
7557 }
7558
7559 BITMAP_FREE (bbs);
7560 }
7561
7562 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7563
7564 bool
7565 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7566 {
7567 bitmap release_names = (bitmap)data;
7568
7569 if (TREE_CODE (from) != SSA_NAME)
7570 return true;
7571
7572 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7573 return true;
7574 }
7575
7576 /* Return LOOP_DIST_ALIAS call if present in BB. */
7577
7578 static gimple *
7579 find_loop_dist_alias (basic_block bb)
7580 {
7581 gimple *g = last_stmt (bb);
7582 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7583 return NULL;
7584
7585 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7586 gsi_prev (&gsi);
7587 if (gsi_end_p (gsi))
7588 return NULL;
7589
7590 g = gsi_stmt (gsi);
7591 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7592 return g;
7593 return NULL;
7594 }
7595
7596 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7597 to VALUE and update any immediate uses of it's LHS. */
7598
7599 void
7600 fold_loop_internal_call (gimple *g, tree value)
7601 {
7602 tree lhs = gimple_call_lhs (g);
7603 use_operand_p use_p;
7604 imm_use_iterator iter;
7605 gimple *use_stmt;
7606 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7607
7608 replace_call_with_value (&gsi, value);
7609 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7610 {
7611 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7612 SET_USE (use_p, value);
7613 update_stmt (use_stmt);
7614 }
7615 }
7616
7617 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7618 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7619 single basic block in the original CFG and the new basic block is
7620 returned. DEST_CFUN must not have a CFG yet.
7621
7622 Note that the region need not be a pure SESE region. Blocks inside
7623 the region may contain calls to abort/exit. The only restriction
7624 is that ENTRY_BB should be the only entry point and it must
7625 dominate EXIT_BB.
7626
7627 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7628 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7629 to the new function.
7630
7631 All local variables referenced in the region are assumed to be in
7632 the corresponding BLOCK_VARS and unexpanded variable lists
7633 associated with DEST_CFUN.
7634
7635 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7636 reimplement move_sese_region_to_fn by duplicating the region rather than
7637 moving it. */
7638
7639 basic_block
7640 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7641 basic_block exit_bb, tree orig_block)
7642 {
7643 vec<basic_block> bbs;
7644 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7645 basic_block after, bb, *entry_pred, *exit_succ, abb;
7646 struct function *saved_cfun = cfun;
7647 int *entry_flag, *exit_flag;
7648 profile_probability *entry_prob, *exit_prob;
7649 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7650 edge e;
7651 edge_iterator ei;
7652 htab_t new_label_map;
7653 hash_map<void *, void *> *eh_map;
7654 class loop *loop = entry_bb->loop_father;
7655 class loop *loop0 = get_loop (saved_cfun, 0);
7656 struct move_stmt_d d;
7657
7658 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7659 region. */
7660 gcc_assert (entry_bb != exit_bb
7661 && (!exit_bb
7662 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7663
7664 /* Collect all the blocks in the region. Manually add ENTRY_BB
7665 because it won't be added by dfs_enumerate_from. */
7666 bbs.create (0);
7667 bbs.safe_push (entry_bb);
7668 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7669
7670 if (flag_checking)
7671 verify_sese (entry_bb, exit_bb, &bbs);
7672
7673 /* The blocks that used to be dominated by something in BBS will now be
7674 dominated by the new block. */
7675 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7676 bbs.address (),
7677 bbs.length ());
7678
7679 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7680 the predecessor edges to ENTRY_BB and the successor edges to
7681 EXIT_BB so that we can re-attach them to the new basic block that
7682 will replace the region. */
7683 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7684 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7685 entry_flag = XNEWVEC (int, num_entry_edges);
7686 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7687 i = 0;
7688 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7689 {
7690 entry_prob[i] = e->probability;
7691 entry_flag[i] = e->flags;
7692 entry_pred[i++] = e->src;
7693 remove_edge (e);
7694 }
7695
7696 if (exit_bb)
7697 {
7698 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7699 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7700 exit_flag = XNEWVEC (int, num_exit_edges);
7701 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7702 i = 0;
7703 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7704 {
7705 exit_prob[i] = e->probability;
7706 exit_flag[i] = e->flags;
7707 exit_succ[i++] = e->dest;
7708 remove_edge (e);
7709 }
7710 }
7711 else
7712 {
7713 num_exit_edges = 0;
7714 exit_succ = NULL;
7715 exit_flag = NULL;
7716 exit_prob = NULL;
7717 }
7718
7719 /* Switch context to the child function to initialize DEST_FN's CFG. */
7720 gcc_assert (dest_cfun->cfg == NULL);
7721 push_cfun (dest_cfun);
7722
7723 init_empty_tree_cfg ();
7724
7725 /* Initialize EH information for the new function. */
7726 eh_map = NULL;
7727 new_label_map = NULL;
7728 if (saved_cfun->eh)
7729 {
7730 eh_region region = NULL;
7731 bool all = false;
7732
7733 FOR_EACH_VEC_ELT (bbs, i, bb)
7734 {
7735 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7736 if (all)
7737 break;
7738 }
7739
7740 init_eh_for_function ();
7741 if (region != NULL || all)
7742 {
7743 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7744 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7745 new_label_mapper, new_label_map);
7746 }
7747 }
7748
7749 /* Initialize an empty loop tree. */
7750 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7751 init_loops_structure (dest_cfun, loops, 1);
7752 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7753 set_loops_for_fn (dest_cfun, loops);
7754
7755 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7756
7757 /* Move the outlined loop tree part. */
7758 num_nodes = bbs.length ();
7759 FOR_EACH_VEC_ELT (bbs, i, bb)
7760 {
7761 if (bb->loop_father->header == bb)
7762 {
7763 class loop *this_loop = bb->loop_father;
7764 /* Avoid the need to remap SSA names used in nb_iterations. */
7765 free_numbers_of_iterations_estimates (this_loop);
7766 class loop *outer = loop_outer (this_loop);
7767 if (outer == loop
7768 /* If the SESE region contains some bbs ending with
7769 a noreturn call, those are considered to belong
7770 to the outermost loop in saved_cfun, rather than
7771 the entry_bb's loop_father. */
7772 || outer == loop0)
7773 {
7774 if (outer != loop)
7775 num_nodes -= this_loop->num_nodes;
7776 flow_loop_tree_node_remove (bb->loop_father);
7777 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7778 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7779 }
7780 }
7781 else if (bb->loop_father == loop0 && loop0 != loop)
7782 num_nodes--;
7783
7784 /* Remove loop exits from the outlined region. */
7785 if (loops_for_fn (saved_cfun)->exits)
7786 FOR_EACH_EDGE (e, ei, bb->succs)
7787 {
7788 struct loops *l = loops_for_fn (saved_cfun);
7789 loop_exit **slot
7790 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7791 NO_INSERT);
7792 if (slot)
7793 l->exits->clear_slot (slot);
7794 }
7795 }
7796
7797 /* Adjust the number of blocks in the tree root of the outlined part. */
7798 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7799
7800 /* Setup a mapping to be used by move_block_to_fn. */
7801 loop->aux = current_loops->tree_root;
7802 loop0->aux = current_loops->tree_root;
7803
7804 /* Fix up orig_loop_num. If the block referenced in it has been moved
7805 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7806 signed char *moved_orig_loop_num = NULL;
7807 for (auto dloop : loops_list (dest_cfun, 0))
7808 if (dloop->orig_loop_num)
7809 {
7810 if (moved_orig_loop_num == NULL)
7811 moved_orig_loop_num
7812 = XCNEWVEC (signed char, vec_safe_length (larray));
7813 if ((*larray)[dloop->orig_loop_num] != NULL
7814 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7815 {
7816 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7817 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7818 moved_orig_loop_num[dloop->orig_loop_num]++;
7819 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7820 }
7821 else
7822 {
7823 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7824 dloop->orig_loop_num = 0;
7825 }
7826 }
7827 pop_cfun ();
7828
7829 if (moved_orig_loop_num)
7830 {
7831 FOR_EACH_VEC_ELT (bbs, i, bb)
7832 {
7833 gimple *g = find_loop_dist_alias (bb);
7834 if (g == NULL)
7835 continue;
7836
7837 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7838 gcc_assert (orig_loop_num
7839 && (unsigned) orig_loop_num < vec_safe_length (larray));
7840 if (moved_orig_loop_num[orig_loop_num] == 2)
7841 {
7842 /* If we have moved both loops with this orig_loop_num into
7843 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7844 too, update the first argument. */
7845 gcc_assert ((*larray)[orig_loop_num] != NULL
7846 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7847 tree t = build_int_cst (integer_type_node,
7848 (*larray)[orig_loop_num]->num);
7849 gimple_call_set_arg (g, 0, t);
7850 update_stmt (g);
7851 /* Make sure the following loop will not update it. */
7852 moved_orig_loop_num[orig_loop_num] = 0;
7853 }
7854 else
7855 /* Otherwise at least one of the loops stayed in saved_cfun.
7856 Remove the LOOP_DIST_ALIAS call. */
7857 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7858 }
7859 FOR_EACH_BB_FN (bb, saved_cfun)
7860 {
7861 gimple *g = find_loop_dist_alias (bb);
7862 if (g == NULL)
7863 continue;
7864 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7865 gcc_assert (orig_loop_num
7866 && (unsigned) orig_loop_num < vec_safe_length (larray));
7867 if (moved_orig_loop_num[orig_loop_num])
7868 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7869 of the corresponding loops was moved, remove it. */
7870 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7871 }
7872 XDELETEVEC (moved_orig_loop_num);
7873 }
7874 ggc_free (larray);
7875
7876 /* Move blocks from BBS into DEST_CFUN. */
7877 gcc_assert (bbs.length () >= 2);
7878 after = dest_cfun->cfg->x_entry_block_ptr;
7879 hash_map<tree, tree> vars_map;
7880
7881 memset (&d, 0, sizeof (d));
7882 d.orig_block = orig_block;
7883 d.new_block = DECL_INITIAL (dest_cfun->decl);
7884 d.from_context = cfun->decl;
7885 d.to_context = dest_cfun->decl;
7886 d.vars_map = &vars_map;
7887 d.new_label_map = new_label_map;
7888 d.eh_map = eh_map;
7889 d.remap_decls_p = true;
7890
7891 if (gimple_in_ssa_p (cfun))
7892 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7893 {
7894 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7895 set_ssa_default_def (dest_cfun, arg, narg);
7896 vars_map.put (arg, narg);
7897 }
7898
7899 FOR_EACH_VEC_ELT (bbs, i, bb)
7900 {
7901 /* No need to update edge counts on the last block. It has
7902 already been updated earlier when we detached the region from
7903 the original CFG. */
7904 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7905 after = bb;
7906 }
7907
7908 /* Adjust the maximum clique used. */
7909 dest_cfun->last_clique = saved_cfun->last_clique;
7910
7911 loop->aux = NULL;
7912 loop0->aux = NULL;
7913 /* Loop sizes are no longer correct, fix them up. */
7914 loop->num_nodes -= num_nodes;
7915 for (class loop *outer = loop_outer (loop);
7916 outer; outer = loop_outer (outer))
7917 outer->num_nodes -= num_nodes;
7918 loop0->num_nodes -= bbs.length () - num_nodes;
7919
7920 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7921 {
7922 class loop *aloop;
7923 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7924 if (aloop != NULL)
7925 {
7926 if (aloop->simduid)
7927 {
7928 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7929 d.to_context);
7930 dest_cfun->has_simduid_loops = true;
7931 }
7932 if (aloop->force_vectorize)
7933 dest_cfun->has_force_vectorize_loops = true;
7934 }
7935 }
7936
7937 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7938 if (orig_block)
7939 {
7940 tree block;
7941 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7942 == NULL_TREE);
7943 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7944 = BLOCK_SUBBLOCKS (orig_block);
7945 for (block = BLOCK_SUBBLOCKS (orig_block);
7946 block; block = BLOCK_CHAIN (block))
7947 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7948 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7949 }
7950
7951 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7952 &vars_map, dest_cfun->decl);
7953
7954 if (new_label_map)
7955 htab_delete (new_label_map);
7956 if (eh_map)
7957 delete eh_map;
7958
7959 /* We need to release ssa-names in a defined order, so first find them,
7960 and then iterate in ascending version order. */
7961 bitmap release_names = BITMAP_ALLOC (NULL);
7962 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7963 bitmap_iterator bi;
7964 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7965 release_ssa_name (ssa_name (i));
7966 BITMAP_FREE (release_names);
7967
7968 /* Rewire the entry and exit blocks. The successor to the entry
7969 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7970 the child function. Similarly, the predecessor of DEST_FN's
7971 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7972 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7973 various CFG manipulation function get to the right CFG.
7974
7975 FIXME, this is silly. The CFG ought to become a parameter to
7976 these helpers. */
7977 push_cfun (dest_cfun);
7978 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7979 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7980 if (exit_bb)
7981 {
7982 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7983 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7984 }
7985 else
7986 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7987 pop_cfun ();
7988
7989 /* Back in the original function, the SESE region has disappeared,
7990 create a new basic block in its place. */
7991 bb = create_empty_bb (entry_pred[0]);
7992 if (current_loops)
7993 add_bb_to_loop (bb, loop);
7994 for (i = 0; i < num_entry_edges; i++)
7995 {
7996 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7997 e->probability = entry_prob[i];
7998 }
7999
8000 for (i = 0; i < num_exit_edges; i++)
8001 {
8002 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8003 e->probability = exit_prob[i];
8004 }
8005
8006 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8007 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8008 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8009
8010 if (exit_bb)
8011 {
8012 free (exit_prob);
8013 free (exit_flag);
8014 free (exit_succ);
8015 }
8016 free (entry_prob);
8017 free (entry_flag);
8018 free (entry_pred);
8019 bbs.release ();
8020
8021 return bb;
8022 }
8023
8024 /* Dump default def DEF to file FILE using FLAGS and indentation
8025 SPC. */
8026
8027 static void
8028 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8029 {
8030 for (int i = 0; i < spc; ++i)
8031 fprintf (file, " ");
8032 dump_ssaname_info_to_file (file, def, spc);
8033
8034 print_generic_expr (file, TREE_TYPE (def), flags);
8035 fprintf (file, " ");
8036 print_generic_expr (file, def, flags);
8037 fprintf (file, " = ");
8038 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8039 fprintf (file, ";\n");
8040 }
8041
8042 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8043
8044 static void
8045 print_no_sanitize_attr_value (FILE *file, tree value)
8046 {
8047 unsigned int flags = tree_to_uhwi (value);
8048 bool first = true;
8049 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8050 {
8051 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8052 {
8053 if (!first)
8054 fprintf (file, " | ");
8055 fprintf (file, "%s", sanitizer_opts[i].name);
8056 first = false;
8057 }
8058 }
8059 }
8060
8061 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8062 */
8063
8064 void
8065 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8066 {
8067 tree arg, var, old_current_fndecl = current_function_decl;
8068 struct function *dsf;
8069 bool ignore_topmost_bind = false, any_var = false;
8070 basic_block bb;
8071 tree chain;
8072 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8073 && decl_is_tm_clone (fndecl));
8074 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8075
8076 tree fntype = TREE_TYPE (fndecl);
8077 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8078
8079 for (int i = 0; i != 2; ++i)
8080 {
8081 if (!attrs[i])
8082 continue;
8083
8084 fprintf (file, "__attribute__((");
8085
8086 bool first = true;
8087 tree chain;
8088 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8089 {
8090 if (!first)
8091 fprintf (file, ", ");
8092
8093 tree name = get_attribute_name (chain);
8094 print_generic_expr (file, name, dump_flags);
8095 if (TREE_VALUE (chain) != NULL_TREE)
8096 {
8097 fprintf (file, " (");
8098
8099 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8100 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8101 else
8102 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8103 fprintf (file, ")");
8104 }
8105 }
8106
8107 fprintf (file, "))\n");
8108 }
8109
8110 current_function_decl = fndecl;
8111 if (flags & TDF_GIMPLE)
8112 {
8113 static bool hotness_bb_param_printed = false;
8114 if (profile_info != NULL
8115 && !hotness_bb_param_printed)
8116 {
8117 hotness_bb_param_printed = true;
8118 fprintf (file,
8119 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8120 " */\n", get_hot_bb_threshold ());
8121 }
8122
8123 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8124 dump_flags | TDF_SLIM);
8125 fprintf (file, " __GIMPLE (%s",
8126 (fun->curr_properties & PROP_ssa) ? "ssa"
8127 : (fun->curr_properties & PROP_cfg) ? "cfg"
8128 : "");
8129
8130 if (fun && fun->cfg)
8131 {
8132 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8133 if (bb->count.initialized_p ())
8134 fprintf (file, ",%s(%" PRIu64 ")",
8135 profile_quality_as_string (bb->count.quality ()),
8136 bb->count.value ());
8137 if (dump_flags & TDF_UID)
8138 fprintf (file, ")\n%sD_%u (", function_name (fun),
8139 DECL_UID (fndecl));
8140 else
8141 fprintf (file, ")\n%s (", function_name (fun));
8142 }
8143 }
8144 else
8145 {
8146 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8147 if (dump_flags & TDF_UID)
8148 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8149 tmclone ? "[tm-clone] " : "");
8150 else
8151 fprintf (file, " %s %s(", function_name (fun),
8152 tmclone ? "[tm-clone] " : "");
8153 }
8154
8155 arg = DECL_ARGUMENTS (fndecl);
8156 while (arg)
8157 {
8158 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8159 fprintf (file, " ");
8160 print_generic_expr (file, arg, dump_flags);
8161 if (DECL_CHAIN (arg))
8162 fprintf (file, ", ");
8163 arg = DECL_CHAIN (arg);
8164 }
8165 fprintf (file, ")\n");
8166
8167 dsf = DECL_STRUCT_FUNCTION (fndecl);
8168 if (dsf && (flags & TDF_EH))
8169 dump_eh_tree (file, dsf);
8170
8171 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8172 {
8173 dump_node (fndecl, TDF_SLIM | flags, file);
8174 current_function_decl = old_current_fndecl;
8175 return;
8176 }
8177
8178 /* When GIMPLE is lowered, the variables are no longer available in
8179 BIND_EXPRs, so display them separately. */
8180 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8181 {
8182 unsigned ix;
8183 ignore_topmost_bind = true;
8184
8185 fprintf (file, "{\n");
8186 if (gimple_in_ssa_p (fun)
8187 && (flags & TDF_ALIAS))
8188 {
8189 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8190 arg = DECL_CHAIN (arg))
8191 {
8192 tree def = ssa_default_def (fun, arg);
8193 if (def)
8194 dump_default_def (file, def, 2, flags);
8195 }
8196
8197 tree res = DECL_RESULT (fun->decl);
8198 if (res != NULL_TREE
8199 && DECL_BY_REFERENCE (res))
8200 {
8201 tree def = ssa_default_def (fun, res);
8202 if (def)
8203 dump_default_def (file, def, 2, flags);
8204 }
8205
8206 tree static_chain = fun->static_chain_decl;
8207 if (static_chain != NULL_TREE)
8208 {
8209 tree def = ssa_default_def (fun, static_chain);
8210 if (def)
8211 dump_default_def (file, def, 2, flags);
8212 }
8213 }
8214
8215 if (!vec_safe_is_empty (fun->local_decls))
8216 FOR_EACH_LOCAL_DECL (fun, ix, var)
8217 {
8218 print_generic_decl (file, var, flags);
8219 fprintf (file, "\n");
8220
8221 any_var = true;
8222 }
8223
8224 tree name;
8225
8226 if (gimple_in_ssa_p (fun))
8227 FOR_EACH_SSA_NAME (ix, name, fun)
8228 {
8229 if (!SSA_NAME_VAR (name)
8230 /* SSA name with decls without a name still get
8231 dumped as _N, list those explicitely as well even
8232 though we've dumped the decl declaration as D.xxx
8233 above. */
8234 || !SSA_NAME_IDENTIFIER (name))
8235 {
8236 fprintf (file, " ");
8237 print_generic_expr (file, TREE_TYPE (name), flags);
8238 fprintf (file, " ");
8239 print_generic_expr (file, name, flags);
8240 fprintf (file, ";\n");
8241
8242 any_var = true;
8243 }
8244 }
8245 }
8246
8247 if (fun && fun->decl == fndecl
8248 && fun->cfg
8249 && basic_block_info_for_fn (fun))
8250 {
8251 /* If the CFG has been built, emit a CFG-based dump. */
8252 if (!ignore_topmost_bind)
8253 fprintf (file, "{\n");
8254
8255 if (any_var && n_basic_blocks_for_fn (fun))
8256 fprintf (file, "\n");
8257
8258 FOR_EACH_BB_FN (bb, fun)
8259 dump_bb (file, bb, 2, flags);
8260
8261 fprintf (file, "}\n");
8262 }
8263 else if (fun && (fun->curr_properties & PROP_gimple_any))
8264 {
8265 /* The function is now in GIMPLE form but the CFG has not been
8266 built yet. Emit the single sequence of GIMPLE statements
8267 that make up its body. */
8268 gimple_seq body = gimple_body (fndecl);
8269
8270 if (gimple_seq_first_stmt (body)
8271 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8272 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8273 print_gimple_seq (file, body, 0, flags);
8274 else
8275 {
8276 if (!ignore_topmost_bind)
8277 fprintf (file, "{\n");
8278
8279 if (any_var)
8280 fprintf (file, "\n");
8281
8282 print_gimple_seq (file, body, 2, flags);
8283 fprintf (file, "}\n");
8284 }
8285 }
8286 else
8287 {
8288 int indent;
8289
8290 /* Make a tree based dump. */
8291 chain = DECL_SAVED_TREE (fndecl);
8292 if (chain && TREE_CODE (chain) == BIND_EXPR)
8293 {
8294 if (ignore_topmost_bind)
8295 {
8296 chain = BIND_EXPR_BODY (chain);
8297 indent = 2;
8298 }
8299 else
8300 indent = 0;
8301 }
8302 else
8303 {
8304 if (!ignore_topmost_bind)
8305 {
8306 fprintf (file, "{\n");
8307 /* No topmost bind, pretend it's ignored for later. */
8308 ignore_topmost_bind = true;
8309 }
8310 indent = 2;
8311 }
8312
8313 if (any_var)
8314 fprintf (file, "\n");
8315
8316 print_generic_stmt_indented (file, chain, flags, indent);
8317 if (ignore_topmost_bind)
8318 fprintf (file, "}\n");
8319 }
8320
8321 if (flags & TDF_ENUMERATE_LOCALS)
8322 dump_enumerated_decls (file, flags);
8323 fprintf (file, "\n\n");
8324
8325 current_function_decl = old_current_fndecl;
8326 }
8327
8328 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8329
8330 DEBUG_FUNCTION void
8331 debug_function (tree fn, dump_flags_t flags)
8332 {
8333 dump_function_to_file (fn, stderr, flags);
8334 }
8335
8336
8337 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8338
8339 static void
8340 print_pred_bbs (FILE *file, basic_block bb)
8341 {
8342 edge e;
8343 edge_iterator ei;
8344
8345 FOR_EACH_EDGE (e, ei, bb->preds)
8346 fprintf (file, "bb_%d ", e->src->index);
8347 }
8348
8349
8350 /* Print on FILE the indexes for the successors of basic_block BB. */
8351
8352 static void
8353 print_succ_bbs (FILE *file, basic_block bb)
8354 {
8355 edge e;
8356 edge_iterator ei;
8357
8358 FOR_EACH_EDGE (e, ei, bb->succs)
8359 fprintf (file, "bb_%d ", e->dest->index);
8360 }
8361
8362 /* Print to FILE the basic block BB following the VERBOSITY level. */
8363
8364 void
8365 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8366 {
8367 char *s_indent = (char *) alloca ((size_t) indent + 1);
8368 memset ((void *) s_indent, ' ', (size_t) indent);
8369 s_indent[indent] = '\0';
8370
8371 /* Print basic_block's header. */
8372 if (verbosity >= 2)
8373 {
8374 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8375 print_pred_bbs (file, bb);
8376 fprintf (file, "}, succs = {");
8377 print_succ_bbs (file, bb);
8378 fprintf (file, "})\n");
8379 }
8380
8381 /* Print basic_block's body. */
8382 if (verbosity >= 3)
8383 {
8384 fprintf (file, "%s {\n", s_indent);
8385 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8386 fprintf (file, "%s }\n", s_indent);
8387 }
8388 }
8389
8390 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8391
8392 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8393 VERBOSITY level this outputs the contents of the loop, or just its
8394 structure. */
8395
8396 static void
8397 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8398 {
8399 char *s_indent;
8400 basic_block bb;
8401
8402 if (loop == NULL)
8403 return;
8404
8405 s_indent = (char *) alloca ((size_t) indent + 1);
8406 memset ((void *) s_indent, ' ', (size_t) indent);
8407 s_indent[indent] = '\0';
8408
8409 /* Print loop's header. */
8410 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8411 if (loop->header)
8412 fprintf (file, "header = %d", loop->header->index);
8413 else
8414 {
8415 fprintf (file, "deleted)\n");
8416 return;
8417 }
8418 if (loop->latch)
8419 fprintf (file, ", latch = %d", loop->latch->index);
8420 else
8421 fprintf (file, ", multiple latches");
8422 fprintf (file, ", niter = ");
8423 print_generic_expr (file, loop->nb_iterations);
8424
8425 if (loop->any_upper_bound)
8426 {
8427 fprintf (file, ", upper_bound = ");
8428 print_decu (loop->nb_iterations_upper_bound, file);
8429 }
8430 if (loop->any_likely_upper_bound)
8431 {
8432 fprintf (file, ", likely_upper_bound = ");
8433 print_decu (loop->nb_iterations_likely_upper_bound, file);
8434 }
8435
8436 if (loop->any_estimate)
8437 {
8438 fprintf (file, ", estimate = ");
8439 print_decu (loop->nb_iterations_estimate, file);
8440 }
8441 if (loop->unroll)
8442 fprintf (file, ", unroll = %d", loop->unroll);
8443 fprintf (file, ")\n");
8444
8445 /* Print loop's body. */
8446 if (verbosity >= 1)
8447 {
8448 fprintf (file, "%s{\n", s_indent);
8449 FOR_EACH_BB_FN (bb, cfun)
8450 if (bb->loop_father == loop)
8451 print_loops_bb (file, bb, indent, verbosity);
8452
8453 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8454 fprintf (file, "%s}\n", s_indent);
8455 }
8456 }
8457
8458 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8459 spaces. Following VERBOSITY level this outputs the contents of the
8460 loop, or just its structure. */
8461
8462 static void
8463 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8464 int verbosity)
8465 {
8466 if (loop == NULL)
8467 return;
8468
8469 print_loop (file, loop, indent, verbosity);
8470 print_loop_and_siblings (file, loop->next, indent, verbosity);
8471 }
8472
8473 /* Follow a CFG edge from the entry point of the program, and on entry
8474 of a loop, pretty print the loop structure on FILE. */
8475
8476 void
8477 print_loops (FILE *file, int verbosity)
8478 {
8479 basic_block bb;
8480
8481 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8482 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8483 if (bb && bb->loop_father)
8484 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8485 }
8486
8487 /* Dump a loop. */
8488
8489 DEBUG_FUNCTION void
8490 debug (class loop &ref)
8491 {
8492 print_loop (stderr, &ref, 0, /*verbosity*/0);
8493 }
8494
8495 DEBUG_FUNCTION void
8496 debug (class loop *ptr)
8497 {
8498 if (ptr)
8499 debug (*ptr);
8500 else
8501 fprintf (stderr, "<nil>\n");
8502 }
8503
8504 /* Dump a loop verbosely. */
8505
8506 DEBUG_FUNCTION void
8507 debug_verbose (class loop &ref)
8508 {
8509 print_loop (stderr, &ref, 0, /*verbosity*/3);
8510 }
8511
8512 DEBUG_FUNCTION void
8513 debug_verbose (class loop *ptr)
8514 {
8515 if (ptr)
8516 debug (*ptr);
8517 else
8518 fprintf (stderr, "<nil>\n");
8519 }
8520
8521
8522 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8523
8524 DEBUG_FUNCTION void
8525 debug_loops (int verbosity)
8526 {
8527 print_loops (stderr, verbosity);
8528 }
8529
8530 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8531
8532 DEBUG_FUNCTION void
8533 debug_loop (class loop *loop, int verbosity)
8534 {
8535 print_loop (stderr, loop, 0, verbosity);
8536 }
8537
8538 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8539 level. */
8540
8541 DEBUG_FUNCTION void
8542 debug_loop_num (unsigned num, int verbosity)
8543 {
8544 debug_loop (get_loop (cfun, num), verbosity);
8545 }
8546
8547 /* Return true if BB ends with a call, possibly followed by some
8548 instructions that must stay with the call. Return false,
8549 otherwise. */
8550
8551 static bool
8552 gimple_block_ends_with_call_p (basic_block bb)
8553 {
8554 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8555 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8556 }
8557
8558
8559 /* Return true if BB ends with a conditional branch. Return false,
8560 otherwise. */
8561
8562 static bool
8563 gimple_block_ends_with_condjump_p (const_basic_block bb)
8564 {
8565 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8566 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8567 }
8568
8569
8570 /* Return true if statement T may terminate execution of BB in ways not
8571 explicitly represtented in the CFG. */
8572
8573 bool
8574 stmt_can_terminate_bb_p (gimple *t)
8575 {
8576 tree fndecl = NULL_TREE;
8577 int call_flags = 0;
8578
8579 /* Eh exception not handled internally terminates execution of the whole
8580 function. */
8581 if (stmt_can_throw_external (cfun, t))
8582 return true;
8583
8584 /* NORETURN and LONGJMP calls already have an edge to exit.
8585 CONST and PURE calls do not need one.
8586 We don't currently check for CONST and PURE here, although
8587 it would be a good idea, because those attributes are
8588 figured out from the RTL in mark_constant_function, and
8589 the counter incrementation code from -fprofile-arcs
8590 leads to different results from -fbranch-probabilities. */
8591 if (is_gimple_call (t))
8592 {
8593 fndecl = gimple_call_fndecl (t);
8594 call_flags = gimple_call_flags (t);
8595 }
8596
8597 if (is_gimple_call (t)
8598 && fndecl
8599 && fndecl_built_in_p (fndecl)
8600 && (call_flags & ECF_NOTHROW)
8601 && !(call_flags & ECF_RETURNS_TWICE)
8602 /* fork() doesn't really return twice, but the effect of
8603 wrapping it in __gcov_fork() which calls __gcov_dump() and
8604 __gcov_reset() and clears the counters before forking has the same
8605 effect as returning twice. Force a fake edge. */
8606 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8607 return false;
8608
8609 if (is_gimple_call (t))
8610 {
8611 edge_iterator ei;
8612 edge e;
8613 basic_block bb;
8614
8615 if (call_flags & (ECF_PURE | ECF_CONST)
8616 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8617 return false;
8618
8619 /* Function call may do longjmp, terminate program or do other things.
8620 Special case noreturn that have non-abnormal edges out as in this case
8621 the fact is sufficiently represented by lack of edges out of T. */
8622 if (!(call_flags & ECF_NORETURN))
8623 return true;
8624
8625 bb = gimple_bb (t);
8626 FOR_EACH_EDGE (e, ei, bb->succs)
8627 if ((e->flags & EDGE_FAKE) == 0)
8628 return true;
8629 }
8630
8631 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8632 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8633 return true;
8634
8635 return false;
8636 }
8637
8638
8639 /* Add fake edges to the function exit for any non constant and non
8640 noreturn calls (or noreturn calls with EH/abnormal edges),
8641 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8642 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8643 that were split.
8644
8645 The goal is to expose cases in which entering a basic block does
8646 not imply that all subsequent instructions must be executed. */
8647
8648 static int
8649 gimple_flow_call_edges_add (sbitmap blocks)
8650 {
8651 int i;
8652 int blocks_split = 0;
8653 int last_bb = last_basic_block_for_fn (cfun);
8654 bool check_last_block = false;
8655
8656 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8657 return 0;
8658
8659 if (! blocks)
8660 check_last_block = true;
8661 else
8662 check_last_block = bitmap_bit_p (blocks,
8663 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8664
8665 /* In the last basic block, before epilogue generation, there will be
8666 a fallthru edge to EXIT. Special care is required if the last insn
8667 of the last basic block is a call because make_edge folds duplicate
8668 edges, which would result in the fallthru edge also being marked
8669 fake, which would result in the fallthru edge being removed by
8670 remove_fake_edges, which would result in an invalid CFG.
8671
8672 Moreover, we can't elide the outgoing fake edge, since the block
8673 profiler needs to take this into account in order to solve the minimal
8674 spanning tree in the case that the call doesn't return.
8675
8676 Handle this by adding a dummy instruction in a new last basic block. */
8677 if (check_last_block)
8678 {
8679 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8680 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8681 gimple *t = NULL;
8682
8683 if (!gsi_end_p (gsi))
8684 t = gsi_stmt (gsi);
8685
8686 if (t && stmt_can_terminate_bb_p (t))
8687 {
8688 edge e;
8689
8690 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8691 if (e)
8692 {
8693 gsi_insert_on_edge (e, gimple_build_nop ());
8694 gsi_commit_edge_inserts ();
8695 }
8696 }
8697 }
8698
8699 /* Now add fake edges to the function exit for any non constant
8700 calls since there is no way that we can determine if they will
8701 return or not... */
8702 for (i = 0; i < last_bb; i++)
8703 {
8704 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8705 gimple_stmt_iterator gsi;
8706 gimple *stmt, *last_stmt;
8707
8708 if (!bb)
8709 continue;
8710
8711 if (blocks && !bitmap_bit_p (blocks, i))
8712 continue;
8713
8714 gsi = gsi_last_nondebug_bb (bb);
8715 if (!gsi_end_p (gsi))
8716 {
8717 last_stmt = gsi_stmt (gsi);
8718 do
8719 {
8720 stmt = gsi_stmt (gsi);
8721 if (stmt_can_terminate_bb_p (stmt))
8722 {
8723 edge e;
8724
8725 /* The handling above of the final block before the
8726 epilogue should be enough to verify that there is
8727 no edge to the exit block in CFG already.
8728 Calling make_edge in such case would cause us to
8729 mark that edge as fake and remove it later. */
8730 if (flag_checking && stmt == last_stmt)
8731 {
8732 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8733 gcc_assert (e == NULL);
8734 }
8735
8736 /* Note that the following may create a new basic block
8737 and renumber the existing basic blocks. */
8738 if (stmt != last_stmt)
8739 {
8740 e = split_block (bb, stmt);
8741 if (e)
8742 blocks_split++;
8743 }
8744 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8745 e->probability = profile_probability::guessed_never ();
8746 }
8747 gsi_prev (&gsi);
8748 }
8749 while (!gsi_end_p (gsi));
8750 }
8751 }
8752
8753 if (blocks_split)
8754 checking_verify_flow_info ();
8755
8756 return blocks_split;
8757 }
8758
8759 /* Removes edge E and all the blocks dominated by it, and updates dominance
8760 information. The IL in E->src needs to be updated separately.
8761 If dominance info is not available, only the edge E is removed.*/
8762
8763 void
8764 remove_edge_and_dominated_blocks (edge e)
8765 {
8766 vec<basic_block> bbs_to_fix_dom = vNULL;
8767 edge f;
8768 edge_iterator ei;
8769 bool none_removed = false;
8770 unsigned i;
8771 basic_block bb, dbb;
8772 bitmap_iterator bi;
8773
8774 /* If we are removing a path inside a non-root loop that may change
8775 loop ownership of blocks or remove loops. Mark loops for fixup. */
8776 if (current_loops
8777 && loop_outer (e->src->loop_father) != NULL
8778 && e->src->loop_father == e->dest->loop_father)
8779 loops_state_set (LOOPS_NEED_FIXUP);
8780
8781 if (!dom_info_available_p (CDI_DOMINATORS))
8782 {
8783 remove_edge (e);
8784 return;
8785 }
8786
8787 /* No updating is needed for edges to exit. */
8788 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8789 {
8790 if (cfgcleanup_altered_bbs)
8791 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8792 remove_edge (e);
8793 return;
8794 }
8795
8796 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8797 that is not dominated by E->dest, then this set is empty. Otherwise,
8798 all the basic blocks dominated by E->dest are removed.
8799
8800 Also, to DF_IDOM we store the immediate dominators of the blocks in
8801 the dominance frontier of E (i.e., of the successors of the
8802 removed blocks, if there are any, and of E->dest otherwise). */
8803 FOR_EACH_EDGE (f, ei, e->dest->preds)
8804 {
8805 if (f == e)
8806 continue;
8807
8808 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8809 {
8810 none_removed = true;
8811 break;
8812 }
8813 }
8814
8815 auto_bitmap df, df_idom;
8816 auto_vec<basic_block> bbs_to_remove;
8817 if (none_removed)
8818 bitmap_set_bit (df_idom,
8819 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8820 else
8821 {
8822 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8823 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8824 {
8825 FOR_EACH_EDGE (f, ei, bb->succs)
8826 {
8827 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8828 bitmap_set_bit (df, f->dest->index);
8829 }
8830 }
8831 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8832 bitmap_clear_bit (df, bb->index);
8833
8834 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8835 {
8836 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8837 bitmap_set_bit (df_idom,
8838 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8839 }
8840 }
8841
8842 if (cfgcleanup_altered_bbs)
8843 {
8844 /* Record the set of the altered basic blocks. */
8845 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8846 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8847 }
8848
8849 /* Remove E and the cancelled blocks. */
8850 if (none_removed)
8851 remove_edge (e);
8852 else
8853 {
8854 /* Walk backwards so as to get a chance to substitute all
8855 released DEFs into debug stmts. See
8856 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8857 details. */
8858 for (i = bbs_to_remove.length (); i-- > 0; )
8859 delete_basic_block (bbs_to_remove[i]);
8860 }
8861
8862 /* Update the dominance information. The immediate dominator may change only
8863 for blocks whose immediate dominator belongs to DF_IDOM:
8864
8865 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8866 removal. Let Z the arbitrary block such that idom(Z) = Y and
8867 Z dominates X after the removal. Before removal, there exists a path P
8868 from Y to X that avoids Z. Let F be the last edge on P that is
8869 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8870 dominates W, and because of P, Z does not dominate W), and W belongs to
8871 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8872 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8873 {
8874 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8875 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8876 dbb;
8877 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8878 bbs_to_fix_dom.safe_push (dbb);
8879 }
8880
8881 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8882
8883 bbs_to_fix_dom.release ();
8884 }
8885
8886 /* Purge dead EH edges from basic block BB. */
8887
8888 bool
8889 gimple_purge_dead_eh_edges (basic_block bb)
8890 {
8891 bool changed = false;
8892 edge e;
8893 edge_iterator ei;
8894 gimple *stmt = last_stmt (bb);
8895
8896 if (stmt && stmt_can_throw_internal (cfun, stmt))
8897 return false;
8898
8899 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8900 {
8901 if (e->flags & EDGE_EH)
8902 {
8903 remove_edge_and_dominated_blocks (e);
8904 changed = true;
8905 }
8906 else
8907 ei_next (&ei);
8908 }
8909
8910 return changed;
8911 }
8912
8913 /* Purge dead EH edges from basic block listed in BLOCKS. */
8914
8915 bool
8916 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8917 {
8918 bool changed = false;
8919 unsigned i;
8920 bitmap_iterator bi;
8921
8922 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8923 {
8924 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8925
8926 /* Earlier gimple_purge_dead_eh_edges could have removed
8927 this basic block already. */
8928 gcc_assert (bb || changed);
8929 if (bb != NULL)
8930 changed |= gimple_purge_dead_eh_edges (bb);
8931 }
8932
8933 return changed;
8934 }
8935
8936 /* Purge dead abnormal call edges from basic block BB. */
8937
8938 bool
8939 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8940 {
8941 bool changed = false;
8942 edge e;
8943 edge_iterator ei;
8944 gimple *stmt = last_stmt (bb);
8945
8946 if (stmt && stmt_can_make_abnormal_goto (stmt))
8947 return false;
8948
8949 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8950 {
8951 if (e->flags & EDGE_ABNORMAL)
8952 {
8953 if (e->flags & EDGE_FALLTHRU)
8954 e->flags &= ~EDGE_ABNORMAL;
8955 else
8956 remove_edge_and_dominated_blocks (e);
8957 changed = true;
8958 }
8959 else
8960 ei_next (&ei);
8961 }
8962
8963 return changed;
8964 }
8965
8966 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8967
8968 bool
8969 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8970 {
8971 bool changed = false;
8972 unsigned i;
8973 bitmap_iterator bi;
8974
8975 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8976 {
8977 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8978
8979 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8980 this basic block already. */
8981 gcc_assert (bb || changed);
8982 if (bb != NULL)
8983 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8984 }
8985
8986 return changed;
8987 }
8988
8989 /* This function is called whenever a new edge is created or
8990 redirected. */
8991
8992 static void
8993 gimple_execute_on_growing_pred (edge e)
8994 {
8995 basic_block bb = e->dest;
8996
8997 if (!gimple_seq_empty_p (phi_nodes (bb)))
8998 reserve_phi_args_for_new_edge (bb);
8999 }
9000
9001 /* This function is called immediately before edge E is removed from
9002 the edge vector E->dest->preds. */
9003
9004 static void
9005 gimple_execute_on_shrinking_pred (edge e)
9006 {
9007 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9008 remove_phi_args (e);
9009 }
9010
9011 /*---------------------------------------------------------------------------
9012 Helper functions for Loop versioning
9013 ---------------------------------------------------------------------------*/
9014
9015 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9016 of 'first'. Both of them are dominated by 'new_head' basic block. When
9017 'new_head' was created by 'second's incoming edge it received phi arguments
9018 on the edge by split_edge(). Later, additional edge 'e' was created to
9019 connect 'new_head' and 'first'. Now this routine adds phi args on this
9020 additional edge 'e' that new_head to second edge received as part of edge
9021 splitting. */
9022
9023 static void
9024 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9025 basic_block new_head, edge e)
9026 {
9027 gphi *phi1, *phi2;
9028 gphi_iterator psi1, psi2;
9029 tree def;
9030 edge e2 = find_edge (new_head, second);
9031
9032 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9033 edge, we should always have an edge from NEW_HEAD to SECOND. */
9034 gcc_assert (e2 != NULL);
9035
9036 /* Browse all 'second' basic block phi nodes and add phi args to
9037 edge 'e' for 'first' head. PHI args are always in correct order. */
9038
9039 for (psi2 = gsi_start_phis (second),
9040 psi1 = gsi_start_phis (first);
9041 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9042 gsi_next (&psi2), gsi_next (&psi1))
9043 {
9044 phi1 = psi1.phi ();
9045 phi2 = psi2.phi ();
9046 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9047 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9048 }
9049 }
9050
9051
9052 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9053 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9054 the destination of the ELSE part. */
9055
9056 static void
9057 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9058 basic_block second_head ATTRIBUTE_UNUSED,
9059 basic_block cond_bb, void *cond_e)
9060 {
9061 gimple_stmt_iterator gsi;
9062 gimple *new_cond_expr;
9063 tree cond_expr = (tree) cond_e;
9064 edge e0;
9065
9066 /* Build new conditional expr */
9067 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9068 NULL_TREE, NULL_TREE);
9069
9070 /* Add new cond in cond_bb. */
9071 gsi = gsi_last_bb (cond_bb);
9072 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9073
9074 /* Adjust edges appropriately to connect new head with first head
9075 as well as second head. */
9076 e0 = single_succ_edge (cond_bb);
9077 e0->flags &= ~EDGE_FALLTHRU;
9078 e0->flags |= EDGE_FALSE_VALUE;
9079 }
9080
9081
9082 /* Do book-keeping of basic block BB for the profile consistency checker.
9083 Store the counting in RECORD. */
9084 static void
9085 gimple_account_profile_record (basic_block bb,
9086 struct profile_record *record)
9087 {
9088 gimple_stmt_iterator i;
9089 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9090 gsi_next_nondebug (&i))
9091 {
9092 record->size
9093 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9094 if (profile_info)
9095 {
9096 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9097 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9098 && bb->count.ipa ().initialized_p ())
9099 record->time
9100 += estimate_num_insns (gsi_stmt (i),
9101 &eni_time_weights)
9102 * bb->count.ipa ().to_gcov_type ();
9103 }
9104 else if (bb->count.initialized_p ()
9105 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9106 record->time
9107 += estimate_num_insns
9108 (gsi_stmt (i),
9109 &eni_time_weights)
9110 * bb->count.to_sreal_scale
9111 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9112 else
9113 record->time
9114 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9115 }
9116 }
9117
9118 struct cfg_hooks gimple_cfg_hooks = {
9119 "gimple",
9120 gimple_verify_flow_info,
9121 gimple_dump_bb, /* dump_bb */
9122 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9123 create_bb, /* create_basic_block */
9124 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9125 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9126 gimple_can_remove_branch_p, /* can_remove_branch_p */
9127 remove_bb, /* delete_basic_block */
9128 gimple_split_block, /* split_block */
9129 gimple_move_block_after, /* move_block_after */
9130 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9131 gimple_merge_blocks, /* merge_blocks */
9132 gimple_predict_edge, /* predict_edge */
9133 gimple_predicted_by_p, /* predicted_by_p */
9134 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9135 gimple_duplicate_bb, /* duplicate_block */
9136 gimple_split_edge, /* split_edge */
9137 gimple_make_forwarder_block, /* make_forward_block */
9138 NULL, /* tidy_fallthru_edge */
9139 NULL, /* force_nonfallthru */
9140 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9141 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9142 gimple_flow_call_edges_add, /* flow_call_edges_add */
9143 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9144 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9145 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9146 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9147 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9148 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9149 flush_pending_stmts, /* flush_pending_stmts */
9150 gimple_empty_block_p, /* block_empty_p */
9151 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9152 gimple_account_profile_record,
9153 };
9154
9155
9156 /* Split all critical edges. Split some extra (not necessarily critical) edges
9157 if FOR_EDGE_INSERTION_P is true. */
9158
9159 unsigned int
9160 split_critical_edges (bool for_edge_insertion_p /* = false */)
9161 {
9162 basic_block bb;
9163 edge e;
9164 edge_iterator ei;
9165
9166 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9167 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9168 mappings around the calls to split_edge. */
9169 start_recording_case_labels ();
9170 FOR_ALL_BB_FN (bb, cfun)
9171 {
9172 FOR_EACH_EDGE (e, ei, bb->succs)
9173 {
9174 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9175 split_edge (e);
9176 /* PRE inserts statements to edges and expects that
9177 since split_critical_edges was done beforehand, committing edge
9178 insertions will not split more edges. In addition to critical
9179 edges we must split edges that have multiple successors and
9180 end by control flow statements, such as RESX.
9181 Go ahead and split them too. This matches the logic in
9182 gimple_find_edge_insert_loc. */
9183 else if (for_edge_insertion_p
9184 && (!single_pred_p (e->dest)
9185 || !gimple_seq_empty_p (phi_nodes (e->dest))
9186 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9187 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9188 && !(e->flags & EDGE_ABNORMAL))
9189 {
9190 gimple_stmt_iterator gsi;
9191
9192 gsi = gsi_last_bb (e->src);
9193 if (!gsi_end_p (gsi)
9194 && stmt_ends_bb_p (gsi_stmt (gsi))
9195 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9196 && !gimple_call_builtin_p (gsi_stmt (gsi),
9197 BUILT_IN_RETURN)))
9198 split_edge (e);
9199 }
9200 }
9201 }
9202 end_recording_case_labels ();
9203 return 0;
9204 }
9205
9206 namespace {
9207
9208 const pass_data pass_data_split_crit_edges =
9209 {
9210 GIMPLE_PASS, /* type */
9211 "crited", /* name */
9212 OPTGROUP_NONE, /* optinfo_flags */
9213 TV_TREE_SPLIT_EDGES, /* tv_id */
9214 PROP_cfg, /* properties_required */
9215 PROP_no_crit_edges, /* properties_provided */
9216 0, /* properties_destroyed */
9217 0, /* todo_flags_start */
9218 0, /* todo_flags_finish */
9219 };
9220
9221 class pass_split_crit_edges : public gimple_opt_pass
9222 {
9223 public:
9224 pass_split_crit_edges (gcc::context *ctxt)
9225 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9226 {}
9227
9228 /* opt_pass methods: */
9229 virtual unsigned int execute (function *) { return split_critical_edges (); }
9230
9231 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9232 }; // class pass_split_crit_edges
9233
9234 } // anon namespace
9235
9236 gimple_opt_pass *
9237 make_pass_split_crit_edges (gcc::context *ctxt)
9238 {
9239 return new pass_split_crit_edges (ctxt);
9240 }
9241
9242
9243 /* Insert COND expression which is GIMPLE_COND after STMT
9244 in basic block BB with appropriate basic block split
9245 and creation of a new conditionally executed basic block.
9246 Update profile so the new bb is visited with probability PROB.
9247 Return created basic block. */
9248 basic_block
9249 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9250 profile_probability prob)
9251 {
9252 edge fall = split_block (bb, stmt);
9253 gimple_stmt_iterator iter = gsi_last_bb (bb);
9254 basic_block new_bb;
9255
9256 /* Insert cond statement. */
9257 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9258 if (gsi_end_p (iter))
9259 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9260 else
9261 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9262
9263 /* Create conditionally executed block. */
9264 new_bb = create_empty_bb (bb);
9265 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9266 e->probability = prob;
9267 new_bb->count = e->count ();
9268 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9269
9270 /* Fix edge for split bb. */
9271 fall->flags = EDGE_FALSE_VALUE;
9272 fall->probability -= e->probability;
9273
9274 /* Update dominance info. */
9275 if (dom_info_available_p (CDI_DOMINATORS))
9276 {
9277 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9278 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9279 }
9280
9281 /* Update loop info. */
9282 if (current_loops)
9283 add_bb_to_loop (new_bb, bb->loop_father);
9284
9285 return new_bb;
9286 }
9287
9288
9289 \f
9290 /* Given a basic block B which ends with a conditional and has
9291 precisely two successors, determine which of the edges is taken if
9292 the conditional is true and which is taken if the conditional is
9293 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9294
9295 void
9296 extract_true_false_edges_from_block (basic_block b,
9297 edge *true_edge,
9298 edge *false_edge)
9299 {
9300 edge e = EDGE_SUCC (b, 0);
9301
9302 if (e->flags & EDGE_TRUE_VALUE)
9303 {
9304 *true_edge = e;
9305 *false_edge = EDGE_SUCC (b, 1);
9306 }
9307 else
9308 {
9309 *false_edge = e;
9310 *true_edge = EDGE_SUCC (b, 1);
9311 }
9312 }
9313
9314
9315 /* From a controlling predicate in the immediate dominator DOM of
9316 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9317 predicate evaluates to true and false and store them to
9318 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9319 they are non-NULL. Returns true if the edges can be determined,
9320 else return false. */
9321
9322 bool
9323 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9324 edge *true_controlled_edge,
9325 edge *false_controlled_edge)
9326 {
9327 basic_block bb = phiblock;
9328 edge true_edge, false_edge, tem;
9329 edge e0 = NULL, e1 = NULL;
9330
9331 /* We have to verify that one edge into the PHI node is dominated
9332 by the true edge of the predicate block and the other edge
9333 dominated by the false edge. This ensures that the PHI argument
9334 we are going to take is completely determined by the path we
9335 take from the predicate block.
9336 We can only use BB dominance checks below if the destination of
9337 the true/false edges are dominated by their edge, thus only
9338 have a single predecessor. */
9339 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9340 tem = EDGE_PRED (bb, 0);
9341 if (tem == true_edge
9342 || (single_pred_p (true_edge->dest)
9343 && (tem->src == true_edge->dest
9344 || dominated_by_p (CDI_DOMINATORS,
9345 tem->src, true_edge->dest))))
9346 e0 = tem;
9347 else if (tem == false_edge
9348 || (single_pred_p (false_edge->dest)
9349 && (tem->src == false_edge->dest
9350 || dominated_by_p (CDI_DOMINATORS,
9351 tem->src, false_edge->dest))))
9352 e1 = tem;
9353 else
9354 return false;
9355 tem = EDGE_PRED (bb, 1);
9356 if (tem == true_edge
9357 || (single_pred_p (true_edge->dest)
9358 && (tem->src == true_edge->dest
9359 || dominated_by_p (CDI_DOMINATORS,
9360 tem->src, true_edge->dest))))
9361 e0 = tem;
9362 else if (tem == false_edge
9363 || (single_pred_p (false_edge->dest)
9364 && (tem->src == false_edge->dest
9365 || dominated_by_p (CDI_DOMINATORS,
9366 tem->src, false_edge->dest))))
9367 e1 = tem;
9368 else
9369 return false;
9370 if (!e0 || !e1)
9371 return false;
9372
9373 if (true_controlled_edge)
9374 *true_controlled_edge = e0;
9375 if (false_controlled_edge)
9376 *false_controlled_edge = e1;
9377
9378 return true;
9379 }
9380
9381 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9382 range [low, high]. Place associated stmts before *GSI. */
9383
9384 void
9385 generate_range_test (basic_block bb, tree index, tree low, tree high,
9386 tree *lhs, tree *rhs)
9387 {
9388 tree type = TREE_TYPE (index);
9389 tree utype = range_check_type (type);
9390
9391 low = fold_convert (utype, low);
9392 high = fold_convert (utype, high);
9393
9394 gimple_seq seq = NULL;
9395 index = gimple_convert (&seq, utype, index);
9396 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9397 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9398
9399 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9400 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9401 }
9402
9403 /* Return the basic block that belongs to label numbered INDEX
9404 of a switch statement. */
9405
9406 basic_block
9407 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9408 {
9409 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9410 }
9411
9412 /* Return the default basic block of a switch statement. */
9413
9414 basic_block
9415 gimple_switch_default_bb (function *ifun, gswitch *gs)
9416 {
9417 return gimple_switch_label_bb (ifun, gs, 0);
9418 }
9419
9420 /* Return the edge that belongs to label numbered INDEX
9421 of a switch statement. */
9422
9423 edge
9424 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9425 {
9426 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9427 }
9428
9429 /* Return the default edge of a switch statement. */
9430
9431 edge
9432 gimple_switch_default_edge (function *ifun, gswitch *gs)
9433 {
9434 return gimple_switch_edge (ifun, gs, 0);
9435 }
9436
9437 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9438
9439 bool
9440 cond_only_block_p (basic_block bb)
9441 {
9442 /* BB must have no executable statements. */
9443 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9444 if (phi_nodes (bb))
9445 return false;
9446 while (!gsi_end_p (gsi))
9447 {
9448 gimple *stmt = gsi_stmt (gsi);
9449 if (is_gimple_debug (stmt))
9450 ;
9451 else if (gimple_code (stmt) == GIMPLE_NOP
9452 || gimple_code (stmt) == GIMPLE_PREDICT
9453 || gimple_code (stmt) == GIMPLE_COND)
9454 ;
9455 else
9456 return false;
9457 gsi_next (&gsi);
9458 }
9459 return true;
9460 }
9461
9462
9463 /* Emit return warnings. */
9464
9465 namespace {
9466
9467 const pass_data pass_data_warn_function_return =
9468 {
9469 GIMPLE_PASS, /* type */
9470 "*warn_function_return", /* name */
9471 OPTGROUP_NONE, /* optinfo_flags */
9472 TV_NONE, /* tv_id */
9473 PROP_cfg, /* properties_required */
9474 0, /* properties_provided */
9475 0, /* properties_destroyed */
9476 0, /* todo_flags_start */
9477 0, /* todo_flags_finish */
9478 };
9479
9480 class pass_warn_function_return : public gimple_opt_pass
9481 {
9482 public:
9483 pass_warn_function_return (gcc::context *ctxt)
9484 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9485 {}
9486
9487 /* opt_pass methods: */
9488 virtual unsigned int execute (function *);
9489
9490 }; // class pass_warn_function_return
9491
9492 unsigned int
9493 pass_warn_function_return::execute (function *fun)
9494 {
9495 location_t location;
9496 gimple *last;
9497 edge e;
9498 edge_iterator ei;
9499
9500 if (!targetm.warn_func_return (fun->decl))
9501 return 0;
9502
9503 /* If we have a path to EXIT, then we do return. */
9504 if (TREE_THIS_VOLATILE (fun->decl)
9505 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9506 {
9507 location = UNKNOWN_LOCATION;
9508 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9509 (e = ei_safe_edge (ei)); )
9510 {
9511 last = last_stmt (e->src);
9512 if ((gimple_code (last) == GIMPLE_RETURN
9513 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9514 && location == UNKNOWN_LOCATION
9515 && ((location = LOCATION_LOCUS (gimple_location (last)))
9516 != UNKNOWN_LOCATION)
9517 && !optimize)
9518 break;
9519 /* When optimizing, replace return stmts in noreturn functions
9520 with __builtin_unreachable () call. */
9521 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9522 {
9523 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9524 gimple *new_stmt = gimple_build_call (fndecl, 0);
9525 gimple_set_location (new_stmt, gimple_location (last));
9526 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9527 gsi_replace (&gsi, new_stmt, true);
9528 remove_edge (e);
9529 }
9530 else
9531 ei_next (&ei);
9532 }
9533 if (location == UNKNOWN_LOCATION)
9534 location = cfun->function_end_locus;
9535 warning_at (location, 0, "%<noreturn%> function does return");
9536 }
9537
9538 /* If we see "return;" in some basic block, then we do reach the end
9539 without returning a value. */
9540 else if (warn_return_type > 0
9541 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9542 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9543 {
9544 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9545 {
9546 gimple *last = last_stmt (e->src);
9547 greturn *return_stmt = dyn_cast <greturn *> (last);
9548 if (return_stmt
9549 && gimple_return_retval (return_stmt) == NULL
9550 && !warning_suppressed_p (last, OPT_Wreturn_type))
9551 {
9552 location = gimple_location (last);
9553 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9554 location = fun->function_end_locus;
9555 if (warning_at (location, OPT_Wreturn_type,
9556 "control reaches end of non-void function"))
9557 suppress_warning (fun->decl, OPT_Wreturn_type);
9558 break;
9559 }
9560 }
9561 /* The C++ FE turns fallthrough from the end of non-void function
9562 into __builtin_unreachable () call with BUILTINS_LOCATION.
9563 Recognize those too. */
9564 basic_block bb;
9565 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9566 FOR_EACH_BB_FN (bb, fun)
9567 if (EDGE_COUNT (bb->succs) == 0)
9568 {
9569 gimple *last = last_stmt (bb);
9570 const enum built_in_function ubsan_missing_ret
9571 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9572 if (last
9573 && ((LOCATION_LOCUS (gimple_location (last))
9574 == BUILTINS_LOCATION
9575 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9576 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9577 {
9578 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9579 gsi_prev_nondebug (&gsi);
9580 gimple *prev = gsi_stmt (gsi);
9581 if (prev == NULL)
9582 location = UNKNOWN_LOCATION;
9583 else
9584 location = gimple_location (prev);
9585 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9586 location = fun->function_end_locus;
9587 if (warning_at (location, OPT_Wreturn_type,
9588 "control reaches end of non-void function"))
9589 suppress_warning (fun->decl, OPT_Wreturn_type);
9590 break;
9591 }
9592 }
9593 }
9594 return 0;
9595 }
9596
9597 } // anon namespace
9598
9599 gimple_opt_pass *
9600 make_pass_warn_function_return (gcc::context *ctxt)
9601 {
9602 return new pass_warn_function_return (ctxt);
9603 }
9604
9605 /* Walk a gimplified function and warn for functions whose return value is
9606 ignored and attribute((warn_unused_result)) is set. This is done before
9607 inlining, so we don't have to worry about that. */
9608
9609 static void
9610 do_warn_unused_result (gimple_seq seq)
9611 {
9612 tree fdecl, ftype;
9613 gimple_stmt_iterator i;
9614
9615 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9616 {
9617 gimple *g = gsi_stmt (i);
9618
9619 switch (gimple_code (g))
9620 {
9621 case GIMPLE_BIND:
9622 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9623 break;
9624 case GIMPLE_TRY:
9625 do_warn_unused_result (gimple_try_eval (g));
9626 do_warn_unused_result (gimple_try_cleanup (g));
9627 break;
9628 case GIMPLE_CATCH:
9629 do_warn_unused_result (gimple_catch_handler (
9630 as_a <gcatch *> (g)));
9631 break;
9632 case GIMPLE_EH_FILTER:
9633 do_warn_unused_result (gimple_eh_filter_failure (g));
9634 break;
9635
9636 case GIMPLE_CALL:
9637 if (gimple_call_lhs (g))
9638 break;
9639 if (gimple_call_internal_p (g))
9640 break;
9641
9642 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9643 LHS. All calls whose value is ignored should be
9644 represented like this. Look for the attribute. */
9645 fdecl = gimple_call_fndecl (g);
9646 ftype = gimple_call_fntype (g);
9647
9648 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9649 {
9650 location_t loc = gimple_location (g);
9651
9652 if (fdecl)
9653 warning_at (loc, OPT_Wunused_result,
9654 "ignoring return value of %qD "
9655 "declared with attribute %<warn_unused_result%>",
9656 fdecl);
9657 else
9658 warning_at (loc, OPT_Wunused_result,
9659 "ignoring return value of function "
9660 "declared with attribute %<warn_unused_result%>");
9661 }
9662 break;
9663
9664 default:
9665 /* Not a container, not a call, or a call whose value is used. */
9666 break;
9667 }
9668 }
9669 }
9670
9671 namespace {
9672
9673 const pass_data pass_data_warn_unused_result =
9674 {
9675 GIMPLE_PASS, /* type */
9676 "*warn_unused_result", /* name */
9677 OPTGROUP_NONE, /* optinfo_flags */
9678 TV_NONE, /* tv_id */
9679 PROP_gimple_any, /* properties_required */
9680 0, /* properties_provided */
9681 0, /* properties_destroyed */
9682 0, /* todo_flags_start */
9683 0, /* todo_flags_finish */
9684 };
9685
9686 class pass_warn_unused_result : public gimple_opt_pass
9687 {
9688 public:
9689 pass_warn_unused_result (gcc::context *ctxt)
9690 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9691 {}
9692
9693 /* opt_pass methods: */
9694 virtual bool gate (function *) { return flag_warn_unused_result; }
9695 virtual unsigned int execute (function *)
9696 {
9697 do_warn_unused_result (gimple_body (current_function_decl));
9698 return 0;
9699 }
9700
9701 }; // class pass_warn_unused_result
9702
9703 } // anon namespace
9704
9705 gimple_opt_pass *
9706 make_pass_warn_unused_result (gcc::context *ctxt)
9707 {
9708 return new pass_warn_unused_result (ctxt);
9709 }
9710
9711 /* Maybe Remove stores to variables we marked write-only.
9712 Return true if a store was removed. */
9713 static bool
9714 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9715 bitmap dce_ssa_names)
9716 {
9717 /* Keep access when store has side effect, i.e. in case when source
9718 is volatile. */
9719 if (!gimple_store_p (stmt)
9720 || gimple_has_side_effects (stmt)
9721 || optimize_debug)
9722 return false;
9723
9724 tree lhs = get_base_address (gimple_get_lhs (stmt));
9725
9726 if (!VAR_P (lhs)
9727 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9728 || !varpool_node::get (lhs)->writeonly)
9729 return false;
9730
9731 if (dump_file && (dump_flags & TDF_DETAILS))
9732 {
9733 fprintf (dump_file, "Removing statement, writes"
9734 " to write only var:\n");
9735 print_gimple_stmt (dump_file, stmt, 0,
9736 TDF_VOPS|TDF_MEMSYMS);
9737 }
9738
9739 /* Mark ssa name defining to be checked for simple dce. */
9740 if (gimple_assign_single_p (stmt))
9741 {
9742 tree rhs = gimple_assign_rhs1 (stmt);
9743 if (TREE_CODE (rhs) == SSA_NAME
9744 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9745 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9746 }
9747 unlink_stmt_vdef (stmt);
9748 gsi_remove (&gsi, true);
9749 release_defs (stmt);
9750 return true;
9751 }
9752
9753 /* IPA passes, compilation of earlier functions or inlining
9754 might have changed some properties, such as marked functions nothrow,
9755 pure, const or noreturn.
9756 Remove redundant edges and basic blocks, and create new ones if necessary. */
9757
9758 unsigned int
9759 execute_fixup_cfg (void)
9760 {
9761 basic_block bb;
9762 gimple_stmt_iterator gsi;
9763 int todo = 0;
9764 cgraph_node *node = cgraph_node::get (current_function_decl);
9765 /* Same scaling is also done by ipa_merge_profiles. */
9766 profile_count num = node->count;
9767 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9768 bool scale = num.initialized_p () && !(num == den);
9769 auto_bitmap dce_ssa_names;
9770
9771 if (scale)
9772 {
9773 profile_count::adjust_for_ipa_scaling (&num, &den);
9774 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9775 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9776 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9777 }
9778
9779 FOR_EACH_BB_FN (bb, cfun)
9780 {
9781 if (scale)
9782 bb->count = bb->count.apply_scale (num, den);
9783 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9784 {
9785 gimple *stmt = gsi_stmt (gsi);
9786 tree decl = is_gimple_call (stmt)
9787 ? gimple_call_fndecl (stmt)
9788 : NULL;
9789 if (decl)
9790 {
9791 int flags = gimple_call_flags (stmt);
9792 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9793 {
9794 if (gimple_purge_dead_abnormal_call_edges (bb))
9795 todo |= TODO_cleanup_cfg;
9796
9797 if (gimple_in_ssa_p (cfun))
9798 {
9799 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9800 update_stmt (stmt);
9801 }
9802 }
9803
9804 if (flags & ECF_NORETURN
9805 && fixup_noreturn_call (stmt))
9806 todo |= TODO_cleanup_cfg;
9807 }
9808
9809 /* Remove stores to variables we marked write-only. */
9810 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9811 {
9812 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9813 continue;
9814 }
9815
9816 /* For calls we can simply remove LHS when it is known
9817 to be write-only. */
9818 if (is_gimple_call (stmt)
9819 && gimple_get_lhs (stmt))
9820 {
9821 tree lhs = get_base_address (gimple_get_lhs (stmt));
9822
9823 if (VAR_P (lhs)
9824 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9825 && varpool_node::get (lhs)->writeonly)
9826 {
9827 gimple_call_set_lhs (stmt, NULL);
9828 update_stmt (stmt);
9829 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9830 }
9831 }
9832
9833 if (maybe_clean_eh_stmt (stmt)
9834 && gimple_purge_dead_eh_edges (bb))
9835 todo |= TODO_cleanup_cfg;
9836 gsi_next (&gsi);
9837 }
9838
9839 /* If we have a basic block with no successors that does not
9840 end with a control statement or a noreturn call end it with
9841 a call to __builtin_unreachable. This situation can occur
9842 when inlining a noreturn call that does in fact return. */
9843 if (EDGE_COUNT (bb->succs) == 0)
9844 {
9845 gimple *stmt = last_stmt (bb);
9846 if (!stmt
9847 || (!is_ctrl_stmt (stmt)
9848 && (!is_gimple_call (stmt)
9849 || !gimple_call_noreturn_p (stmt))))
9850 {
9851 if (stmt && is_gimple_call (stmt))
9852 gimple_call_set_ctrl_altering (stmt, false);
9853 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9854 stmt = gimple_build_call (fndecl, 0);
9855 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9856 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9857 if (!cfun->after_inlining)
9858 {
9859 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9860 node->create_edge (cgraph_node::get_create (fndecl),
9861 call_stmt, bb->count);
9862 }
9863 }
9864 }
9865 }
9866 if (scale)
9867 {
9868 update_max_bb_count ();
9869 compute_function_frequency ();
9870 }
9871
9872 if (current_loops
9873 && (todo & TODO_cleanup_cfg))
9874 loops_state_set (LOOPS_NEED_FIXUP);
9875
9876 simple_dce_from_worklist (dce_ssa_names);
9877
9878 return todo;
9879 }
9880
9881 namespace {
9882
9883 const pass_data pass_data_fixup_cfg =
9884 {
9885 GIMPLE_PASS, /* type */
9886 "fixup_cfg", /* name */
9887 OPTGROUP_NONE, /* optinfo_flags */
9888 TV_NONE, /* tv_id */
9889 PROP_cfg, /* properties_required */
9890 0, /* properties_provided */
9891 0, /* properties_destroyed */
9892 0, /* todo_flags_start */
9893 0, /* todo_flags_finish */
9894 };
9895
9896 class pass_fixup_cfg : public gimple_opt_pass
9897 {
9898 public:
9899 pass_fixup_cfg (gcc::context *ctxt)
9900 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9901 {}
9902
9903 /* opt_pass methods: */
9904 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9905 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9906
9907 }; // class pass_fixup_cfg
9908
9909 } // anon namespace
9910
9911 gimple_opt_pass *
9912 make_pass_fixup_cfg (gcc::context *ctxt)
9913 {
9914 return new pass_fixup_cfg (ctxt);
9915 }
9916
9917 /* Garbage collection support for edge_def. */
9918
9919 extern void gt_ggc_mx (tree&);
9920 extern void gt_ggc_mx (gimple *&);
9921 extern void gt_ggc_mx (rtx&);
9922 extern void gt_ggc_mx (basic_block&);
9923
9924 static void
9925 gt_ggc_mx (rtx_insn *& x)
9926 {
9927 if (x)
9928 gt_ggc_mx_rtx_def ((void *) x);
9929 }
9930
9931 void
9932 gt_ggc_mx (edge_def *e)
9933 {
9934 tree block = LOCATION_BLOCK (e->goto_locus);
9935 gt_ggc_mx (e->src);
9936 gt_ggc_mx (e->dest);
9937 if (current_ir_type () == IR_GIMPLE)
9938 gt_ggc_mx (e->insns.g);
9939 else
9940 gt_ggc_mx (e->insns.r);
9941 gt_ggc_mx (block);
9942 }
9943
9944 /* PCH support for edge_def. */
9945
9946 extern void gt_pch_nx (tree&);
9947 extern void gt_pch_nx (gimple *&);
9948 extern void gt_pch_nx (rtx&);
9949 extern void gt_pch_nx (basic_block&);
9950
9951 static void
9952 gt_pch_nx (rtx_insn *& x)
9953 {
9954 if (x)
9955 gt_pch_nx_rtx_def ((void *) x);
9956 }
9957
9958 void
9959 gt_pch_nx (edge_def *e)
9960 {
9961 tree block = LOCATION_BLOCK (e->goto_locus);
9962 gt_pch_nx (e->src);
9963 gt_pch_nx (e->dest);
9964 if (current_ir_type () == IR_GIMPLE)
9965 gt_pch_nx (e->insns.g);
9966 else
9967 gt_pch_nx (e->insns.r);
9968 gt_pch_nx (block);
9969 }
9970
9971 void
9972 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9973 {
9974 tree block = LOCATION_BLOCK (e->goto_locus);
9975 op (&(e->src), NULL, cookie);
9976 op (&(e->dest), NULL, cookie);
9977 if (current_ir_type () == IR_GIMPLE)
9978 op (&(e->insns.g), NULL, cookie);
9979 else
9980 op (&(e->insns.r), NULL, cookie);
9981 op (&(block), &(block), cookie);
9982 }
9983
9984 #if CHECKING_P
9985
9986 namespace selftest {
9987
9988 /* Helper function for CFG selftests: create a dummy function decl
9989 and push it as cfun. */
9990
9991 static tree
9992 push_fndecl (const char *name)
9993 {
9994 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9995 /* FIXME: this uses input_location: */
9996 tree fndecl = build_fn_decl (name, fn_type);
9997 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9998 NULL_TREE, integer_type_node);
9999 DECL_RESULT (fndecl) = retval;
10000 push_struct_function (fndecl);
10001 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10002 ASSERT_TRUE (fun != NULL);
10003 init_empty_tree_cfg_for_function (fun);
10004 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10005 ASSERT_EQ (0, n_edges_for_fn (fun));
10006 return fndecl;
10007 }
10008
10009 /* These tests directly create CFGs.
10010 Compare with the static fns within tree-cfg.cc:
10011 - build_gimple_cfg
10012 - make_blocks: calls create_basic_block (seq, bb);
10013 - make_edges. */
10014
10015 /* Verify a simple cfg of the form:
10016 ENTRY -> A -> B -> C -> EXIT. */
10017
10018 static void
10019 test_linear_chain ()
10020 {
10021 gimple_register_cfg_hooks ();
10022
10023 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10024 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10025
10026 /* Create some empty blocks. */
10027 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10028 basic_block bb_b = create_empty_bb (bb_a);
10029 basic_block bb_c = create_empty_bb (bb_b);
10030
10031 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10032 ASSERT_EQ (0, n_edges_for_fn (fun));
10033
10034 /* Create some edges: a simple linear chain of BBs. */
10035 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10036 make_edge (bb_a, bb_b, 0);
10037 make_edge (bb_b, bb_c, 0);
10038 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10039
10040 /* Verify the edges. */
10041 ASSERT_EQ (4, n_edges_for_fn (fun));
10042 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10043 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10044 ASSERT_EQ (1, bb_a->preds->length ());
10045 ASSERT_EQ (1, bb_a->succs->length ());
10046 ASSERT_EQ (1, bb_b->preds->length ());
10047 ASSERT_EQ (1, bb_b->succs->length ());
10048 ASSERT_EQ (1, bb_c->preds->length ());
10049 ASSERT_EQ (1, bb_c->succs->length ());
10050 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10051 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10052
10053 /* Verify the dominance information
10054 Each BB in our simple chain should be dominated by the one before
10055 it. */
10056 calculate_dominance_info (CDI_DOMINATORS);
10057 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10058 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10059 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10060 ASSERT_EQ (1, dom_by_b.length ());
10061 ASSERT_EQ (bb_c, dom_by_b[0]);
10062 free_dominance_info (CDI_DOMINATORS);
10063
10064 /* Similarly for post-dominance: each BB in our chain is post-dominated
10065 by the one after it. */
10066 calculate_dominance_info (CDI_POST_DOMINATORS);
10067 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10068 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10069 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10070 ASSERT_EQ (1, postdom_by_b.length ());
10071 ASSERT_EQ (bb_a, postdom_by_b[0]);
10072 free_dominance_info (CDI_POST_DOMINATORS);
10073
10074 pop_cfun ();
10075 }
10076
10077 /* Verify a simple CFG of the form:
10078 ENTRY
10079 |
10080 A
10081 / \
10082 /t \f
10083 B C
10084 \ /
10085 \ /
10086 D
10087 |
10088 EXIT. */
10089
10090 static void
10091 test_diamond ()
10092 {
10093 gimple_register_cfg_hooks ();
10094
10095 tree fndecl = push_fndecl ("cfg_test_diamond");
10096 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10097
10098 /* Create some empty blocks. */
10099 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10100 basic_block bb_b = create_empty_bb (bb_a);
10101 basic_block bb_c = create_empty_bb (bb_a);
10102 basic_block bb_d = create_empty_bb (bb_b);
10103
10104 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10105 ASSERT_EQ (0, n_edges_for_fn (fun));
10106
10107 /* Create the edges. */
10108 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10109 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10110 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10111 make_edge (bb_b, bb_d, 0);
10112 make_edge (bb_c, bb_d, 0);
10113 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10114
10115 /* Verify the edges. */
10116 ASSERT_EQ (6, n_edges_for_fn (fun));
10117 ASSERT_EQ (1, bb_a->preds->length ());
10118 ASSERT_EQ (2, bb_a->succs->length ());
10119 ASSERT_EQ (1, bb_b->preds->length ());
10120 ASSERT_EQ (1, bb_b->succs->length ());
10121 ASSERT_EQ (1, bb_c->preds->length ());
10122 ASSERT_EQ (1, bb_c->succs->length ());
10123 ASSERT_EQ (2, bb_d->preds->length ());
10124 ASSERT_EQ (1, bb_d->succs->length ());
10125
10126 /* Verify the dominance information. */
10127 calculate_dominance_info (CDI_DOMINATORS);
10128 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10129 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10130 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10131 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10132 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10133 dom_by_a.release ();
10134 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10135 ASSERT_EQ (0, dom_by_b.length ());
10136 dom_by_b.release ();
10137 free_dominance_info (CDI_DOMINATORS);
10138
10139 /* Similarly for post-dominance. */
10140 calculate_dominance_info (CDI_POST_DOMINATORS);
10141 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10142 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10143 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10144 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10145 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10146 postdom_by_d.release ();
10147 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10148 ASSERT_EQ (0, postdom_by_b.length ());
10149 postdom_by_b.release ();
10150 free_dominance_info (CDI_POST_DOMINATORS);
10151
10152 pop_cfun ();
10153 }
10154
10155 /* Verify that we can handle a CFG containing a "complete" aka
10156 fully-connected subgraph (where A B C D below all have edges
10157 pointing to each other node, also to themselves).
10158 e.g.:
10159 ENTRY EXIT
10160 | ^
10161 | /
10162 | /
10163 | /
10164 V/
10165 A<--->B
10166 ^^ ^^
10167 | \ / |
10168 | X |
10169 | / \ |
10170 VV VV
10171 C<--->D
10172 */
10173
10174 static void
10175 test_fully_connected ()
10176 {
10177 gimple_register_cfg_hooks ();
10178
10179 tree fndecl = push_fndecl ("cfg_fully_connected");
10180 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10181
10182 const int n = 4;
10183
10184 /* Create some empty blocks. */
10185 auto_vec <basic_block> subgraph_nodes;
10186 for (int i = 0; i < n; i++)
10187 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10188
10189 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10190 ASSERT_EQ (0, n_edges_for_fn (fun));
10191
10192 /* Create the edges. */
10193 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10194 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10195 for (int i = 0; i < n; i++)
10196 for (int j = 0; j < n; j++)
10197 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10198
10199 /* Verify the edges. */
10200 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10201 /* The first one is linked to ENTRY/EXIT as well as itself and
10202 everything else. */
10203 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10204 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10205 /* The other ones in the subgraph are linked to everything in
10206 the subgraph (including themselves). */
10207 for (int i = 1; i < n; i++)
10208 {
10209 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10210 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10211 }
10212
10213 /* Verify the dominance information. */
10214 calculate_dominance_info (CDI_DOMINATORS);
10215 /* The initial block in the subgraph should be dominated by ENTRY. */
10216 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10217 get_immediate_dominator (CDI_DOMINATORS,
10218 subgraph_nodes[0]));
10219 /* Every other block in the subgraph should be dominated by the
10220 initial block. */
10221 for (int i = 1; i < n; i++)
10222 ASSERT_EQ (subgraph_nodes[0],
10223 get_immediate_dominator (CDI_DOMINATORS,
10224 subgraph_nodes[i]));
10225 free_dominance_info (CDI_DOMINATORS);
10226
10227 /* Similarly for post-dominance. */
10228 calculate_dominance_info (CDI_POST_DOMINATORS);
10229 /* The initial block in the subgraph should be postdominated by EXIT. */
10230 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10231 get_immediate_dominator (CDI_POST_DOMINATORS,
10232 subgraph_nodes[0]));
10233 /* Every other block in the subgraph should be postdominated by the
10234 initial block, since that leads to EXIT. */
10235 for (int i = 1; i < n; i++)
10236 ASSERT_EQ (subgraph_nodes[0],
10237 get_immediate_dominator (CDI_POST_DOMINATORS,
10238 subgraph_nodes[i]));
10239 free_dominance_info (CDI_POST_DOMINATORS);
10240
10241 pop_cfun ();
10242 }
10243
10244 /* Run all of the selftests within this file. */
10245
10246 void
10247 tree_cfg_cc_tests ()
10248 {
10249 test_linear_chain ();
10250 test_diamond ();
10251 test_fully_connected ();
10252 }
10253
10254 } // namespace selftest
10255
10256 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10257 - loop
10258 - nested loops
10259 - switch statement (a block with many out-edges)
10260 - something that jumps to itself
10261 - etc */
10262
10263 #endif /* CHECKING_P */
This page took 0.515935 seconds and 5 git commands to generate.