]> gcc.gnu.org Git - gcc.git/blob - gcc/flow.c
rtl.h (INSN_P): New macro.
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
26
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
30
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
34
35 ** find_basic_blocks **
36
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
41
42 find_basic_blocks also finds any unreachable loops and deletes them.
43
44 ** life_analysis **
45
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
49
50 ** live-register info **
51
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
59
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
66
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
76 REG_DEAD notes.
77
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
84
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
88
89 ** Other actions of life_analysis **
90
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
93
94 life_analysis deletes insns whose only effect is to store a value
95 that is never used.
96
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
102
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
105
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
112
113 /* TODO:
114
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
118 - log links creation
119 - pre/post modify transformation
120 */
121 \f
122 #include "config.h"
123 #include "system.h"
124 #include "tree.h"
125 #include "rtl.h"
126 #include "tm_p.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
129 #include "regs.h"
130 #include "hard-reg-set.h"
131 #include "flags.h"
132 #include "output.h"
133 #include "function.h"
134 #include "except.h"
135 #include "toplev.h"
136 #include "recog.h"
137 #include "insn-flags.h"
138 #include "expr.h"
139
140 #include "obstack.h"
141
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
144
145
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
152 #endif
153
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
156 #endif
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
159 #endif
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
162 #endif
163
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
168
169 extern struct obstack *function_obstack;
170
171 /* Number of basic blocks in the current function. */
172
173 int n_basic_blocks;
174
175 /* Number of edges in the current function. */
176
177 int n_edges;
178
179 /* The basic block array. */
180
181 varray_type basic_block_info;
182
183 /* The special entry and exit blocks. */
184
185 struct basic_block_def entry_exit_blocks[2]
186 = {{NULL, /* head */
187 NULL, /* end */
188 NULL, /* pred */
189 NULL, /* succ */
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
193 NULL, /* aux */
194 ENTRY_BLOCK, /* index */
195 0, /* loop_depth */
196 -1, -1 /* eh_beg, eh_end */
197 },
198 {
199 NULL, /* head */
200 NULL, /* end */
201 NULL, /* pred */
202 NULL, /* succ */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
206 NULL, /* aux */
207 EXIT_BLOCK, /* index */
208 0, /* loop_depth */
209 -1, -1 /* eh_beg, eh_end */
210 }
211 };
212
213 /* Nonzero if the second flow pass has completed. */
214 int flow2_completed;
215
216 /* Maximum register number used in this function, plus one. */
217
218 int max_regno;
219
220 /* Indexed by n, giving various register information */
221
222 varray_type reg_n_info;
223
224 /* Size of the reg_n_info table. */
225
226 unsigned int reg_n_max;
227
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
231
232 static rtx *reg_next_use;
233
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
236
237 int regset_bytes;
238 int regset_size;
239
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242
243 regset regs_live_at_setjmp;
244
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
249 rtx regs_may_share;
250
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
253
254 static int loop_depth;
255
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
257
258 static int cc0_live;
259
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
262
263 static rtx mem_set_list;
264
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
267
268 static HARD_REG_SET elim_reg_set;
269
270 /* The basic block structure for every insn, indexed by uid. */
271
272 varray_type basic_block_for_insn;
273
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
277
278 static rtx label_value_list;
279
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
286 rtx, int));
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
292
293 static void commit_one_edge_insertion PARAMS ((edge));
294
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
302 basic_block));
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
304 basic_block));
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
321 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
322 static void propagate_block PARAMS ((basic_block, regset,
323 regset, int));
324 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
325 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
326 static void mark_set_regs PARAMS ((regset, regset, rtx,
327 rtx, regset, int));
328 static void mark_set_1 PARAMS ((regset, regset, rtx,
329 rtx, regset, int));
330 #ifdef AUTO_INC_DEC
331 static void find_auto_inc PARAMS ((regset, rtx, rtx));
332 static int try_pre_increment_1 PARAMS ((rtx));
333 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
334 #endif
335 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
336 void dump_flow_info PARAMS ((FILE *));
337 void debug_flow_info PARAMS ((void));
338 static void dump_edge_info PARAMS ((FILE *, edge, int));
339
340 static void count_reg_sets_1 PARAMS ((rtx));
341 static void count_reg_sets PARAMS ((rtx));
342 static void count_reg_references PARAMS ((rtx));
343 static void invalidate_mems_from_autoinc PARAMS ((rtx));
344 static void remove_fake_successors PARAMS ((basic_block));
345 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
346 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
347 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
348 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
349 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
350 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
351 static int flow_depth_first_order_compute PARAMS ((int *));
352 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
353 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
354 static void flow_loops_tree_build PARAMS ((struct loops *));
355 static int flow_loop_level_compute PARAMS ((struct loop *, int));
356 static int flow_loops_level_compute PARAMS ((struct loops *));
357 \f
358 /* Find basic blocks of the current function.
359 F is the first insn of the function and NREGS the number of register
360 numbers in use. */
361
362 void
363 find_basic_blocks (f, nregs, file)
364 rtx f;
365 int nregs ATTRIBUTE_UNUSED;
366 FILE *file ATTRIBUTE_UNUSED;
367 {
368 int max_uid;
369
370 /* Flush out existing data. */
371 if (basic_block_info != NULL)
372 {
373 int i;
374
375 clear_edges ();
376
377 /* Clear bb->aux on all extant basic blocks. We'll use this as a
378 tag for reuse during create_basic_block, just in case some pass
379 copies around basic block notes improperly. */
380 for (i = 0; i < n_basic_blocks; ++i)
381 BASIC_BLOCK (i)->aux = NULL;
382
383 VARRAY_FREE (basic_block_info);
384 }
385
386 n_basic_blocks = count_basic_blocks (f);
387
388 /* Size the basic block table. The actual structures will be allocated
389 by find_basic_blocks_1, since we want to keep the structure pointers
390 stable across calls to find_basic_blocks. */
391 /* ??? This whole issue would be much simpler if we called find_basic_blocks
392 exactly once, and thereafter we don't have a single long chain of
393 instructions at all until close to the end of compilation when we
394 actually lay them out. */
395
396 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
397
398 label_value_list = find_basic_blocks_1 (f);
399
400 /* Record the block to which an insn belongs. */
401 /* ??? This should be done another way, by which (perhaps) a label is
402 tagged directly with the basic block that it starts. It is used for
403 more than that currently, but IMO that is the only valid use. */
404
405 max_uid = get_max_uid ();
406 #ifdef AUTO_INC_DEC
407 /* Leave space for insns life_analysis makes in some cases for auto-inc.
408 These cases are rare, so we don't need too much space. */
409 max_uid += max_uid / 10;
410 #endif
411
412 compute_bb_for_insn (max_uid);
413
414 /* Discover the edges of our cfg. */
415 record_active_eh_regions (f);
416 make_edges (label_value_list);
417
418 /* Do very simple cleanup now, for the benefit of code that runs between
419 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
420 tidy_fallthru_edges ();
421
422 mark_critical_edges ();
423
424 #ifdef ENABLE_CHECKING
425 verify_flow_info ();
426 #endif
427 }
428
429 /* Count the basic blocks of the function. */
430
431 static int
432 count_basic_blocks (f)
433 rtx f;
434 {
435 register rtx insn;
436 register RTX_CODE prev_code;
437 register int count = 0;
438 int eh_region = 0;
439 int call_had_abnormal_edge = 0;
440 rtx prev_call = NULL_RTX;
441
442 prev_code = JUMP_INSN;
443 for (insn = f; insn; insn = NEXT_INSN (insn))
444 {
445 register RTX_CODE code = GET_CODE (insn);
446
447 if (code == CODE_LABEL
448 || (GET_RTX_CLASS (code) == 'i'
449 && (prev_code == JUMP_INSN
450 || prev_code == BARRIER
451 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
452 {
453 count++;
454 }
455
456 /* Record whether this call created an edge. */
457 if (code == CALL_INSN)
458 {
459 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
460 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
461 prev_call = insn;
462 call_had_abnormal_edge = 0;
463
464 /* If there is an EH region or rethrow, we have an edge. */
465 if ((eh_region && region > 0)
466 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
467 call_had_abnormal_edge = 1;
468 else
469 {
470 /* If there is a nonlocal goto label and the specified
471 region number isn't -1, we have an edge. (0 means
472 no throw, but might have a nonlocal goto). */
473 if (nonlocal_goto_handler_labels && region >= 0)
474 call_had_abnormal_edge = 1;
475 }
476 }
477 else if (code != NOTE)
478 prev_call = NULL_RTX;
479
480 if (code != NOTE)
481 prev_code = code;
482 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
483 ++eh_region;
484 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
485 --eh_region;
486
487 }
488
489 /* The rest of the compiler works a bit smoother when we don't have to
490 check for the edge case of do-nothing functions with no basic blocks. */
491 if (count == 0)
492 {
493 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
494 count = 1;
495 }
496
497 return count;
498 }
499
500 /* Find all basic blocks of the function whose first insn is F.
501
502 Collect and return a list of labels whose addresses are taken. This
503 will be used in make_edges for use with computed gotos. */
504
505 static rtx
506 find_basic_blocks_1 (f)
507 rtx f;
508 {
509 register rtx insn, next;
510 int call_has_abnormal_edge = 0;
511 int i = 0;
512 rtx bb_note = NULL_RTX;
513 rtx eh_list = NULL_RTX;
514 rtx label_value_list = NULL_RTX;
515 rtx head = NULL_RTX;
516 rtx end = NULL_RTX;
517
518 /* We process the instructions in a slightly different way than we did
519 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
520 closed out the previous block, so that it gets attached at the proper
521 place. Since this form should be equivalent to the previous,
522 count_basic_blocks continues to use the old form as a check. */
523
524 for (insn = f; insn; insn = next)
525 {
526 enum rtx_code code = GET_CODE (insn);
527
528 next = NEXT_INSN (insn);
529
530 if (code == CALL_INSN)
531 {
532 /* Record whether this call created an edge. */
533 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
534 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
535 call_has_abnormal_edge = 0;
536
537 /* If there is an EH region or rethrow, we have an edge. */
538 if ((eh_list && region > 0)
539 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
540 call_has_abnormal_edge = 1;
541 else
542 {
543 /* If there is a nonlocal goto label and the specified
544 region number isn't -1, we have an edge. (0 means
545 no throw, but might have a nonlocal goto). */
546 if (nonlocal_goto_handler_labels && region >= 0)
547 call_has_abnormal_edge = 1;
548 }
549 }
550
551 switch (code)
552 {
553 case NOTE:
554 {
555 int kind = NOTE_LINE_NUMBER (insn);
556
557 /* Keep a LIFO list of the currently active exception notes. */
558 if (kind == NOTE_INSN_EH_REGION_BEG)
559 eh_list = alloc_INSN_LIST (insn, eh_list);
560 else if (kind == NOTE_INSN_EH_REGION_END)
561 {
562 rtx t = eh_list;
563 eh_list = XEXP (eh_list, 1);
564 free_INSN_LIST_node (t);
565 }
566
567 /* Look for basic block notes with which to keep the
568 basic_block_info pointers stable. Unthread the note now;
569 we'll put it back at the right place in create_basic_block.
570 Or not at all if we've already found a note in this block. */
571 else if (kind == NOTE_INSN_BASIC_BLOCK)
572 {
573 if (bb_note == NULL_RTX)
574 bb_note = insn;
575 next = flow_delete_insn (insn);
576 }
577
578 break;
579 }
580
581 case CODE_LABEL:
582 /* A basic block starts at a label. If we've closed one off due
583 to a barrier or some such, no need to do it again. */
584 if (head != NULL_RTX)
585 {
586 /* While we now have edge lists with which other portions of
587 the compiler might determine a call ending a basic block
588 does not imply an abnormal edge, it will be a bit before
589 everything can be updated. So continue to emit a noop at
590 the end of such a block. */
591 if (GET_CODE (end) == CALL_INSN
592 && ! SIBLING_CALL_P (end))
593 {
594 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
595 end = emit_insn_after (nop, end);
596 }
597
598 create_basic_block (i++, head, end, bb_note);
599 bb_note = NULL_RTX;
600 }
601 head = end = insn;
602 break;
603
604 case JUMP_INSN:
605 /* A basic block ends at a jump. */
606 if (head == NULL_RTX)
607 head = insn;
608 else
609 {
610 /* ??? Make a special check for table jumps. The way this
611 happens is truly and amazingly gross. We are about to
612 create a basic block that contains just a code label and
613 an addr*vec jump insn. Worse, an addr_diff_vec creates
614 its own natural loop.
615
616 Prevent this bit of brain damage, pasting things together
617 correctly in make_edges.
618
619 The correct solution involves emitting the table directly
620 on the tablejump instruction as a note, or JUMP_LABEL. */
621
622 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
623 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
624 {
625 head = end = NULL;
626 n_basic_blocks--;
627 break;
628 }
629 }
630 end = insn;
631 goto new_bb_inclusive;
632
633 case BARRIER:
634 /* A basic block ends at a barrier. It may be that an unconditional
635 jump already closed the basic block -- no need to do it again. */
636 if (head == NULL_RTX)
637 break;
638
639 /* While we now have edge lists with which other portions of the
640 compiler might determine a call ending a basic block does not
641 imply an abnormal edge, it will be a bit before everything can
642 be updated. So continue to emit a noop at the end of such a
643 block. */
644 if (GET_CODE (end) == CALL_INSN
645 && ! SIBLING_CALL_P (end))
646 {
647 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
648 end = emit_insn_after (nop, end);
649 }
650 goto new_bb_exclusive;
651
652 case CALL_INSN:
653 /* A basic block ends at a call that can either throw or
654 do a non-local goto. */
655 if (call_has_abnormal_edge)
656 {
657 new_bb_inclusive:
658 if (head == NULL_RTX)
659 head = insn;
660 end = insn;
661
662 new_bb_exclusive:
663 create_basic_block (i++, head, end, bb_note);
664 head = end = NULL_RTX;
665 bb_note = NULL_RTX;
666 break;
667 }
668 /* FALLTHRU */
669
670 default:
671 if (GET_RTX_CLASS (code) == 'i')
672 {
673 if (head == NULL_RTX)
674 head = insn;
675 end = insn;
676 }
677 break;
678 }
679
680 if (GET_RTX_CLASS (code) == 'i')
681 {
682 rtx note;
683
684 /* Make a list of all labels referred to other than by jumps
685 (which just don't have the REG_LABEL notes).
686
687 Make a special exception for labels followed by an ADDR*VEC,
688 as this would be a part of the tablejump setup code.
689
690 Make a special exception for the eh_return_stub_label, which
691 we know isn't part of any otherwise visible control flow. */
692
693 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
694 if (REG_NOTE_KIND (note) == REG_LABEL)
695 {
696 rtx lab = XEXP (note, 0), next;
697
698 if (lab == eh_return_stub_label)
699 ;
700 else if ((next = next_nonnote_insn (lab)) != NULL
701 && GET_CODE (next) == JUMP_INSN
702 && (GET_CODE (PATTERN (next)) == ADDR_VEC
703 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
704 ;
705 else
706 label_value_list
707 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
708 }
709 }
710 }
711
712 if (head != NULL_RTX)
713 create_basic_block (i++, head, end, bb_note);
714
715 if (i != n_basic_blocks)
716 abort ();
717
718 return label_value_list;
719 }
720
721 /* Tidy the CFG by deleting unreachable code and whatnot. */
722
723 void
724 cleanup_cfg (f)
725 rtx f;
726 {
727 delete_unreachable_blocks ();
728 move_stray_eh_region_notes ();
729 record_active_eh_regions (f);
730 try_merge_blocks ();
731 mark_critical_edges ();
732
733 /* Kill the data we won't maintain. */
734 label_value_list = NULL_RTX;
735 }
736
737 /* Create a new basic block consisting of the instructions between
738 HEAD and END inclusive. Reuses the note and basic block struct
739 in BB_NOTE, if any. */
740
741 void
742 create_basic_block (index, head, end, bb_note)
743 int index;
744 rtx head, end, bb_note;
745 {
746 basic_block bb;
747
748 if (bb_note
749 && ! RTX_INTEGRATED_P (bb_note)
750 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
751 && bb->aux == NULL)
752 {
753 /* If we found an existing note, thread it back onto the chain. */
754
755 if (GET_CODE (head) == CODE_LABEL)
756 add_insn_after (bb_note, head);
757 else
758 {
759 add_insn_before (bb_note, head);
760 head = bb_note;
761 }
762 }
763 else
764 {
765 /* Otherwise we must create a note and a basic block structure.
766 Since we allow basic block structs in rtl, give the struct
767 the same lifetime by allocating it off the function obstack
768 rather than using malloc. */
769
770 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
771 memset (bb, 0, sizeof (*bb));
772
773 if (GET_CODE (head) == CODE_LABEL)
774 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
775 else
776 {
777 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
778 head = bb_note;
779 }
780 NOTE_BASIC_BLOCK (bb_note) = bb;
781 }
782
783 /* Always include the bb note in the block. */
784 if (NEXT_INSN (end) == bb_note)
785 end = bb_note;
786
787 bb->head = head;
788 bb->end = end;
789 bb->index = index;
790 BASIC_BLOCK (index) = bb;
791
792 /* Tag the block so that we know it has been used when considering
793 other basic block notes. */
794 bb->aux = bb;
795 }
796 \f
797 /* Records the basic block struct in BB_FOR_INSN, for every instruction
798 indexed by INSN_UID. MAX is the size of the array. */
799
800 void
801 compute_bb_for_insn (max)
802 int max;
803 {
804 int i;
805
806 if (basic_block_for_insn)
807 VARRAY_FREE (basic_block_for_insn);
808 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
809
810 for (i = 0; i < n_basic_blocks; ++i)
811 {
812 basic_block bb = BASIC_BLOCK (i);
813 rtx insn, end;
814
815 end = bb->end;
816 insn = bb->head;
817 while (1)
818 {
819 int uid = INSN_UID (insn);
820 if (uid < max)
821 VARRAY_BB (basic_block_for_insn, uid) = bb;
822 if (insn == end)
823 break;
824 insn = NEXT_INSN (insn);
825 }
826 }
827 }
828
829 /* Free the memory associated with the edge structures. */
830
831 static void
832 clear_edges ()
833 {
834 int i;
835 edge n, e;
836
837 for (i = 0; i < n_basic_blocks; ++i)
838 {
839 basic_block bb = BASIC_BLOCK (i);
840
841 for (e = bb->succ; e ; e = n)
842 {
843 n = e->succ_next;
844 free (e);
845 }
846
847 bb->succ = 0;
848 bb->pred = 0;
849 }
850
851 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
852 {
853 n = e->succ_next;
854 free (e);
855 }
856
857 ENTRY_BLOCK_PTR->succ = 0;
858 EXIT_BLOCK_PTR->pred = 0;
859
860 n_edges = 0;
861 }
862
863 /* Identify the edges between basic blocks.
864
865 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
866 that are otherwise unreachable may be reachable with a non-local goto.
867
868 BB_EH_END is an array indexed by basic block number in which we record
869 the list of exception regions active at the end of the basic block. */
870
871 static void
872 make_edges (label_value_list)
873 rtx label_value_list;
874 {
875 int i;
876 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
877 sbitmap *edge_cache = NULL;
878
879 /* Assume no computed jump; revise as we create edges. */
880 current_function_has_computed_jump = 0;
881
882 /* Heavy use of computed goto in machine-generated code can lead to
883 nearly fully-connected CFGs. In that case we spend a significant
884 amount of time searching the edge lists for duplicates. */
885 if (forced_labels || label_value_list)
886 {
887 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
888 sbitmap_vector_zero (edge_cache, n_basic_blocks);
889 }
890
891 /* By nature of the way these get numbered, block 0 is always the entry. */
892 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
893
894 for (i = 0; i < n_basic_blocks; ++i)
895 {
896 basic_block bb = BASIC_BLOCK (i);
897 rtx insn, x;
898 enum rtx_code code;
899 int force_fallthru = 0;
900
901 /* Examine the last instruction of the block, and discover the
902 ways we can leave the block. */
903
904 insn = bb->end;
905 code = GET_CODE (insn);
906
907 /* A branch. */
908 if (code == JUMP_INSN)
909 {
910 rtx tmp;
911
912 /* ??? Recognize a tablejump and do the right thing. */
913 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
915 && GET_CODE (tmp) == JUMP_INSN
916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
918 {
919 rtvec vec;
920 int j;
921
922 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
923 vec = XVEC (PATTERN (tmp), 0);
924 else
925 vec = XVEC (PATTERN (tmp), 1);
926
927 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
928 make_label_edge (edge_cache, bb,
929 XEXP (RTVEC_ELT (vec, j), 0), 0);
930
931 /* Some targets (eg, ARM) emit a conditional jump that also
932 contains the out-of-range target. Scan for these and
933 add an edge if necessary. */
934 if ((tmp = single_set (insn)) != NULL
935 && SET_DEST (tmp) == pc_rtx
936 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
937 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
938 make_label_edge (edge_cache, bb,
939 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
940
941 #ifdef CASE_DROPS_THROUGH
942 /* Silly VAXen. The ADDR_VEC is going to be in the way of
943 us naturally detecting fallthru into the next block. */
944 force_fallthru = 1;
945 #endif
946 }
947
948 /* If this is a computed jump, then mark it as reaching
949 everything on the label_value_list and forced_labels list. */
950 else if (computed_jump_p (insn))
951 {
952 current_function_has_computed_jump = 1;
953
954 for (x = label_value_list; x; x = XEXP (x, 1))
955 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
956
957 for (x = forced_labels; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
959 }
960
961 /* Returns create an exit out. */
962 else if (returnjump_p (insn))
963 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
964
965 /* Otherwise, we have a plain conditional or unconditional jump. */
966 else
967 {
968 if (! JUMP_LABEL (insn))
969 abort ();
970 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
971 }
972 }
973
974 /* If this is a sibling call insn, then this is in effect a
975 combined call and return, and so we need an edge to the
976 exit block. No need to worry about EH edges, since we
977 wouldn't have created the sibling call in the first place. */
978
979 if (code == CALL_INSN && SIBLING_CALL_P (insn))
980 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
981 else
982
983 /* If this is a CALL_INSN, then mark it as reaching the active EH
984 handler for this CALL_INSN. If we're handling asynchronous
985 exceptions then any insn can reach any of the active handlers.
986
987 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
988
989 if (code == CALL_INSN || asynchronous_exceptions)
990 {
991 /* Add any appropriate EH edges. We do this unconditionally
992 since there may be a REG_EH_REGION or REG_EH_RETHROW note
993 on the call, and this needn't be within an EH region. */
994 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
995
996 /* If we have asynchronous exceptions, do the same for *all*
997 exception regions active in the block. */
998 if (asynchronous_exceptions
999 && bb->eh_beg != bb->eh_end)
1000 {
1001 if (bb->eh_beg >= 0)
1002 make_eh_edge (edge_cache, eh_nest_info, bb,
1003 NULL_RTX, bb->eh_beg);
1004
1005 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1006 if (GET_CODE (x) == NOTE
1007 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1008 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1009 {
1010 int region = NOTE_EH_HANDLER (x);
1011 make_eh_edge (edge_cache, eh_nest_info, bb,
1012 NULL_RTX, region);
1013 }
1014 }
1015
1016 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1017 {
1018 /* ??? This could be made smarter: in some cases it's possible
1019 to tell that certain calls will not do a nonlocal goto.
1020
1021 For example, if the nested functions that do the nonlocal
1022 gotos do not have their addresses taken, then only calls to
1023 those functions or to other nested functions that use them
1024 could possibly do nonlocal gotos. */
1025 /* We do know that a REG_EH_REGION note with a value less
1026 than 0 is guaranteed not to perform a non-local goto. */
1027 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1028 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1029 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1030 make_label_edge (edge_cache, bb, XEXP (x, 0),
1031 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1032 }
1033 }
1034
1035 /* We know something about the structure of the function __throw in
1036 libgcc2.c. It is the only function that ever contains eh_stub
1037 labels. It modifies its return address so that the last block
1038 returns to one of the eh_stub labels within it. So we have to
1039 make additional edges in the flow graph. */
1040 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1041 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1042
1043 /* Find out if we can drop through to the next block. */
1044 insn = next_nonnote_insn (insn);
1045 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1046 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1047 else if (i + 1 < n_basic_blocks)
1048 {
1049 rtx tmp = BLOCK_HEAD (i + 1);
1050 if (GET_CODE (tmp) == NOTE)
1051 tmp = next_nonnote_insn (tmp);
1052 if (force_fallthru || insn == tmp)
1053 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1054 }
1055 }
1056
1057 free_eh_nesting_info (eh_nest_info);
1058 if (edge_cache)
1059 sbitmap_vector_free (edge_cache);
1060 }
1061
1062 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1063 about the edge that is accumulated between calls. */
1064
1065 void
1066 make_edge (edge_cache, src, dst, flags)
1067 sbitmap *edge_cache;
1068 basic_block src, dst;
1069 int flags;
1070 {
1071 int use_edge_cache;
1072 edge e;
1073
1074 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1075 many edges to them, and we didn't allocate memory for it. */
1076 use_edge_cache = (edge_cache
1077 && src != ENTRY_BLOCK_PTR
1078 && dst != EXIT_BLOCK_PTR);
1079
1080 /* Make sure we don't add duplicate edges. */
1081 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1082 for (e = src->succ; e ; e = e->succ_next)
1083 if (e->dest == dst)
1084 {
1085 e->flags |= flags;
1086 return;
1087 }
1088
1089 e = (edge) xcalloc (1, sizeof (*e));
1090 n_edges++;
1091
1092 e->succ_next = src->succ;
1093 e->pred_next = dst->pred;
1094 e->src = src;
1095 e->dest = dst;
1096 e->flags = flags;
1097
1098 src->succ = e;
1099 dst->pred = e;
1100
1101 if (use_edge_cache)
1102 SET_BIT (edge_cache[src->index], dst->index);
1103 }
1104
1105 /* Create an edge from a basic block to a label. */
1106
1107 static void
1108 make_label_edge (edge_cache, src, label, flags)
1109 sbitmap *edge_cache;
1110 basic_block src;
1111 rtx label;
1112 int flags;
1113 {
1114 if (GET_CODE (label) != CODE_LABEL)
1115 abort ();
1116
1117 /* If the label was never emitted, this insn is junk, but avoid a
1118 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1119 as a result of a syntax error and a diagnostic has already been
1120 printed. */
1121
1122 if (INSN_UID (label) == 0)
1123 return;
1124
1125 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1126 }
1127
1128 /* Create the edges generated by INSN in REGION. */
1129
1130 static void
1131 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1132 sbitmap *edge_cache;
1133 eh_nesting_info *eh_nest_info;
1134 basic_block src;
1135 rtx insn;
1136 int region;
1137 {
1138 handler_info **handler_list;
1139 int num, is_call;
1140
1141 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1142 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1143 while (--num >= 0)
1144 {
1145 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1146 EDGE_ABNORMAL | EDGE_EH | is_call);
1147 }
1148 }
1149
1150 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1151 dangerous if we intend to move basic blocks around. Move such notes
1152 into the following block. */
1153
1154 static void
1155 move_stray_eh_region_notes ()
1156 {
1157 int i;
1158 basic_block b1, b2;
1159
1160 if (n_basic_blocks < 2)
1161 return;
1162
1163 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1164 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1165 {
1166 rtx insn, next, list = NULL_RTX;
1167
1168 b1 = BASIC_BLOCK (i);
1169 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1170 {
1171 next = NEXT_INSN (insn);
1172 if (GET_CODE (insn) == NOTE
1173 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1174 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1175 {
1176 /* Unlink from the insn chain. */
1177 NEXT_INSN (PREV_INSN (insn)) = next;
1178 PREV_INSN (next) = PREV_INSN (insn);
1179
1180 /* Queue it. */
1181 NEXT_INSN (insn) = list;
1182 list = insn;
1183 }
1184 }
1185
1186 if (list == NULL_RTX)
1187 continue;
1188
1189 /* Find where to insert these things. */
1190 insn = b2->head;
1191 if (GET_CODE (insn) == CODE_LABEL)
1192 insn = NEXT_INSN (insn);
1193
1194 while (list)
1195 {
1196 next = NEXT_INSN (list);
1197 add_insn_after (list, insn);
1198 list = next;
1199 }
1200 }
1201 }
1202
1203 /* Recompute eh_beg/eh_end for each basic block. */
1204
1205 static void
1206 record_active_eh_regions (f)
1207 rtx f;
1208 {
1209 rtx insn, eh_list = NULL_RTX;
1210 int i = 0;
1211 basic_block bb = BASIC_BLOCK (0);
1212
1213 for (insn = f; insn ; insn = NEXT_INSN (insn))
1214 {
1215 if (bb->head == insn)
1216 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1217
1218 if (GET_CODE (insn) == NOTE)
1219 {
1220 int kind = NOTE_LINE_NUMBER (insn);
1221 if (kind == NOTE_INSN_EH_REGION_BEG)
1222 eh_list = alloc_INSN_LIST (insn, eh_list);
1223 else if (kind == NOTE_INSN_EH_REGION_END)
1224 {
1225 rtx t = XEXP (eh_list, 1);
1226 free_INSN_LIST_node (eh_list);
1227 eh_list = t;
1228 }
1229 }
1230
1231 if (bb->end == insn)
1232 {
1233 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1234 i += 1;
1235 if (i == n_basic_blocks)
1236 break;
1237 bb = BASIC_BLOCK (i);
1238 }
1239 }
1240 }
1241
1242 /* Identify critical edges and set the bits appropriately. */
1243
1244 static void
1245 mark_critical_edges ()
1246 {
1247 int i, n = n_basic_blocks;
1248 basic_block bb;
1249
1250 /* We begin with the entry block. This is not terribly important now,
1251 but could be if a front end (Fortran) implemented alternate entry
1252 points. */
1253 bb = ENTRY_BLOCK_PTR;
1254 i = -1;
1255
1256 while (1)
1257 {
1258 edge e;
1259
1260 /* (1) Critical edges must have a source with multiple successors. */
1261 if (bb->succ && bb->succ->succ_next)
1262 {
1263 for (e = bb->succ; e ; e = e->succ_next)
1264 {
1265 /* (2) Critical edges must have a destination with multiple
1266 predecessors. Note that we know there is at least one
1267 predecessor -- the edge we followed to get here. */
1268 if (e->dest->pred->pred_next)
1269 e->flags |= EDGE_CRITICAL;
1270 else
1271 e->flags &= ~EDGE_CRITICAL;
1272 }
1273 }
1274 else
1275 {
1276 for (e = bb->succ; e ; e = e->succ_next)
1277 e->flags &= ~EDGE_CRITICAL;
1278 }
1279
1280 if (++i >= n)
1281 break;
1282 bb = BASIC_BLOCK (i);
1283 }
1284 }
1285 \f
1286 /* Split a (typically critical) edge. Return the new block.
1287 Abort on abnormal edges.
1288
1289 ??? The code generally expects to be called on critical edges.
1290 The case of a block ending in an unconditional jump to a
1291 block with multiple predecessors is not handled optimally. */
1292
1293 basic_block
1294 split_edge (edge_in)
1295 edge edge_in;
1296 {
1297 basic_block old_pred, bb, old_succ;
1298 edge edge_out;
1299 rtx bb_note;
1300 int i, j;
1301
1302 /* Abnormal edges cannot be split. */
1303 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1304 abort ();
1305
1306 old_pred = edge_in->src;
1307 old_succ = edge_in->dest;
1308
1309 /* Remove the existing edge from the destination's pred list. */
1310 {
1311 edge *pp;
1312 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1313 continue;
1314 *pp = edge_in->pred_next;
1315 edge_in->pred_next = NULL;
1316 }
1317
1318 /* Create the new structures. */
1319 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1320 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1321 n_edges++;
1322
1323 memset (bb, 0, sizeof (*bb));
1324 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1325 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1326
1327 /* ??? This info is likely going to be out of date very soon. */
1328 if (old_succ->global_live_at_start)
1329 {
1330 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1331 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1332 }
1333 else
1334 {
1335 CLEAR_REG_SET (bb->global_live_at_start);
1336 CLEAR_REG_SET (bb->global_live_at_end);
1337 }
1338
1339 /* Wire them up. */
1340 bb->pred = edge_in;
1341 bb->succ = edge_out;
1342
1343 edge_in->dest = bb;
1344 edge_in->flags &= ~EDGE_CRITICAL;
1345
1346 edge_out->pred_next = old_succ->pred;
1347 edge_out->succ_next = NULL;
1348 edge_out->src = bb;
1349 edge_out->dest = old_succ;
1350 edge_out->flags = EDGE_FALLTHRU;
1351 edge_out->probability = REG_BR_PROB_BASE;
1352
1353 old_succ->pred = edge_out;
1354
1355 /* Tricky case -- if there existed a fallthru into the successor
1356 (and we're not it) we must add a new unconditional jump around
1357 the new block we're actually interested in.
1358
1359 Further, if that edge is critical, this means a second new basic
1360 block must be created to hold it. In order to simplify correct
1361 insn placement, do this before we touch the existing basic block
1362 ordering for the block we were really wanting. */
1363 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1364 {
1365 edge e;
1366 for (e = edge_out->pred_next; e ; e = e->pred_next)
1367 if (e->flags & EDGE_FALLTHRU)
1368 break;
1369
1370 if (e)
1371 {
1372 basic_block jump_block;
1373 rtx pos;
1374
1375 if ((e->flags & EDGE_CRITICAL) == 0
1376 && e->src != ENTRY_BLOCK_PTR)
1377 {
1378 /* Non critical -- we can simply add a jump to the end
1379 of the existing predecessor. */
1380 jump_block = e->src;
1381 }
1382 else
1383 {
1384 /* We need a new block to hold the jump. The simplest
1385 way to do the bulk of the work here is to recursively
1386 call ourselves. */
1387 jump_block = split_edge (e);
1388 e = jump_block->succ;
1389 }
1390
1391 /* Now add the jump insn ... */
1392 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1393 jump_block->end);
1394 jump_block->end = pos;
1395 if (basic_block_for_insn)
1396 set_block_for_insn (pos, jump_block);
1397 emit_barrier_after (pos);
1398
1399 /* ... let jump know that label is in use, ... */
1400 JUMP_LABEL (pos) = old_succ->head;
1401 ++LABEL_NUSES (old_succ->head);
1402
1403 /* ... and clear fallthru on the outgoing edge. */
1404 e->flags &= ~EDGE_FALLTHRU;
1405
1406 /* Continue splitting the interesting edge. */
1407 }
1408 }
1409
1410 /* Place the new block just in front of the successor. */
1411 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1412 if (old_succ == EXIT_BLOCK_PTR)
1413 j = n_basic_blocks - 1;
1414 else
1415 j = old_succ->index;
1416 for (i = n_basic_blocks - 1; i > j; --i)
1417 {
1418 basic_block tmp = BASIC_BLOCK (i - 1);
1419 BASIC_BLOCK (i) = tmp;
1420 tmp->index = i;
1421 }
1422 BASIC_BLOCK (i) = bb;
1423 bb->index = i;
1424
1425 /* Create the basic block note.
1426
1427 Where we place the note can have a noticable impact on the generated
1428 code. Consider this cfg:
1429
1430
1431 E
1432 |
1433 0
1434 / \
1435 +->1-->2--->E
1436 | |
1437 +--+
1438
1439 If we need to insert an insn on the edge from block 0 to block 1,
1440 we want to ensure the instructions we insert are outside of any
1441 loop notes that physically sit between block 0 and block 1. Otherwise
1442 we confuse the loop optimizer into thinking the loop is a phony. */
1443 if (old_succ != EXIT_BLOCK_PTR
1444 && PREV_INSN (old_succ->head)
1445 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1446 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1447 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1448 PREV_INSN (old_succ->head));
1449 else if (old_succ != EXIT_BLOCK_PTR)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1451 else
1452 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1453 NOTE_BASIC_BLOCK (bb_note) = bb;
1454 bb->head = bb->end = bb_note;
1455
1456 /* Not quite simple -- for non-fallthru edges, we must adjust the
1457 predecessor's jump instruction to target our new block. */
1458 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1459 {
1460 rtx tmp, insn = old_pred->end;
1461 rtx old_label = old_succ->head;
1462 rtx new_label = gen_label_rtx ();
1463
1464 if (GET_CODE (insn) != JUMP_INSN)
1465 abort ();
1466
1467 /* ??? Recognize a tablejump and adjust all matching cases. */
1468 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1469 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1470 && GET_CODE (tmp) == JUMP_INSN
1471 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1472 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1473 {
1474 rtvec vec;
1475 int j;
1476
1477 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1478 vec = XVEC (PATTERN (tmp), 0);
1479 else
1480 vec = XVEC (PATTERN (tmp), 1);
1481
1482 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1483 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1484 {
1485 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1486 --LABEL_NUSES (old_label);
1487 ++LABEL_NUSES (new_label);
1488 }
1489
1490 /* Handle casesi dispatch insns */
1491 if ((tmp = single_set (insn)) != NULL
1492 && SET_DEST (tmp) == pc_rtx
1493 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1494 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1495 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1496 {
1497 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1498 new_label);
1499 --LABEL_NUSES (old_label);
1500 ++LABEL_NUSES (new_label);
1501 }
1502 }
1503 else
1504 {
1505 /* This would have indicated an abnormal edge. */
1506 if (computed_jump_p (insn))
1507 abort ();
1508
1509 /* A return instruction can't be redirected. */
1510 if (returnjump_p (insn))
1511 abort ();
1512
1513 /* If the insn doesn't go where we think, we're confused. */
1514 if (JUMP_LABEL (insn) != old_label)
1515 abort ();
1516
1517 redirect_jump (insn, new_label);
1518 }
1519
1520 emit_label_before (new_label, bb_note);
1521 bb->head = new_label;
1522 }
1523
1524 return bb;
1525 }
1526
1527 /* Queue instructions for insertion on an edge between two basic blocks.
1528 The new instructions and basic blocks (if any) will not appear in the
1529 CFG until commit_edge_insertions is called. */
1530
1531 void
1532 insert_insn_on_edge (pattern, e)
1533 rtx pattern;
1534 edge e;
1535 {
1536 /* We cannot insert instructions on an abnormal critical edge.
1537 It will be easier to find the culprit if we die now. */
1538 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1539 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1540 abort ();
1541
1542 if (e->insns == NULL_RTX)
1543 start_sequence ();
1544 else
1545 push_to_sequence (e->insns);
1546
1547 emit_insn (pattern);
1548
1549 e->insns = get_insns ();
1550 end_sequence();
1551 }
1552
1553 /* Update the CFG for the instructions queued on edge E. */
1554
1555 static void
1556 commit_one_edge_insertion (e)
1557 edge e;
1558 {
1559 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1560 basic_block bb;
1561
1562 /* Pull the insns off the edge now since the edge might go away. */
1563 insns = e->insns;
1564 e->insns = NULL_RTX;
1565
1566 /* Figure out where to put these things. If the destination has
1567 one predecessor, insert there. Except for the exit block. */
1568 if (e->dest->pred->pred_next == NULL
1569 && e->dest != EXIT_BLOCK_PTR)
1570 {
1571 bb = e->dest;
1572
1573 /* Get the location correct wrt a code label, and "nice" wrt
1574 a basic block note, and before everything else. */
1575 tmp = bb->head;
1576 if (GET_CODE (tmp) == CODE_LABEL)
1577 tmp = NEXT_INSN (tmp);
1578 if (GET_CODE (tmp) == NOTE
1579 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1580 tmp = NEXT_INSN (tmp);
1581 if (tmp == bb->head)
1582 before = tmp;
1583 else
1584 after = PREV_INSN (tmp);
1585 }
1586
1587 /* If the source has one successor and the edge is not abnormal,
1588 insert there. Except for the entry block. */
1589 else if ((e->flags & EDGE_ABNORMAL) == 0
1590 && e->src->succ->succ_next == NULL
1591 && e->src != ENTRY_BLOCK_PTR)
1592 {
1593 bb = e->src;
1594 /* It is possible to have a non-simple jump here. Consider a target
1595 where some forms of unconditional jumps clobber a register. This
1596 happens on the fr30 for example.
1597
1598 We know this block has a single successor, so we can just emit
1599 the queued insns before the jump. */
1600 if (GET_CODE (bb->end) == JUMP_INSN)
1601 {
1602 before = bb->end;
1603 }
1604 else
1605 {
1606 /* We'd better be fallthru, or we've lost track of what's what. */
1607 if ((e->flags & EDGE_FALLTHRU) == 0)
1608 abort ();
1609
1610 after = bb->end;
1611 }
1612 }
1613
1614 /* Otherwise we must split the edge. */
1615 else
1616 {
1617 bb = split_edge (e);
1618 after = bb->end;
1619 }
1620
1621 /* Now that we've found the spot, do the insertion. */
1622
1623 /* Set the new block number for these insns, if structure is allocated. */
1624 if (basic_block_for_insn)
1625 {
1626 rtx i;
1627 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1628 set_block_for_insn (i, bb);
1629 }
1630
1631 if (before)
1632 {
1633 emit_insns_before (insns, before);
1634 if (before == bb->head)
1635 bb->head = insns;
1636 }
1637 else
1638 {
1639 rtx last = emit_insns_after (insns, after);
1640 if (after == bb->end)
1641 {
1642 bb->end = last;
1643
1644 if (GET_CODE (last) == JUMP_INSN)
1645 {
1646 if (returnjump_p (last))
1647 {
1648 /* ??? Remove all outgoing edges from BB and add one
1649 for EXIT. This is not currently a problem because
1650 this only happens for the (single) epilogue, which
1651 already has a fallthru edge to EXIT. */
1652
1653 e = bb->succ;
1654 if (e->dest != EXIT_BLOCK_PTR
1655 || e->succ_next != NULL
1656 || (e->flags & EDGE_FALLTHRU) == 0)
1657 abort ();
1658 e->flags &= ~EDGE_FALLTHRU;
1659
1660 emit_barrier_after (last);
1661 }
1662 else
1663 abort ();
1664 }
1665 }
1666 }
1667 }
1668
1669 /* Update the CFG for all queued instructions. */
1670
1671 void
1672 commit_edge_insertions ()
1673 {
1674 int i;
1675 basic_block bb;
1676
1677 #ifdef ENABLE_CHECKING
1678 verify_flow_info ();
1679 #endif
1680
1681 i = -1;
1682 bb = ENTRY_BLOCK_PTR;
1683 while (1)
1684 {
1685 edge e, next;
1686
1687 for (e = bb->succ; e ; e = next)
1688 {
1689 next = e->succ_next;
1690 if (e->insns)
1691 commit_one_edge_insertion (e);
1692 }
1693
1694 if (++i >= n_basic_blocks)
1695 break;
1696 bb = BASIC_BLOCK (i);
1697 }
1698 }
1699 \f
1700 /* Delete all unreachable basic blocks. */
1701
1702 static void
1703 delete_unreachable_blocks ()
1704 {
1705 basic_block *worklist, *tos;
1706 int deleted_handler;
1707 edge e;
1708 int i, n;
1709
1710 n = n_basic_blocks;
1711 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1712
1713 /* Use basic_block->aux as a marker. Clear them all. */
1714
1715 for (i = 0; i < n; ++i)
1716 BASIC_BLOCK (i)->aux = NULL;
1717
1718 /* Add our starting points to the worklist. Almost always there will
1719 be only one. It isn't inconcievable that we might one day directly
1720 support Fortran alternate entry points. */
1721
1722 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1723 {
1724 *tos++ = e->dest;
1725
1726 /* Mark the block with a handy non-null value. */
1727 e->dest->aux = e;
1728 }
1729
1730 /* Iterate: find everything reachable from what we've already seen. */
1731
1732 while (tos != worklist)
1733 {
1734 basic_block b = *--tos;
1735
1736 for (e = b->succ; e ; e = e->succ_next)
1737 if (!e->dest->aux)
1738 {
1739 *tos++ = e->dest;
1740 e->dest->aux = e;
1741 }
1742 }
1743
1744 /* Delete all unreachable basic blocks. Count down so that we don't
1745 interfere with the block renumbering that happens in delete_block. */
1746
1747 deleted_handler = 0;
1748
1749 for (i = n - 1; i >= 0; --i)
1750 {
1751 basic_block b = BASIC_BLOCK (i);
1752
1753 if (b->aux != NULL)
1754 /* This block was found. Tidy up the mark. */
1755 b->aux = NULL;
1756 else
1757 deleted_handler |= delete_block (b);
1758 }
1759
1760 tidy_fallthru_edges ();
1761
1762 /* If we deleted an exception handler, we may have EH region begin/end
1763 blocks to remove as well. */
1764 if (deleted_handler)
1765 delete_eh_regions ();
1766
1767 free (worklist);
1768 }
1769
1770 /* Find EH regions for which there is no longer a handler, and delete them. */
1771
1772 static void
1773 delete_eh_regions ()
1774 {
1775 rtx insn;
1776
1777 update_rethrow_references ();
1778
1779 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1780 if (GET_CODE (insn) == NOTE)
1781 {
1782 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1783 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1784 {
1785 int num = NOTE_EH_HANDLER (insn);
1786 /* A NULL handler indicates a region is no longer needed,
1787 as long as its rethrow label isn't used. */
1788 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1789 {
1790 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1791 NOTE_SOURCE_FILE (insn) = 0;
1792 }
1793 }
1794 }
1795 }
1796
1797 /* Return true if NOTE is not one of the ones that must be kept paired,
1798 so that we may simply delete them. */
1799
1800 static int
1801 can_delete_note_p (note)
1802 rtx note;
1803 {
1804 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1805 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1806 }
1807
1808 /* Unlink a chain of insns between START and FINISH, leaving notes
1809 that must be paired. */
1810
1811 void
1812 flow_delete_insn_chain (start, finish)
1813 rtx start, finish;
1814 {
1815 /* Unchain the insns one by one. It would be quicker to delete all
1816 of these with a single unchaining, rather than one at a time, but
1817 we need to keep the NOTE's. */
1818
1819 rtx next;
1820
1821 while (1)
1822 {
1823 next = NEXT_INSN (start);
1824 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1825 ;
1826 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1827 ;
1828 else
1829 next = flow_delete_insn (start);
1830
1831 if (start == finish)
1832 break;
1833 start = next;
1834 }
1835 }
1836
1837 /* Delete the insns in a (non-live) block. We physically delete every
1838 non-deleted-note insn, and update the flow graph appropriately.
1839
1840 Return nonzero if we deleted an exception handler. */
1841
1842 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1843 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1844
1845 static int
1846 delete_block (b)
1847 basic_block b;
1848 {
1849 int deleted_handler = 0;
1850 rtx insn, end, tmp;
1851
1852 /* If the head of this block is a CODE_LABEL, then it might be the
1853 label for an exception handler which can't be reached.
1854
1855 We need to remove the label from the exception_handler_label list
1856 and remove the associated NOTE_INSN_EH_REGION_BEG and
1857 NOTE_INSN_EH_REGION_END notes. */
1858
1859 insn = b->head;
1860
1861 never_reached_warning (insn);
1862
1863 if (GET_CODE (insn) == CODE_LABEL)
1864 {
1865 rtx x, *prev = &exception_handler_labels;
1866
1867 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1868 {
1869 if (XEXP (x, 0) == insn)
1870 {
1871 /* Found a match, splice this label out of the EH label list. */
1872 *prev = XEXP (x, 1);
1873 XEXP (x, 1) = NULL_RTX;
1874 XEXP (x, 0) = NULL_RTX;
1875
1876 /* Remove the handler from all regions */
1877 remove_handler (insn);
1878 deleted_handler = 1;
1879 break;
1880 }
1881 prev = &XEXP (x, 1);
1882 }
1883
1884 /* This label may be referenced by code solely for its value, or
1885 referenced by static data, or something. We have determined
1886 that it is not reachable, but cannot delete the label itself.
1887 Save code space and continue to delete the balance of the block,
1888 along with properly updating the cfg. */
1889 if (!can_delete_label_p (insn))
1890 {
1891 /* If we've only got one of these, skip the whole deleting
1892 insns thing. */
1893 if (insn == b->end)
1894 goto no_delete_insns;
1895 insn = NEXT_INSN (insn);
1896 }
1897 }
1898
1899 /* Include any jump table following the basic block. */
1900 end = b->end;
1901 if (GET_CODE (end) == JUMP_INSN
1902 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1903 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1904 && GET_CODE (tmp) == JUMP_INSN
1905 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1906 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1907 end = tmp;
1908
1909 /* Include any barrier that may follow the basic block. */
1910 tmp = next_nonnote_insn (end);
1911 if (tmp && GET_CODE (tmp) == BARRIER)
1912 end = tmp;
1913
1914 /* Selectively delete the entire chain. */
1915 flow_delete_insn_chain (insn, end);
1916
1917 no_delete_insns:
1918
1919 /* Remove the edges into and out of this block. Note that there may
1920 indeed be edges in, if we are removing an unreachable loop. */
1921 {
1922 edge e, next, *q;
1923
1924 for (e = b->pred; e ; e = next)
1925 {
1926 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1927 continue;
1928 *q = e->succ_next;
1929 next = e->pred_next;
1930 n_edges--;
1931 free (e);
1932 }
1933 for (e = b->succ; e ; e = next)
1934 {
1935 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1936 continue;
1937 *q = e->pred_next;
1938 next = e->succ_next;
1939 n_edges--;
1940 free (e);
1941 }
1942
1943 b->pred = NULL;
1944 b->succ = NULL;
1945 }
1946
1947 /* Remove the basic block from the array, and compact behind it. */
1948 expunge_block (b);
1949
1950 return deleted_handler;
1951 }
1952
1953 /* Remove block B from the basic block array and compact behind it. */
1954
1955 static void
1956 expunge_block (b)
1957 basic_block b;
1958 {
1959 int i, n = n_basic_blocks;
1960
1961 for (i = b->index; i + 1 < n; ++i)
1962 {
1963 basic_block x = BASIC_BLOCK (i + 1);
1964 BASIC_BLOCK (i) = x;
1965 x->index = i;
1966 }
1967
1968 basic_block_info->num_elements--;
1969 n_basic_blocks--;
1970 }
1971
1972 /* Delete INSN by patching it out. Return the next insn. */
1973
1974 rtx
1975 flow_delete_insn (insn)
1976 rtx insn;
1977 {
1978 rtx prev = PREV_INSN (insn);
1979 rtx next = NEXT_INSN (insn);
1980 rtx note;
1981
1982 PREV_INSN (insn) = NULL_RTX;
1983 NEXT_INSN (insn) = NULL_RTX;
1984
1985 if (prev)
1986 NEXT_INSN (prev) = next;
1987 if (next)
1988 PREV_INSN (next) = prev;
1989 else
1990 set_last_insn (prev);
1991
1992 if (GET_CODE (insn) == CODE_LABEL)
1993 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1994
1995 /* If deleting a jump, decrement the use count of the label. Deleting
1996 the label itself should happen in the normal course of block merging. */
1997 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1998 LABEL_NUSES (JUMP_LABEL (insn))--;
1999
2000 /* Also if deleting an insn that references a label. */
2001 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2002 LABEL_NUSES (XEXP (note, 0))--;
2003
2004 return next;
2005 }
2006
2007 /* True if a given label can be deleted. */
2008
2009 static int
2010 can_delete_label_p (label)
2011 rtx label;
2012 {
2013 rtx x;
2014
2015 if (LABEL_PRESERVE_P (label))
2016 return 0;
2017
2018 for (x = forced_labels; x ; x = XEXP (x, 1))
2019 if (label == XEXP (x, 0))
2020 return 0;
2021 for (x = label_value_list; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2023 return 0;
2024 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2026 return 0;
2027
2028 /* User declared labels must be preserved. */
2029 if (LABEL_NAME (label) != 0)
2030 return 0;
2031
2032 return 1;
2033 }
2034
2035 /* Blocks A and B are to be merged into a single block. A has no incoming
2036 fallthru edge, so it can be moved before B without adding or modifying
2037 any jumps (aside from the jump from A to B). */
2038
2039 static int
2040 merge_blocks_move_predecessor_nojumps (a, b)
2041 basic_block a, b;
2042 {
2043 rtx start, end, barrier;
2044 int index;
2045
2046 start = a->head;
2047 end = a->end;
2048
2049 /* We want to delete the BARRIER after the end of the insns we are
2050 going to move. If we don't find a BARRIER, then do nothing. This
2051 can happen in some cases if we have labels we can not delete.
2052
2053 Similarly, do nothing if we can not delete the label at the start
2054 of the target block. */
2055 barrier = next_nonnote_insn (end);
2056 if (GET_CODE (barrier) != BARRIER
2057 || (GET_CODE (b->head) == CODE_LABEL
2058 && ! can_delete_label_p (b->head)))
2059 return 0;
2060 else
2061 flow_delete_insn (barrier);
2062
2063 /* Move block and loop notes out of the chain so that we do not
2064 disturb their order.
2065
2066 ??? A better solution would be to squeeze out all the non-nested notes
2067 and adjust the block trees appropriately. Even better would be to have
2068 a tighter connection between block trees and rtl so that this is not
2069 necessary. */
2070 start = squeeze_notes (start, end);
2071
2072 /* Scramble the insn chain. */
2073 if (end != PREV_INSN (b->head))
2074 reorder_insns (start, end, PREV_INSN (b->head));
2075
2076 if (rtl_dump_file)
2077 {
2078 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2079 a->index, b->index);
2080 }
2081
2082 /* Swap the records for the two blocks around. Although we are deleting B,
2083 A is now where B was and we want to compact the BB array from where
2084 A used to be. */
2085 BASIC_BLOCK(a->index) = b;
2086 BASIC_BLOCK(b->index) = a;
2087 index = a->index;
2088 a->index = b->index;
2089 b->index = index;
2090
2091 /* Now blocks A and B are contiguous. Merge them. */
2092 merge_blocks_nomove (a, b);
2093
2094 return 1;
2095 }
2096
2097 /* Blocks A and B are to be merged into a single block. B has no outgoing
2098 fallthru edge, so it can be moved after A without adding or modifying
2099 any jumps (aside from the jump from A to B). */
2100
2101 static int
2102 merge_blocks_move_successor_nojumps (a, b)
2103 basic_block a, b;
2104 {
2105 rtx start, end, barrier;
2106
2107 start = b->head;
2108 end = b->end;
2109
2110 /* We want to delete the BARRIER after the end of the insns we are
2111 going to move. If we don't find a BARRIER, then do nothing. This
2112 can happen in some cases if we have labels we can not delete.
2113
2114 Similarly, do nothing if we can not delete the label at the start
2115 of the target block. */
2116 barrier = next_nonnote_insn (end);
2117 if (GET_CODE (barrier) != BARRIER
2118 || (GET_CODE (b->head) == CODE_LABEL
2119 && ! can_delete_label_p (b->head)))
2120 return 0;
2121 else
2122 flow_delete_insn (barrier);
2123
2124 /* Move block and loop notes out of the chain so that we do not
2125 disturb their order.
2126
2127 ??? A better solution would be to squeeze out all the non-nested notes
2128 and adjust the block trees appropriately. Even better would be to have
2129 a tighter connection between block trees and rtl so that this is not
2130 necessary. */
2131 start = squeeze_notes (start, end);
2132
2133 /* Scramble the insn chain. */
2134 reorder_insns (start, end, a->end);
2135
2136 /* Now blocks A and B are contiguous. Merge them. */
2137 merge_blocks_nomove (a, b);
2138
2139 if (rtl_dump_file)
2140 {
2141 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2142 b->index, a->index);
2143 }
2144
2145 return 1;
2146 }
2147
2148 /* Blocks A and B are to be merged into a single block. The insns
2149 are already contiguous, hence `nomove'. */
2150
2151 static void
2152 merge_blocks_nomove (a, b)
2153 basic_block a, b;
2154 {
2155 edge e;
2156 rtx b_head, b_end, a_end;
2157 int b_empty = 0;
2158
2159 /* If there was a CODE_LABEL beginning B, delete it. */
2160 b_head = b->head;
2161 b_end = b->end;
2162 if (GET_CODE (b_head) == CODE_LABEL)
2163 {
2164 /* Detect basic blocks with nothing but a label. This can happen
2165 in particular at the end of a function. */
2166 if (b_head == b_end)
2167 b_empty = 1;
2168 b_head = flow_delete_insn (b_head);
2169 }
2170
2171 /* Delete the basic block note. */
2172 if (GET_CODE (b_head) == NOTE
2173 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2174 {
2175 if (b_head == b_end)
2176 b_empty = 1;
2177 b_head = flow_delete_insn (b_head);
2178 }
2179
2180 /* If there was a jump out of A, delete it. */
2181 a_end = a->end;
2182 if (GET_CODE (a_end) == JUMP_INSN)
2183 {
2184 rtx prev;
2185
2186 prev = prev_nonnote_insn (a_end);
2187 if (!prev)
2188 prev = a->head;
2189
2190 #ifdef HAVE_cc0
2191 /* If this was a conditional jump, we need to also delete
2192 the insn that set cc0. */
2193
2194 if (prev && sets_cc0_p (prev))
2195 {
2196 rtx tmp = prev;
2197 prev = prev_nonnote_insn (prev);
2198 if (!prev)
2199 prev = a->head;
2200 flow_delete_insn (tmp);
2201 }
2202 #endif
2203
2204 /* Note that a->head != a->end, since we should have at least a
2205 bb note plus the jump, so prev != insn. */
2206 flow_delete_insn (a_end);
2207 a_end = prev;
2208 }
2209
2210 /* By definition, there should only be one successor of A, and that is
2211 B. Free that edge struct. */
2212 n_edges--;
2213 free (a->succ);
2214
2215 /* Adjust the edges out of B for the new owner. */
2216 for (e = b->succ; e ; e = e->succ_next)
2217 e->src = a;
2218 a->succ = b->succ;
2219
2220 /* Reassociate the insns of B with A. */
2221 if (!b_empty)
2222 {
2223 BLOCK_FOR_INSN (b_head) = a;
2224 while (b_head != b_end)
2225 {
2226 b_head = NEXT_INSN (b_head);
2227 BLOCK_FOR_INSN (b_head) = a;
2228 }
2229 a_end = b_head;
2230 }
2231 a->end = a_end;
2232
2233 /* Compact the basic block array. */
2234 expunge_block (b);
2235 }
2236
2237 /* Attempt to merge basic blocks that are potentially non-adjacent.
2238 Return true iff the attempt succeeded. */
2239
2240 static int
2241 merge_blocks (e, b, c)
2242 edge e;
2243 basic_block b, c;
2244 {
2245 /* If B has a fallthru edge to C, no need to move anything. */
2246 if (e->flags & EDGE_FALLTHRU)
2247 {
2248 /* If a label still appears somewhere and we cannot delete the label,
2249 then we cannot merge the blocks. The edge was tidied already. */
2250
2251 rtx insn, stop = NEXT_INSN (c->head);
2252 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2253 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2254 return 0;
2255
2256 merge_blocks_nomove (b, c);
2257
2258 if (rtl_dump_file)
2259 {
2260 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2261 b->index, c->index);
2262 }
2263
2264 return 1;
2265 }
2266 else
2267 {
2268 edge tmp_edge;
2269 basic_block d;
2270 int c_has_outgoing_fallthru;
2271 int b_has_incoming_fallthru;
2272
2273 /* We must make sure to not munge nesting of exception regions,
2274 lexical blocks, and loop notes.
2275
2276 The first is taken care of by requiring that the active eh
2277 region at the end of one block always matches the active eh
2278 region at the beginning of the next block.
2279
2280 The later two are taken care of by squeezing out all the notes. */
2281
2282 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2283 executed and we may want to treat blocks which have two out
2284 edges, one normal, one abnormal as only having one edge for
2285 block merging purposes. */
2286
2287 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2288 if (tmp_edge->flags & EDGE_FALLTHRU)
2289 break;
2290 c_has_outgoing_fallthru = (tmp_edge != NULL);
2291
2292 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2293 if (tmp_edge->flags & EDGE_FALLTHRU)
2294 break;
2295 b_has_incoming_fallthru = (tmp_edge != NULL);
2296
2297 /* If B does not have an incoming fallthru, and the exception regions
2298 match, then it can be moved immediately before C without introducing
2299 or modifying jumps.
2300
2301 C can not be the first block, so we do not have to worry about
2302 accessing a non-existent block. */
2303 d = BASIC_BLOCK (c->index - 1);
2304 if (! b_has_incoming_fallthru
2305 && d->eh_end == b->eh_beg
2306 && b->eh_end == c->eh_beg)
2307 return merge_blocks_move_predecessor_nojumps (b, c);
2308
2309 /* Otherwise, we're going to try to move C after B. Make sure the
2310 exception regions match.
2311
2312 If B is the last basic block, then we must not try to access the
2313 block structure for block B + 1. Luckily in that case we do not
2314 need to worry about matching exception regions. */
2315 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2316 if (b->eh_end == c->eh_beg
2317 && (d == NULL || c->eh_end == d->eh_beg))
2318 {
2319 /* If C does not have an outgoing fallthru, then it can be moved
2320 immediately after B without introducing or modifying jumps. */
2321 if (! c_has_outgoing_fallthru)
2322 return merge_blocks_move_successor_nojumps (b, c);
2323
2324 /* Otherwise, we'll need to insert an extra jump, and possibly
2325 a new block to contain it. */
2326 /* ??? Not implemented yet. */
2327 }
2328
2329 return 0;
2330 }
2331 }
2332
2333 /* Top level driver for merge_blocks. */
2334
2335 static void
2336 try_merge_blocks ()
2337 {
2338 int i;
2339
2340 /* Attempt to merge blocks as made possible by edge removal. If a block
2341 has only one successor, and the successor has only one predecessor,
2342 they may be combined. */
2343
2344 for (i = 0; i < n_basic_blocks; )
2345 {
2346 basic_block c, b = BASIC_BLOCK (i);
2347 edge s;
2348
2349 /* A loop because chains of blocks might be combineable. */
2350 while ((s = b->succ) != NULL
2351 && s->succ_next == NULL
2352 && (s->flags & EDGE_EH) == 0
2353 && (c = s->dest) != EXIT_BLOCK_PTR
2354 && c->pred->pred_next == NULL
2355 /* If the jump insn has side effects, we can't kill the edge. */
2356 && (GET_CODE (b->end) != JUMP_INSN
2357 || onlyjump_p (b->end))
2358 && merge_blocks (s, b, c))
2359 continue;
2360
2361 /* Don't get confused by the index shift caused by deleting blocks. */
2362 i = b->index + 1;
2363 }
2364 }
2365
2366 /* The given edge should potentially be a fallthru edge. If that is in
2367 fact true, delete the jump and barriers that are in the way. */
2368
2369 static void
2370 tidy_fallthru_edge (e, b, c)
2371 edge e;
2372 basic_block b, c;
2373 {
2374 rtx q;
2375
2376 /* ??? In a late-running flow pass, other folks may have deleted basic
2377 blocks by nopping out blocks, leaving multiple BARRIERs between here
2378 and the target label. They ought to be chastized and fixed.
2379
2380 We can also wind up with a sequence of undeletable labels between
2381 one block and the next.
2382
2383 So search through a sequence of barriers, labels, and notes for
2384 the head of block C and assert that we really do fall through. */
2385
2386 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2387 return;
2388
2389 /* Remove what will soon cease being the jump insn from the source block.
2390 If block B consisted only of this single jump, turn it into a deleted
2391 note. */
2392 q = b->end;
2393 if (GET_CODE (q) == JUMP_INSN)
2394 {
2395 #ifdef HAVE_cc0
2396 /* If this was a conditional jump, we need to also delete
2397 the insn that set cc0. */
2398 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2399 q = PREV_INSN (q);
2400 #endif
2401
2402 if (b->head == q)
2403 {
2404 PUT_CODE (q, NOTE);
2405 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2406 NOTE_SOURCE_FILE (q) = 0;
2407 }
2408 else
2409 b->end = q = PREV_INSN (q);
2410 }
2411
2412 /* Selectively unlink the sequence. */
2413 if (q != PREV_INSN (c->head))
2414 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2415
2416 e->flags |= EDGE_FALLTHRU;
2417 }
2418
2419 /* Fix up edges that now fall through, or rather should now fall through
2420 but previously required a jump around now deleted blocks. Simplify
2421 the search by only examining blocks numerically adjacent, since this
2422 is how find_basic_blocks created them. */
2423
2424 static void
2425 tidy_fallthru_edges ()
2426 {
2427 int i;
2428
2429 for (i = 1; i < n_basic_blocks; ++i)
2430 {
2431 basic_block b = BASIC_BLOCK (i - 1);
2432 basic_block c = BASIC_BLOCK (i);
2433 edge s;
2434
2435 /* We care about simple conditional or unconditional jumps with
2436 a single successor.
2437
2438 If we had a conditional branch to the next instruction when
2439 find_basic_blocks was called, then there will only be one
2440 out edge for the block which ended with the conditional
2441 branch (since we do not create duplicate edges).
2442
2443 Furthermore, the edge will be marked as a fallthru because we
2444 merge the flags for the duplicate edges. So we do not want to
2445 check that the edge is not a FALLTHRU edge. */
2446 if ((s = b->succ) != NULL
2447 && s->succ_next == NULL
2448 && s->dest == c
2449 /* If the jump insn has side effects, we can't tidy the edge. */
2450 && (GET_CODE (b->end) != JUMP_INSN
2451 || onlyjump_p (b->end)))
2452 tidy_fallthru_edge (s, b, c);
2453 }
2454 }
2455
2456 /* Discover and record the loop depth at the head of each basic block. */
2457
2458 void
2459 calculate_loop_depth (dump)
2460 FILE *dump;
2461 {
2462 struct loops loops;
2463
2464 /* The loop infrastructure does the real job for us. */
2465 flow_loops_find (&loops);
2466
2467 if (dump)
2468 flow_loops_dump (&loops, dump, 0);
2469
2470 flow_loops_free (&loops);
2471 }
2472 \f
2473 /* Perform data flow analysis.
2474 F is the first insn of the function and NREGS the number of register numbers
2475 in use. */
2476
2477 void
2478 life_analysis (f, nregs, file, remove_dead_code)
2479 rtx f;
2480 int nregs;
2481 FILE *file;
2482 int remove_dead_code;
2483 {
2484 #ifdef ELIMINABLE_REGS
2485 register int i;
2486 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2487 #endif
2488 int flags;
2489 sbitmap all_blocks;
2490
2491 /* Dead code elimination changes basic block structure and therefore
2492 breaks the SSA phi representation. Particularly, a phi node
2493 can have an alternative value for each incoming block, referenced
2494 by the block number. Removing dead code can bump entire blocks
2495 and therefore cause blocks to be renumbered, invalidating the
2496 numbering of phi alternatives. */
2497 if (remove_dead_code && in_ssa_form)
2498 abort ();
2499
2500 /* Record which registers will be eliminated. We use this in
2501 mark_used_regs. */
2502
2503 CLEAR_HARD_REG_SET (elim_reg_set);
2504
2505 #ifdef ELIMINABLE_REGS
2506 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2507 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2508 #else
2509 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2510 #endif
2511
2512 /* We want alias analysis information for local dead store elimination. */
2513 init_alias_analysis ();
2514
2515 if (! optimize)
2516 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2517 else
2518 {
2519 flags = PROP_FINAL;
2520 if (! remove_dead_code)
2521 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2522 }
2523
2524 /* The post-reload life analysis have (on a global basis) the same
2525 registers live as was computed by reload itself. elimination
2526 Otherwise offsets and such may be incorrect.
2527
2528 Reload will make some registers as live even though they do not
2529 appear in the rtl. */
2530 if (reload_completed)
2531 flags &= ~PROP_REG_INFO;
2532
2533 max_regno = nregs;
2534
2535 /* Always remove no-op moves. Do this before other processing so
2536 that we don't have to keep re-scanning them. */
2537 delete_noop_moves (f);
2538
2539 /* Some targets can emit simpler epilogues if they know that sp was
2540 not ever modified during the function. After reload, of course,
2541 we've already emitted the epilogue so there's no sense searching. */
2542 if (! reload_completed)
2543 notice_stack_pointer_modification (f);
2544
2545 /* Allocate and zero out data structures that will record the
2546 data from lifetime analysis. */
2547 allocate_reg_life_data ();
2548 allocate_bb_life_data ();
2549 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2550 all_blocks = sbitmap_alloc (n_basic_blocks);
2551 sbitmap_ones (all_blocks);
2552
2553 /* Find the set of registers live on function exit. */
2554 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2555
2556 /* "Update" life info from zero. It'd be nice to begin the
2557 relaxation with just the exit and noreturn blocks, but that set
2558 is not immediately handy. */
2559
2560 if (flags & PROP_REG_INFO)
2561 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2562 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2563
2564 /* Clean up. */
2565 sbitmap_free (all_blocks);
2566 free (reg_next_use);
2567 reg_next_use = NULL;
2568 end_alias_analysis ();
2569
2570 if (file)
2571 dump_flow_info (file);
2572
2573 free_basic_block_vars (1);
2574 }
2575
2576 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2577 Search for REGNO. If found, abort if it is not wider than word_mode. */
2578
2579 static int
2580 verify_wide_reg_1 (px, pregno)
2581 rtx *px;
2582 void *pregno;
2583 {
2584 rtx x = *px;
2585 unsigned int regno = *(int *) pregno;
2586
2587 if (GET_CODE (x) == REG && REGNO (x) == regno)
2588 {
2589 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2590 abort ();
2591 return 1;
2592 }
2593 return 0;
2594 }
2595
2596 /* A subroutine of verify_local_live_at_start. Search through insns
2597 between HEAD and END looking for register REGNO. */
2598
2599 static void
2600 verify_wide_reg (regno, head, end)
2601 int regno;
2602 rtx head, end;
2603 {
2604 while (1)
2605 {
2606 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2607 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2608 return;
2609 if (head == end)
2610 break;
2611 head = NEXT_INSN (head);
2612 }
2613
2614 /* We didn't find the register at all. Something's way screwy. */
2615 abort ();
2616 }
2617
2618 /* A subroutine of update_life_info. Verify that there are no untoward
2619 changes in live_at_start during a local update. */
2620
2621 static void
2622 verify_local_live_at_start (new_live_at_start, bb)
2623 regset new_live_at_start;
2624 basic_block bb;
2625 {
2626 if (reload_completed)
2627 {
2628 /* After reload, there are no pseudos, nor subregs of multi-word
2629 registers. The regsets should exactly match. */
2630 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2631 abort ();
2632 }
2633 else
2634 {
2635 int i;
2636
2637 /* Find the set of changed registers. */
2638 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2639
2640 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2641 {
2642 /* No registers should die. */
2643 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2644 abort ();
2645 /* Verify that the now-live register is wider than word_mode. */
2646 verify_wide_reg (i, bb->head, bb->end);
2647 });
2648 }
2649 }
2650
2651 /* Updates life information starting with the basic blocks set in BLOCKS.
2652
2653 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2654 expecting local modifications to basic blocks. If we find extra
2655 registers live at the beginning of a block, then we either killed
2656 useful data, or we have a broken split that wants data not provided.
2657 If we find registers removed from live_at_start, that means we have
2658 a broken peephole that is killing a register it shouldn't.
2659
2660 ??? This is not true in one situation -- when a pre-reload splitter
2661 generates subregs of a multi-word pseudo, current life analysis will
2662 lose the kill. So we _can_ have a pseudo go live. How irritating.
2663
2664 BLOCK_FOR_INSN is assumed to be correct.
2665
2666 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2667 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2668 regs_ever_live unless the caller resets it to zero. */
2669
2670 void
2671 update_life_info (blocks, extent, prop_flags)
2672 sbitmap blocks;
2673 enum update_life_extent extent;
2674 int prop_flags;
2675 {
2676 regset tmp;
2677 regset_head tmp_head;
2678 int i;
2679
2680 tmp = INITIALIZE_REG_SET (tmp_head);
2681
2682 /* For a global update, we go through the relaxation process again. */
2683 if (extent != UPDATE_LIFE_LOCAL)
2684 {
2685 calculate_global_regs_live (blocks, blocks,
2686 prop_flags & PROP_SCAN_DEAD_CODE);
2687
2688 /* If asked, remove notes from the blocks we'll update. */
2689 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2690 count_or_remove_death_notes (blocks, 1);
2691 }
2692
2693 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2694 {
2695 basic_block bb = BASIC_BLOCK (i);
2696
2697 COPY_REG_SET (tmp, bb->global_live_at_end);
2698 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2699
2700 if (extent == UPDATE_LIFE_LOCAL)
2701 verify_local_live_at_start (tmp, bb);
2702 });
2703
2704 FREE_REG_SET (tmp);
2705
2706 if (prop_flags & PROP_REG_INFO)
2707 {
2708 /* The only pseudos that are live at the beginning of the function
2709 are those that were not set anywhere in the function. local-alloc
2710 doesn't know how to handle these correctly, so mark them as not
2711 local to any one basic block. */
2712 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2713 FIRST_PSEUDO_REGISTER, i,
2714 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2715
2716 /* We have a problem with any pseudoreg that lives across the setjmp.
2717 ANSI says that if a user variable does not change in value between
2718 the setjmp and the longjmp, then the longjmp preserves it. This
2719 includes longjmp from a place where the pseudo appears dead.
2720 (In principle, the value still exists if it is in scope.)
2721 If the pseudo goes in a hard reg, some other value may occupy
2722 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2723 Conclusion: such a pseudo must not go in a hard reg. */
2724 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2725 FIRST_PSEUDO_REGISTER, i,
2726 {
2727 if (regno_reg_rtx[i] != 0)
2728 {
2729 REG_LIVE_LENGTH (i) = -1;
2730 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2731 }
2732 });
2733 }
2734 }
2735
2736 /* Free the variables allocated by find_basic_blocks.
2737
2738 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2739
2740 void
2741 free_basic_block_vars (keep_head_end_p)
2742 int keep_head_end_p;
2743 {
2744 if (basic_block_for_insn)
2745 {
2746 VARRAY_FREE (basic_block_for_insn);
2747 basic_block_for_insn = NULL;
2748 }
2749
2750 if (! keep_head_end_p)
2751 {
2752 clear_edges ();
2753 VARRAY_FREE (basic_block_info);
2754 n_basic_blocks = 0;
2755
2756 ENTRY_BLOCK_PTR->aux = NULL;
2757 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2758 EXIT_BLOCK_PTR->aux = NULL;
2759 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2760 }
2761 }
2762
2763 /* Return nonzero if the destination of SET equals the source. */
2764 static int
2765 set_noop_p (set)
2766 rtx set;
2767 {
2768 rtx src = SET_SRC (set);
2769 rtx dst = SET_DEST (set);
2770
2771 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2772 {
2773 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2774 return 0;
2775 src = SUBREG_REG (src);
2776 dst = SUBREG_REG (dst);
2777 }
2778
2779 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2780 && REGNO (src) == REGNO (dst));
2781 }
2782
2783 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2784 value to itself. */
2785 static int
2786 noop_move_p (insn)
2787 rtx insn;
2788 {
2789 rtx pat = PATTERN (insn);
2790
2791 /* Insns carrying these notes are useful later on. */
2792 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2793 return 0;
2794
2795 if (GET_CODE (pat) == SET && set_noop_p (pat))
2796 return 1;
2797
2798 if (GET_CODE (pat) == PARALLEL)
2799 {
2800 int i;
2801 /* If nothing but SETs of registers to themselves,
2802 this insn can also be deleted. */
2803 for (i = 0; i < XVECLEN (pat, 0); i++)
2804 {
2805 rtx tem = XVECEXP (pat, 0, i);
2806
2807 if (GET_CODE (tem) == USE
2808 || GET_CODE (tem) == CLOBBER)
2809 continue;
2810
2811 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2812 return 0;
2813 }
2814
2815 return 1;
2816 }
2817 return 0;
2818 }
2819
2820 /* Delete any insns that copy a register to itself. */
2821
2822 static void
2823 delete_noop_moves (f)
2824 rtx f;
2825 {
2826 rtx insn;
2827 for (insn = f; insn; insn = NEXT_INSN (insn))
2828 {
2829 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2830 {
2831 PUT_CODE (insn, NOTE);
2832 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2833 NOTE_SOURCE_FILE (insn) = 0;
2834 }
2835 }
2836 }
2837
2838 /* Determine if the stack pointer is constant over the life of the function.
2839 Only useful before prologues have been emitted. */
2840
2841 static void
2842 notice_stack_pointer_modification_1 (x, pat, data)
2843 rtx x;
2844 rtx pat ATTRIBUTE_UNUSED;
2845 void *data ATTRIBUTE_UNUSED;
2846 {
2847 if (x == stack_pointer_rtx
2848 /* The stack pointer is only modified indirectly as the result
2849 of a push until later in flow. See the comments in rtl.texi
2850 regarding Embedded Side-Effects on Addresses. */
2851 || (GET_CODE (x) == MEM
2852 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2853 || GET_CODE (XEXP (x, 0)) == PRE_INC
2854 || GET_CODE (XEXP (x, 0)) == POST_DEC
2855 || GET_CODE (XEXP (x, 0)) == POST_INC)
2856 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2857 current_function_sp_is_unchanging = 0;
2858 }
2859
2860 static void
2861 notice_stack_pointer_modification (f)
2862 rtx f;
2863 {
2864 rtx insn;
2865
2866 /* Assume that the stack pointer is unchanging if alloca hasn't
2867 been used. */
2868 current_function_sp_is_unchanging = !current_function_calls_alloca;
2869 if (! current_function_sp_is_unchanging)
2870 return;
2871
2872 for (insn = f; insn; insn = NEXT_INSN (insn))
2873 {
2874 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2875 {
2876 /* Check if insn modifies the stack pointer. */
2877 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2878 NULL);
2879 if (! current_function_sp_is_unchanging)
2880 return;
2881 }
2882 }
2883 }
2884
2885 /* Mark a register in SET. Hard registers in large modes get all
2886 of their component registers set as well. */
2887 static void
2888 mark_reg (reg, xset)
2889 rtx reg;
2890 void *xset;
2891 {
2892 regset set = (regset) xset;
2893 int regno = REGNO (reg);
2894
2895 if (GET_MODE (reg) == BLKmode)
2896 abort ();
2897
2898 SET_REGNO_REG_SET (set, regno);
2899 if (regno < FIRST_PSEUDO_REGISTER)
2900 {
2901 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2902 while (--n > 0)
2903 SET_REGNO_REG_SET (set, regno + n);
2904 }
2905 }
2906
2907 /* Mark those regs which are needed at the end of the function as live
2908 at the end of the last basic block. */
2909 static void
2910 mark_regs_live_at_end (set)
2911 regset set;
2912 {
2913 int i;
2914
2915 /* If exiting needs the right stack value, consider the stack pointer
2916 live at the end of the function. */
2917 if ((HAVE_epilogue && reload_completed)
2918 || ! EXIT_IGNORE_STACK
2919 || (! FRAME_POINTER_REQUIRED
2920 && ! current_function_calls_alloca
2921 && flag_omit_frame_pointer)
2922 || current_function_sp_is_unchanging)
2923 {
2924 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2925 }
2926
2927 /* Mark the frame pointer if needed at the end of the function. If
2928 we end up eliminating it, it will be removed from the live list
2929 of each basic block by reload. */
2930
2931 if (! reload_completed || frame_pointer_needed)
2932 {
2933 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2934 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2935 /* If they are different, also mark the hard frame pointer as live */
2936 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2937 #endif
2938 }
2939
2940 #ifdef PIC_OFFSET_TABLE_REGNUM
2941 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2942 /* Many architectures have a GP register even without flag_pic.
2943 Assume the pic register is not in use, or will be handled by
2944 other means, if it is not fixed. */
2945 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2946 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2947 #endif
2948 #endif
2949
2950 /* Mark all global registers, and all registers used by the epilogue
2951 as being live at the end of the function since they may be
2952 referenced by our caller. */
2953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2954 if (global_regs[i]
2955 #ifdef EPILOGUE_USES
2956 || EPILOGUE_USES (i)
2957 #endif
2958 )
2959 SET_REGNO_REG_SET (set, i);
2960
2961 /* Mark all call-saved registers that we actaully used. */
2962 if (HAVE_epilogue && reload_completed)
2963 {
2964 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2965 if (! call_used_regs[i] && regs_ever_live[i])
2966 SET_REGNO_REG_SET (set, i);
2967 }
2968
2969 /* Mark function return value. */
2970 diddle_return_value (mark_reg, set);
2971 }
2972
2973 /* Callback function for for_each_successor_phi. DATA is a regset.
2974 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2975 INSN, in the regset. */
2976
2977 static int
2978 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2979 rtx insn ATTRIBUTE_UNUSED;
2980 int dest_regno ATTRIBUTE_UNUSED;
2981 int src_regno;
2982 void *data;
2983 {
2984 regset live = (regset) data;
2985 SET_REGNO_REG_SET (live, src_regno);
2986 return 0;
2987 }
2988
2989 /* Propagate global life info around the graph of basic blocks. Begin
2990 considering blocks with their corresponding bit set in BLOCKS_IN.
2991 BLOCKS_OUT is set for every block that was changed. */
2992
2993 static void
2994 calculate_global_regs_live (blocks_in, blocks_out, flags)
2995 sbitmap blocks_in, blocks_out;
2996 int flags;
2997 {
2998 basic_block *queue, *qhead, *qtail, *qend;
2999 regset tmp, new_live_at_end;
3000 regset_head tmp_head;
3001 regset_head new_live_at_end_head;
3002 int i;
3003
3004 tmp = INITIALIZE_REG_SET (tmp_head);
3005 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3006
3007 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3008 because the `head == tail' style test for an empty queue doesn't
3009 work with a full queue. */
3010 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3011 qtail = queue;
3012 qhead = qend = queue + n_basic_blocks + 2;
3013
3014 /* Clear out the garbage that might be hanging out in bb->aux. */
3015 for (i = n_basic_blocks - 1; i >= 0; --i)
3016 BASIC_BLOCK (i)->aux = NULL;
3017
3018 /* Queue the blocks set in the initial mask. Do this in reverse block
3019 number order so that we are more likely for the first round to do
3020 useful work. We use AUX non-null to flag that the block is queued. */
3021 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3022 {
3023 basic_block bb = BASIC_BLOCK (i);
3024 *--qhead = bb;
3025 bb->aux = bb;
3026 });
3027
3028 sbitmap_zero (blocks_out);
3029
3030 while (qhead != qtail)
3031 {
3032 int rescan, changed;
3033 basic_block bb;
3034 edge e;
3035
3036 bb = *qhead++;
3037 if (qhead == qend)
3038 qhead = queue;
3039 bb->aux = NULL;
3040
3041 /* Begin by propogating live_at_start from the successor blocks. */
3042 CLEAR_REG_SET (new_live_at_end);
3043 for (e = bb->succ; e ; e = e->succ_next)
3044 {
3045 basic_block sb = e->dest;
3046 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3047 }
3048
3049 /* Regs used in phi nodes are not included in
3050 global_live_at_start, since they are live only along a
3051 particular edge. Set those regs that are live because of a
3052 phi node alternative corresponding to this particular block. */
3053 for_each_successor_phi (bb->index, &set_phi_alternative_reg,
3054 new_live_at_end);
3055
3056 if (bb == ENTRY_BLOCK_PTR)
3057 {
3058 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3059 continue;
3060 }
3061
3062 /* On our first pass through this block, we'll go ahead and continue.
3063 Recognize first pass by local_set NULL. On subsequent passes, we
3064 get to skip out early if live_at_end wouldn't have changed. */
3065
3066 if (bb->local_set == NULL)
3067 {
3068 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3069 rescan = 1;
3070 }
3071 else
3072 {
3073 /* If any bits were removed from live_at_end, we'll have to
3074 rescan the block. This wouldn't be necessary if we had
3075 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3076 local_live is really dependant on live_at_end. */
3077 CLEAR_REG_SET (tmp);
3078 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3079 new_live_at_end, BITMAP_AND_COMPL);
3080
3081 if (! rescan)
3082 {
3083 /* Find the set of changed bits. Take this opportunity
3084 to notice that this set is empty and early out. */
3085 CLEAR_REG_SET (tmp);
3086 changed = bitmap_operation (tmp, bb->global_live_at_end,
3087 new_live_at_end, BITMAP_XOR);
3088 if (! changed)
3089 continue;
3090
3091 /* If any of the changed bits overlap with local_set,
3092 we'll have to rescan the block. Detect overlap by
3093 the AND with ~local_set turning off bits. */
3094 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3095 BITMAP_AND_COMPL);
3096 }
3097 }
3098
3099 /* Let our caller know that BB changed enough to require its
3100 death notes updated. */
3101 SET_BIT (blocks_out, bb->index);
3102
3103 if (! rescan)
3104 {
3105 /* Add to live_at_start the set of all registers in
3106 new_live_at_end that aren't in the old live_at_end. */
3107
3108 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3109 BITMAP_AND_COMPL);
3110 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3111
3112 changed = bitmap_operation (bb->global_live_at_start,
3113 bb->global_live_at_start,
3114 tmp, BITMAP_IOR);
3115 if (! changed)
3116 continue;
3117 }
3118 else
3119 {
3120 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3121
3122 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3123 into live_at_start. */
3124 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3125
3126 /* If live_at start didn't change, no need to go farther. */
3127 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3128 continue;
3129
3130 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3131 }
3132
3133 /* Queue all predecessors of BB so that we may re-examine
3134 their live_at_end. */
3135 for (e = bb->pred; e ; e = e->pred_next)
3136 {
3137 basic_block pb = e->src;
3138 if (pb->aux == NULL)
3139 {
3140 *qtail++ = pb;
3141 if (qtail == qend)
3142 qtail = queue;
3143 pb->aux = pb;
3144 }
3145 }
3146 }
3147
3148 FREE_REG_SET (tmp);
3149 FREE_REG_SET (new_live_at_end);
3150
3151 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3152 {
3153 basic_block bb = BASIC_BLOCK (i);
3154 FREE_REG_SET (bb->local_set);
3155 });
3156
3157 free (queue);
3158 }
3159 \f
3160 /* Subroutines of life analysis. */
3161
3162 /* Allocate the permanent data structures that represent the results
3163 of life analysis. Not static since used also for stupid life analysis. */
3164
3165 void
3166 allocate_bb_life_data ()
3167 {
3168 register int i;
3169
3170 for (i = 0; i < n_basic_blocks; i++)
3171 {
3172 basic_block bb = BASIC_BLOCK (i);
3173
3174 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3175 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3176 }
3177
3178 ENTRY_BLOCK_PTR->global_live_at_end
3179 = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 EXIT_BLOCK_PTR->global_live_at_start
3181 = OBSTACK_ALLOC_REG_SET (function_obstack);
3182
3183 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 }
3185
3186 void
3187 allocate_reg_life_data ()
3188 {
3189 int i;
3190
3191 /* Recalculate the register space, in case it has grown. Old style
3192 vector oriented regsets would set regset_{size,bytes} here also. */
3193 allocate_reg_info (max_regno, FALSE, FALSE);
3194
3195 /* Reset all the data we'll collect in propagate_block and its
3196 subroutines. */
3197 for (i = 0; i < max_regno; i++)
3198 {
3199 REG_N_SETS (i) = 0;
3200 REG_N_REFS (i) = 0;
3201 REG_N_DEATHS (i) = 0;
3202 REG_N_CALLS_CROSSED (i) = 0;
3203 REG_LIVE_LENGTH (i) = 0;
3204 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3205 }
3206 }
3207
3208 /* Compute the registers live at the beginning of a basic block
3209 from those live at the end.
3210
3211 When called, OLD contains those live at the end.
3212 On return, it contains those live at the beginning.
3213 FIRST and LAST are the first and last insns of the basic block.
3214
3215 FINAL is nonzero if we are doing the final pass which is not
3216 for computing the life info (since that has already been done)
3217 but for acting on it. On this pass, we delete dead stores,
3218 set up the logical links and dead-variables lists of instructions,
3219 and merge instructions for autoincrement and autodecrement addresses.
3220
3221 SIGNIFICANT is nonzero only the first time for each basic block.
3222 If it is nonzero, it points to a regset in which we store
3223 a 1 for each register that is set within the block.
3224
3225 BNUM is the number of the basic block. */
3226
3227 static void
3228 propagate_block (bb, old, significant, flags)
3229 basic_block bb;
3230 regset old;
3231 regset significant;
3232 int flags;
3233 {
3234 register rtx insn;
3235 rtx prev;
3236 regset live;
3237 regset_head live_head;
3238 regset dead;
3239 regset_head dead_head;
3240
3241 /* Find the loop depth for this block. Ignore loop level changes in the
3242 middle of the basic block -- for register allocation purposes, the
3243 important uses will be in the blocks wholely contained within the loop
3244 not in the loop pre-header or post-trailer. */
3245 loop_depth = bb->loop_depth;
3246
3247 dead = INITIALIZE_REG_SET (live_head);
3248 live = INITIALIZE_REG_SET (dead_head);
3249
3250 cc0_live = 0;
3251
3252 if (flags & PROP_REG_INFO)
3253 {
3254 register int i;
3255
3256 /* Process the regs live at the end of the block.
3257 Mark them as not local to any one basic block. */
3258 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3259 {
3260 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3261 });
3262 }
3263
3264 /* Scan the block an insn at a time from end to beginning. */
3265
3266 for (insn = bb->end; ; insn = prev)
3267 {
3268 prev = PREV_INSN (insn);
3269
3270 if (GET_CODE (insn) == NOTE)
3271 {
3272 /* If this is a call to `setjmp' et al,
3273 warn if any non-volatile datum is live. */
3274
3275 if ((flags & PROP_REG_INFO)
3276 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3277 IOR_REG_SET (regs_live_at_setjmp, old);
3278 }
3279
3280 /* Update the life-status of regs for this insn.
3281 First DEAD gets which regs are set in this insn
3282 then LIVE gets which regs are used in this insn.
3283 Then the regs live before the insn
3284 are those live after, with DEAD regs turned off,
3285 and then LIVE regs turned on. */
3286
3287 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3288 {
3289 register int i;
3290 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3291 int insn_is_dead = 0;
3292 int libcall_is_dead = 0;
3293
3294 if (flags & PROP_SCAN_DEAD_CODE)
3295 {
3296 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3297 REG_NOTES (insn));
3298 libcall_is_dead = (insn_is_dead && note != 0
3299 && libcall_dead_p (PATTERN (insn), old,
3300 note, insn));
3301 }
3302
3303 /* We almost certainly don't want to delete prologue or epilogue
3304 instructions. Warn about probable compiler losage. */
3305 if (insn_is_dead
3306 && reload_completed
3307 && (((HAVE_epilogue || HAVE_prologue)
3308 && prologue_epilogue_contains (insn))
3309 || (HAVE_sibcall_epilogue
3310 && sibcall_epilogue_contains (insn))))
3311 {
3312 if (flags & PROP_KILL_DEAD_CODE)
3313 {
3314 warning ("ICE: would have deleted prologue/epilogue insn");
3315 if (!inhibit_warnings)
3316 debug_rtx (insn);
3317 }
3318 libcall_is_dead = insn_is_dead = 0;
3319 }
3320
3321 /* If an instruction consists of just dead store(s) on final pass,
3322 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3323 We could really delete it with delete_insn, but that
3324 can cause trouble for first or last insn in a basic block. */
3325 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3326 {
3327 rtx inote;
3328 /* If the insn referred to a label, note that the label is
3329 now less used. */
3330 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3331 {
3332 if (REG_NOTE_KIND (inote) == REG_LABEL)
3333 {
3334 rtx label = XEXP (inote, 0);
3335 rtx next;
3336 LABEL_NUSES (label)--;
3337
3338 /* If this label was attached to an ADDR_VEC, it's
3339 safe to delete the ADDR_VEC. In fact, it's pretty
3340 much mandatory to delete it, because the ADDR_VEC may
3341 be referencing labels that no longer exist. */
3342 if (LABEL_NUSES (label) == 0
3343 && (next = next_nonnote_insn (label)) != NULL
3344 && GET_CODE (next) == JUMP_INSN
3345 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3346 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3347 {
3348 rtx pat = PATTERN (next);
3349 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3350 int len = XVECLEN (pat, diff_vec_p);
3351 int i;
3352 for (i = 0; i < len; i++)
3353 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3354 PUT_CODE (next, NOTE);
3355 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3356 NOTE_SOURCE_FILE (next) = 0;
3357
3358 if ((next = next_nonnote_insn (label)) != NULL
3359 && GET_CODE (next) == BARRIER)
3360 {
3361 PUT_CODE (next, NOTE);
3362 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3363 NOTE_SOURCE_FILE (next) = 0;
3364 }
3365 }
3366 }
3367 }
3368
3369 PUT_CODE (insn, NOTE);
3370 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3371 NOTE_SOURCE_FILE (insn) = 0;
3372
3373 /* CC0 is now known to be dead. Either this insn used it,
3374 in which case it doesn't anymore, or clobbered it,
3375 so the next insn can't use it. */
3376 cc0_live = 0;
3377
3378 /* If this insn is copying the return value from a library call,
3379 delete the entire library call. */
3380 if (libcall_is_dead)
3381 {
3382 rtx first = XEXP (note, 0);
3383 rtx p = insn;
3384 while (INSN_DELETED_P (first))
3385 first = NEXT_INSN (first);
3386 while (p != first)
3387 {
3388 p = PREV_INSN (p);
3389 PUT_CODE (p, NOTE);
3390 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3391 NOTE_SOURCE_FILE (p) = 0;
3392 }
3393 }
3394 goto flushed;
3395 }
3396
3397 CLEAR_REG_SET (dead);
3398 CLEAR_REG_SET (live);
3399
3400 /* See if this is an increment or decrement that can be
3401 merged into a following memory address. */
3402 #ifdef AUTO_INC_DEC
3403 {
3404 register rtx x = single_set (insn);
3405
3406 /* Does this instruction increment or decrement a register? */
3407 if (!reload_completed
3408 && (flags & PROP_AUTOINC)
3409 && x != 0
3410 && GET_CODE (SET_DEST (x)) == REG
3411 && (GET_CODE (SET_SRC (x)) == PLUS
3412 || GET_CODE (SET_SRC (x)) == MINUS)
3413 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3414 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3415 /* Ok, look for a following memory ref we can combine with.
3416 If one is found, change the memory ref to a PRE_INC
3417 or PRE_DEC, cancel this insn, and return 1.
3418 Return 0 if nothing has been done. */
3419 && try_pre_increment_1 (insn))
3420 goto flushed;
3421 }
3422 #endif /* AUTO_INC_DEC */
3423
3424 /* If this is not the final pass, and this insn is copying the
3425 value of a library call and it's dead, don't scan the
3426 insns that perform the library call, so that the call's
3427 arguments are not marked live. */
3428 if (libcall_is_dead)
3429 {
3430 /* Mark the dest reg as `significant'. */
3431 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3432 significant, flags);
3433
3434 insn = XEXP (note, 0);
3435 prev = PREV_INSN (insn);
3436 }
3437 else if (GET_CODE (PATTERN (insn)) == SET
3438 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3439 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3440 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3441 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3442 /* We have an insn to pop a constant amount off the stack.
3443 (Such insns use PLUS regardless of the direction of the stack,
3444 and any insn to adjust the stack by a constant is always a pop.)
3445 These insns, if not dead stores, have no effect on life. */
3446 ;
3447 else
3448 {
3449 /* Any regs live at the time of a call instruction
3450 must not go in a register clobbered by calls.
3451 Find all regs now live and record this for them. */
3452
3453 if (GET_CODE (insn) == CALL_INSN
3454 && (flags & PROP_REG_INFO))
3455 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3456 {
3457 REG_N_CALLS_CROSSED (i)++;
3458 });
3459
3460 /* LIVE gets the regs used in INSN;
3461 DEAD gets those set by it. Dead insns don't make anything
3462 live. */
3463
3464 mark_set_regs (old, dead, PATTERN (insn),
3465 insn, significant, flags);
3466
3467 /* If an insn doesn't use CC0, it becomes dead since we
3468 assume that every insn clobbers it. So show it dead here;
3469 mark_used_regs will set it live if it is referenced. */
3470 cc0_live = 0;
3471
3472 if (! insn_is_dead)
3473 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3474
3475 /* Sometimes we may have inserted something before INSN (such as
3476 a move) when we make an auto-inc. So ensure we will scan
3477 those insns. */
3478 #ifdef AUTO_INC_DEC
3479 prev = PREV_INSN (insn);
3480 #endif
3481
3482 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3483 {
3484 register int i;
3485
3486 rtx note;
3487
3488 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3489 note;
3490 note = XEXP (note, 1))
3491 if (GET_CODE (XEXP (note, 0)) == USE)
3492 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3493 flags, insn);
3494
3495 /* Each call clobbers all call-clobbered regs that are not
3496 global or fixed. Note that the function-value reg is a
3497 call-clobbered reg, and mark_set_regs has already had
3498 a chance to handle it. */
3499
3500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3501 if (call_used_regs[i] && ! global_regs[i]
3502 && ! fixed_regs[i])
3503 {
3504 SET_REGNO_REG_SET (dead, i);
3505 if (significant)
3506 SET_REGNO_REG_SET (significant, i);
3507 }
3508
3509 /* The stack ptr is used (honorarily) by a CALL insn. */
3510 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3511
3512 /* Calls may also reference any of the global registers,
3513 so they are made live. */
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 if (global_regs[i])
3516 mark_used_regs (old, live,
3517 gen_rtx_REG (reg_raw_mode[i], i),
3518 flags, insn);
3519
3520 /* Calls also clobber memory. */
3521 free_EXPR_LIST_list (&mem_set_list);
3522 }
3523
3524 /* Update OLD for the registers used or set. */
3525 AND_COMPL_REG_SET (old, dead);
3526 IOR_REG_SET (old, live);
3527
3528 }
3529
3530 /* On final pass, update counts of how many insns in which
3531 each reg is live. */
3532 if (flags & PROP_REG_INFO)
3533 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3534 }
3535 flushed:
3536 if (insn == bb->head)
3537 break;
3538 }
3539
3540 FREE_REG_SET (dead);
3541 FREE_REG_SET (live);
3542 free_EXPR_LIST_list (&mem_set_list);
3543 }
3544 \f
3545 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3546 (SET expressions whose destinations are registers dead after the insn).
3547 NEEDED is the regset that says which regs are alive after the insn.
3548
3549 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3550
3551 If X is the entire body of an insn, NOTES contains the reg notes
3552 pertaining to the insn. */
3553
3554 static int
3555 insn_dead_p (x, needed, call_ok, notes)
3556 rtx x;
3557 regset needed;
3558 int call_ok;
3559 rtx notes ATTRIBUTE_UNUSED;
3560 {
3561 enum rtx_code code = GET_CODE (x);
3562
3563 #ifdef AUTO_INC_DEC
3564 /* If flow is invoked after reload, we must take existing AUTO_INC
3565 expresions into account. */
3566 if (reload_completed)
3567 {
3568 for ( ; notes; notes = XEXP (notes, 1))
3569 {
3570 if (REG_NOTE_KIND (notes) == REG_INC)
3571 {
3572 int regno = REGNO (XEXP (notes, 0));
3573
3574 /* Don't delete insns to set global regs. */
3575 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3576 || REGNO_REG_SET_P (needed, regno))
3577 return 0;
3578 }
3579 }
3580 }
3581 #endif
3582
3583 /* If setting something that's a reg or part of one,
3584 see if that register's altered value will be live. */
3585
3586 if (code == SET)
3587 {
3588 rtx r = SET_DEST (x);
3589
3590 #ifdef HAVE_cc0
3591 if (GET_CODE (r) == CC0)
3592 return ! cc0_live;
3593 #endif
3594
3595 /* A SET that is a subroutine call cannot be dead. */
3596 if (GET_CODE (SET_SRC (x)) == CALL)
3597 {
3598 if (! call_ok)
3599 return 0;
3600 }
3601
3602 /* Don't eliminate loads from volatile memory or volatile asms. */
3603 else if (volatile_refs_p (SET_SRC (x)))
3604 return 0;
3605
3606 if (GET_CODE (r) == MEM)
3607 {
3608 rtx temp;
3609
3610 if (MEM_VOLATILE_P (r))
3611 return 0;
3612
3613 /* Walk the set of memory locations we are currently tracking
3614 and see if one is an identical match to this memory location.
3615 If so, this memory write is dead (remember, we're walking
3616 backwards from the end of the block to the start. */
3617 temp = mem_set_list;
3618 while (temp)
3619 {
3620 if (rtx_equal_p (XEXP (temp, 0), r))
3621 return 1;
3622 temp = XEXP (temp, 1);
3623 }
3624 }
3625 else
3626 {
3627 while (GET_CODE (r) == SUBREG
3628 || GET_CODE (r) == STRICT_LOW_PART
3629 || GET_CODE (r) == ZERO_EXTRACT)
3630 r = XEXP (r, 0);
3631
3632 if (GET_CODE (r) == REG)
3633 {
3634 int regno = REGNO (r);
3635
3636 /* Obvious. */
3637 if (REGNO_REG_SET_P (needed, regno))
3638 return 0;
3639
3640 /* If this is a hard register, verify that subsequent
3641 words are not needed. */
3642 if (regno < FIRST_PSEUDO_REGISTER)
3643 {
3644 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3645
3646 while (--n > 0)
3647 if (REGNO_REG_SET_P (needed, regno+n))
3648 return 0;
3649 }
3650
3651 /* Don't delete insns to set global regs. */
3652 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3653 return 0;
3654
3655 /* Make sure insns to set the stack pointer aren't deleted. */
3656 if (regno == STACK_POINTER_REGNUM)
3657 return 0;
3658
3659 /* Make sure insns to set the frame pointer aren't deleted. */
3660 if (regno == FRAME_POINTER_REGNUM
3661 && (! reload_completed || frame_pointer_needed))
3662 return 0;
3663 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3664 if (regno == HARD_FRAME_POINTER_REGNUM
3665 && (! reload_completed || frame_pointer_needed))
3666 return 0;
3667 #endif
3668
3669 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3670 /* Make sure insns to set arg pointer are never deleted
3671 (if the arg pointer isn't fixed, there will be a USE
3672 for it, so we can treat it normally). */
3673 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3674 return 0;
3675 #endif
3676
3677 /* Otherwise, the set is dead. */
3678 return 1;
3679 }
3680 }
3681 }
3682
3683 /* If performing several activities, insn is dead if each activity
3684 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3685 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3686 worth keeping. */
3687 else if (code == PARALLEL)
3688 {
3689 int i = XVECLEN (x, 0);
3690
3691 for (i--; i >= 0; i--)
3692 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3693 && GET_CODE (XVECEXP (x, 0, i)) != USE
3694 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3695 return 0;
3696
3697 return 1;
3698 }
3699
3700 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3701 is not necessarily true for hard registers. */
3702 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3703 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3704 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3705 return 1;
3706
3707 /* We do not check other CLOBBER or USE here. An insn consisting of just
3708 a CLOBBER or just a USE should not be deleted. */
3709 return 0;
3710 }
3711
3712 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3713 return 1 if the entire library call is dead.
3714 This is true if X copies a register (hard or pseudo)
3715 and if the hard return reg of the call insn is dead.
3716 (The caller should have tested the destination of X already for death.)
3717
3718 If this insn doesn't just copy a register, then we don't
3719 have an ordinary libcall. In that case, cse could not have
3720 managed to substitute the source for the dest later on,
3721 so we can assume the libcall is dead.
3722
3723 NEEDED is the bit vector of pseudoregs live before this insn.
3724 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3725
3726 static int
3727 libcall_dead_p (x, needed, note, insn)
3728 rtx x;
3729 regset needed;
3730 rtx note;
3731 rtx insn;
3732 {
3733 register RTX_CODE code = GET_CODE (x);
3734
3735 if (code == SET)
3736 {
3737 register rtx r = SET_SRC (x);
3738 if (GET_CODE (r) == REG)
3739 {
3740 rtx call = XEXP (note, 0);
3741 rtx call_pat;
3742 register int i;
3743
3744 /* Find the call insn. */
3745 while (call != insn && GET_CODE (call) != CALL_INSN)
3746 call = NEXT_INSN (call);
3747
3748 /* If there is none, do nothing special,
3749 since ordinary death handling can understand these insns. */
3750 if (call == insn)
3751 return 0;
3752
3753 /* See if the hard reg holding the value is dead.
3754 If this is a PARALLEL, find the call within it. */
3755 call_pat = PATTERN (call);
3756 if (GET_CODE (call_pat) == PARALLEL)
3757 {
3758 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3759 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3760 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3761 break;
3762
3763 /* This may be a library call that is returning a value
3764 via invisible pointer. Do nothing special, since
3765 ordinary death handling can understand these insns. */
3766 if (i < 0)
3767 return 0;
3768
3769 call_pat = XVECEXP (call_pat, 0, i);
3770 }
3771
3772 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3773 }
3774 }
3775 return 1;
3776 }
3777
3778 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3779 live at function entry. Don't count global register variables, variables
3780 in registers that can be used for function arg passing, or variables in
3781 fixed hard registers. */
3782
3783 int
3784 regno_uninitialized (regno)
3785 int regno;
3786 {
3787 if (n_basic_blocks == 0
3788 || (regno < FIRST_PSEUDO_REGISTER
3789 && (global_regs[regno]
3790 || fixed_regs[regno]
3791 || FUNCTION_ARG_REGNO_P (regno))))
3792 return 0;
3793
3794 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3795 }
3796
3797 /* 1 if register REGNO was alive at a place where `setjmp' was called
3798 and was set more than once or is an argument.
3799 Such regs may be clobbered by `longjmp'. */
3800
3801 int
3802 regno_clobbered_at_setjmp (regno)
3803 int regno;
3804 {
3805 if (n_basic_blocks == 0)
3806 return 0;
3807
3808 return ((REG_N_SETS (regno) > 1
3809 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3810 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3811 }
3812 \f
3813 /* INSN references memory, possibly using autoincrement addressing modes.
3814 Find any entries on the mem_set_list that need to be invalidated due
3815 to an address change. */
3816 static void
3817 invalidate_mems_from_autoinc (insn)
3818 rtx insn;
3819 {
3820 rtx note = REG_NOTES (insn);
3821 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3822 {
3823 if (REG_NOTE_KIND (note) == REG_INC)
3824 {
3825 rtx temp = mem_set_list;
3826 rtx prev = NULL_RTX;
3827 rtx next;
3828
3829 while (temp)
3830 {
3831 next = XEXP (temp, 1);
3832 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3833 {
3834 /* Splice temp out of list. */
3835 if (prev)
3836 XEXP (prev, 1) = next;
3837 else
3838 mem_set_list = next;
3839 free_EXPR_LIST_node (temp);
3840 }
3841 else
3842 prev = temp;
3843 temp = next;
3844 }
3845 }
3846 }
3847 }
3848
3849 /* Process the registers that are set within X. Their bits are set to
3850 1 in the regset DEAD, because they are dead prior to this insn.
3851
3852 If INSN is nonzero, it is the insn being processed.
3853
3854 FLAGS is the set of operations to perform. */
3855
3856 static void
3857 mark_set_regs (needed, dead, x, insn, significant, flags)
3858 regset needed;
3859 regset dead;
3860 rtx x;
3861 rtx insn;
3862 regset significant;
3863 int flags;
3864 {
3865 register RTX_CODE code = GET_CODE (x);
3866
3867 if (code == SET || code == CLOBBER)
3868 mark_set_1 (needed, dead, x, insn, significant, flags);
3869 else if (code == PARALLEL)
3870 {
3871 register int i;
3872 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3873 {
3874 code = GET_CODE (XVECEXP (x, 0, i));
3875 if (code == SET || code == CLOBBER)
3876 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3877 significant, flags);
3878 }
3879 }
3880 }
3881
3882 /* Process a single SET rtx, X. */
3883
3884 static void
3885 mark_set_1 (needed, dead, x, insn, significant, flags)
3886 regset needed;
3887 regset dead;
3888 rtx x;
3889 rtx insn;
3890 regset significant;
3891 int flags;
3892 {
3893 register int regno = -1;
3894 register rtx reg = SET_DEST (x);
3895
3896 /* Some targets place small structures in registers for
3897 return values of functions. We have to detect this
3898 case specially here to get correct flow information. */
3899 if (GET_CODE (reg) == PARALLEL
3900 && GET_MODE (reg) == BLKmode)
3901 {
3902 register int i;
3903
3904 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3905 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3906 significant, flags);
3907 return;
3908 }
3909
3910 /* Modifying just one hardware register of a multi-reg value
3911 or just a byte field of a register
3912 does not mean the value from before this insn is now dead.
3913 But it does mean liveness of that register at the end of the block
3914 is significant.
3915
3916 Within mark_set_1, however, we treat it as if the register is
3917 indeed modified. mark_used_regs will, however, also treat this
3918 register as being used. Thus, we treat these insns as setting a
3919 new value for the register as a function of its old value. This
3920 cases LOG_LINKS to be made appropriately and this will help combine. */
3921
3922 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3923 || GET_CODE (reg) == SIGN_EXTRACT
3924 || GET_CODE (reg) == STRICT_LOW_PART)
3925 reg = XEXP (reg, 0);
3926
3927 /* If this set is a MEM, then it kills any aliased writes.
3928 If this set is a REG, then it kills any MEMs which use the reg. */
3929 if (flags & PROP_SCAN_DEAD_CODE)
3930 {
3931 if (GET_CODE (reg) == MEM
3932 || GET_CODE (reg) == REG)
3933 {
3934 rtx temp = mem_set_list;
3935 rtx prev = NULL_RTX;
3936 rtx next;
3937
3938 while (temp)
3939 {
3940 next = XEXP (temp, 1);
3941 if ((GET_CODE (reg) == MEM
3942 && output_dependence (XEXP (temp, 0), reg))
3943 || (GET_CODE (reg) == REG
3944 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3945 {
3946 /* Splice this entry out of the list. */
3947 if (prev)
3948 XEXP (prev, 1) = next;
3949 else
3950 mem_set_list = next;
3951 free_EXPR_LIST_node (temp);
3952 }
3953 else
3954 prev = temp;
3955 temp = next;
3956 }
3957 }
3958
3959 /* If the memory reference had embedded side effects (autoincrement
3960 address modes. Then we may need to kill some entries on the
3961 memory set list. */
3962 if (insn && GET_CODE (reg) == MEM)
3963 invalidate_mems_from_autoinc (insn);
3964
3965 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3966 /* We do not know the size of a BLKmode store, so we do not track
3967 them for redundant store elimination. */
3968 && GET_MODE (reg) != BLKmode
3969 /* There are no REG_INC notes for SP, so we can't assume we'll see
3970 everything that invalidates it. To be safe, don't eliminate any
3971 stores though SP; none of them should be redundant anyway. */
3972 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3973 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3974 }
3975
3976 if (GET_CODE (reg) == REG
3977 && (regno = REGNO (reg),
3978 ! (regno == FRAME_POINTER_REGNUM
3979 && (! reload_completed || frame_pointer_needed)))
3980 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3981 && ! (regno == HARD_FRAME_POINTER_REGNUM
3982 && (! reload_completed || frame_pointer_needed))
3983 #endif
3984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3985 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3986 #endif
3987 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3988 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3989 {
3990 int some_needed = REGNO_REG_SET_P (needed, regno);
3991 int some_not_needed = ! some_needed;
3992
3993 /* Mark it as a significant register for this basic block. */
3994 if (significant)
3995 SET_REGNO_REG_SET (significant, regno);
3996
3997 /* Mark it as dead before this insn. */
3998 SET_REGNO_REG_SET (dead, regno);
3999
4000 /* A hard reg in a wide mode may really be multiple registers.
4001 If so, mark all of them just like the first. */
4002 if (regno < FIRST_PSEUDO_REGISTER)
4003 {
4004 int n;
4005
4006 /* Nothing below is needed for the stack pointer; get out asap.
4007 Eg, log links aren't needed, since combine won't use them. */
4008 if (regno == STACK_POINTER_REGNUM)
4009 return;
4010
4011 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4012 while (--n > 0)
4013 {
4014 int regno_n = regno + n;
4015 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4016 if (significant)
4017 SET_REGNO_REG_SET (significant, regno_n);
4018
4019 SET_REGNO_REG_SET (dead, regno_n);
4020 some_needed |= needed_regno;
4021 some_not_needed |= ! needed_regno;
4022 }
4023 }
4024
4025 /* Additional data to record if this is the final pass. */
4026 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4027 | PROP_DEATH_NOTES | PROP_AUTOINC))
4028 {
4029 register rtx y;
4030 register int blocknum = BLOCK_NUM (insn);
4031
4032 y = NULL_RTX;
4033 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4034 y = reg_next_use[regno];
4035
4036 /* If this is a hard reg, record this function uses the reg. */
4037
4038 if (regno < FIRST_PSEUDO_REGISTER)
4039 {
4040 register int i;
4041 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4042
4043 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4044 for (i = regno; i < endregno; i++)
4045 {
4046 /* The next use is no longer "next", since a store
4047 intervenes. */
4048 reg_next_use[i] = 0;
4049 }
4050
4051 if (flags & PROP_REG_INFO)
4052 for (i = regno; i < endregno; i++)
4053 {
4054 regs_ever_live[i] = 1;
4055 REG_N_SETS (i)++;
4056 }
4057 }
4058 else
4059 {
4060 /* The next use is no longer "next", since a store
4061 intervenes. */
4062 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4063 reg_next_use[regno] = 0;
4064
4065 /* Keep track of which basic blocks each reg appears in. */
4066
4067 if (flags & PROP_REG_INFO)
4068 {
4069 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4070 REG_BASIC_BLOCK (regno) = blocknum;
4071 else if (REG_BASIC_BLOCK (regno) != blocknum)
4072 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4073
4074 /* Count (weighted) references, stores, etc. This counts a
4075 register twice if it is modified, but that is correct. */
4076 REG_N_SETS (regno)++;
4077 REG_N_REFS (regno) += loop_depth + 1;
4078
4079 /* The insns where a reg is live are normally counted
4080 elsewhere, but we want the count to include the insn
4081 where the reg is set, and the normal counting mechanism
4082 would not count it. */
4083 REG_LIVE_LENGTH (regno)++;
4084 }
4085 }
4086
4087 if (! some_not_needed)
4088 {
4089 if (flags & PROP_LOG_LINKS)
4090 {
4091 /* Make a logical link from the next following insn
4092 that uses this register, back to this insn.
4093 The following insns have already been processed.
4094
4095 We don't build a LOG_LINK for hard registers containing
4096 in ASM_OPERANDs. If these registers get replaced,
4097 we might wind up changing the semantics of the insn,
4098 even if reload can make what appear to be valid
4099 assignments later. */
4100 if (y && (BLOCK_NUM (y) == blocknum)
4101 && (regno >= FIRST_PSEUDO_REGISTER
4102 || asm_noperands (PATTERN (y)) < 0))
4103 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4104 }
4105 }
4106 else if (! some_needed)
4107 {
4108 if (flags & PROP_REG_INFO)
4109 REG_N_DEATHS (REGNO (reg))++;
4110
4111 if (flags & PROP_DEATH_NOTES)
4112 {
4113 /* Note that dead stores have already been deleted
4114 when possible. If we get here, we have found a
4115 dead store that cannot be eliminated (because the
4116 same insn does something useful). Indicate this
4117 by marking the reg being set as dying here. */
4118 REG_NOTES (insn)
4119 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4120 }
4121 }
4122 else
4123 {
4124 if (flags & PROP_DEATH_NOTES)
4125 {
4126 /* This is a case where we have a multi-word hard register
4127 and some, but not all, of the words of the register are
4128 needed in subsequent insns. Write REG_UNUSED notes
4129 for those parts that were not needed. This case should
4130 be rare. */
4131
4132 int i;
4133
4134 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4135 i >= 0; i--)
4136 if (!REGNO_REG_SET_P (needed, regno + i))
4137 REG_NOTES (insn)
4138 = (alloc_EXPR_LIST
4139 (REG_UNUSED,
4140 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4141 REG_NOTES (insn)));
4142 }
4143 }
4144 }
4145 }
4146 else if (GET_CODE (reg) == REG)
4147 {
4148 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4149 reg_next_use[regno] = 0;
4150 }
4151
4152 /* If this is the last pass and this is a SCRATCH, show it will be dying
4153 here and count it. */
4154 else if (GET_CODE (reg) == SCRATCH)
4155 {
4156 if (flags & PROP_DEATH_NOTES)
4157 REG_NOTES (insn)
4158 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4159 }
4160 }
4161 \f
4162 #ifdef AUTO_INC_DEC
4163
4164 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4165 reference. */
4166
4167 static void
4168 find_auto_inc (needed, x, insn)
4169 regset needed;
4170 rtx x;
4171 rtx insn;
4172 {
4173 rtx addr = XEXP (x, 0);
4174 HOST_WIDE_INT offset = 0;
4175 rtx set;
4176
4177 /* Here we detect use of an index register which might be good for
4178 postincrement, postdecrement, preincrement, or predecrement. */
4179
4180 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4181 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4182
4183 if (GET_CODE (addr) == REG)
4184 {
4185 register rtx y;
4186 register int size = GET_MODE_SIZE (GET_MODE (x));
4187 rtx use;
4188 rtx incr;
4189 int regno = REGNO (addr);
4190
4191 /* Is the next use an increment that might make auto-increment? */
4192 if ((incr = reg_next_use[regno]) != 0
4193 && (set = single_set (incr)) != 0
4194 && GET_CODE (set) == SET
4195 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4196 /* Can't add side effects to jumps; if reg is spilled and
4197 reloaded, there's no way to store back the altered value. */
4198 && GET_CODE (insn) != JUMP_INSN
4199 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4200 && XEXP (y, 0) == addr
4201 && GET_CODE (XEXP (y, 1)) == CONST_INT
4202 && ((HAVE_POST_INCREMENT
4203 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4204 || (HAVE_POST_DECREMENT
4205 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4206 || (HAVE_PRE_INCREMENT
4207 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4208 || (HAVE_PRE_DECREMENT
4209 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4210 /* Make sure this reg appears only once in this insn. */
4211 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4212 use != 0 && use != (rtx) 1))
4213 {
4214 rtx q = SET_DEST (set);
4215 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4216 ? (offset ? PRE_INC : POST_INC)
4217 : (offset ? PRE_DEC : POST_DEC));
4218
4219 if (dead_or_set_p (incr, addr))
4220 {
4221 /* This is the simple case. Try to make the auto-inc. If
4222 we can't, we are done. Otherwise, we will do any
4223 needed updates below. */
4224 if (! validate_change (insn, &XEXP (x, 0),
4225 gen_rtx_fmt_e (inc_code, Pmode, addr),
4226 0))
4227 return;
4228 }
4229 else if (GET_CODE (q) == REG
4230 /* PREV_INSN used here to check the semi-open interval
4231 [insn,incr). */
4232 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4233 /* We must also check for sets of q as q may be
4234 a call clobbered hard register and there may
4235 be a call between PREV_INSN (insn) and incr. */
4236 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4237 {
4238 /* We have *p followed sometime later by q = p+size.
4239 Both p and q must be live afterward,
4240 and q is not used between INSN and its assignment.
4241 Change it to q = p, ...*q..., q = q+size.
4242 Then fall into the usual case. */
4243 rtx insns, temp;
4244 basic_block bb;
4245
4246 start_sequence ();
4247 emit_move_insn (q, addr);
4248 insns = get_insns ();
4249 end_sequence ();
4250
4251 bb = BLOCK_FOR_INSN (insn);
4252 for (temp = insns; temp; temp = NEXT_INSN (temp))
4253 set_block_for_insn (temp, bb);
4254
4255 /* If we can't make the auto-inc, or can't make the
4256 replacement into Y, exit. There's no point in making
4257 the change below if we can't do the auto-inc and doing
4258 so is not correct in the pre-inc case. */
4259
4260 validate_change (insn, &XEXP (x, 0),
4261 gen_rtx_fmt_e (inc_code, Pmode, q),
4262 1);
4263 validate_change (incr, &XEXP (y, 0), q, 1);
4264 if (! apply_change_group ())
4265 return;
4266
4267 /* We now know we'll be doing this change, so emit the
4268 new insn(s) and do the updates. */
4269 emit_insns_before (insns, insn);
4270
4271 if (BLOCK_FOR_INSN (insn)->head == insn)
4272 BLOCK_FOR_INSN (insn)->head = insns;
4273
4274 /* INCR will become a NOTE and INSN won't contain a
4275 use of ADDR. If a use of ADDR was just placed in
4276 the insn before INSN, make that the next use.
4277 Otherwise, invalidate it. */
4278 if (GET_CODE (PREV_INSN (insn)) == INSN
4279 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4280 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4281 reg_next_use[regno] = PREV_INSN (insn);
4282 else
4283 reg_next_use[regno] = 0;
4284
4285 addr = q;
4286 regno = REGNO (q);
4287
4288 /* REGNO is now used in INCR which is below INSN, but
4289 it previously wasn't live here. If we don't mark
4290 it as needed, we'll put a REG_DEAD note for it
4291 on this insn, which is incorrect. */
4292 SET_REGNO_REG_SET (needed, regno);
4293
4294 /* If there are any calls between INSN and INCR, show
4295 that REGNO now crosses them. */
4296 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4297 if (GET_CODE (temp) == CALL_INSN)
4298 REG_N_CALLS_CROSSED (regno)++;
4299 }
4300 else
4301 return;
4302
4303 /* If we haven't returned, it means we were able to make the
4304 auto-inc, so update the status. First, record that this insn
4305 has an implicit side effect. */
4306
4307 REG_NOTES (insn)
4308 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4309
4310 /* Modify the old increment-insn to simply copy
4311 the already-incremented value of our register. */
4312 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4313 abort ();
4314
4315 /* If that makes it a no-op (copying the register into itself) delete
4316 it so it won't appear to be a "use" and a "set" of this
4317 register. */
4318 if (SET_DEST (set) == addr)
4319 {
4320 PUT_CODE (incr, NOTE);
4321 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4322 NOTE_SOURCE_FILE (incr) = 0;
4323 }
4324
4325 if (regno >= FIRST_PSEUDO_REGISTER)
4326 {
4327 /* Count an extra reference to the reg. When a reg is
4328 incremented, spilling it is worse, so we want to make
4329 that less likely. */
4330 REG_N_REFS (regno) += loop_depth + 1;
4331
4332 /* Count the increment as a setting of the register,
4333 even though it isn't a SET in rtl. */
4334 REG_N_SETS (regno)++;
4335 }
4336 }
4337 }
4338 }
4339 #endif /* AUTO_INC_DEC */
4340 \f
4341 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4342 This is done assuming the registers needed from X
4343 are those that have 1-bits in NEEDED.
4344
4345 FLAGS is the set of enabled operations.
4346
4347 INSN is the containing instruction. If INSN is dead, this function is not
4348 called. */
4349
4350 static void
4351 mark_used_regs (needed, live, x, flags, insn)
4352 regset needed;
4353 regset live;
4354 rtx x;
4355 int flags;
4356 rtx insn;
4357 {
4358 register RTX_CODE code;
4359 register int regno;
4360 int i;
4361
4362 retry:
4363 code = GET_CODE (x);
4364 switch (code)
4365 {
4366 case LABEL_REF:
4367 case SYMBOL_REF:
4368 case CONST_INT:
4369 case CONST:
4370 case CONST_DOUBLE:
4371 case PC:
4372 case ADDR_VEC:
4373 case ADDR_DIFF_VEC:
4374 return;
4375
4376 #ifdef HAVE_cc0
4377 case CC0:
4378 cc0_live = 1;
4379 return;
4380 #endif
4381
4382 case CLOBBER:
4383 /* If we are clobbering a MEM, mark any registers inside the address
4384 as being used. */
4385 if (GET_CODE (XEXP (x, 0)) == MEM)
4386 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4387 return;
4388
4389 case MEM:
4390 /* Don't bother watching stores to mems if this is not the
4391 final pass. We'll not be deleting dead stores this round. */
4392 if (flags & PROP_SCAN_DEAD_CODE)
4393 {
4394 /* Invalidate the data for the last MEM stored, but only if MEM is
4395 something that can be stored into. */
4396 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4397 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4398 ; /* needn't clear the memory set list */
4399 else
4400 {
4401 rtx temp = mem_set_list;
4402 rtx prev = NULL_RTX;
4403 rtx next;
4404
4405 while (temp)
4406 {
4407 next = XEXP (temp, 1);
4408 if (anti_dependence (XEXP (temp, 0), x))
4409 {
4410 /* Splice temp out of the list. */
4411 if (prev)
4412 XEXP (prev, 1) = next;
4413 else
4414 mem_set_list = next;
4415 free_EXPR_LIST_node (temp);
4416 }
4417 else
4418 prev = temp;
4419 temp = next;
4420 }
4421 }
4422
4423 /* If the memory reference had embedded side effects (autoincrement
4424 address modes. Then we may need to kill some entries on the
4425 memory set list. */
4426 if (insn)
4427 invalidate_mems_from_autoinc (insn);
4428 }
4429
4430 #ifdef AUTO_INC_DEC
4431 if (flags & PROP_AUTOINC)
4432 find_auto_inc (needed, x, insn);
4433 #endif
4434 break;
4435
4436 case SUBREG:
4437 if (GET_CODE (SUBREG_REG (x)) == REG
4438 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4439 && (GET_MODE_SIZE (GET_MODE (x))
4440 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4441 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4442
4443 /* While we're here, optimize this case. */
4444 x = SUBREG_REG (x);
4445
4446 /* In case the SUBREG is not of a register, don't optimize */
4447 if (GET_CODE (x) != REG)
4448 {
4449 mark_used_regs (needed, live, x, flags, insn);
4450 return;
4451 }
4452
4453 /* ... fall through ... */
4454
4455 case REG:
4456 /* See a register other than being set
4457 => mark it as needed. */
4458
4459 regno = REGNO (x);
4460 {
4461 int some_needed = REGNO_REG_SET_P (needed, regno);
4462 int some_not_needed = ! some_needed;
4463
4464 SET_REGNO_REG_SET (live, regno);
4465
4466 /* A hard reg in a wide mode may really be multiple registers.
4467 If so, mark all of them just like the first. */
4468 if (regno < FIRST_PSEUDO_REGISTER)
4469 {
4470 int n;
4471
4472 /* For stack ptr or fixed arg pointer,
4473 nothing below can be necessary, so waste no more time. */
4474 if (regno == STACK_POINTER_REGNUM
4475 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4476 || (regno == HARD_FRAME_POINTER_REGNUM
4477 && (! reload_completed || frame_pointer_needed))
4478 #endif
4479 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4480 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4481 #endif
4482 || (regno == FRAME_POINTER_REGNUM
4483 && (! reload_completed || frame_pointer_needed)))
4484 {
4485 /* If this is a register we are going to try to eliminate,
4486 don't mark it live here. If we are successful in
4487 eliminating it, it need not be live unless it is used for
4488 pseudos, in which case it will have been set live when
4489 it was allocated to the pseudos. If the register will not
4490 be eliminated, reload will set it live at that point. */
4491
4492 if ((flags & PROP_REG_INFO)
4493 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4494 regs_ever_live[regno] = 1;
4495 return;
4496 }
4497 /* No death notes for global register variables;
4498 their values are live after this function exits. */
4499 if (global_regs[regno])
4500 {
4501 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4502 reg_next_use[regno] = insn;
4503 return;
4504 }
4505
4506 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4507 while (--n > 0)
4508 {
4509 int regno_n = regno + n;
4510 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4511
4512 SET_REGNO_REG_SET (live, regno_n);
4513 some_needed |= needed_regno;
4514 some_not_needed |= ! needed_regno;
4515 }
4516 }
4517
4518 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4519 {
4520 /* Record where each reg is used, so when the reg
4521 is set we know the next insn that uses it. */
4522
4523 reg_next_use[regno] = insn;
4524 }
4525 if (flags & PROP_REG_INFO)
4526 {
4527 if (regno < FIRST_PSEUDO_REGISTER)
4528 {
4529 /* If a hard reg is being used,
4530 record that this function does use it. */
4531
4532 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4533 if (i == 0)
4534 i = 1;
4535 do
4536 regs_ever_live[regno + --i] = 1;
4537 while (i > 0);
4538 }
4539 else
4540 {
4541 /* Keep track of which basic block each reg appears in. */
4542
4543 register int blocknum = BLOCK_NUM (insn);
4544
4545 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4546 REG_BASIC_BLOCK (regno) = blocknum;
4547 else if (REG_BASIC_BLOCK (regno) != blocknum)
4548 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4549
4550 /* Count (weighted) number of uses of each reg. */
4551
4552 REG_N_REFS (regno) += loop_depth + 1;
4553 }
4554 }
4555
4556 /* Record and count the insns in which a reg dies.
4557 If it is used in this insn and was dead below the insn
4558 then it dies in this insn. If it was set in this insn,
4559 we do not make a REG_DEAD note; likewise if we already
4560 made such a note. */
4561
4562 if (flags & PROP_DEATH_NOTES)
4563 {
4564 if (some_not_needed
4565 && ! dead_or_set_p (insn, x)
4566 #if 0
4567 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4568 #endif
4569 )
4570 {
4571 /* Check for the case where the register dying partially
4572 overlaps the register set by this insn. */
4573 if (regno < FIRST_PSEUDO_REGISTER
4574 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4575 {
4576 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4577 while (--n >= 0)
4578 some_needed |= dead_or_set_regno_p (insn, regno + n);
4579 }
4580
4581 /* If none of the words in X is needed, make a REG_DEAD
4582 note. Otherwise, we must make partial REG_DEAD notes. */
4583 if (! some_needed)
4584 {
4585 REG_NOTES (insn)
4586 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4587 REG_N_DEATHS (regno)++;
4588 }
4589 else
4590 {
4591 int i;
4592
4593 /* Don't make a REG_DEAD note for a part of a register
4594 that is set in the insn. */
4595
4596 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4597 i >= 0; i--)
4598 if (!REGNO_REG_SET_P (needed, regno + i)
4599 && ! dead_or_set_regno_p (insn, regno + i))
4600 REG_NOTES (insn)
4601 = (alloc_EXPR_LIST
4602 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4603 regno + i),
4604 REG_NOTES (insn)));
4605 }
4606 }
4607 }
4608 }
4609 return;
4610
4611 case SET:
4612 {
4613 register rtx testreg = SET_DEST (x);
4614 int mark_dest = 0;
4615
4616 /* If storing into MEM, don't show it as being used. But do
4617 show the address as being used. */
4618 if (GET_CODE (testreg) == MEM)
4619 {
4620 #ifdef AUTO_INC_DEC
4621 if (flags & PROP_AUTOINC)
4622 find_auto_inc (needed, testreg, insn);
4623 #endif
4624 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4625 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4626 return;
4627 }
4628
4629 /* Storing in STRICT_LOW_PART is like storing in a reg
4630 in that this SET might be dead, so ignore it in TESTREG.
4631 but in some other ways it is like using the reg.
4632
4633 Storing in a SUBREG or a bit field is like storing the entire
4634 register in that if the register's value is not used
4635 then this SET is not needed. */
4636 while (GET_CODE (testreg) == STRICT_LOW_PART
4637 || GET_CODE (testreg) == ZERO_EXTRACT
4638 || GET_CODE (testreg) == SIGN_EXTRACT
4639 || GET_CODE (testreg) == SUBREG)
4640 {
4641 if (GET_CODE (testreg) == SUBREG
4642 && GET_CODE (SUBREG_REG (testreg)) == REG
4643 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4644 && (GET_MODE_SIZE (GET_MODE (testreg))
4645 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4646 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4647
4648 /* Modifying a single register in an alternate mode
4649 does not use any of the old value. But these other
4650 ways of storing in a register do use the old value. */
4651 if (GET_CODE (testreg) == SUBREG
4652 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4653 ;
4654 else
4655 mark_dest = 1;
4656
4657 testreg = XEXP (testreg, 0);
4658 }
4659
4660 /* If this is a store into a register,
4661 recursively scan the value being stored. */
4662
4663 if ((GET_CODE (testreg) == PARALLEL
4664 && GET_MODE (testreg) == BLKmode)
4665 || (GET_CODE (testreg) == REG
4666 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4667 && (! reload_completed || frame_pointer_needed)))
4668 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4669 && ! (regno == HARD_FRAME_POINTER_REGNUM
4670 && (! reload_completed || frame_pointer_needed))
4671 #endif
4672 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4673 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4674 #endif
4675 ))
4676 /* We used to exclude global_regs here, but that seems wrong.
4677 Storing in them is like storing in mem. */
4678 {
4679 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4680 if (mark_dest)
4681 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4682 return;
4683 }
4684 }
4685 break;
4686
4687 case ASM_OPERANDS:
4688 case UNSPEC_VOLATILE:
4689 case TRAP_IF:
4690 case ASM_INPUT:
4691 {
4692 /* Traditional and volatile asm instructions must be considered to use
4693 and clobber all hard registers, all pseudo-registers and all of
4694 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4695
4696 Consider for instance a volatile asm that changes the fpu rounding
4697 mode. An insn should not be moved across this even if it only uses
4698 pseudo-regs because it might give an incorrectly rounded result.
4699
4700 ?!? Unfortunately, marking all hard registers as live causes massive
4701 problems for the register allocator and marking all pseudos as live
4702 creates mountains of uninitialized variable warnings.
4703
4704 So for now, just clear the memory set list and mark any regs
4705 we can find in ASM_OPERANDS as used. */
4706 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4707 free_EXPR_LIST_list (&mem_set_list);
4708
4709 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4710 We can not just fall through here since then we would be confused
4711 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4712 traditional asms unlike their normal usage. */
4713 if (code == ASM_OPERANDS)
4714 {
4715 int j;
4716
4717 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4718 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4719 flags, insn);
4720 }
4721 break;
4722 }
4723
4724 case PHI:
4725 /* We _do_not_ want to scan operands of phi nodes. Operands of
4726 a phi function are evaluated only when control reaches this
4727 block along a particular edge. Therefore, regs that appear
4728 as arguments to phi should not be added to the global live at
4729 start. */
4730 return;
4731
4732 default:
4733 break;
4734 }
4735
4736 /* Recursively scan the operands of this expression. */
4737
4738 {
4739 register const char *fmt = GET_RTX_FORMAT (code);
4740 register int i;
4741
4742 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4743 {
4744 if (fmt[i] == 'e')
4745 {
4746 /* Tail recursive case: save a function call level. */
4747 if (i == 0)
4748 {
4749 x = XEXP (x, 0);
4750 goto retry;
4751 }
4752 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4753 }
4754 else if (fmt[i] == 'E')
4755 {
4756 register int j;
4757 for (j = 0; j < XVECLEN (x, i); j++)
4758 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4759 }
4760 }
4761 }
4762 }
4763 \f
4764 #ifdef AUTO_INC_DEC
4765
4766 static int
4767 try_pre_increment_1 (insn)
4768 rtx insn;
4769 {
4770 /* Find the next use of this reg. If in same basic block,
4771 make it do pre-increment or pre-decrement if appropriate. */
4772 rtx x = single_set (insn);
4773 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4774 * INTVAL (XEXP (SET_SRC (x), 1)));
4775 int regno = REGNO (SET_DEST (x));
4776 rtx y = reg_next_use[regno];
4777 if (y != 0
4778 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4779 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4780 mode would be better. */
4781 && ! dead_or_set_p (y, SET_DEST (x))
4782 && try_pre_increment (y, SET_DEST (x), amount))
4783 {
4784 /* We have found a suitable auto-increment
4785 and already changed insn Y to do it.
4786 So flush this increment-instruction. */
4787 PUT_CODE (insn, NOTE);
4788 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4789 NOTE_SOURCE_FILE (insn) = 0;
4790 /* Count a reference to this reg for the increment
4791 insn we are deleting. When a reg is incremented.
4792 spilling it is worse, so we want to make that
4793 less likely. */
4794 if (regno >= FIRST_PSEUDO_REGISTER)
4795 {
4796 REG_N_REFS (regno) += loop_depth + 1;
4797 REG_N_SETS (regno)++;
4798 }
4799 return 1;
4800 }
4801 return 0;
4802 }
4803
4804 /* Try to change INSN so that it does pre-increment or pre-decrement
4805 addressing on register REG in order to add AMOUNT to REG.
4806 AMOUNT is negative for pre-decrement.
4807 Returns 1 if the change could be made.
4808 This checks all about the validity of the result of modifying INSN. */
4809
4810 static int
4811 try_pre_increment (insn, reg, amount)
4812 rtx insn, reg;
4813 HOST_WIDE_INT amount;
4814 {
4815 register rtx use;
4816
4817 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4818 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4819 int pre_ok = 0;
4820 /* Nonzero if we can try to make a post-increment or post-decrement.
4821 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4822 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4823 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4824 int post_ok = 0;
4825
4826 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4827 int do_post = 0;
4828
4829 /* From the sign of increment, see which possibilities are conceivable
4830 on this target machine. */
4831 if (HAVE_PRE_INCREMENT && amount > 0)
4832 pre_ok = 1;
4833 if (HAVE_POST_INCREMENT && amount > 0)
4834 post_ok = 1;
4835
4836 if (HAVE_PRE_DECREMENT && amount < 0)
4837 pre_ok = 1;
4838 if (HAVE_POST_DECREMENT && amount < 0)
4839 post_ok = 1;
4840
4841 if (! (pre_ok || post_ok))
4842 return 0;
4843
4844 /* It is not safe to add a side effect to a jump insn
4845 because if the incremented register is spilled and must be reloaded
4846 there would be no way to store the incremented value back in memory. */
4847
4848 if (GET_CODE (insn) == JUMP_INSN)
4849 return 0;
4850
4851 use = 0;
4852 if (pre_ok)
4853 use = find_use_as_address (PATTERN (insn), reg, 0);
4854 if (post_ok && (use == 0 || use == (rtx) 1))
4855 {
4856 use = find_use_as_address (PATTERN (insn), reg, -amount);
4857 do_post = 1;
4858 }
4859
4860 if (use == 0 || use == (rtx) 1)
4861 return 0;
4862
4863 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4864 return 0;
4865
4866 /* See if this combination of instruction and addressing mode exists. */
4867 if (! validate_change (insn, &XEXP (use, 0),
4868 gen_rtx_fmt_e (amount > 0
4869 ? (do_post ? POST_INC : PRE_INC)
4870 : (do_post ? POST_DEC : PRE_DEC),
4871 Pmode, reg), 0))
4872 return 0;
4873
4874 /* Record that this insn now has an implicit side effect on X. */
4875 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4876 return 1;
4877 }
4878
4879 #endif /* AUTO_INC_DEC */
4880 \f
4881 /* Find the place in the rtx X where REG is used as a memory address.
4882 Return the MEM rtx that so uses it.
4883 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4884 (plus REG (const_int PLUSCONST)).
4885
4886 If such an address does not appear, return 0.
4887 If REG appears more than once, or is used other than in such an address,
4888 return (rtx)1. */
4889
4890 rtx
4891 find_use_as_address (x, reg, plusconst)
4892 register rtx x;
4893 rtx reg;
4894 HOST_WIDE_INT plusconst;
4895 {
4896 enum rtx_code code = GET_CODE (x);
4897 const char *fmt = GET_RTX_FORMAT (code);
4898 register int i;
4899 register rtx value = 0;
4900 register rtx tem;
4901
4902 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4903 return x;
4904
4905 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4906 && XEXP (XEXP (x, 0), 0) == reg
4907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4908 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4909 return x;
4910
4911 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4912 {
4913 /* If REG occurs inside a MEM used in a bit-field reference,
4914 that is unacceptable. */
4915 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4916 return (rtx) (HOST_WIDE_INT) 1;
4917 }
4918
4919 if (x == reg)
4920 return (rtx) (HOST_WIDE_INT) 1;
4921
4922 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4923 {
4924 if (fmt[i] == 'e')
4925 {
4926 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4927 if (value == 0)
4928 value = tem;
4929 else if (tem != 0)
4930 return (rtx) (HOST_WIDE_INT) 1;
4931 }
4932 else if (fmt[i] == 'E')
4933 {
4934 register int j;
4935 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4936 {
4937 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4938 if (value == 0)
4939 value = tem;
4940 else if (tem != 0)
4941 return (rtx) (HOST_WIDE_INT) 1;
4942 }
4943 }
4944 }
4945
4946 return value;
4947 }
4948 \f
4949 /* Write information about registers and basic blocks into FILE.
4950 This is part of making a debugging dump. */
4951
4952 void
4953 dump_regset (r, outf)
4954 regset r;
4955 FILE *outf;
4956 {
4957 int i;
4958 if (r == NULL)
4959 {
4960 fputs (" (nil)", outf);
4961 return;
4962 }
4963
4964 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4965 {
4966 fprintf (outf, " %d", i);
4967 if (i < FIRST_PSEUDO_REGISTER)
4968 fprintf (outf, " [%s]",
4969 reg_names[i]);
4970 });
4971 }
4972
4973 void
4974 debug_regset (r)
4975 regset r;
4976 {
4977 dump_regset (r, stderr);
4978 putc ('\n', stderr);
4979 }
4980
4981 void
4982 dump_flow_info (file)
4983 FILE *file;
4984 {
4985 register int i;
4986 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4987
4988 fprintf (file, "%d registers.\n", max_regno);
4989 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4990 if (REG_N_REFS (i))
4991 {
4992 enum reg_class class, altclass;
4993 fprintf (file, "\nRegister %d used %d times across %d insns",
4994 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4995 if (REG_BASIC_BLOCK (i) >= 0)
4996 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4997 if (REG_N_SETS (i))
4998 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4999 (REG_N_SETS (i) == 1) ? "" : "s");
5000 if (REG_USERVAR_P (regno_reg_rtx[i]))
5001 fprintf (file, "; user var");
5002 if (REG_N_DEATHS (i) != 1)
5003 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5004 if (REG_N_CALLS_CROSSED (i) == 1)
5005 fprintf (file, "; crosses 1 call");
5006 else if (REG_N_CALLS_CROSSED (i))
5007 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5008 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5009 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5010 class = reg_preferred_class (i);
5011 altclass = reg_alternate_class (i);
5012 if (class != GENERAL_REGS || altclass != ALL_REGS)
5013 {
5014 if (altclass == ALL_REGS || class == ALL_REGS)
5015 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5016 else if (altclass == NO_REGS)
5017 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5018 else
5019 fprintf (file, "; pref %s, else %s",
5020 reg_class_names[(int) class],
5021 reg_class_names[(int) altclass]);
5022 }
5023 if (REGNO_POINTER_FLAG (i))
5024 fprintf (file, "; pointer");
5025 fprintf (file, ".\n");
5026 }
5027
5028 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5029 for (i = 0; i < n_basic_blocks; i++)
5030 {
5031 register basic_block bb = BASIC_BLOCK (i);
5032 register edge e;
5033
5034 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5035 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5036
5037 fprintf (file, "Predecessors: ");
5038 for (e = bb->pred; e ; e = e->pred_next)
5039 dump_edge_info (file, e, 0);
5040
5041 fprintf (file, "\nSuccessors: ");
5042 for (e = bb->succ; e ; e = e->succ_next)
5043 dump_edge_info (file, e, 1);
5044
5045 fprintf (file, "\nRegisters live at start:");
5046 dump_regset (bb->global_live_at_start, file);
5047
5048 fprintf (file, "\nRegisters live at end:");
5049 dump_regset (bb->global_live_at_end, file);
5050
5051 putc('\n', file);
5052 }
5053
5054 putc('\n', file);
5055 }
5056
5057 void
5058 debug_flow_info ()
5059 {
5060 dump_flow_info (stderr);
5061 }
5062
5063 static void
5064 dump_edge_info (file, e, do_succ)
5065 FILE *file;
5066 edge e;
5067 int do_succ;
5068 {
5069 basic_block side = (do_succ ? e->dest : e->src);
5070
5071 if (side == ENTRY_BLOCK_PTR)
5072 fputs (" ENTRY", file);
5073 else if (side == EXIT_BLOCK_PTR)
5074 fputs (" EXIT", file);
5075 else
5076 fprintf (file, " %d", side->index);
5077
5078 if (e->flags)
5079 {
5080 static const char * const bitnames[] = {
5081 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5082 };
5083 int comma = 0;
5084 int i, flags = e->flags;
5085
5086 fputc (' ', file);
5087 fputc ('(', file);
5088 for (i = 0; flags; i++)
5089 if (flags & (1 << i))
5090 {
5091 flags &= ~(1 << i);
5092
5093 if (comma)
5094 fputc (',', file);
5095 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5096 fputs (bitnames[i], file);
5097 else
5098 fprintf (file, "%d", i);
5099 comma = 1;
5100 }
5101 fputc (')', file);
5102 }
5103 }
5104
5105 \f
5106 /* Print out one basic block with live information at start and end. */
5107 void
5108 dump_bb (bb, outf)
5109 basic_block bb;
5110 FILE *outf;
5111 {
5112 rtx insn;
5113 rtx last;
5114 edge e;
5115
5116 fprintf (outf, ";; Basic block %d, loop depth %d",
5117 bb->index, bb->loop_depth - 1);
5118 if (bb->eh_beg != -1 || bb->eh_end != -1)
5119 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5120 putc ('\n', outf);
5121
5122 fputs (";; Predecessors: ", outf);
5123 for (e = bb->pred; e ; e = e->pred_next)
5124 dump_edge_info (outf, e, 0);
5125 putc ('\n', outf);
5126
5127 fputs (";; Registers live at start:", outf);
5128 dump_regset (bb->global_live_at_start, outf);
5129 putc ('\n', outf);
5130
5131 for (insn = bb->head, last = NEXT_INSN (bb->end);
5132 insn != last;
5133 insn = NEXT_INSN (insn))
5134 print_rtl_single (outf, insn);
5135
5136 fputs (";; Registers live at end:", outf);
5137 dump_regset (bb->global_live_at_end, outf);
5138 putc ('\n', outf);
5139
5140 fputs (";; Successors: ", outf);
5141 for (e = bb->succ; e; e = e->succ_next)
5142 dump_edge_info (outf, e, 1);
5143 putc ('\n', outf);
5144 }
5145
5146 void
5147 debug_bb (bb)
5148 basic_block bb;
5149 {
5150 dump_bb (bb, stderr);
5151 }
5152
5153 void
5154 debug_bb_n (n)
5155 int n;
5156 {
5157 dump_bb (BASIC_BLOCK(n), stderr);
5158 }
5159
5160 /* Like print_rtl, but also print out live information for the start of each
5161 basic block. */
5162
5163 void
5164 print_rtl_with_bb (outf, rtx_first)
5165 FILE *outf;
5166 rtx rtx_first;
5167 {
5168 register rtx tmp_rtx;
5169
5170 if (rtx_first == 0)
5171 fprintf (outf, "(nil)\n");
5172 else
5173 {
5174 int i;
5175 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5176 int max_uid = get_max_uid ();
5177 basic_block *start = (basic_block *)
5178 xcalloc (max_uid, sizeof (basic_block));
5179 basic_block *end = (basic_block *)
5180 xcalloc (max_uid, sizeof (basic_block));
5181 enum bb_state *in_bb_p = (enum bb_state *)
5182 xcalloc (max_uid, sizeof (enum bb_state));
5183
5184 for (i = n_basic_blocks - 1; i >= 0; i--)
5185 {
5186 basic_block bb = BASIC_BLOCK (i);
5187 rtx x;
5188
5189 start[INSN_UID (bb->head)] = bb;
5190 end[INSN_UID (bb->end)] = bb;
5191 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5192 {
5193 enum bb_state state = IN_MULTIPLE_BB;
5194 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5195 state = IN_ONE_BB;
5196 in_bb_p[INSN_UID(x)] = state;
5197
5198 if (x == bb->end)
5199 break;
5200 }
5201 }
5202
5203 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5204 {
5205 int did_output;
5206 basic_block bb;
5207
5208 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5209 {
5210 fprintf (outf, ";; Start of basic block %d, registers live:",
5211 bb->index);
5212 dump_regset (bb->global_live_at_start, outf);
5213 putc ('\n', outf);
5214 }
5215
5216 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5217 && GET_CODE (tmp_rtx) != NOTE
5218 && GET_CODE (tmp_rtx) != BARRIER)
5219 fprintf (outf, ";; Insn is not within a basic block\n");
5220 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5221 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5222
5223 did_output = print_rtl_single (outf, tmp_rtx);
5224
5225 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5226 fprintf (outf, ";; End of basic block %d\n", bb->index);
5227
5228 if (did_output)
5229 putc ('\n', outf);
5230 }
5231
5232 free (start);
5233 free (end);
5234 free (in_bb_p);
5235 }
5236
5237 if (current_function_epilogue_delay_list != 0)
5238 {
5239 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5240 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5241 tmp_rtx = XEXP (tmp_rtx, 1))
5242 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5243 }
5244 }
5245
5246 /* Compute dominator relationships using new flow graph structures. */
5247 void
5248 compute_flow_dominators (dominators, post_dominators)
5249 sbitmap *dominators;
5250 sbitmap *post_dominators;
5251 {
5252 int bb;
5253 sbitmap *temp_bitmap;
5254 edge e;
5255 basic_block *worklist, *tos;
5256
5257 /* Allocate a worklist array/queue. Entries are only added to the
5258 list if they were not already on the list. So the size is
5259 bounded by the number of basic blocks. */
5260 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5261 * n_basic_blocks);
5262
5263 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5264 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5265
5266 if (dominators)
5267 {
5268 /* The optimistic setting of dominators requires us to put every
5269 block on the work list initially. */
5270 for (bb = 0; bb < n_basic_blocks; bb++)
5271 {
5272 *tos++ = BASIC_BLOCK (bb);
5273 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5274 }
5275
5276 /* We want a maximal solution, so initially assume everything dominates
5277 everything else. */
5278 sbitmap_vector_ones (dominators, n_basic_blocks);
5279
5280 /* Mark successors of the entry block so we can identify them below. */
5281 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5282 e->dest->aux = ENTRY_BLOCK_PTR;
5283
5284 /* Iterate until the worklist is empty. */
5285 while (tos != worklist)
5286 {
5287 /* Take the first entry off the worklist. */
5288 basic_block b = *--tos;
5289 bb = b->index;
5290
5291 /* Compute the intersection of the dominators of all the
5292 predecessor blocks.
5293
5294 If one of the predecessor blocks is the ENTRY block, then the
5295 intersection of the dominators of the predecessor blocks is
5296 defined as the null set. We can identify such blocks by the
5297 special value in the AUX field in the block structure. */
5298 if (b->aux == ENTRY_BLOCK_PTR)
5299 {
5300 /* Do not clear the aux field for blocks which are
5301 successors of the ENTRY block. That way we we never
5302 add them to the worklist again.
5303
5304 The intersect of dominators of the preds of this block is
5305 defined as the null set. */
5306 sbitmap_zero (temp_bitmap[bb]);
5307 }
5308 else
5309 {
5310 /* Clear the aux field of this block so it can be added to
5311 the worklist again if necessary. */
5312 b->aux = NULL;
5313 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5314 }
5315
5316 /* Make sure each block always dominates itself. */
5317 SET_BIT (temp_bitmap[bb], bb);
5318
5319 /* If the out state of this block changed, then we need to
5320 add the successors of this block to the worklist if they
5321 are not already on the worklist. */
5322 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5323 {
5324 for (e = b->succ; e; e = e->succ_next)
5325 {
5326 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5327 {
5328 *tos++ = e->dest;
5329 e->dest->aux = e;
5330 }
5331 }
5332 }
5333 }
5334 }
5335
5336 if (post_dominators)
5337 {
5338 /* The optimistic setting of dominators requires us to put every
5339 block on the work list initially. */
5340 for (bb = 0; bb < n_basic_blocks; bb++)
5341 {
5342 *tos++ = BASIC_BLOCK (bb);
5343 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5344 }
5345
5346 /* We want a maximal solution, so initially assume everything post
5347 dominates everything else. */
5348 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5349
5350 /* Mark predecessors of the exit block so we can identify them below. */
5351 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5352 e->src->aux = EXIT_BLOCK_PTR;
5353
5354 /* Iterate until the worklist is empty. */
5355 while (tos != worklist)
5356 {
5357 /* Take the first entry off the worklist. */
5358 basic_block b = *--tos;
5359 bb = b->index;
5360
5361 /* Compute the intersection of the post dominators of all the
5362 successor blocks.
5363
5364 If one of the successor blocks is the EXIT block, then the
5365 intersection of the dominators of the successor blocks is
5366 defined as the null set. We can identify such blocks by the
5367 special value in the AUX field in the block structure. */
5368 if (b->aux == EXIT_BLOCK_PTR)
5369 {
5370 /* Do not clear the aux field for blocks which are
5371 predecessors of the EXIT block. That way we we never
5372 add them to the worklist again.
5373
5374 The intersect of dominators of the succs of this block is
5375 defined as the null set. */
5376 sbitmap_zero (temp_bitmap[bb]);
5377 }
5378 else
5379 {
5380 /* Clear the aux field of this block so it can be added to
5381 the worklist again if necessary. */
5382 b->aux = NULL;
5383 sbitmap_intersection_of_succs (temp_bitmap[bb],
5384 post_dominators, bb);
5385 }
5386
5387 /* Make sure each block always post dominates itself. */
5388 SET_BIT (temp_bitmap[bb], bb);
5389
5390 /* If the out state of this block changed, then we need to
5391 add the successors of this block to the worklist if they
5392 are not already on the worklist. */
5393 if (sbitmap_a_and_b (post_dominators[bb],
5394 post_dominators[bb],
5395 temp_bitmap[bb]))
5396 {
5397 for (e = b->pred; e; e = e->pred_next)
5398 {
5399 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5400 {
5401 *tos++ = e->src;
5402 e->src->aux = e;
5403 }
5404 }
5405 }
5406 }
5407 }
5408 free (temp_bitmap);
5409 }
5410
5411 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5412
5413 void
5414 compute_immediate_dominators (idom, dominators)
5415 int *idom;
5416 sbitmap *dominators;
5417 {
5418 sbitmap *tmp;
5419 int b;
5420
5421 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5422
5423 /* Begin with tmp(n) = dom(n) - { n }. */
5424 for (b = n_basic_blocks; --b >= 0; )
5425 {
5426 sbitmap_copy (tmp[b], dominators[b]);
5427 RESET_BIT (tmp[b], b);
5428 }
5429
5430 /* Subtract out all of our dominator's dominators. */
5431 for (b = n_basic_blocks; --b >= 0; )
5432 {
5433 sbitmap tmp_b = tmp[b];
5434 int s;
5435
5436 for (s = n_basic_blocks; --s >= 0; )
5437 if (TEST_BIT (tmp_b, s))
5438 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5439 }
5440
5441 /* Find the one bit set in the bitmap and put it in the output array. */
5442 for (b = n_basic_blocks; --b >= 0; )
5443 {
5444 int t;
5445 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5446 }
5447
5448 sbitmap_vector_free (tmp);
5449 }
5450
5451 /* Count for a single SET rtx, X. */
5452
5453 static void
5454 count_reg_sets_1 (x)
5455 rtx x;
5456 {
5457 register int regno;
5458 register rtx reg = SET_DEST (x);
5459
5460 /* Find the register that's set/clobbered. */
5461 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5462 || GET_CODE (reg) == SIGN_EXTRACT
5463 || GET_CODE (reg) == STRICT_LOW_PART)
5464 reg = XEXP (reg, 0);
5465
5466 if (GET_CODE (reg) == PARALLEL
5467 && GET_MODE (reg) == BLKmode)
5468 {
5469 register int i;
5470 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5471 count_reg_sets_1 (XVECEXP (reg, 0, i));
5472 return;
5473 }
5474
5475 if (GET_CODE (reg) == REG)
5476 {
5477 regno = REGNO (reg);
5478 if (regno >= FIRST_PSEUDO_REGISTER)
5479 {
5480 /* Count (weighted) references, stores, etc. This counts a
5481 register twice if it is modified, but that is correct. */
5482 REG_N_SETS (regno)++;
5483 REG_N_REFS (regno) += loop_depth + 1;
5484 }
5485 }
5486 }
5487
5488 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5489 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5490
5491 static void
5492 count_reg_sets (x)
5493 rtx x;
5494 {
5495 register RTX_CODE code = GET_CODE (x);
5496
5497 if (code == SET || code == CLOBBER)
5498 count_reg_sets_1 (x);
5499 else if (code == PARALLEL)
5500 {
5501 register int i;
5502 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5503 {
5504 code = GET_CODE (XVECEXP (x, 0, i));
5505 if (code == SET || code == CLOBBER)
5506 count_reg_sets_1 (XVECEXP (x, 0, i));
5507 }
5508 }
5509 }
5510
5511 /* Increment REG_N_REFS by the current loop depth each register reference
5512 found in X. */
5513
5514 static void
5515 count_reg_references (x)
5516 rtx x;
5517 {
5518 register RTX_CODE code;
5519
5520 retry:
5521 code = GET_CODE (x);
5522 switch (code)
5523 {
5524 case LABEL_REF:
5525 case SYMBOL_REF:
5526 case CONST_INT:
5527 case CONST:
5528 case CONST_DOUBLE:
5529 case PC:
5530 case ADDR_VEC:
5531 case ADDR_DIFF_VEC:
5532 case ASM_INPUT:
5533 return;
5534
5535 #ifdef HAVE_cc0
5536 case CC0:
5537 return;
5538 #endif
5539
5540 case CLOBBER:
5541 /* If we are clobbering a MEM, mark any registers inside the address
5542 as being used. */
5543 if (GET_CODE (XEXP (x, 0)) == MEM)
5544 count_reg_references (XEXP (XEXP (x, 0), 0));
5545 return;
5546
5547 case SUBREG:
5548 /* While we're here, optimize this case. */
5549 x = SUBREG_REG (x);
5550
5551 /* In case the SUBREG is not of a register, don't optimize */
5552 if (GET_CODE (x) != REG)
5553 {
5554 count_reg_references (x);
5555 return;
5556 }
5557
5558 /* ... fall through ... */
5559
5560 case REG:
5561 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5562 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5563 return;
5564
5565 case SET:
5566 {
5567 register rtx testreg = SET_DEST (x);
5568 int mark_dest = 0;
5569
5570 /* If storing into MEM, don't show it as being used. But do
5571 show the address as being used. */
5572 if (GET_CODE (testreg) == MEM)
5573 {
5574 count_reg_references (XEXP (testreg, 0));
5575 count_reg_references (SET_SRC (x));
5576 return;
5577 }
5578
5579 /* Storing in STRICT_LOW_PART is like storing in a reg
5580 in that this SET might be dead, so ignore it in TESTREG.
5581 but in some other ways it is like using the reg.
5582
5583 Storing in a SUBREG or a bit field is like storing the entire
5584 register in that if the register's value is not used
5585 then this SET is not needed. */
5586 while (GET_CODE (testreg) == STRICT_LOW_PART
5587 || GET_CODE (testreg) == ZERO_EXTRACT
5588 || GET_CODE (testreg) == SIGN_EXTRACT
5589 || GET_CODE (testreg) == SUBREG)
5590 {
5591 /* Modifying a single register in an alternate mode
5592 does not use any of the old value. But these other
5593 ways of storing in a register do use the old value. */
5594 if (GET_CODE (testreg) == SUBREG
5595 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5596 ;
5597 else
5598 mark_dest = 1;
5599
5600 testreg = XEXP (testreg, 0);
5601 }
5602
5603 /* If this is a store into a register,
5604 recursively scan the value being stored. */
5605
5606 if ((GET_CODE (testreg) == PARALLEL
5607 && GET_MODE (testreg) == BLKmode)
5608 || GET_CODE (testreg) == REG)
5609 {
5610 count_reg_references (SET_SRC (x));
5611 if (mark_dest)
5612 count_reg_references (SET_DEST (x));
5613 return;
5614 }
5615 }
5616 break;
5617
5618 default:
5619 break;
5620 }
5621
5622 /* Recursively scan the operands of this expression. */
5623
5624 {
5625 register const char *fmt = GET_RTX_FORMAT (code);
5626 register int i;
5627
5628 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5629 {
5630 if (fmt[i] == 'e')
5631 {
5632 /* Tail recursive case: save a function call level. */
5633 if (i == 0)
5634 {
5635 x = XEXP (x, 0);
5636 goto retry;
5637 }
5638 count_reg_references (XEXP (x, i));
5639 }
5640 else if (fmt[i] == 'E')
5641 {
5642 register int j;
5643 for (j = 0; j < XVECLEN (x, i); j++)
5644 count_reg_references (XVECEXP (x, i, j));
5645 }
5646 }
5647 }
5648 }
5649
5650 /* Recompute register set/reference counts immediately prior to register
5651 allocation.
5652
5653 This avoids problems with set/reference counts changing to/from values
5654 which have special meanings to the register allocators.
5655
5656 Additionally, the reference counts are the primary component used by the
5657 register allocators to prioritize pseudos for allocation to hard regs.
5658 More accurate reference counts generally lead to better register allocation.
5659
5660 F is the first insn to be scanned.
5661
5662 LOOP_STEP denotes how much loop_depth should be incremented per
5663 loop nesting level in order to increase the ref count more for
5664 references in a loop.
5665
5666 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5667 possibly other information which is used by the register allocators. */
5668
5669 void
5670 recompute_reg_usage (f, loop_step)
5671 rtx f ATTRIBUTE_UNUSED;
5672 int loop_step ATTRIBUTE_UNUSED;
5673 {
5674 rtx insn;
5675 int i, max_reg;
5676 int index;
5677
5678 /* Clear out the old data. */
5679 max_reg = max_reg_num ();
5680 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5681 {
5682 REG_N_SETS (i) = 0;
5683 REG_N_REFS (i) = 0;
5684 }
5685
5686 /* Scan each insn in the chain and count how many times each register is
5687 set/used. */
5688 for (index = 0; index < n_basic_blocks; index++)
5689 {
5690 basic_block bb = BASIC_BLOCK (index);
5691 loop_depth = bb->loop_depth;
5692 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5693 {
5694 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5695 {
5696 rtx links;
5697
5698 /* This call will increment REG_N_SETS for each SET or CLOBBER
5699 of a register in INSN. It will also increment REG_N_REFS
5700 by the loop depth for each set of a register in INSN. */
5701 count_reg_sets (PATTERN (insn));
5702
5703 /* count_reg_sets does not detect autoincrement address modes, so
5704 detect them here by looking at the notes attached to INSN. */
5705 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5706 {
5707 if (REG_NOTE_KIND (links) == REG_INC)
5708 /* Count (weighted) references, stores, etc. This counts a
5709 register twice if it is modified, but that is correct. */
5710 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5711 }
5712
5713 /* This call will increment REG_N_REFS by the current loop depth for
5714 each reference to a register in INSN. */
5715 count_reg_references (PATTERN (insn));
5716
5717 /* count_reg_references will not include counts for arguments to
5718 function calls, so detect them here by examining the
5719 CALL_INSN_FUNCTION_USAGE data. */
5720 if (GET_CODE (insn) == CALL_INSN)
5721 {
5722 rtx note;
5723
5724 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5725 note;
5726 note = XEXP (note, 1))
5727 if (GET_CODE (XEXP (note, 0)) == USE)
5728 count_reg_references (XEXP (XEXP (note, 0), 0));
5729 }
5730 }
5731 if (insn == bb->end)
5732 break;
5733 }
5734 }
5735 }
5736
5737 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5738 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5739 of the number of registers that died. */
5740
5741 int
5742 count_or_remove_death_notes (blocks, kill)
5743 sbitmap blocks;
5744 int kill;
5745 {
5746 int i, count = 0;
5747
5748 for (i = n_basic_blocks - 1; i >= 0; --i)
5749 {
5750 basic_block bb;
5751 rtx insn;
5752
5753 if (blocks && ! TEST_BIT (blocks, i))
5754 continue;
5755
5756 bb = BASIC_BLOCK (i);
5757
5758 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5759 {
5760 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5761 {
5762 rtx *pprev = &REG_NOTES (insn);
5763 rtx link = *pprev;
5764
5765 while (link)
5766 {
5767 switch (REG_NOTE_KIND (link))
5768 {
5769 case REG_DEAD:
5770 if (GET_CODE (XEXP (link, 0)) == REG)
5771 {
5772 rtx reg = XEXP (link, 0);
5773 int n;
5774
5775 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5776 n = 1;
5777 else
5778 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5779 count += n;
5780 }
5781 /* FALLTHRU */
5782
5783 case REG_UNUSED:
5784 if (kill)
5785 {
5786 rtx next = XEXP (link, 1);
5787 free_EXPR_LIST_node (link);
5788 *pprev = link = next;
5789 break;
5790 }
5791 /* FALLTHRU */
5792
5793 default:
5794 pprev = &XEXP (link, 1);
5795 link = *pprev;
5796 break;
5797 }
5798 }
5799 }
5800
5801 if (insn == bb->end)
5802 break;
5803 }
5804 }
5805
5806 return count;
5807 }
5808
5809 /* Record INSN's block as BB. */
5810
5811 void
5812 set_block_for_insn (insn, bb)
5813 rtx insn;
5814 basic_block bb;
5815 {
5816 size_t uid = INSN_UID (insn);
5817 if (uid >= basic_block_for_insn->num_elements)
5818 {
5819 int new_size;
5820
5821 /* Add one-eighth the size so we don't keep calling xrealloc. */
5822 new_size = uid + (uid + 7) / 8;
5823
5824 VARRAY_GROW (basic_block_for_insn, new_size);
5825 }
5826 VARRAY_BB (basic_block_for_insn, uid) = bb;
5827 }
5828
5829 /* Record INSN's block number as BB. */
5830 /* ??? This has got to go. */
5831
5832 void
5833 set_block_num (insn, bb)
5834 rtx insn;
5835 int bb;
5836 {
5837 set_block_for_insn (insn, BASIC_BLOCK (bb));
5838 }
5839 \f
5840 /* Verify the CFG consistency. This function check some CFG invariants and
5841 aborts when something is wrong. Hope that this function will help to
5842 convert many optimization passes to preserve CFG consistent.
5843
5844 Currently it does following checks:
5845
5846 - test head/end pointers
5847 - overlapping of basic blocks
5848 - edge list corectness
5849 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5850 - tails of basic blocks (ensure that boundary is necesary)
5851 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5852 and NOTE_INSN_BASIC_BLOCK
5853 - check that all insns are in the basic blocks
5854 (except the switch handling code, barriers and notes)
5855 - check that all returns are followed by barriers
5856
5857 In future it can be extended check a lot of other stuff as well
5858 (reachability of basic blocks, life information, etc. etc.). */
5859
5860 void
5861 verify_flow_info ()
5862 {
5863 const int max_uid = get_max_uid ();
5864 const rtx rtx_first = get_insns ();
5865 basic_block *bb_info;
5866 rtx x;
5867 int i, err = 0;
5868
5869 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5870
5871 /* First pass check head/end pointers and set bb_info array used by
5872 later passes. */
5873 for (i = n_basic_blocks - 1; i >= 0; i--)
5874 {
5875 basic_block bb = BASIC_BLOCK (i);
5876
5877 /* Check the head pointer and make sure that it is pointing into
5878 insn list. */
5879 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5880 if (x == bb->head)
5881 break;
5882 if (!x)
5883 {
5884 error ("Head insn %d for block %d not found in the insn stream.",
5885 INSN_UID (bb->head), bb->index);
5886 err = 1;
5887 }
5888
5889 /* Check the end pointer and make sure that it is pointing into
5890 insn list. */
5891 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5892 {
5893 if (bb_info[INSN_UID (x)] != NULL)
5894 {
5895 error ("Insn %d is in multiple basic blocks (%d and %d)",
5896 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5897 err = 1;
5898 }
5899 bb_info[INSN_UID (x)] = bb;
5900
5901 if (x == bb->end)
5902 break;
5903 }
5904 if (!x)
5905 {
5906 error ("End insn %d for block %d not found in the insn stream.",
5907 INSN_UID (bb->end), bb->index);
5908 err = 1;
5909 }
5910 }
5911
5912 /* Now check the basic blocks (boundaries etc.) */
5913 for (i = n_basic_blocks - 1; i >= 0; i--)
5914 {
5915 basic_block bb = BASIC_BLOCK (i);
5916 /* Check corectness of edge lists */
5917 edge e;
5918
5919 e = bb->succ;
5920 while (e)
5921 {
5922 if (e->src != bb)
5923 {
5924 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5925 bb->index);
5926 fprintf (stderr, "Predecessor: ");
5927 dump_edge_info (stderr, e, 0);
5928 fprintf (stderr, "\nSuccessor: ");
5929 dump_edge_info (stderr, e, 1);
5930 fflush (stderr);
5931 err = 1;
5932 }
5933 if (e->dest != EXIT_BLOCK_PTR)
5934 {
5935 edge e2 = e->dest->pred;
5936 while (e2 && e2 != e)
5937 e2 = e2->pred_next;
5938 if (!e2)
5939 {
5940 error ("Basic block %i edge lists are corrupted", bb->index);
5941 err = 1;
5942 }
5943 }
5944 e = e->succ_next;
5945 }
5946
5947 e = bb->pred;
5948 while (e)
5949 {
5950 if (e->dest != bb)
5951 {
5952 error ("Basic block %d pred edge is corrupted", bb->index);
5953 fputs ("Predecessor: ", stderr);
5954 dump_edge_info (stderr, e, 0);
5955 fputs ("\nSuccessor: ", stderr);
5956 dump_edge_info (stderr, e, 1);
5957 fputc ('\n', stderr);
5958 err = 1;
5959 }
5960 if (e->src != ENTRY_BLOCK_PTR)
5961 {
5962 edge e2 = e->src->succ;
5963 while (e2 && e2 != e)
5964 e2 = e2->succ_next;
5965 if (!e2)
5966 {
5967 error ("Basic block %i edge lists are corrupted", bb->index);
5968 err = 1;
5969 }
5970 }
5971 e = e->pred_next;
5972 }
5973
5974 /* OK pointers are correct. Now check the header of basic
5975 block. It ought to contain optional CODE_LABEL followed
5976 by NOTE_BASIC_BLOCK. */
5977 x = bb->head;
5978 if (GET_CODE (x) == CODE_LABEL)
5979 {
5980 if (bb->end == x)
5981 {
5982 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5983 bb->index);
5984 err = 1;
5985 }
5986 x = NEXT_INSN (x);
5987 }
5988 if (GET_CODE (x) != NOTE
5989 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5990 || NOTE_BASIC_BLOCK (x) != bb)
5991 {
5992 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5993 bb->index);
5994 err = 1;
5995 }
5996
5997 if (bb->end == x)
5998 {
5999 /* Do checks for empty blocks here */
6000 }
6001 else
6002 {
6003 x = NEXT_INSN (x);
6004 while (x)
6005 {
6006 if (GET_CODE (x) == NOTE
6007 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6008 {
6009 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6010 INSN_UID (x), bb->index);
6011 err = 1;
6012 }
6013
6014 if (x == bb->end)
6015 break;
6016
6017 if (GET_CODE (x) == JUMP_INSN
6018 || GET_CODE (x) == CODE_LABEL
6019 || GET_CODE (x) == BARRIER)
6020 {
6021 error ("In basic block %d:", bb->index);
6022 fatal_insn ("Flow control insn inside a basic block", x);
6023 }
6024
6025 x = NEXT_INSN (x);
6026 }
6027 }
6028 }
6029
6030 x = rtx_first;
6031 while (x)
6032 {
6033 if (!bb_info[INSN_UID (x)])
6034 {
6035 switch (GET_CODE (x))
6036 {
6037 case BARRIER:
6038 case NOTE:
6039 break;
6040
6041 case CODE_LABEL:
6042 /* An addr_vec is placed outside any block block. */
6043 if (NEXT_INSN (x)
6044 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6045 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6046 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6047 {
6048 x = NEXT_INSN (x);
6049 }
6050
6051 /* But in any case, non-deletable labels can appear anywhere. */
6052 break;
6053
6054 default:
6055 fatal_insn ("Insn outside basic block", x);
6056 }
6057 }
6058
6059 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6060 && GET_CODE (x) == JUMP_INSN
6061 && returnjump_p (x) && ! condjump_p (x)
6062 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6063 fatal_insn ("Return not followed by barrier", x);
6064
6065 x = NEXT_INSN (x);
6066 }
6067
6068 if (err)
6069 abort ();
6070
6071 /* Clean up. */
6072 free (bb_info);
6073 }
6074 \f
6075 /* Functions to access an edge list with a vector representation.
6076 Enough data is kept such that given an index number, the
6077 pred and succ that edge reprsents can be determined, or
6078 given a pred and a succ, it's index number can be returned.
6079 This allows algorithms which comsume a lot of memory to
6080 represent the normally full matrix of edge (pred,succ) with a
6081 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6082 wasted space in the client code due to sparse flow graphs. */
6083
6084 /* This functions initializes the edge list. Basically the entire
6085 flowgraph is processed, and all edges are assigned a number,
6086 and the data structure is filed in. */
6087 struct edge_list *
6088 create_edge_list ()
6089 {
6090 struct edge_list *elist;
6091 edge e;
6092 int num_edges;
6093 int x;
6094 int block_count;
6095
6096 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6097
6098 num_edges = 0;
6099
6100 /* Determine the number of edges in the flow graph by counting successor
6101 edges on each basic block. */
6102 for (x = 0; x < n_basic_blocks; x++)
6103 {
6104 basic_block bb = BASIC_BLOCK (x);
6105
6106 for (e = bb->succ; e; e = e->succ_next)
6107 num_edges++;
6108 }
6109 /* Don't forget successors of the entry block. */
6110 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6111 num_edges++;
6112
6113 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6114 elist->num_blocks = block_count;
6115 elist->num_edges = num_edges;
6116 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6117
6118 num_edges = 0;
6119
6120 /* Follow successors of the entry block, and register these edges. */
6121 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6122 {
6123 elist->index_to_edge[num_edges] = e;
6124 num_edges++;
6125 }
6126
6127 for (x = 0; x < n_basic_blocks; x++)
6128 {
6129 basic_block bb = BASIC_BLOCK (x);
6130
6131 /* Follow all successors of blocks, and register these edges. */
6132 for (e = bb->succ; e; e = e->succ_next)
6133 {
6134 elist->index_to_edge[num_edges] = e;
6135 num_edges++;
6136 }
6137 }
6138 return elist;
6139 }
6140
6141 /* This function free's memory associated with an edge list. */
6142 void
6143 free_edge_list (elist)
6144 struct edge_list *elist;
6145 {
6146 if (elist)
6147 {
6148 free (elist->index_to_edge);
6149 free (elist);
6150 }
6151 }
6152
6153 /* This function provides debug output showing an edge list. */
6154 void
6155 print_edge_list (f, elist)
6156 FILE *f;
6157 struct edge_list *elist;
6158 {
6159 int x;
6160 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6161 elist->num_blocks - 2, elist->num_edges);
6162
6163 for (x = 0; x < elist->num_edges; x++)
6164 {
6165 fprintf (f, " %-4d - edge(", x);
6166 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6167 fprintf (f,"entry,");
6168 else
6169 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6170
6171 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6172 fprintf (f,"exit)\n");
6173 else
6174 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6175 }
6176 }
6177
6178 /* This function provides an internal consistancy check of an edge list,
6179 verifying that all edges are present, and that there are no
6180 extra edges. */
6181 void
6182 verify_edge_list (f, elist)
6183 FILE *f;
6184 struct edge_list *elist;
6185 {
6186 int x, pred, succ, index;
6187 edge e;
6188
6189 for (x = 0; x < n_basic_blocks; x++)
6190 {
6191 basic_block bb = BASIC_BLOCK (x);
6192
6193 for (e = bb->succ; e; e = e->succ_next)
6194 {
6195 pred = e->src->index;
6196 succ = e->dest->index;
6197 index = EDGE_INDEX (elist, e->src, e->dest);
6198 if (index == EDGE_INDEX_NO_EDGE)
6199 {
6200 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6201 continue;
6202 }
6203 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6204 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6205 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6206 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6207 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6208 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6209 }
6210 }
6211 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6212 {
6213 pred = e->src->index;
6214 succ = e->dest->index;
6215 index = EDGE_INDEX (elist, e->src, e->dest);
6216 if (index == EDGE_INDEX_NO_EDGE)
6217 {
6218 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6219 continue;
6220 }
6221 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6222 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6223 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6224 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6225 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6226 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6227 }
6228 /* We've verified that all the edges are in the list, no lets make sure
6229 there are no spurious edges in the list. */
6230
6231 for (pred = 0 ; pred < n_basic_blocks; pred++)
6232 for (succ = 0 ; succ < n_basic_blocks; succ++)
6233 {
6234 basic_block p = BASIC_BLOCK (pred);
6235 basic_block s = BASIC_BLOCK (succ);
6236
6237 int found_edge = 0;
6238
6239 for (e = p->succ; e; e = e->succ_next)
6240 if (e->dest == s)
6241 {
6242 found_edge = 1;
6243 break;
6244 }
6245 for (e = s->pred; e; e = e->pred_next)
6246 if (e->src == p)
6247 {
6248 found_edge = 1;
6249 break;
6250 }
6251 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6252 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6253 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6254 pred, succ);
6255 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6256 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6257 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6258 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6259 BASIC_BLOCK (succ)));
6260 }
6261 for (succ = 0 ; succ < n_basic_blocks; succ++)
6262 {
6263 basic_block p = ENTRY_BLOCK_PTR;
6264 basic_block s = BASIC_BLOCK (succ);
6265
6266 int found_edge = 0;
6267
6268 for (e = p->succ; e; e = e->succ_next)
6269 if (e->dest == s)
6270 {
6271 found_edge = 1;
6272 break;
6273 }
6274 for (e = s->pred; e; e = e->pred_next)
6275 if (e->src == p)
6276 {
6277 found_edge = 1;
6278 break;
6279 }
6280 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6281 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6282 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6283 succ);
6284 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6285 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6286 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6287 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6288 BASIC_BLOCK (succ)));
6289 }
6290 for (pred = 0 ; pred < n_basic_blocks; pred++)
6291 {
6292 basic_block p = BASIC_BLOCK (pred);
6293 basic_block s = EXIT_BLOCK_PTR;
6294
6295 int found_edge = 0;
6296
6297 for (e = p->succ; e; e = e->succ_next)
6298 if (e->dest == s)
6299 {
6300 found_edge = 1;
6301 break;
6302 }
6303 for (e = s->pred; e; e = e->pred_next)
6304 if (e->src == p)
6305 {
6306 found_edge = 1;
6307 break;
6308 }
6309 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6310 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6311 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6312 pred);
6313 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6314 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6315 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6316 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6317 EXIT_BLOCK_PTR));
6318 }
6319 }
6320
6321 /* This routine will determine what, if any, edge there is between
6322 a specified predecessor and successor. */
6323
6324 int
6325 find_edge_index (edge_list, pred, succ)
6326 struct edge_list *edge_list;
6327 basic_block pred, succ;
6328 {
6329 int x;
6330 for (x = 0; x < NUM_EDGES (edge_list); x++)
6331 {
6332 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6333 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6334 return x;
6335 }
6336 return (EDGE_INDEX_NO_EDGE);
6337 }
6338
6339 /* This function will remove an edge from the flow graph. */
6340 void
6341 remove_edge (e)
6342 edge e;
6343 {
6344 edge last_pred = NULL;
6345 edge last_succ = NULL;
6346 edge tmp;
6347 basic_block src, dest;
6348 src = e->src;
6349 dest = e->dest;
6350 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6351 last_succ = tmp;
6352
6353 if (!tmp)
6354 abort ();
6355 if (last_succ)
6356 last_succ->succ_next = e->succ_next;
6357 else
6358 src->succ = e->succ_next;
6359
6360 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6361 last_pred = tmp;
6362
6363 if (!tmp)
6364 abort ();
6365 if (last_pred)
6366 last_pred->pred_next = e->pred_next;
6367 else
6368 dest->pred = e->pred_next;
6369
6370 n_edges--;
6371 free (e);
6372 }
6373
6374 /* This routine will remove any fake successor edges for a basic block.
6375 When the edge is removed, it is also removed from whatever predecessor
6376 list it is in. */
6377 static void
6378 remove_fake_successors (bb)
6379 basic_block bb;
6380 {
6381 edge e;
6382 for (e = bb->succ; e ; )
6383 {
6384 edge tmp = e;
6385 e = e->succ_next;
6386 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6387 remove_edge (tmp);
6388 }
6389 }
6390
6391 /* This routine will remove all fake edges from the flow graph. If
6392 we remove all fake successors, it will automatically remove all
6393 fake predecessors. */
6394 void
6395 remove_fake_edges ()
6396 {
6397 int x;
6398
6399 for (x = 0; x < n_basic_blocks; x++)
6400 remove_fake_successors (BASIC_BLOCK (x));
6401
6402 /* We've handled all successors except the entry block's. */
6403 remove_fake_successors (ENTRY_BLOCK_PTR);
6404 }
6405
6406 /* This functions will add a fake edge between any block which has no
6407 successors, and the exit block. Some data flow equations require these
6408 edges to exist. */
6409 void
6410 add_noreturn_fake_exit_edges ()
6411 {
6412 int x;
6413
6414 for (x = 0; x < n_basic_blocks; x++)
6415 if (BASIC_BLOCK (x)->succ == NULL)
6416 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6417 }
6418 \f
6419 /* Dump the list of basic blocks in the bitmap NODES. */
6420 static void
6421 flow_nodes_print (str, nodes, file)
6422 const char *str;
6423 const sbitmap nodes;
6424 FILE *file;
6425 {
6426 int node;
6427
6428 fprintf (file, "%s { ", str);
6429 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6430 fputs ("}\n", file);
6431 }
6432
6433
6434 /* Dump the list of exiting edges in the array EDGES. */
6435 static void
6436 flow_exits_print (str, edges, num_edges, file)
6437 const char *str;
6438 const edge *edges;
6439 int num_edges;
6440 FILE *file;
6441 {
6442 int i;
6443
6444 fprintf (file, "%s { ", str);
6445 for (i = 0; i < num_edges; i++)
6446 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6447 fputs ("}\n", file);
6448 }
6449
6450
6451 /* Dump loop related CFG information. */
6452 static void
6453 flow_loops_cfg_dump (loops, file)
6454 const struct loops *loops;
6455 FILE *file;
6456 {
6457 int i;
6458
6459 if (! loops->num || ! file || ! loops->cfg.dom)
6460 return;
6461
6462 for (i = 0; i < n_basic_blocks; i++)
6463 {
6464 edge succ;
6465
6466 fprintf (file, ";; %d succs { ", i);
6467 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6468 fprintf (file, "%d ", succ->dest->index);
6469 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6470 }
6471
6472
6473 /* Dump the DFS node order. */
6474 if (loops->cfg.dfs_order)
6475 {
6476 fputs (";; DFS order: ", file);
6477 for (i = 0; i < n_basic_blocks; i++)
6478 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6479 fputs ("\n", file);
6480 }
6481 }
6482
6483
6484 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6485 static int
6486 flow_loop_nested_p (outer, loop)
6487 struct loop *outer;
6488 struct loop *loop;
6489 {
6490 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6491 }
6492
6493
6494 /* Dump the loop information specified by LOOPS to the stream FILE. */
6495 void
6496 flow_loops_dump (loops, file, verbose)
6497 const struct loops *loops;
6498 FILE *file;
6499 int verbose;
6500 {
6501 int i;
6502 int num_loops;
6503
6504 num_loops = loops->num;
6505 if (! num_loops || ! file)
6506 return;
6507
6508 fprintf (file, ";; %d loops found, %d levels\n",
6509 num_loops, loops->levels);
6510
6511 for (i = 0; i < num_loops; i++)
6512 {
6513 struct loop *loop = &loops->array[i];
6514
6515 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6516 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6517 loop->header->index, loop->latch->index,
6518 loop->pre_header ? loop->pre_header->index : -1,
6519 loop->depth, loop->level,
6520 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6521 fprintf (file, ";; %d", loop->num_nodes);
6522 flow_nodes_print (" nodes", loop->nodes, file);
6523 fprintf (file, ";; %d", loop->num_exits);
6524 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6525
6526 if (loop->shared)
6527 {
6528 int j;
6529
6530 for (j = 0; j < i; j++)
6531 {
6532 struct loop *oloop = &loops->array[j];
6533
6534 if (loop->header == oloop->header)
6535 {
6536 int disjoint;
6537 int smaller;
6538
6539 smaller = loop->num_nodes < oloop->num_nodes;
6540
6541 /* If the union of LOOP and OLOOP is different than
6542 the larger of LOOP and OLOOP then LOOP and OLOOP
6543 must be disjoint. */
6544 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6545 smaller ? oloop : loop);
6546 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6547 loop->header->index, i, j,
6548 disjoint ? "disjoint" : "nested");
6549 }
6550 }
6551 }
6552
6553 if (verbose)
6554 {
6555 /* Print diagnostics to compare our concept of a loop with
6556 what the loop notes say. */
6557 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6558 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6559 != NOTE_INSN_LOOP_BEG)
6560 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6561 INSN_UID (PREV_INSN (loop->first->head)));
6562 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6563 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6564 != NOTE_INSN_LOOP_END)
6565 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6566 INSN_UID (NEXT_INSN (loop->last->end)));
6567 }
6568 }
6569
6570 if (verbose)
6571 flow_loops_cfg_dump (loops, file);
6572 }
6573
6574
6575 /* Free all the memory allocated for LOOPS. */
6576 void
6577 flow_loops_free (loops)
6578 struct loops *loops;
6579 {
6580 if (loops->array)
6581 {
6582 int i;
6583
6584 if (! loops->num)
6585 abort ();
6586
6587 /* Free the loop descriptors. */
6588 for (i = 0; i < loops->num; i++)
6589 {
6590 struct loop *loop = &loops->array[i];
6591
6592 if (loop->nodes)
6593 sbitmap_free (loop->nodes);
6594 if (loop->exits)
6595 free (loop->exits);
6596 }
6597 free (loops->array);
6598 loops->array = NULL;
6599
6600 if (loops->cfg.dom)
6601 sbitmap_vector_free (loops->cfg.dom);
6602 if (loops->cfg.dfs_order)
6603 free (loops->cfg.dfs_order);
6604
6605 sbitmap_free (loops->shared_headers);
6606 }
6607 }
6608
6609
6610 /* Find the exits from the loop using the bitmap of loop nodes NODES
6611 and store in EXITS array. Return the number of exits from the
6612 loop. */
6613 static int
6614 flow_loop_exits_find (nodes, exits)
6615 const sbitmap nodes;
6616 edge **exits;
6617 {
6618 edge e;
6619 int node;
6620 int num_exits;
6621
6622 *exits = NULL;
6623
6624 /* Check all nodes within the loop to see if there are any
6625 successors not in the loop. Note that a node may have multiple
6626 exiting edges. */
6627 num_exits = 0;
6628 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6629 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6630 {
6631 basic_block dest = e->dest;
6632
6633 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6634 num_exits++;
6635 }
6636 });
6637
6638 if (! num_exits)
6639 return 0;
6640
6641 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6642
6643 /* Store all exiting edges into an array. */
6644 num_exits = 0;
6645 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6646 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6647 {
6648 basic_block dest = e->dest;
6649
6650 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6651 (*exits)[num_exits++] = e;
6652 }
6653 });
6654
6655 return num_exits;
6656 }
6657
6658
6659 /* Find the nodes contained within the loop with header HEADER and
6660 latch LATCH and store in NODES. Return the number of nodes within
6661 the loop. */
6662 static int
6663 flow_loop_nodes_find (header, latch, nodes)
6664 basic_block header;
6665 basic_block latch;
6666 sbitmap nodes;
6667 {
6668 basic_block *stack;
6669 int sp;
6670 int num_nodes = 0;
6671
6672 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6673 sp = 0;
6674
6675 /* Start with only the loop header in the set of loop nodes. */
6676 sbitmap_zero (nodes);
6677 SET_BIT (nodes, header->index);
6678 num_nodes++;
6679 header->loop_depth++;
6680
6681 /* Push the loop latch on to the stack. */
6682 if (! TEST_BIT (nodes, latch->index))
6683 {
6684 SET_BIT (nodes, latch->index);
6685 latch->loop_depth++;
6686 num_nodes++;
6687 stack[sp++] = latch;
6688 }
6689
6690 while (sp)
6691 {
6692 basic_block node;
6693 edge e;
6694
6695 node = stack[--sp];
6696 for (e = node->pred; e; e = e->pred_next)
6697 {
6698 basic_block ancestor = e->src;
6699
6700 /* If each ancestor not marked as part of loop, add to set of
6701 loop nodes and push on to stack. */
6702 if (ancestor != ENTRY_BLOCK_PTR
6703 && ! TEST_BIT (nodes, ancestor->index))
6704 {
6705 SET_BIT (nodes, ancestor->index);
6706 ancestor->loop_depth++;
6707 num_nodes++;
6708 stack[sp++] = ancestor;
6709 }
6710 }
6711 }
6712 free (stack);
6713 return num_nodes;
6714 }
6715
6716
6717 /* Compute the depth first search order and store in the array
6718 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6719 number of nodes visited. */
6720 static int
6721 flow_depth_first_order_compute (dfs_order)
6722 int *dfs_order;
6723 {
6724 edge e;
6725 edge *stack;
6726 int sp;
6727 int dfsnum = 0;
6728 sbitmap visited;
6729
6730 /* Allocate stack for back-tracking up CFG. */
6731 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6732 sp = 0;
6733
6734 /* Allocate bitmap to track nodes that have been visited. */
6735 visited = sbitmap_alloc (n_basic_blocks);
6736
6737 /* None of the nodes in the CFG have been visited yet. */
6738 sbitmap_zero (visited);
6739
6740 /* Start with the first successor edge from the entry block. */
6741 e = ENTRY_BLOCK_PTR->succ;
6742 while (e)
6743 {
6744 basic_block src = e->src;
6745 basic_block dest = e->dest;
6746
6747 /* Mark that we have visited this node. */
6748 if (src != ENTRY_BLOCK_PTR)
6749 SET_BIT (visited, src->index);
6750
6751 /* If this node has not been visited before, push the current
6752 edge on to the stack and proceed with the first successor
6753 edge of this node. */
6754 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6755 && dest->succ)
6756 {
6757 stack[sp++] = e;
6758 e = dest->succ;
6759 }
6760 else
6761 {
6762 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6763 && ! dest->succ)
6764 {
6765 /* DEST has no successors (for example, a non-returning
6766 function is called) so do not push the current edge
6767 but carry on with its next successor. */
6768 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6769 SET_BIT (visited, dest->index);
6770 }
6771
6772 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6773 {
6774 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6775
6776 /* Pop edge off stack. */
6777 e = stack[--sp];
6778 src = e->src;
6779 }
6780 e = e->succ_next;
6781 }
6782 }
6783 free (stack);
6784 sbitmap_free (visited);
6785
6786 /* The number of nodes visited should not be greater than
6787 n_basic_blocks. */
6788 if (dfsnum > n_basic_blocks)
6789 abort ();
6790
6791 /* There are some nodes left in the CFG that are unreachable. */
6792 if (dfsnum < n_basic_blocks)
6793 abort ();
6794 return dfsnum;
6795 }
6796
6797
6798 /* Return the block for the pre-header of the loop with header
6799 HEADER where DOM specifies the dominator information. Return NULL if
6800 there is no pre-header. */
6801 static basic_block
6802 flow_loop_pre_header_find (header, dom)
6803 basic_block header;
6804 const sbitmap *dom;
6805 {
6806 basic_block pre_header;
6807 edge e;
6808
6809 /* If block p is a predecessor of the header and is the only block
6810 that the header does not dominate, then it is the pre-header. */
6811 pre_header = NULL;
6812 for (e = header->pred; e; e = e->pred_next)
6813 {
6814 basic_block node = e->src;
6815
6816 if (node != ENTRY_BLOCK_PTR
6817 && ! TEST_BIT (dom[node->index], header->index))
6818 {
6819 if (pre_header == NULL)
6820 pre_header = node;
6821 else
6822 {
6823 /* There are multiple edges into the header from outside
6824 the loop so there is no pre-header block. */
6825 pre_header = NULL;
6826 break;
6827 }
6828 }
6829 }
6830 return pre_header;
6831 }
6832
6833
6834 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6835 previously added. The insertion algorithm assumes that the loops
6836 are added in the order found by a depth first search of the CFG. */
6837 static void
6838 flow_loop_tree_node_add (prevloop, loop)
6839 struct loop *prevloop;
6840 struct loop *loop;
6841 {
6842
6843 if (flow_loop_nested_p (prevloop, loop))
6844 {
6845 prevloop->inner = loop;
6846 loop->outer = prevloop;
6847 return;
6848 }
6849
6850 while (prevloop->outer)
6851 {
6852 if (flow_loop_nested_p (prevloop->outer, loop))
6853 {
6854 prevloop->next = loop;
6855 loop->outer = prevloop->outer;
6856 return;
6857 }
6858 prevloop = prevloop->outer;
6859 }
6860
6861 prevloop->next = loop;
6862 loop->outer = NULL;
6863 }
6864
6865
6866 /* Build the loop hierarchy tree for LOOPS. */
6867 static void
6868 flow_loops_tree_build (loops)
6869 struct loops *loops;
6870 {
6871 int i;
6872 int num_loops;
6873
6874 num_loops = loops->num;
6875 if (! num_loops)
6876 return;
6877
6878 /* Root the loop hierarchy tree with the first loop found.
6879 Since we used a depth first search this should be the
6880 outermost loop. */
6881 loops->tree = &loops->array[0];
6882 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6883
6884 /* Add the remaining loops to the tree. */
6885 for (i = 1; i < num_loops; i++)
6886 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6887 }
6888
6889
6890 /* Helper function to compute loop nesting depth and enclosed loop level
6891 for the natural loop specified by LOOP at the loop depth DEPTH.
6892 Returns the loop level. */
6893 static int
6894 flow_loop_level_compute (loop, depth)
6895 struct loop *loop;
6896 int depth;
6897 {
6898 struct loop *inner;
6899 int level = 1;
6900
6901 if (! loop)
6902 return 0;
6903
6904 /* Traverse loop tree assigning depth and computing level as the
6905 maximum level of all the inner loops of this loop. The loop
6906 level is equivalent to the height of the loop in the loop tree
6907 and corresponds to the number of enclosed loop levels (including
6908 itself). */
6909 for (inner = loop->inner; inner; inner = inner->next)
6910 {
6911 int ilevel;
6912
6913 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6914
6915 if (ilevel > level)
6916 level = ilevel;
6917 }
6918 loop->level = level;
6919 loop->depth = depth;
6920 return level;
6921 }
6922
6923
6924 /* Compute the loop nesting depth and enclosed loop level for the loop
6925 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6926 level. */
6927
6928 static int
6929 flow_loops_level_compute (loops)
6930 struct loops *loops;
6931 {
6932 struct loop *loop;
6933 int level;
6934 int levels = 0;
6935
6936 /* Traverse all the outer level loops. */
6937 for (loop = loops->tree; loop; loop = loop->next)
6938 {
6939 level = flow_loop_level_compute (loop, 1);
6940 if (level > levels)
6941 levels = level;
6942 }
6943 return levels;
6944 }
6945
6946
6947 /* Find all the natural loops in the function and save in LOOPS structure
6948 and recalculate loop_depth information in basic block structures.
6949 Return the number of natural loops found. */
6950
6951 int
6952 flow_loops_find (loops)
6953 struct loops *loops;
6954 {
6955 int i;
6956 int b;
6957 int num_loops;
6958 edge e;
6959 sbitmap headers;
6960 sbitmap *dom;
6961 int *dfs_order;
6962
6963 loops->num = 0;
6964 loops->array = NULL;
6965 loops->tree = NULL;
6966 dfs_order = NULL;
6967
6968 /* Taking care of this degenerate case makes the rest of
6969 this code simpler. */
6970 if (n_basic_blocks == 0)
6971 return 0;
6972
6973 /* Compute the dominators. */
6974 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6975 compute_flow_dominators (dom, NULL);
6976
6977 /* Count the number of loop edges (back edges). This should be the
6978 same as the number of natural loops. Also clear the loop_depth
6979 and as we work from inner->outer in a loop nest we call
6980 find_loop_nodes_find which will increment loop_depth for nodes
6981 within the current loop, which happens to enclose inner loops. */
6982
6983 num_loops = 0;
6984 for (b = 0; b < n_basic_blocks; b++)
6985 {
6986 BASIC_BLOCK (b)->loop_depth = 0;
6987 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6988 {
6989 basic_block latch = e->src;
6990
6991 /* Look for back edges where a predecessor is dominated
6992 by this block. A natural loop has a single entry
6993 node (header) that dominates all the nodes in the
6994 loop. It also has single back edge to the header
6995 from a latch node. Note that multiple natural loops
6996 may share the same header. */
6997 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6998 num_loops++;
6999 }
7000 }
7001
7002 if (num_loops)
7003 {
7004 /* Compute depth first search order of the CFG so that outer
7005 natural loops will be found before inner natural loops. */
7006 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7007 flow_depth_first_order_compute (dfs_order);
7008
7009 /* Allocate loop structures. */
7010 loops->array
7011 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7012
7013 headers = sbitmap_alloc (n_basic_blocks);
7014 sbitmap_zero (headers);
7015
7016 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7017 sbitmap_zero (loops->shared_headers);
7018
7019 /* Find and record information about all the natural loops
7020 in the CFG. */
7021 num_loops = 0;
7022 for (b = 0; b < n_basic_blocks; b++)
7023 {
7024 basic_block header;
7025
7026 /* Search the nodes of the CFG in DFS order that we can find
7027 outer loops first. */
7028 header = BASIC_BLOCK (dfs_order[b]);
7029
7030 /* Look for all the possible latch blocks for this header. */
7031 for (e = header->pred; e; e = e->pred_next)
7032 {
7033 basic_block latch = e->src;
7034
7035 /* Look for back edges where a predecessor is dominated
7036 by this block. A natural loop has a single entry
7037 node (header) that dominates all the nodes in the
7038 loop. It also has single back edge to the header
7039 from a latch node. Note that multiple natural loops
7040 may share the same header. */
7041 if (latch != ENTRY_BLOCK_PTR
7042 && TEST_BIT (dom[latch->index], header->index))
7043 {
7044 struct loop *loop;
7045
7046 loop = loops->array + num_loops;
7047
7048 loop->header = header;
7049 loop->latch = latch;
7050
7051 /* Keep track of blocks that are loop headers so
7052 that we can tell which loops should be merged. */
7053 if (TEST_BIT (headers, header->index))
7054 SET_BIT (loops->shared_headers, header->index);
7055 SET_BIT (headers, header->index);
7056
7057 /* Find nodes contained within the loop. */
7058 loop->nodes = sbitmap_alloc (n_basic_blocks);
7059 loop->num_nodes
7060 = flow_loop_nodes_find (header, latch, loop->nodes);
7061
7062 /* Compute first and last blocks within the loop.
7063 These are often the same as the loop header and
7064 loop latch respectively, but this is not always
7065 the case. */
7066 loop->first
7067 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7068 loop->last
7069 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7070
7071 /* Find edges which exit the loop. Note that a node
7072 may have several exit edges. */
7073 loop->num_exits
7074 = flow_loop_exits_find (loop->nodes, &loop->exits);
7075
7076 /* Look to see if the loop has a pre-header node. */
7077 loop->pre_header
7078 = flow_loop_pre_header_find (header, dom);
7079
7080 num_loops++;
7081 }
7082 }
7083 }
7084
7085 /* Natural loops with shared headers may either be disjoint or
7086 nested. Disjoint loops with shared headers cannot be inner
7087 loops and should be merged. For now just mark loops that share
7088 headers. */
7089 for (i = 0; i < num_loops; i++)
7090 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7091 loops->array[i].shared = 1;
7092
7093 sbitmap_free (headers);
7094 }
7095
7096 loops->num = num_loops;
7097
7098 /* Save CFG derived information to avoid recomputing it. */
7099 loops->cfg.dom = dom;
7100 loops->cfg.dfs_order = dfs_order;
7101
7102 /* Build the loop hierarchy tree. */
7103 flow_loops_tree_build (loops);
7104
7105 /* Assign the loop nesting depth and enclosed loop level for each
7106 loop. */
7107 loops->levels = flow_loops_level_compute (loops);
7108
7109 return num_loops;
7110 }
7111
7112
7113 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7114 int
7115 flow_loop_outside_edge_p (loop, e)
7116 const struct loop *loop;
7117 edge e;
7118 {
7119 if (e->dest != loop->header)
7120 abort ();
7121 return (e->src == ENTRY_BLOCK_PTR)
7122 || ! TEST_BIT (loop->nodes, e->src->index);
7123 }
7124
7125
7126 /* Clear LOG_LINKS fields of insns in a chain. */
7127 void
7128 clear_log_links (insns)
7129 rtx insns;
7130 {
7131 rtx i;
7132 for (i = insns; i; i = NEXT_INSN (i))
7133 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7134 LOG_LINKS (i) = 0;
7135 }
This page took 0.348842 seconds and 5 git commands to generate.