]> gcc.gnu.org Git - gcc.git/blob - gcc/flow.c
sparc.h (TARGET_ARCH32): Exchange ifdefs so that if compiling libgcc2 the macro depen...
[gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137
138 #include "obstack.h"
139
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
142
143
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
150 #endif
151
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
154 #endif
155
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
159
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
164
165 extern struct obstack *function_obstack;
166
167 /* Number of basic blocks in the current function. */
168
169 int n_basic_blocks;
170
171 /* Number of edges in the current function. */
172
173 int n_edges;
174
175 /* The basic block array. */
176
177 varray_type basic_block_info;
178
179 /* The special entry and exit blocks. */
180
181 struct basic_block_def entry_exit_blocks[2] =
182 {
183 {
184 NULL, /* head */
185 NULL, /* end */
186 NULL, /* pred */
187 NULL, /* succ */
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
191 NULL, /* aux */
192 ENTRY_BLOCK, /* index */
193 0, /* loop_depth */
194 -1, -1 /* eh_beg, eh_end */
195 },
196 {
197 NULL, /* head */
198 NULL, /* end */
199 NULL, /* pred */
200 NULL, /* succ */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
204 NULL, /* aux */
205 EXIT_BLOCK, /* index */
206 0, /* loop_depth */
207 -1, -1 /* eh_beg, eh_end */
208 }
209 };
210
211 /* Nonzero if the second flow pass has completed. */
212 int flow2_completed;
213
214 /* Maximum register number used in this function, plus one. */
215
216 int max_regno;
217
218 /* Indexed by n, giving various register information */
219
220 varray_type reg_n_info;
221
222 /* Size of the reg_n_info table. */
223
224 unsigned int reg_n_max;
225
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
229
230 static rtx *reg_next_use;
231
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
234
235 int regset_bytes;
236 int regset_size;
237
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240
241 regset regs_live_at_setjmp;
242
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
247 rtx regs_may_share;
248
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
251
252 static int loop_depth;
253
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
255
256 static int cc0_live;
257
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
260
261 static rtx mem_set_list;
262
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
265
266 static HARD_REG_SET elim_reg_set;
267
268 /* The basic block structure for every insn, indexed by uid. */
269
270 varray_type basic_block_for_insn;
271
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
275
276 static rtx label_value_list;
277
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
279
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile;
283
284 /* Forward declarations */
285 static int count_basic_blocks PROTO((rtx));
286 static rtx find_basic_blocks_1 PROTO((rtx));
287 static void create_basic_block PROTO((int, rtx, rtx, rtx));
288 static void clear_edges PROTO((void));
289 static void make_edges PROTO((rtx));
290 static void make_edge PROTO((sbitmap *, basic_block,
291 basic_block, int));
292 static void make_label_edge PROTO((sbitmap *, basic_block,
293 rtx, int));
294 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
295 basic_block, rtx, int));
296 static void mark_critical_edges PROTO((void));
297 static void move_stray_eh_region_notes PROTO((void));
298 static void record_active_eh_regions PROTO((rtx));
299
300 static void commit_one_edge_insertion PROTO((edge));
301
302 static void delete_unreachable_blocks PROTO((void));
303 static void delete_eh_regions PROTO((void));
304 static int can_delete_note_p PROTO((rtx));
305 static int delete_block PROTO((basic_block));
306 static void expunge_block PROTO((basic_block));
307 static rtx flow_delete_insn PROTO((rtx));
308 static int can_delete_label_p PROTO((rtx));
309 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
310 basic_block));
311 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
312 basic_block));
313 static void merge_blocks_nomove PROTO((basic_block, basic_block));
314 static int merge_blocks PROTO((edge,basic_block,basic_block));
315 static void try_merge_blocks PROTO((void));
316 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
317
318 static int verify_wide_reg_1 PROTO((rtx *, void *));
319 static void verify_wide_reg PROTO((int, rtx, rtx));
320 static void verify_local_live_at_start PROTO((regset, basic_block));
321 static int set_noop_p PROTO((rtx));
322 static int noop_move_p PROTO((rtx));
323 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
324 static void record_volatile_insns PROTO((rtx));
325 static void mark_reg PROTO((regset, rtx));
326 static void mark_regs_live_at_end PROTO((regset));
327 static void life_analysis_1 PROTO((rtx, int, int));
328 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
329 static void propagate_block PROTO((regset, rtx, rtx,
330 regset, int, int));
331 static int insn_dead_p PROTO((rtx, regset, int, rtx));
332 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
333 static void mark_set_regs PROTO((regset, regset, rtx,
334 rtx, regset, int));
335 static void mark_set_1 PROTO((regset, regset, rtx,
336 rtx, regset, int));
337 #ifdef AUTO_INC_DEC
338 static void find_auto_inc PROTO((regset, rtx, rtx));
339 static int try_pre_increment_1 PROTO((rtx));
340 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
341 #endif
342 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
343 void dump_flow_info PROTO((FILE *));
344 void debug_flow_info PROTO((void));
345 static void dump_edge_info PROTO((FILE *, edge, int));
346
347 static void count_reg_sets_1 PROTO ((rtx));
348 static void count_reg_sets PROTO ((rtx));
349 static void count_reg_references PROTO ((rtx));
350 static void invalidate_mems_from_autoinc PROTO ((rtx));
351 static void remove_edge PROTO ((edge));
352 static void remove_fake_successors PROTO ((basic_block));
353
354 /* This function is always defined so it can be called from the
355 debugger, and it is declared extern so we don't get warnings about
356 it being unused. */
357 void verify_flow_info PROTO ((void));
358
359 \f
360 /* Find basic blocks of the current function.
361 F is the first insn of the function and NREGS the number of register
362 numbers in use. */
363
364 void
365 find_basic_blocks (f, nregs, file, do_cleanup)
366 rtx f;
367 int nregs ATTRIBUTE_UNUSED;
368 FILE *file ATTRIBUTE_UNUSED;
369 int do_cleanup;
370 {
371 int max_uid;
372
373 /* Flush out existing data. */
374 if (basic_block_info != NULL)
375 {
376 int i;
377
378 clear_edges ();
379
380 /* Clear bb->aux on all extant basic blocks. We'll use this as a
381 tag for reuse during create_basic_block, just in case some pass
382 copies around basic block notes improperly. */
383 for (i = 0; i < n_basic_blocks; ++i)
384 BASIC_BLOCK (i)->aux = NULL;
385
386 VARRAY_FREE (basic_block_info);
387 }
388
389 n_basic_blocks = count_basic_blocks (f);
390
391 /* Size the basic block table. The actual structures will be allocated
392 by find_basic_blocks_1, since we want to keep the structure pointers
393 stable across calls to find_basic_blocks. */
394 /* ??? This whole issue would be much simpler if we called find_basic_blocks
395 exactly once, and thereafter we don't have a single long chain of
396 instructions at all until close to the end of compilation when we
397 actually lay them out. */
398
399 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
400
401 label_value_list = find_basic_blocks_1 (f);
402
403 /* Record the block to which an insn belongs. */
404 /* ??? This should be done another way, by which (perhaps) a label is
405 tagged directly with the basic block that it starts. It is used for
406 more than that currently, but IMO that is the only valid use. */
407
408 max_uid = get_max_uid ();
409 #ifdef AUTO_INC_DEC
410 /* Leave space for insns life_analysis makes in some cases for auto-inc.
411 These cases are rare, so we don't need too much space. */
412 max_uid += max_uid / 10;
413 #endif
414
415 compute_bb_for_insn (max_uid);
416
417 /* Discover the edges of our cfg. */
418
419 record_active_eh_regions (f);
420 make_edges (label_value_list);
421
422 /* Delete unreachable blocks, then merge blocks when possible. */
423
424 if (do_cleanup)
425 {
426 delete_unreachable_blocks ();
427 move_stray_eh_region_notes ();
428 record_active_eh_regions (f);
429 try_merge_blocks ();
430 }
431
432 /* Mark critical edges. */
433
434 mark_critical_edges ();
435
436 /* Kill the data we won't maintain. */
437 label_value_list = NULL_RTX;
438
439 #ifdef ENABLE_CHECKING
440 verify_flow_info ();
441 #endif
442 }
443
444 /* Count the basic blocks of the function. */
445
446 static int
447 count_basic_blocks (f)
448 rtx f;
449 {
450 register rtx insn;
451 register RTX_CODE prev_code;
452 register int count = 0;
453 int eh_region = 0;
454 int call_had_abnormal_edge = 0;
455 rtx prev_call = NULL_RTX;
456
457 prev_code = JUMP_INSN;
458 for (insn = f; insn; insn = NEXT_INSN (insn))
459 {
460 register RTX_CODE code = GET_CODE (insn);
461
462 if (code == CODE_LABEL
463 || (GET_RTX_CLASS (code) == 'i'
464 && (prev_code == JUMP_INSN
465 || prev_code == BARRIER
466 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
467 {
468 count++;
469 }
470
471 /* Record whether this call created an edge. */
472 if (code == CALL_INSN)
473 {
474 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
475 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
476 prev_call = insn;
477 call_had_abnormal_edge = 0;
478
479 /* If there is a specified EH region, we have an edge. */
480 if (eh_region && region > 0)
481 call_had_abnormal_edge = 1;
482 else
483 {
484 /* If there is a nonlocal goto label and the specified
485 region number isn't -1, we have an edge. (0 means
486 no throw, but might have a nonlocal goto). */
487 if (nonlocal_goto_handler_labels && region >= 0)
488 call_had_abnormal_edge = 1;
489 }
490 }
491 else if (code != NOTE)
492 prev_call = NULL_RTX;
493
494 if (code != NOTE)
495 prev_code = code;
496 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
497 ++eh_region;
498 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
499 --eh_region;
500
501 }
502
503 /* The rest of the compiler works a bit smoother when we don't have to
504 check for the edge case of do-nothing functions with no basic blocks. */
505 if (count == 0)
506 {
507 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
508 count = 1;
509 }
510
511 return count;
512 }
513
514 /* Find all basic blocks of the function whose first insn is F.
515
516 Collect and return a list of labels whose addresses are taken. This
517 will be used in make_edges for use with computed gotos. */
518
519 static rtx
520 find_basic_blocks_1 (f)
521 rtx f;
522 {
523 register rtx insn, next;
524 int call_has_abnormal_edge = 0;
525 int i = 0;
526 rtx bb_note = NULL_RTX;
527 rtx eh_list = NULL_RTX;
528 rtx label_value_list = NULL_RTX;
529 rtx head = NULL_RTX;
530 rtx end = NULL_RTX;
531
532 /* We process the instructions in a slightly different way than we did
533 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
534 closed out the previous block, so that it gets attached at the proper
535 place. Since this form should be equivalent to the previous,
536 count_basic_blocks continues to use the old form as a check. */
537
538 for (insn = f; insn; insn = next)
539 {
540 enum rtx_code code = GET_CODE (insn);
541
542 next = NEXT_INSN (insn);
543
544 if (code == CALL_INSN)
545 {
546 /* Record whether this call created an edge. */
547 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
548 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
549 call_has_abnormal_edge = 0;
550
551 /* If there is an EH region, we have an edge. */
552 if (eh_list && region > 0)
553 call_has_abnormal_edge = 1;
554 else
555 {
556 /* If there is a nonlocal goto label and the specified
557 region number isn't -1, we have an edge. (0 means
558 no throw, but might have a nonlocal goto). */
559 if (nonlocal_goto_handler_labels && region >= 0)
560 call_has_abnormal_edge = 1;
561 }
562 }
563
564 switch (code)
565 {
566 case NOTE:
567 {
568 int kind = NOTE_LINE_NUMBER (insn);
569
570 /* Keep a LIFO list of the currently active exception notes. */
571 if (kind == NOTE_INSN_EH_REGION_BEG)
572 eh_list = alloc_INSN_LIST (insn, eh_list);
573 else if (kind == NOTE_INSN_EH_REGION_END)
574 {
575 rtx t = eh_list;
576 eh_list = XEXP (eh_list, 1);
577 free_INSN_LIST_node (t);
578 }
579
580 /* Look for basic block notes with which to keep the
581 basic_block_info pointers stable. Unthread the note now;
582 we'll put it back at the right place in create_basic_block.
583 Or not at all if we've already found a note in this block. */
584 else if (kind == NOTE_INSN_BASIC_BLOCK)
585 {
586 if (bb_note == NULL_RTX)
587 bb_note = insn;
588 next = flow_delete_insn (insn);
589 }
590
591 break;
592 }
593
594 case CODE_LABEL:
595 /* A basic block starts at a label. If we've closed one off due
596 to a barrier or some such, no need to do it again. */
597 if (head != NULL_RTX)
598 {
599 /* While we now have edge lists with which other portions of
600 the compiler might determine a call ending a basic block
601 does not imply an abnormal edge, it will be a bit before
602 everything can be updated. So continue to emit a noop at
603 the end of such a block. */
604 if (GET_CODE (end) == CALL_INSN)
605 {
606 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
607 end = emit_insn_after (nop, end);
608 }
609
610 create_basic_block (i++, head, end, bb_note);
611 bb_note = NULL_RTX;
612 }
613 head = end = insn;
614 break;
615
616 case JUMP_INSN:
617 /* A basic block ends at a jump. */
618 if (head == NULL_RTX)
619 head = insn;
620 else
621 {
622 /* ??? Make a special check for table jumps. The way this
623 happens is truly and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
627
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
630
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
633
634 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
636 {
637 head = end = NULL;
638 n_basic_blocks--;
639 break;
640 }
641 }
642 end = insn;
643 goto new_bb_inclusive;
644
645 case BARRIER:
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head == NULL_RTX)
649 break;
650
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
655 block. */
656 if (GET_CODE (end) == CALL_INSN)
657 {
658 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659 end = emit_insn_after (nop, end);
660 }
661 goto new_bb_exclusive;
662
663 case CALL_INSN:
664 /* A basic block ends at a call that can either throw or
665 do a non-local goto. */
666 if (call_has_abnormal_edge)
667 {
668 new_bb_inclusive:
669 if (head == NULL_RTX)
670 head = insn;
671 end = insn;
672
673 new_bb_exclusive:
674 create_basic_block (i++, head, end, bb_note);
675 head = end = NULL_RTX;
676 bb_note = NULL_RTX;
677 break;
678 }
679 /* FALLTHRU */
680
681 default:
682 if (GET_RTX_CLASS (code) == 'i')
683 {
684 if (head == NULL_RTX)
685 head = insn;
686 end = insn;
687 }
688 break;
689 }
690
691 if (GET_RTX_CLASS (code) == 'i')
692 {
693 rtx note;
694
695 /* Make a list of all labels referred to other than by jumps
696 (which just don't have the REG_LABEL notes).
697
698 Make a special exception for labels followed by an ADDR*VEC,
699 as this would be a part of the tablejump setup code.
700
701 Make a special exception for the eh_return_stub_label, which
702 we know isn't part of any otherwise visible control flow. */
703
704 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
705 if (REG_NOTE_KIND (note) == REG_LABEL)
706 {
707 rtx lab = XEXP (note, 0), next;
708
709 if (lab == eh_return_stub_label)
710 ;
711 else if ((next = next_nonnote_insn (lab)) != NULL
712 && GET_CODE (next) == JUMP_INSN
713 && (GET_CODE (PATTERN (next)) == ADDR_VEC
714 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
715 ;
716 else
717 label_value_list
718 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
719 }
720 }
721 }
722
723 if (head != NULL_RTX)
724 create_basic_block (i++, head, end, bb_note);
725
726 if (i != n_basic_blocks)
727 abort ();
728
729 return label_value_list;
730 }
731
732 /* Create a new basic block consisting of the instructions between
733 HEAD and END inclusive. Reuses the note and basic block struct
734 in BB_NOTE, if any. */
735
736 static void
737 create_basic_block (index, head, end, bb_note)
738 int index;
739 rtx head, end, bb_note;
740 {
741 basic_block bb;
742
743 if (bb_note
744 && ! RTX_INTEGRATED_P (bb_note)
745 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
746 && bb->aux == NULL)
747 {
748 /* If we found an existing note, thread it back onto the chain. */
749
750 if (GET_CODE (head) == CODE_LABEL)
751 add_insn_after (bb_note, head);
752 else
753 {
754 add_insn_before (bb_note, head);
755 head = bb_note;
756 }
757 }
758 else
759 {
760 /* Otherwise we must create a note and a basic block structure.
761 Since we allow basic block structs in rtl, give the struct
762 the same lifetime by allocating it off the function obstack
763 rather than using malloc. */
764
765 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
766 memset (bb, 0, sizeof (*bb));
767
768 if (GET_CODE (head) == CODE_LABEL)
769 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
770 else
771 {
772 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
773 head = bb_note;
774 }
775 NOTE_BASIC_BLOCK (bb_note) = bb;
776 }
777
778 /* Always include the bb note in the block. */
779 if (NEXT_INSN (end) == bb_note)
780 end = bb_note;
781
782 bb->head = head;
783 bb->end = end;
784 bb->index = index;
785 BASIC_BLOCK (index) = bb;
786
787 /* Tag the block so that we know it has been used when considering
788 other basic block notes. */
789 bb->aux = bb;
790 }
791 \f
792 /* Records the basic block struct in BB_FOR_INSN, for every instruction
793 indexed by INSN_UID. MAX is the size of the array. */
794
795 void
796 compute_bb_for_insn (max)
797 int max;
798 {
799 int i;
800
801 if (basic_block_for_insn)
802 VARRAY_FREE (basic_block_for_insn);
803 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
804
805 for (i = 0; i < n_basic_blocks; ++i)
806 {
807 basic_block bb = BASIC_BLOCK (i);
808 rtx insn, end;
809
810 end = bb->end;
811 insn = bb->head;
812 while (1)
813 {
814 int uid = INSN_UID (insn);
815 if (uid < max)
816 VARRAY_BB (basic_block_for_insn, uid) = bb;
817 if (insn == end)
818 break;
819 insn = NEXT_INSN (insn);
820 }
821 }
822 }
823
824 /* Free the memory associated with the edge structures. */
825
826 static void
827 clear_edges ()
828 {
829 int i;
830 edge n, e;
831
832 for (i = 0; i < n_basic_blocks; ++i)
833 {
834 basic_block bb = BASIC_BLOCK (i);
835
836 for (e = bb->succ; e ; e = n)
837 {
838 n = e->succ_next;
839 free (e);
840 }
841
842 bb->succ = 0;
843 bb->pred = 0;
844 }
845
846 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
847 {
848 n = e->succ_next;
849 free (e);
850 }
851
852 ENTRY_BLOCK_PTR->succ = 0;
853 EXIT_BLOCK_PTR->pred = 0;
854
855 n_edges = 0;
856 }
857
858 /* Identify the edges between basic blocks.
859
860 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
861 that are otherwise unreachable may be reachable with a non-local goto.
862
863 BB_EH_END is an array indexed by basic block number in which we record
864 the list of exception regions active at the end of the basic block. */
865
866 static void
867 make_edges (label_value_list)
868 rtx label_value_list;
869 {
870 int i;
871 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
872 sbitmap *edge_cache = NULL;
873
874 /* Assume no computed jump; revise as we create edges. */
875 current_function_has_computed_jump = 0;
876
877 /* Heavy use of computed goto in machine-generated code can lead to
878 nearly fully-connected CFGs. In that case we spend a significant
879 amount of time searching the edge lists for duplicates. */
880 if (forced_labels || label_value_list)
881 {
882 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
883 sbitmap_vector_zero (edge_cache, n_basic_blocks);
884 }
885
886 /* By nature of the way these get numbered, block 0 is always the entry. */
887 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
888
889 for (i = 0; i < n_basic_blocks; ++i)
890 {
891 basic_block bb = BASIC_BLOCK (i);
892 rtx insn, x;
893 enum rtx_code code;
894 int force_fallthru = 0;
895
896 /* Examine the last instruction of the block, and discover the
897 ways we can leave the block. */
898
899 insn = bb->end;
900 code = GET_CODE (insn);
901
902 /* A branch. */
903 if (code == JUMP_INSN)
904 {
905 rtx tmp;
906
907 /* ??? Recognize a tablejump and do the right thing. */
908 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
909 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
910 && GET_CODE (tmp) == JUMP_INSN
911 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
912 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
913 {
914 rtvec vec;
915 int j;
916
917 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
918 vec = XVEC (PATTERN (tmp), 0);
919 else
920 vec = XVEC (PATTERN (tmp), 1);
921
922 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
923 make_label_edge (edge_cache, bb,
924 XEXP (RTVEC_ELT (vec, j), 0), 0);
925
926 /* Some targets (eg, ARM) emit a conditional jump that also
927 contains the out-of-range target. Scan for these and
928 add an edge if necessary. */
929 if ((tmp = single_set (insn)) != NULL
930 && SET_DEST (tmp) == pc_rtx
931 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
932 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
933 make_label_edge (edge_cache, bb,
934 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
935
936 #ifdef CASE_DROPS_THROUGH
937 /* Silly VAXen. The ADDR_VEC is going to be in the way of
938 us naturally detecting fallthru into the next block. */
939 force_fallthru = 1;
940 #endif
941 }
942
943 /* If this is a computed jump, then mark it as reaching
944 everything on the label_value_list and forced_labels list. */
945 else if (computed_jump_p (insn))
946 {
947 current_function_has_computed_jump = 1;
948
949 for (x = label_value_list; x; x = XEXP (x, 1))
950 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
951
952 for (x = forced_labels; x; x = XEXP (x, 1))
953 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
954 }
955
956 /* Returns create an exit out. */
957 else if (returnjump_p (insn))
958 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
959
960 /* Otherwise, we have a plain conditional or unconditional jump. */
961 else
962 {
963 if (! JUMP_LABEL (insn))
964 abort ();
965 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
966 }
967 }
968
969 /* If this is a CALL_INSN, then mark it as reaching the active EH
970 handler for this CALL_INSN. If we're handling asynchronous
971 exceptions then any insn can reach any of the active handlers.
972
973 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
974
975 if (code == CALL_INSN || asynchronous_exceptions)
976 {
977 /* If there's an EH region active at the end of a block,
978 add the appropriate edges. */
979 if (bb->eh_end >= 0)
980 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
981
982 /* If we have asynchronous exceptions, do the same for *all*
983 exception regions active in the block. */
984 if (asynchronous_exceptions
985 && bb->eh_beg != bb->eh_end)
986 {
987 if (bb->eh_beg >= 0)
988 make_eh_edge (edge_cache, eh_nest_info, bb,
989 NULL_RTX, bb->eh_beg);
990
991 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
992 if (GET_CODE (x) == NOTE
993 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
994 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
995 {
996 int region = NOTE_EH_HANDLER (x);
997 make_eh_edge (edge_cache, eh_nest_info, bb,
998 NULL_RTX, region);
999 }
1000 }
1001
1002 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1003 {
1004 /* ??? This could be made smarter: in some cases it's possible
1005 to tell that certain calls will not do a nonlocal goto.
1006
1007 For example, if the nested functions that do the nonlocal
1008 gotos do not have their addresses taken, then only calls to
1009 those functions or to other nested functions that use them
1010 could possibly do nonlocal gotos. */
1011 /* We do know that a REG_EH_REGION note with a value less
1012 than 0 is guaranteed not to perform a non-local goto. */
1013 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1014 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1015 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1016 make_label_edge (edge_cache, bb, XEXP (x, 0),
1017 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1018 }
1019 }
1020
1021 /* We know something about the structure of the function __throw in
1022 libgcc2.c. It is the only function that ever contains eh_stub
1023 labels. It modifies its return address so that the last block
1024 returns to one of the eh_stub labels within it. So we have to
1025 make additional edges in the flow graph. */
1026 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1027 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1028
1029 /* Find out if we can drop through to the next block. */
1030 insn = next_nonnote_insn (insn);
1031 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1032 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1033 else if (i + 1 < n_basic_blocks)
1034 {
1035 rtx tmp = BLOCK_HEAD (i + 1);
1036 if (GET_CODE (tmp) == NOTE)
1037 tmp = next_nonnote_insn (tmp);
1038 if (force_fallthru || insn == tmp)
1039 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1040 }
1041 }
1042
1043 free_eh_nesting_info (eh_nest_info);
1044 if (edge_cache)
1045 sbitmap_vector_free (edge_cache);
1046 }
1047
1048 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1049 about the edge that is accumulated between calls. */
1050
1051 static void
1052 make_edge (edge_cache, src, dst, flags)
1053 sbitmap *edge_cache;
1054 basic_block src, dst;
1055 int flags;
1056 {
1057 int use_edge_cache;
1058 edge e;
1059
1060 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1061 many edges to them, and we didn't allocate memory for it. */
1062 use_edge_cache = (edge_cache
1063 && src != ENTRY_BLOCK_PTR
1064 && dst != EXIT_BLOCK_PTR);
1065
1066 /* Make sure we don't add duplicate edges. */
1067 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1068 for (e = src->succ; e ; e = e->succ_next)
1069 if (e->dest == dst)
1070 {
1071 e->flags |= flags;
1072 return;
1073 }
1074
1075 e = (edge) xcalloc (1, sizeof (*e));
1076 n_edges++;
1077
1078 e->succ_next = src->succ;
1079 e->pred_next = dst->pred;
1080 e->src = src;
1081 e->dest = dst;
1082 e->flags = flags;
1083
1084 src->succ = e;
1085 dst->pred = e;
1086
1087 if (use_edge_cache)
1088 SET_BIT (edge_cache[src->index], dst->index);
1089 }
1090
1091 /* Create an edge from a basic block to a label. */
1092
1093 static void
1094 make_label_edge (edge_cache, src, label, flags)
1095 sbitmap *edge_cache;
1096 basic_block src;
1097 rtx label;
1098 int flags;
1099 {
1100 if (GET_CODE (label) != CODE_LABEL)
1101 abort ();
1102
1103 /* If the label was never emitted, this insn is junk, but avoid a
1104 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1105 as a result of a syntax error and a diagnostic has already been
1106 printed. */
1107
1108 if (INSN_UID (label) == 0)
1109 return;
1110
1111 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1112 }
1113
1114 /* Create the edges generated by INSN in REGION. */
1115
1116 static void
1117 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1118 sbitmap *edge_cache;
1119 eh_nesting_info *eh_nest_info;
1120 basic_block src;
1121 rtx insn;
1122 int region;
1123 {
1124 handler_info **handler_list;
1125 int num, is_call;
1126
1127 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1128 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1129 while (--num >= 0)
1130 {
1131 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1132 EDGE_ABNORMAL | EDGE_EH | is_call);
1133 }
1134 }
1135
1136 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1137 dangerous if we intend to move basic blocks around. Move such notes
1138 into the following block. */
1139
1140 static void
1141 move_stray_eh_region_notes ()
1142 {
1143 int i;
1144 basic_block b1, b2;
1145
1146 if (n_basic_blocks < 2)
1147 return;
1148
1149 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1150 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1151 {
1152 rtx insn, next, list = NULL_RTX;
1153
1154 b1 = BASIC_BLOCK (i);
1155 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1156 {
1157 next = NEXT_INSN (insn);
1158 if (GET_CODE (insn) == NOTE
1159 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1160 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1161 {
1162 /* Unlink from the insn chain. */
1163 NEXT_INSN (PREV_INSN (insn)) = next;
1164 PREV_INSN (next) = PREV_INSN (insn);
1165
1166 /* Queue it. */
1167 NEXT_INSN (insn) = list;
1168 list = insn;
1169 }
1170 }
1171
1172 if (list == NULL_RTX)
1173 continue;
1174
1175 /* Find where to insert these things. */
1176 insn = b2->head;
1177 if (GET_CODE (insn) == CODE_LABEL)
1178 insn = NEXT_INSN (insn);
1179
1180 while (list)
1181 {
1182 next = NEXT_INSN (list);
1183 add_insn_after (list, insn);
1184 list = next;
1185 }
1186 }
1187 }
1188
1189 /* Recompute eh_beg/eh_end for each basic block. */
1190
1191 static void
1192 record_active_eh_regions (f)
1193 rtx f;
1194 {
1195 rtx insn, eh_list = NULL_RTX;
1196 int i = 0;
1197 basic_block bb = BASIC_BLOCK (0);
1198
1199 for (insn = f; insn ; insn = NEXT_INSN (insn))
1200 {
1201 if (bb->head == insn)
1202 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1203
1204 if (GET_CODE (insn) == NOTE)
1205 {
1206 int kind = NOTE_LINE_NUMBER (insn);
1207 if (kind == NOTE_INSN_EH_REGION_BEG)
1208 eh_list = alloc_INSN_LIST (insn, eh_list);
1209 else if (kind == NOTE_INSN_EH_REGION_END)
1210 {
1211 rtx t = XEXP (eh_list, 1);
1212 free_INSN_LIST_node (eh_list);
1213 eh_list = t;
1214 }
1215 }
1216
1217 if (bb->end == insn)
1218 {
1219 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1220 i += 1;
1221 if (i == n_basic_blocks)
1222 break;
1223 bb = BASIC_BLOCK (i);
1224 }
1225 }
1226 }
1227
1228 /* Identify critical edges and set the bits appropriately. */
1229
1230 static void
1231 mark_critical_edges ()
1232 {
1233 int i, n = n_basic_blocks;
1234 basic_block bb;
1235
1236 /* We begin with the entry block. This is not terribly important now,
1237 but could be if a front end (Fortran) implemented alternate entry
1238 points. */
1239 bb = ENTRY_BLOCK_PTR;
1240 i = -1;
1241
1242 while (1)
1243 {
1244 edge e;
1245
1246 /* (1) Critical edges must have a source with multiple successors. */
1247 if (bb->succ && bb->succ->succ_next)
1248 {
1249 for (e = bb->succ; e ; e = e->succ_next)
1250 {
1251 /* (2) Critical edges must have a destination with multiple
1252 predecessors. Note that we know there is at least one
1253 predecessor -- the edge we followed to get here. */
1254 if (e->dest->pred->pred_next)
1255 e->flags |= EDGE_CRITICAL;
1256 else
1257 e->flags &= ~EDGE_CRITICAL;
1258 }
1259 }
1260 else
1261 {
1262 for (e = bb->succ; e ; e = e->succ_next)
1263 e->flags &= ~EDGE_CRITICAL;
1264 }
1265
1266 if (++i >= n)
1267 break;
1268 bb = BASIC_BLOCK (i);
1269 }
1270 }
1271 \f
1272 /* Split a (typically critical) edge. Return the new block.
1273 Abort on abnormal edges.
1274
1275 ??? The code generally expects to be called on critical edges.
1276 The case of a block ending in an unconditional jump to a
1277 block with multiple predecessors is not handled optimally. */
1278
1279 basic_block
1280 split_edge (edge_in)
1281 edge edge_in;
1282 {
1283 basic_block old_pred, bb, old_succ;
1284 edge edge_out;
1285 rtx bb_note;
1286 int i, j;
1287
1288 /* Abnormal edges cannot be split. */
1289 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1290 abort ();
1291
1292 old_pred = edge_in->src;
1293 old_succ = edge_in->dest;
1294
1295 /* Remove the existing edge from the destination's pred list. */
1296 {
1297 edge *pp;
1298 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1299 continue;
1300 *pp = edge_in->pred_next;
1301 edge_in->pred_next = NULL;
1302 }
1303
1304 /* Create the new structures. */
1305 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1306 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1307 n_edges++;
1308
1309 memset (bb, 0, sizeof (*bb));
1310 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1311 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1312
1313 /* ??? This info is likely going to be out of date very soon. */
1314 if (old_succ->global_live_at_start)
1315 {
1316 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1317 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1318 }
1319 else
1320 {
1321 CLEAR_REG_SET (bb->global_live_at_start);
1322 CLEAR_REG_SET (bb->global_live_at_end);
1323 }
1324
1325 /* Wire them up. */
1326 bb->pred = edge_in;
1327 bb->succ = edge_out;
1328
1329 edge_in->dest = bb;
1330 edge_in->flags &= ~EDGE_CRITICAL;
1331
1332 edge_out->pred_next = old_succ->pred;
1333 edge_out->succ_next = NULL;
1334 edge_out->src = bb;
1335 edge_out->dest = old_succ;
1336 edge_out->flags = EDGE_FALLTHRU;
1337 edge_out->probability = REG_BR_PROB_BASE;
1338
1339 old_succ->pred = edge_out;
1340
1341 /* Tricky case -- if there existed a fallthru into the successor
1342 (and we're not it) we must add a new unconditional jump around
1343 the new block we're actually interested in.
1344
1345 Further, if that edge is critical, this means a second new basic
1346 block must be created to hold it. In order to simplify correct
1347 insn placement, do this before we touch the existing basic block
1348 ordering for the block we were really wanting. */
1349 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1350 {
1351 edge e;
1352 for (e = edge_out->pred_next; e ; e = e->pred_next)
1353 if (e->flags & EDGE_FALLTHRU)
1354 break;
1355
1356 if (e)
1357 {
1358 basic_block jump_block;
1359 rtx pos;
1360
1361 if ((e->flags & EDGE_CRITICAL) == 0)
1362 {
1363 /* Non critical -- we can simply add a jump to the end
1364 of the existing predecessor. */
1365 jump_block = e->src;
1366 }
1367 else
1368 {
1369 /* We need a new block to hold the jump. The simplest
1370 way to do the bulk of the work here is to recursively
1371 call ourselves. */
1372 jump_block = split_edge (e);
1373 e = jump_block->succ;
1374 }
1375
1376 /* Now add the jump insn ... */
1377 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1378 jump_block->end);
1379 jump_block->end = pos;
1380 emit_barrier_after (pos);
1381
1382 /* ... let jump know that label is in use, ... */
1383 JUMP_LABEL (pos) = old_succ->head;
1384 ++LABEL_NUSES (old_succ->head);
1385
1386 /* ... and clear fallthru on the outgoing edge. */
1387 e->flags &= ~EDGE_FALLTHRU;
1388
1389 /* Continue splitting the interesting edge. */
1390 }
1391 }
1392
1393 /* Place the new block just in front of the successor. */
1394 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1395 if (old_succ == EXIT_BLOCK_PTR)
1396 j = n_basic_blocks - 1;
1397 else
1398 j = old_succ->index;
1399 for (i = n_basic_blocks - 1; i > j; --i)
1400 {
1401 basic_block tmp = BASIC_BLOCK (i - 1);
1402 BASIC_BLOCK (i) = tmp;
1403 tmp->index = i;
1404 }
1405 BASIC_BLOCK (i) = bb;
1406 bb->index = i;
1407
1408 /* Create the basic block note.
1409
1410 Where we place the note can have a noticable impact on the generated
1411 code. Consider this cfg:
1412
1413
1414 E
1415 |
1416 0
1417 / \
1418 +->1-->2--->E
1419 | |
1420 +--+
1421
1422 If we need to insert an insn on the edge from block 0 to block 1,
1423 we want to ensure the instructions we insert are outside of any
1424 loop notes that physically sit between block 0 and block 1. Otherwise
1425 we confuse the loop optimizer into thinking the loop is a phony. */
1426 if (old_succ != EXIT_BLOCK_PTR
1427 && PREV_INSN (old_succ->head)
1428 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1429 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1430 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1431 PREV_INSN (old_succ->head));
1432 else if (old_succ != EXIT_BLOCK_PTR)
1433 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1434 else
1435 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1436 NOTE_BASIC_BLOCK (bb_note) = bb;
1437 bb->head = bb->end = bb_note;
1438
1439 /* Not quite simple -- for non-fallthru edges, we must adjust the
1440 predecessor's jump instruction to target our new block. */
1441 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1442 {
1443 rtx tmp, insn = old_pred->end;
1444 rtx old_label = old_succ->head;
1445 rtx new_label = gen_label_rtx ();
1446
1447 if (GET_CODE (insn) != JUMP_INSN)
1448 abort ();
1449
1450 /* ??? Recognize a tablejump and adjust all matching cases. */
1451 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1452 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1453 && GET_CODE (tmp) == JUMP_INSN
1454 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1455 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1456 {
1457 rtvec vec;
1458 int j;
1459
1460 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1461 vec = XVEC (PATTERN (tmp), 0);
1462 else
1463 vec = XVEC (PATTERN (tmp), 1);
1464
1465 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1466 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1467 {
1468 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1469 --LABEL_NUSES (old_label);
1470 ++LABEL_NUSES (new_label);
1471 }
1472
1473 /* Handle casesi dispatch insns */
1474 if ((tmp = single_set (insn)) != NULL
1475 && SET_DEST (tmp) == pc_rtx
1476 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1477 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1478 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1479 {
1480 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1481 new_label);
1482 --LABEL_NUSES (old_label);
1483 ++LABEL_NUSES (new_label);
1484 }
1485 }
1486 else
1487 {
1488 /* This would have indicated an abnormal edge. */
1489 if (computed_jump_p (insn))
1490 abort ();
1491
1492 /* A return instruction can't be redirected. */
1493 if (returnjump_p (insn))
1494 abort ();
1495
1496 /* If the insn doesn't go where we think, we're confused. */
1497 if (JUMP_LABEL (insn) != old_label)
1498 abort ();
1499
1500 redirect_jump (insn, new_label);
1501 }
1502
1503 emit_label_before (new_label, bb_note);
1504 bb->head = new_label;
1505 }
1506
1507 return bb;
1508 }
1509
1510 /* Queue instructions for insertion on an edge between two basic blocks.
1511 The new instructions and basic blocks (if any) will not appear in the
1512 CFG until commit_edge_insertions is called. */
1513
1514 void
1515 insert_insn_on_edge (pattern, e)
1516 rtx pattern;
1517 edge e;
1518 {
1519 /* We cannot insert instructions on an abnormal critical edge.
1520 It will be easier to find the culprit if we die now. */
1521 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1522 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1523 abort ();
1524
1525 if (e->insns == NULL_RTX)
1526 start_sequence ();
1527 else
1528 push_to_sequence (e->insns);
1529
1530 emit_insn (pattern);
1531
1532 e->insns = get_insns ();
1533 end_sequence();
1534 }
1535
1536 /* Update the CFG for the instructions queued on edge E. */
1537
1538 static void
1539 commit_one_edge_insertion (e)
1540 edge e;
1541 {
1542 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1543 basic_block bb;
1544
1545 /* Figure out where to put these things. If the destination has
1546 one predecessor, insert there. Except for the exit block. */
1547 if (e->dest->pred->pred_next == NULL
1548 && e->dest != EXIT_BLOCK_PTR)
1549 {
1550 bb = e->dest;
1551
1552 /* Get the location correct wrt a code label, and "nice" wrt
1553 a basic block note, and before everything else. */
1554 tmp = bb->head;
1555 if (GET_CODE (tmp) == CODE_LABEL)
1556 tmp = NEXT_INSN (tmp);
1557 if (GET_CODE (tmp) == NOTE
1558 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1559 tmp = NEXT_INSN (tmp);
1560 if (tmp == bb->head)
1561 before = tmp;
1562 else
1563 after = PREV_INSN (tmp);
1564 }
1565
1566 /* If the source has one successor and the edge is not abnormal,
1567 insert there. Except for the entry block. */
1568 else if ((e->flags & EDGE_ABNORMAL) == 0
1569 && e->src->succ->succ_next == NULL
1570 && e->src != ENTRY_BLOCK_PTR)
1571 {
1572 bb = e->src;
1573 if (GET_CODE (bb->end) == JUMP_INSN)
1574 {
1575 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1576 a jump with delay slots already filled? */
1577 if (! simplejump_p (bb->end))
1578 abort ();
1579
1580 before = bb->end;
1581 }
1582 else
1583 {
1584 /* We'd better be fallthru, or we've lost track of what's what. */
1585 if ((e->flags & EDGE_FALLTHRU) == 0)
1586 abort ();
1587
1588 after = bb->end;
1589 }
1590 }
1591
1592 /* Otherwise we must split the edge. */
1593 else
1594 {
1595 bb = split_edge (e);
1596 after = bb->end;
1597 }
1598
1599 /* Now that we've found the spot, do the insertion. */
1600 tmp = e->insns;
1601 e->insns = NULL_RTX;
1602
1603 /* Set the new block number for these insns, if structure is allocated. */
1604 if (basic_block_for_insn)
1605 {
1606 rtx i;
1607 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1608 set_block_for_insn (i, bb);
1609 }
1610
1611 if (before)
1612 {
1613 emit_insns_before (tmp, before);
1614 if (before == bb->head)
1615 bb->head = tmp;
1616 }
1617 else
1618 {
1619 tmp = emit_insns_after (tmp, after);
1620 if (after == bb->end)
1621 bb->end = tmp;
1622 }
1623 }
1624
1625 /* Update the CFG for all queued instructions. */
1626
1627 void
1628 commit_edge_insertions ()
1629 {
1630 int i;
1631 basic_block bb;
1632
1633 i = -1;
1634 bb = ENTRY_BLOCK_PTR;
1635 while (1)
1636 {
1637 edge e, next;
1638
1639 for (e = bb->succ; e ; e = next)
1640 {
1641 next = e->succ_next;
1642 if (e->insns)
1643 commit_one_edge_insertion (e);
1644 }
1645
1646 if (++i >= n_basic_blocks)
1647 break;
1648 bb = BASIC_BLOCK (i);
1649 }
1650 }
1651 \f
1652 /* Delete all unreachable basic blocks. */
1653
1654 static void
1655 delete_unreachable_blocks ()
1656 {
1657 basic_block *worklist, *tos;
1658 int deleted_handler;
1659 edge e;
1660 int i, n;
1661
1662 n = n_basic_blocks;
1663 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1664
1665 /* Use basic_block->aux as a marker. Clear them all. */
1666
1667 for (i = 0; i < n; ++i)
1668 BASIC_BLOCK (i)->aux = NULL;
1669
1670 /* Add our starting points to the worklist. Almost always there will
1671 be only one. It isn't inconcievable that we might one day directly
1672 support Fortran alternate entry points. */
1673
1674 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1675 {
1676 *tos++ = e->dest;
1677
1678 /* Mark the block with a handy non-null value. */
1679 e->dest->aux = e;
1680 }
1681
1682 /* Iterate: find everything reachable from what we've already seen. */
1683
1684 while (tos != worklist)
1685 {
1686 basic_block b = *--tos;
1687
1688 for (e = b->succ; e ; e = e->succ_next)
1689 if (!e->dest->aux)
1690 {
1691 *tos++ = e->dest;
1692 e->dest->aux = e;
1693 }
1694 }
1695
1696 /* Delete all unreachable basic blocks. Count down so that we don't
1697 interfere with the block renumbering that happens in delete_block. */
1698
1699 deleted_handler = 0;
1700
1701 for (i = n - 1; i >= 0; --i)
1702 {
1703 basic_block b = BASIC_BLOCK (i);
1704
1705 if (b->aux != NULL)
1706 /* This block was found. Tidy up the mark. */
1707 b->aux = NULL;
1708 else
1709 deleted_handler |= delete_block (b);
1710 }
1711
1712 /* Fix up edges that now fall through, or rather should now fall through
1713 but previously required a jump around now deleted blocks. Simplify
1714 the search by only examining blocks numerically adjacent, since this
1715 is how find_basic_blocks created them. */
1716
1717 for (i = 1; i < n_basic_blocks; ++i)
1718 {
1719 basic_block b = BASIC_BLOCK (i - 1);
1720 basic_block c = BASIC_BLOCK (i);
1721 edge s;
1722
1723 /* We care about simple conditional or unconditional jumps with
1724 a single successor.
1725
1726 If we had a conditional branch to the next instruction when
1727 find_basic_blocks was called, then there will only be one
1728 out edge for the block which ended with the conditional
1729 branch (since we do not create duplicate edges).
1730
1731 Furthermore, the edge will be marked as a fallthru because we
1732 merge the flags for the duplicate edges. So we do not want to
1733 check that the edge is not a FALLTHRU edge. */
1734 if ((s = b->succ) != NULL
1735 && s->succ_next == NULL
1736 && s->dest == c
1737 /* If the jump insn has side effects, we can't tidy the edge. */
1738 && (GET_CODE (b->end) != JUMP_INSN
1739 || onlyjump_p (b->end)))
1740 tidy_fallthru_edge (s, b, c);
1741 }
1742
1743 /* If we deleted an exception handler, we may have EH region begin/end
1744 blocks to remove as well. */
1745 if (deleted_handler)
1746 delete_eh_regions ();
1747
1748 free (worklist);
1749 }
1750
1751 /* Find EH regions for which there is no longer a handler, and delete them. */
1752
1753 static void
1754 delete_eh_regions ()
1755 {
1756 rtx insn;
1757
1758 update_rethrow_references ();
1759
1760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1761 if (GET_CODE (insn) == NOTE)
1762 {
1763 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1764 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1765 {
1766 int num = NOTE_EH_HANDLER (insn);
1767 /* A NULL handler indicates a region is no longer needed,
1768 as long as it isn't the target of a rethrow. */
1769 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1770 {
1771 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1772 NOTE_SOURCE_FILE (insn) = 0;
1773 }
1774 }
1775 }
1776 }
1777
1778 /* Return true if NOTE is not one of the ones that must be kept paired,
1779 so that we may simply delete them. */
1780
1781 static int
1782 can_delete_note_p (note)
1783 rtx note;
1784 {
1785 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1786 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1787 }
1788
1789 /* Unlink a chain of insns between START and FINISH, leaving notes
1790 that must be paired. */
1791
1792 void
1793 flow_delete_insn_chain (start, finish)
1794 rtx start, finish;
1795 {
1796 /* Unchain the insns one by one. It would be quicker to delete all
1797 of these with a single unchaining, rather than one at a time, but
1798 we need to keep the NOTE's. */
1799
1800 rtx next;
1801
1802 while (1)
1803 {
1804 next = NEXT_INSN (start);
1805 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1806 ;
1807 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1808 ;
1809 else
1810 next = flow_delete_insn (start);
1811
1812 if (start == finish)
1813 break;
1814 start = next;
1815 }
1816 }
1817
1818 /* Delete the insns in a (non-live) block. We physically delete every
1819 non-deleted-note insn, and update the flow graph appropriately.
1820
1821 Return nonzero if we deleted an exception handler. */
1822
1823 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1824 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1825
1826 static int
1827 delete_block (b)
1828 basic_block b;
1829 {
1830 int deleted_handler = 0;
1831 rtx insn, end;
1832
1833 /* If the head of this block is a CODE_LABEL, then it might be the
1834 label for an exception handler which can't be reached.
1835
1836 We need to remove the label from the exception_handler_label list
1837 and remove the associated NOTE_INSN_EH_REGION_BEG and
1838 NOTE_INSN_EH_REGION_END notes. */
1839
1840 insn = b->head;
1841
1842 never_reached_warning (insn);
1843
1844 if (GET_CODE (insn) == CODE_LABEL)
1845 {
1846 rtx x, *prev = &exception_handler_labels;
1847
1848 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1849 {
1850 if (XEXP (x, 0) == insn)
1851 {
1852 /* Found a match, splice this label out of the EH label list. */
1853 *prev = XEXP (x, 1);
1854 XEXP (x, 1) = NULL_RTX;
1855 XEXP (x, 0) = NULL_RTX;
1856
1857 /* Remove the handler from all regions */
1858 remove_handler (insn);
1859 deleted_handler = 1;
1860 break;
1861 }
1862 prev = &XEXP (x, 1);
1863 }
1864
1865 /* This label may be referenced by code solely for its value, or
1866 referenced by static data, or something. We have determined
1867 that it is not reachable, but cannot delete the label itself.
1868 Save code space and continue to delete the balance of the block,
1869 along with properly updating the cfg. */
1870 if (!can_delete_label_p (insn))
1871 {
1872 /* If we've only got one of these, skip the whole deleting
1873 insns thing. */
1874 if (insn == b->end)
1875 goto no_delete_insns;
1876 insn = NEXT_INSN (insn);
1877 }
1878 }
1879
1880 /* Selectively unlink the insn chain. Include any BARRIER that may
1881 follow the basic block. */
1882 end = next_nonnote_insn (b->end);
1883 if (!end || GET_CODE (end) != BARRIER)
1884 end = b->end;
1885 flow_delete_insn_chain (insn, end);
1886
1887 no_delete_insns:
1888
1889 /* Remove the edges into and out of this block. Note that there may
1890 indeed be edges in, if we are removing an unreachable loop. */
1891 {
1892 edge e, next, *q;
1893
1894 for (e = b->pred; e ; e = next)
1895 {
1896 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1897 continue;
1898 *q = e->succ_next;
1899 next = e->pred_next;
1900 n_edges--;
1901 free (e);
1902 }
1903 for (e = b->succ; e ; e = next)
1904 {
1905 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1906 continue;
1907 *q = e->pred_next;
1908 next = e->succ_next;
1909 n_edges--;
1910 free (e);
1911 }
1912
1913 b->pred = NULL;
1914 b->succ = NULL;
1915 }
1916
1917 /* Remove the basic block from the array, and compact behind it. */
1918 expunge_block (b);
1919
1920 return deleted_handler;
1921 }
1922
1923 /* Remove block B from the basic block array and compact behind it. */
1924
1925 static void
1926 expunge_block (b)
1927 basic_block b;
1928 {
1929 int i, n = n_basic_blocks;
1930
1931 for (i = b->index; i + 1 < n; ++i)
1932 {
1933 basic_block x = BASIC_BLOCK (i + 1);
1934 BASIC_BLOCK (i) = x;
1935 x->index = i;
1936 }
1937
1938 basic_block_info->num_elements--;
1939 n_basic_blocks--;
1940 }
1941
1942 /* Delete INSN by patching it out. Return the next insn. */
1943
1944 static rtx
1945 flow_delete_insn (insn)
1946 rtx insn;
1947 {
1948 rtx prev = PREV_INSN (insn);
1949 rtx next = NEXT_INSN (insn);
1950
1951 PREV_INSN (insn) = NULL_RTX;
1952 NEXT_INSN (insn) = NULL_RTX;
1953
1954 if (prev)
1955 NEXT_INSN (prev) = next;
1956 if (next)
1957 PREV_INSN (next) = prev;
1958 else
1959 set_last_insn (prev);
1960
1961 if (GET_CODE (insn) == CODE_LABEL)
1962 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1963
1964 /* If deleting a jump, decrement the use count of the label. Deleting
1965 the label itself should happen in the normal course of block merging. */
1966 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1967 LABEL_NUSES (JUMP_LABEL (insn))--;
1968
1969 return next;
1970 }
1971
1972 /* True if a given label can be deleted. */
1973
1974 static int
1975 can_delete_label_p (label)
1976 rtx label;
1977 {
1978 rtx x;
1979
1980 if (LABEL_PRESERVE_P (label))
1981 return 0;
1982
1983 for (x = forced_labels; x ; x = XEXP (x, 1))
1984 if (label == XEXP (x, 0))
1985 return 0;
1986 for (x = label_value_list; x ; x = XEXP (x, 1))
1987 if (label == XEXP (x, 0))
1988 return 0;
1989 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1990 if (label == XEXP (x, 0))
1991 return 0;
1992
1993 /* User declared labels must be preserved. */
1994 if (LABEL_NAME (label) != 0)
1995 return 0;
1996
1997 return 1;
1998 }
1999
2000 /* Blocks A and B are to be merged into a single block. A has no incoming
2001 fallthru edge, so it can be moved before B without adding or modifying
2002 any jumps (aside from the jump from A to B). */
2003
2004 static int
2005 merge_blocks_move_predecessor_nojumps (a, b)
2006 basic_block a, b;
2007 {
2008 rtx start, end, barrier;
2009 int index;
2010
2011 start = a->head;
2012 end = a->end;
2013
2014 /* We want to delete the BARRIER after the end of the insns we are
2015 going to move. If we don't find a BARRIER, then do nothing. This
2016 can happen in some cases if we have labels we can not delete.
2017
2018 Similarly, do nothing if we can not delete the label at the start
2019 of the target block. */
2020 barrier = next_nonnote_insn (end);
2021 if (GET_CODE (barrier) != BARRIER
2022 || (GET_CODE (b->head) == CODE_LABEL
2023 && ! can_delete_label_p (b->head)))
2024 return 0;
2025 else
2026 flow_delete_insn (barrier);
2027
2028 /* Move block and loop notes out of the chain so that we do not
2029 disturb their order.
2030
2031 ??? A better solution would be to squeeze out all the non-nested notes
2032 and adjust the block trees appropriately. Even better would be to have
2033 a tighter connection between block trees and rtl so that this is not
2034 necessary. */
2035 start = squeeze_notes (start, end);
2036
2037 /* Scramble the insn chain. */
2038 if (end != PREV_INSN (b->head))
2039 reorder_insns (start, end, PREV_INSN (b->head));
2040
2041 if (rtl_dump_file)
2042 {
2043 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2044 a->index, b->index);
2045 }
2046
2047 /* Swap the records for the two blocks around. Although we are deleting B,
2048 A is now where B was and we want to compact the BB array from where
2049 A used to be. */
2050 BASIC_BLOCK(a->index) = b;
2051 BASIC_BLOCK(b->index) = a;
2052 index = a->index;
2053 a->index = b->index;
2054 b->index = index;
2055
2056 /* Now blocks A and B are contiguous. Merge them. */
2057 merge_blocks_nomove (a, b);
2058
2059 return 1;
2060 }
2061
2062 /* Blocks A and B are to be merged into a single block. B has no outgoing
2063 fallthru edge, so it can be moved after A without adding or modifying
2064 any jumps (aside from the jump from A to B). */
2065
2066 static int
2067 merge_blocks_move_successor_nojumps (a, b)
2068 basic_block a, b;
2069 {
2070 rtx start, end, barrier;
2071
2072 start = b->head;
2073 end = b->end;
2074
2075 /* We want to delete the BARRIER after the end of the insns we are
2076 going to move. If we don't find a BARRIER, then do nothing. This
2077 can happen in some cases if we have labels we can not delete.
2078
2079 Similarly, do nothing if we can not delete the label at the start
2080 of the target block. */
2081 barrier = next_nonnote_insn (end);
2082 if (GET_CODE (barrier) != BARRIER
2083 || (GET_CODE (b->head) == CODE_LABEL
2084 && ! can_delete_label_p (b->head)))
2085 return 0;
2086 else
2087 flow_delete_insn (barrier);
2088
2089 /* Move block and loop notes out of the chain so that we do not
2090 disturb their order.
2091
2092 ??? A better solution would be to squeeze out all the non-nested notes
2093 and adjust the block trees appropriately. Even better would be to have
2094 a tighter connection between block trees and rtl so that this is not
2095 necessary. */
2096 start = squeeze_notes (start, end);
2097
2098 /* Scramble the insn chain. */
2099 reorder_insns (start, end, a->end);
2100
2101 /* Now blocks A and B are contiguous. Merge them. */
2102 merge_blocks_nomove (a, b);
2103
2104 if (rtl_dump_file)
2105 {
2106 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2107 b->index, a->index);
2108 }
2109
2110 return 1;
2111 }
2112
2113 /* Blocks A and B are to be merged into a single block. The insns
2114 are already contiguous, hence `nomove'. */
2115
2116 static void
2117 merge_blocks_nomove (a, b)
2118 basic_block a, b;
2119 {
2120 edge e;
2121 rtx b_head, b_end, a_end;
2122 int b_empty = 0;
2123
2124 /* If there was a CODE_LABEL beginning B, delete it. */
2125 b_head = b->head;
2126 b_end = b->end;
2127 if (GET_CODE (b_head) == CODE_LABEL)
2128 {
2129 /* Detect basic blocks with nothing but a label. This can happen
2130 in particular at the end of a function. */
2131 if (b_head == b_end)
2132 b_empty = 1;
2133 b_head = flow_delete_insn (b_head);
2134 }
2135
2136 /* Delete the basic block note. */
2137 if (GET_CODE (b_head) == NOTE
2138 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2139 {
2140 if (b_head == b_end)
2141 b_empty = 1;
2142 b_head = flow_delete_insn (b_head);
2143 }
2144
2145 /* If there was a jump out of A, delete it. */
2146 a_end = a->end;
2147 if (GET_CODE (a_end) == JUMP_INSN)
2148 {
2149 rtx prev;
2150
2151 prev = prev_nonnote_insn (a_end);
2152 if (!prev)
2153 prev = a->head;
2154
2155 #ifdef HAVE_cc0
2156 /* If this was a conditional jump, we need to also delete
2157 the insn that set cc0. */
2158
2159 if (prev && sets_cc0_p (prev))
2160 {
2161 rtx tmp = prev;
2162 prev = prev_nonnote_insn (prev);
2163 if (!prev)
2164 prev = a->head;
2165 flow_delete_insn (tmp);
2166 }
2167 #endif
2168
2169 /* Note that a->head != a->end, since we should have at least a
2170 bb note plus the jump, so prev != insn. */
2171 flow_delete_insn (a_end);
2172 a_end = prev;
2173 }
2174
2175 /* By definition, there should only be one successor of A, and that is
2176 B. Free that edge struct. */
2177 n_edges--;
2178 free (a->succ);
2179
2180 /* Adjust the edges out of B for the new owner. */
2181 for (e = b->succ; e ; e = e->succ_next)
2182 e->src = a;
2183 a->succ = b->succ;
2184
2185 /* Reassociate the insns of B with A. */
2186 if (!b_empty)
2187 {
2188 BLOCK_FOR_INSN (b_head) = a;
2189 while (b_head != b_end)
2190 {
2191 b_head = NEXT_INSN (b_head);
2192 BLOCK_FOR_INSN (b_head) = a;
2193 }
2194 a_end = b_head;
2195 }
2196 a->end = a_end;
2197
2198 /* Compact the basic block array. */
2199 expunge_block (b);
2200 }
2201
2202 /* Attempt to merge basic blocks that are potentially non-adjacent.
2203 Return true iff the attempt succeeded. */
2204
2205 static int
2206 merge_blocks (e, b, c)
2207 edge e;
2208 basic_block b, c;
2209 {
2210 /* If B has a fallthru edge to C, no need to move anything. */
2211 if (e->flags & EDGE_FALLTHRU)
2212 {
2213 /* If a label still appears somewhere and we cannot delete the label,
2214 then we cannot merge the blocks. The edge was tidied already. */
2215
2216 rtx insn, stop = NEXT_INSN (c->head);
2217 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2218 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2219 return 0;
2220
2221 merge_blocks_nomove (b, c);
2222
2223 if (rtl_dump_file)
2224 {
2225 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2226 b->index, c->index);
2227 }
2228
2229 return 1;
2230 }
2231 else
2232 {
2233 edge tmp_edge;
2234 basic_block d;
2235 int c_has_outgoing_fallthru;
2236 int b_has_incoming_fallthru;
2237
2238 /* We must make sure to not munge nesting of exception regions,
2239 lexical blocks, and loop notes.
2240
2241 The first is taken care of by requiring that the active eh
2242 region at the end of one block always matches the active eh
2243 region at the beginning of the next block.
2244
2245 The later two are taken care of by squeezing out all the notes. */
2246
2247 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2248 executed and we may want to treat blocks which have two out
2249 edges, one normal, one abnormal as only having one edge for
2250 block merging purposes. */
2251
2252 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2253 if (tmp_edge->flags & EDGE_FALLTHRU)
2254 break;
2255 c_has_outgoing_fallthru = (tmp_edge != NULL);
2256
2257 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2258 if (tmp_edge->flags & EDGE_FALLTHRU)
2259 break;
2260 b_has_incoming_fallthru = (tmp_edge != NULL);
2261
2262 /* If B does not have an incoming fallthru, and the exception regions
2263 match, then it can be moved immediately before C without introducing
2264 or modifying jumps.
2265
2266 C can not be the first block, so we do not have to worry about
2267 accessing a non-existent block. */
2268 d = BASIC_BLOCK (c->index - 1);
2269 if (! b_has_incoming_fallthru
2270 && d->eh_end == b->eh_beg
2271 && b->eh_end == c->eh_beg)
2272 return merge_blocks_move_predecessor_nojumps (b, c);
2273
2274 /* Otherwise, we're going to try to move C after B. Make sure the
2275 exception regions match.
2276
2277 If B is the last basic block, then we must not try to access the
2278 block structure for block B + 1. Luckily in that case we do not
2279 need to worry about matching exception regions. */
2280 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2281 if (b->eh_end == c->eh_beg
2282 && (d == NULL || c->eh_end == d->eh_beg))
2283 {
2284 /* If C does not have an outgoing fallthru, then it can be moved
2285 immediately after B without introducing or modifying jumps. */
2286 if (! c_has_outgoing_fallthru)
2287 return merge_blocks_move_successor_nojumps (b, c);
2288
2289 /* Otherwise, we'll need to insert an extra jump, and possibly
2290 a new block to contain it. */
2291 /* ??? Not implemented yet. */
2292 }
2293
2294 return 0;
2295 }
2296 }
2297
2298 /* Top level driver for merge_blocks. */
2299
2300 static void
2301 try_merge_blocks ()
2302 {
2303 int i;
2304
2305 /* Attempt to merge blocks as made possible by edge removal. If a block
2306 has only one successor, and the successor has only one predecessor,
2307 they may be combined. */
2308
2309 for (i = 0; i < n_basic_blocks; )
2310 {
2311 basic_block c, b = BASIC_BLOCK (i);
2312 edge s;
2313
2314 /* A loop because chains of blocks might be combineable. */
2315 while ((s = b->succ) != NULL
2316 && s->succ_next == NULL
2317 && (s->flags & EDGE_EH) == 0
2318 && (c = s->dest) != EXIT_BLOCK_PTR
2319 && c->pred->pred_next == NULL
2320 /* If the jump insn has side effects, we can't kill the edge. */
2321 && (GET_CODE (b->end) != JUMP_INSN
2322 || onlyjump_p (b->end))
2323 && merge_blocks (s, b, c))
2324 continue;
2325
2326 /* Don't get confused by the index shift caused by deleting blocks. */
2327 i = b->index + 1;
2328 }
2329 }
2330
2331 /* The given edge should potentially a fallthru edge. If that is in
2332 fact true, delete the unconditional jump and barriers that are in
2333 the way. */
2334
2335 static void
2336 tidy_fallthru_edge (e, b, c)
2337 edge e;
2338 basic_block b, c;
2339 {
2340 rtx q;
2341
2342 /* ??? In a late-running flow pass, other folks may have deleted basic
2343 blocks by nopping out blocks, leaving multiple BARRIERs between here
2344 and the target label. They ought to be chastized and fixed.
2345
2346 We can also wind up with a sequence of undeletable labels between
2347 one block and the next.
2348
2349 So search through a sequence of barriers, labels, and notes for
2350 the head of block C and assert that we really do fall through. */
2351
2352 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2353 return;
2354
2355 /* Remove what will soon cease being the jump insn from the source block.
2356 If block B consisted only of this single jump, turn it into a deleted
2357 note. */
2358 q = b->end;
2359 if (GET_CODE (q) == JUMP_INSN)
2360 {
2361 #ifdef HAVE_cc0
2362 /* If this was a conditional jump, we need to also delete
2363 the insn that set cc0. */
2364 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2365 q = PREV_INSN (q);
2366 #endif
2367
2368 if (b->head == q)
2369 {
2370 PUT_CODE (q, NOTE);
2371 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2372 NOTE_SOURCE_FILE (q) = 0;
2373 }
2374 else
2375 b->end = q = PREV_INSN (q);
2376 }
2377
2378 /* Selectively unlink the sequence. */
2379 if (q != PREV_INSN (c->head))
2380 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2381
2382 e->flags |= EDGE_FALLTHRU;
2383 }
2384
2385 /* Discover and record the loop depth at the head of each basic block. */
2386
2387 void
2388 calculate_loop_depth (dump)
2389 FILE *dump;
2390 {
2391 struct loops loops;
2392
2393 /* The loop infrastructure does the real job for us. */
2394 flow_loops_find (&loops);
2395 if (dump)
2396 flow_loops_dump (&loops, dump, 0);
2397 flow_loops_free (&loops);
2398 }
2399 \f
2400 /* Perform data flow analysis.
2401 F is the first insn of the function and NREGS the number of register numbers
2402 in use. */
2403
2404 void
2405 life_analysis (f, nregs, file, remove_dead_code)
2406 rtx f;
2407 int nregs;
2408 FILE *file;
2409 int remove_dead_code;
2410 {
2411 #ifdef ELIMINABLE_REGS
2412 register size_t i;
2413 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2414 #endif
2415 int flags;
2416
2417 /* Record which registers will be eliminated. We use this in
2418 mark_used_regs. */
2419
2420 CLEAR_HARD_REG_SET (elim_reg_set);
2421
2422 #ifdef ELIMINABLE_REGS
2423 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2424 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2425 #else
2426 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2427 #endif
2428
2429 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2430 uid_volatile = BITMAP_XMALLOC ();
2431
2432 /* We want alias analysis information for local dead store elimination. */
2433 init_alias_analysis ();
2434
2435 flags = PROP_FINAL;
2436 if (! remove_dead_code)
2437 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2438 life_analysis_1 (f, nregs, flags);
2439
2440 if (! reload_completed)
2441 mark_constant_function ();
2442
2443 end_alias_analysis ();
2444
2445 if (file)
2446 dump_flow_info (file);
2447
2448 BITMAP_XFREE (uid_volatile);
2449 free_basic_block_vars (1);
2450 }
2451
2452 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2453 Search for REGNO. If found, abort if it is not wider than word_mode. */
2454
2455 static int
2456 verify_wide_reg_1 (px, pregno)
2457 rtx *px;
2458 void *pregno;
2459 {
2460 rtx x = *px;
2461 int regno = *(int *) pregno;
2462
2463 if (GET_CODE (x) == REG && REGNO (x) == regno)
2464 {
2465 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2466 abort ();
2467 return 1;
2468 }
2469 return 0;
2470 }
2471
2472 /* A subroutine of verify_local_live_at_start. Search through insns
2473 between HEAD and END looking for register REGNO. */
2474
2475 static void
2476 verify_wide_reg (regno, head, end)
2477 int regno;
2478 rtx head, end;
2479 {
2480 while (1)
2481 {
2482 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2483 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2484 return;
2485 if (head == end)
2486 break;
2487 head = NEXT_INSN (head);
2488 }
2489
2490 /* We didn't find the register at all. Something's way screwy. */
2491 abort ();
2492 }
2493
2494 /* A subroutine of update_life_info. Verify that there are no untoward
2495 changes in live_at_start during a local update. */
2496
2497 static void
2498 verify_local_live_at_start (new_live_at_start, bb)
2499 regset new_live_at_start;
2500 basic_block bb;
2501 {
2502 if (reload_completed)
2503 {
2504 /* After reload, there are no pseudos, nor subregs of multi-word
2505 registers. The regsets should exactly match. */
2506 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2507 abort ();
2508 }
2509 else
2510 {
2511 int i;
2512
2513 /* Find the set of changed registers. */
2514 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2515
2516 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2517 {
2518 /* No registers should die. */
2519 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2520 abort ();
2521 /* Verify that the now-live register is wider than word_mode. */
2522 verify_wide_reg (i, bb->head, bb->end);
2523 });
2524 }
2525 }
2526
2527 /* Updates death notes starting with the basic blocks set in BLOCKS.
2528
2529 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2530 expecting local modifications to basic blocks. If we find extra
2531 registers live at the beginning of a block, then we either killed
2532 useful data, or we have a broken split that wants data not provided.
2533 If we find registers removed from live_at_start, that means we have
2534 a broken peephole that is killing a register it shouldn't.
2535
2536 ??? This is not true in one situation -- when a pre-reload splitter
2537 generates subregs of a multi-word pseudo, current life analysis will
2538 lose the kill. So we _can_ have a pseudo go live. How irritating.
2539
2540 BLOCK_FOR_INSN is assumed to be correct.
2541
2542 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2543 reg_next_use for that. Including PROP_REG_INFO does not refresh
2544 regs_ever_live unless the caller resets it to zero. */
2545
2546 void
2547 update_life_info (blocks, extent, prop_flags)
2548 sbitmap blocks;
2549 enum update_life_extent extent;
2550 int prop_flags;
2551 {
2552 regset tmp;
2553 int i;
2554
2555 tmp = ALLOCA_REG_SET ();
2556
2557 /* For a global update, we go through the relaxation process again. */
2558 if (extent != UPDATE_LIFE_LOCAL)
2559 {
2560 calculate_global_regs_live (blocks, blocks,
2561 prop_flags & PROP_SCAN_DEAD_CODE);
2562
2563 /* If asked, remove notes from the blocks we'll update. */
2564 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2565 count_or_remove_death_notes (blocks, 1);
2566 }
2567
2568 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2569 {
2570 basic_block bb = BASIC_BLOCK (i);
2571
2572 COPY_REG_SET (tmp, bb->global_live_at_end);
2573 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2574 prop_flags);
2575
2576 if (extent == UPDATE_LIFE_LOCAL)
2577 verify_local_live_at_start (tmp, bb);
2578 });
2579
2580 FREE_REG_SET (tmp);
2581 }
2582
2583 /* Free the variables allocated by find_basic_blocks.
2584
2585 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2586
2587 void
2588 free_basic_block_vars (keep_head_end_p)
2589 int keep_head_end_p;
2590 {
2591 if (basic_block_for_insn)
2592 {
2593 VARRAY_FREE (basic_block_for_insn);
2594 basic_block_for_insn = NULL;
2595 }
2596
2597 if (! keep_head_end_p)
2598 {
2599 clear_edges ();
2600 VARRAY_FREE (basic_block_info);
2601 n_basic_blocks = 0;
2602
2603 ENTRY_BLOCK_PTR->aux = NULL;
2604 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2605 EXIT_BLOCK_PTR->aux = NULL;
2606 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2607 }
2608 }
2609
2610 /* Return nonzero if the destination of SET equals the source. */
2611 static int
2612 set_noop_p (set)
2613 rtx set;
2614 {
2615 rtx src = SET_SRC (set);
2616 rtx dst = SET_DEST (set);
2617 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2618 && REGNO (src) == REGNO (dst))
2619 return 1;
2620 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2621 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2622 return 0;
2623 src = SUBREG_REG (src);
2624 dst = SUBREG_REG (dst);
2625 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2626 && REGNO (src) == REGNO (dst))
2627 return 1;
2628 return 0;
2629 }
2630
2631 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2632 value to itself. */
2633 static int
2634 noop_move_p (insn)
2635 rtx insn;
2636 {
2637 rtx pat = PATTERN (insn);
2638
2639 /* Insns carrying these notes are useful later on. */
2640 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2641 return 0;
2642
2643 if (GET_CODE (pat) == SET && set_noop_p (pat))
2644 return 1;
2645
2646 if (GET_CODE (pat) == PARALLEL)
2647 {
2648 int i;
2649 /* If nothing but SETs of registers to themselves,
2650 this insn can also be deleted. */
2651 for (i = 0; i < XVECLEN (pat, 0); i++)
2652 {
2653 rtx tem = XVECEXP (pat, 0, i);
2654
2655 if (GET_CODE (tem) == USE
2656 || GET_CODE (tem) == CLOBBER)
2657 continue;
2658
2659 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2660 return 0;
2661 }
2662
2663 return 1;
2664 }
2665 return 0;
2666 }
2667
2668 static void
2669 notice_stack_pointer_modification (x, pat, data)
2670 rtx x;
2671 rtx pat ATTRIBUTE_UNUSED;
2672 void *data ATTRIBUTE_UNUSED;
2673 {
2674 if (x == stack_pointer_rtx
2675 /* The stack pointer is only modified indirectly as the result
2676 of a push until later in flow. See the comments in rtl.texi
2677 regarding Embedded Side-Effects on Addresses. */
2678 || (GET_CODE (x) == MEM
2679 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2680 || GET_CODE (XEXP (x, 0)) == PRE_INC
2681 || GET_CODE (XEXP (x, 0)) == POST_DEC
2682 || GET_CODE (XEXP (x, 0)) == POST_INC)
2683 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2684 current_function_sp_is_unchanging = 0;
2685 }
2686
2687 /* Record which insns refer to any volatile memory
2688 or for any reason can't be deleted just because they are dead stores.
2689 Also, delete any insns that copy a register to itself.
2690 And see if the stack pointer is modified. */
2691 static void
2692 record_volatile_insns (f)
2693 rtx f;
2694 {
2695 rtx insn;
2696 for (insn = f; insn; insn = NEXT_INSN (insn))
2697 {
2698 enum rtx_code code1 = GET_CODE (insn);
2699 if (code1 == CALL_INSN)
2700 SET_INSN_VOLATILE (insn);
2701 else if (code1 == INSN || code1 == JUMP_INSN)
2702 {
2703 if (GET_CODE (PATTERN (insn)) != USE
2704 && volatile_refs_p (PATTERN (insn)))
2705 SET_INSN_VOLATILE (insn);
2706
2707 /* A SET that makes space on the stack cannot be dead.
2708 (Such SETs occur only for allocating variable-size data,
2709 so they will always have a PLUS or MINUS according to the
2710 direction of stack growth.)
2711 Even if this function never uses this stack pointer value,
2712 signal handlers do! */
2713 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2714 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2715 #ifdef STACK_GROWS_DOWNWARD
2716 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2717 #else
2718 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2719 #endif
2720 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2721 SET_INSN_VOLATILE (insn);
2722
2723 /* Delete (in effect) any obvious no-op moves. */
2724 else if (noop_move_p (insn))
2725 {
2726 PUT_CODE (insn, NOTE);
2727 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2728 NOTE_SOURCE_FILE (insn) = 0;
2729 }
2730 }
2731
2732 /* Check if insn modifies the stack pointer. */
2733 if ( current_function_sp_is_unchanging
2734 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2735 note_stores (PATTERN (insn),
2736 notice_stack_pointer_modification,
2737 NULL);
2738 }
2739 }
2740
2741 /* Mark a register in SET. Hard registers in large modes get all
2742 of their component registers set as well. */
2743 static void
2744 mark_reg (set, reg)
2745 regset set;
2746 rtx reg;
2747 {
2748 int regno = REGNO (reg);
2749
2750 SET_REGNO_REG_SET (set, regno);
2751 if (regno < FIRST_PSEUDO_REGISTER)
2752 {
2753 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2754 while (--n > 0)
2755 SET_REGNO_REG_SET (set, regno + n);
2756 }
2757 }
2758
2759 /* Mark those regs which are needed at the end of the function as live
2760 at the end of the last basic block. */
2761 static void
2762 mark_regs_live_at_end (set)
2763 regset set;
2764 {
2765 tree type;
2766 int i;
2767
2768 /* If exiting needs the right stack value, consider the stack pointer
2769 live at the end of the function. */
2770 if ((HAVE_epilogue && reload_completed)
2771 || ! EXIT_IGNORE_STACK
2772 || (! FRAME_POINTER_REQUIRED
2773 && ! current_function_calls_alloca
2774 && flag_omit_frame_pointer)
2775 || current_function_sp_is_unchanging)
2776 {
2777 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2778 }
2779
2780 /* Mark the frame pointer if needed at the end of the function. If
2781 we end up eliminating it, it will be removed from the live list
2782 of each basic block by reload. */
2783
2784 if (! reload_completed || frame_pointer_needed)
2785 {
2786 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2787 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2788 /* If they are different, also mark the hard frame pointer as live */
2789 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2790 #endif
2791 }
2792
2793 #ifdef PIC_OFFSET_TABLE_REGNUM
2794 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2795 /* Many architectures have a GP register even without flag_pic.
2796 Assume the pic register is not in use, or will be handled by
2797 other means, if it is not fixed. */
2798 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2799 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2800 #endif
2801 #endif
2802
2803 /* Mark all global registers, and all registers used by the epilogue
2804 as being live at the end of the function since they may be
2805 referenced by our caller. */
2806 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2807 if (global_regs[i]
2808 #ifdef EPILOGUE_USES
2809 || EPILOGUE_USES (i)
2810 #endif
2811 )
2812 SET_REGNO_REG_SET (set, i);
2813
2814 /* Mark all call-saved registers that we actaully used. */
2815 if (HAVE_epilogue && reload_completed)
2816 {
2817 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2818 if (! call_used_regs[i] && regs_ever_live[i])
2819 SET_REGNO_REG_SET (set, i);
2820 }
2821
2822 /* Mark function return value. */
2823 /* ??? Only do this after reload. Consider a non-void function that
2824 omits a return statement. Across that edge we'll have the return
2825 register live, and no set for it. Thus the return register will
2826 be live back through the CFG to the entry, and thus we die. A
2827 possible solution is to emit a clobber at exits without returns. */
2828
2829 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2830 if (reload_completed
2831 && type != void_type_node)
2832 {
2833 rtx outgoing;
2834
2835 if (current_function_returns_struct
2836 || current_function_returns_pcc_struct)
2837 type = build_pointer_type (type);
2838
2839 #ifdef FUNCTION_OUTGOING_VALUE
2840 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2841 #else
2842 outgoing = FUNCTION_VALUE (type, current_function_decl);
2843 #endif
2844
2845 if (GET_CODE (outgoing) == REG)
2846 mark_reg (set, outgoing);
2847 else if (GET_CODE (outgoing) == PARALLEL)
2848 {
2849 int len = XVECLEN (outgoing, 0);
2850
2851 /* Check for a NULL entry, used to indicate that the parameter
2852 goes on the stack and in registers. */
2853 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2854
2855 for ( ; i < len; ++i)
2856 {
2857 rtx r = XVECEXP (outgoing, 0, i);
2858 if (GET_CODE (r) == REG)
2859 mark_reg (set, r);
2860 }
2861 }
2862 else
2863 abort ();
2864 }
2865 }
2866
2867 /* Determine which registers are live at the start of each
2868 basic block of the function whose first insn is F.
2869 NREGS is the number of registers used in F.
2870 We allocate the vector basic_block_live_at_start
2871 and the regsets that it points to, and fill them with the data.
2872 regset_size and regset_bytes are also set here. */
2873
2874 static void
2875 life_analysis_1 (f, nregs, flags)
2876 rtx f;
2877 int nregs;
2878 int flags;
2879 {
2880 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2881 register int i;
2882
2883 max_regno = nregs;
2884
2885 /* Allocate and zero out many data structures
2886 that will record the data from lifetime analysis. */
2887
2888 allocate_reg_life_data ();
2889 allocate_bb_life_data ();
2890
2891 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2892
2893 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2894 This will be cleared by record_volatile_insns if it encounters an insn
2895 which modifies the stack pointer. */
2896 current_function_sp_is_unchanging = !current_function_calls_alloca;
2897 record_volatile_insns (f);
2898
2899 /* Find the set of registers live on function exit. Do this before
2900 zeroing regs_ever_live, as we use that data post-reload. */
2901 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2902
2903 /* The post-reload life analysis have (on a global basis) the same
2904 registers live as was computed by reload itself. elimination
2905 Otherwise offsets and such may be incorrect.
2906
2907 Reload will make some registers as live even though they do not
2908 appear in the rtl. */
2909 if (reload_completed)
2910 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2911 memset (regs_ever_live, 0, sizeof regs_ever_live);
2912
2913 /* Compute register life at block boundaries. It'd be nice to
2914 begin with just the exit and noreturn blocks, but that set
2915 is not immediately handy. */
2916 {
2917 sbitmap blocks;
2918 blocks = sbitmap_alloc (n_basic_blocks);
2919 sbitmap_ones (blocks);
2920 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2921 sbitmap_free (blocks);
2922 }
2923
2924 /* The only pseudos that are live at the beginning of the function are
2925 those that were not set anywhere in the function. local-alloc doesn't
2926 know how to handle these correctly, so mark them as not local to any
2927 one basic block. */
2928
2929 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2930 FIRST_PSEUDO_REGISTER, i,
2931 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2932
2933 /* Now the life information is accurate. Make one more pass over each
2934 basic block to delete dead stores, create autoincrement addressing
2935 and record how many times each register is used, is set, or dies. */
2936 {
2937 regset tmp;
2938 tmp = ALLOCA_REG_SET ();
2939
2940 for (i = n_basic_blocks - 1; i >= 0; --i)
2941 {
2942 basic_block bb = BASIC_BLOCK (i);
2943
2944 COPY_REG_SET (tmp, bb->global_live_at_end);
2945 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2946 }
2947
2948 FREE_REG_SET (tmp);
2949 }
2950
2951 /* We have a problem with any pseudoreg that lives across the setjmp.
2952 ANSI says that if a user variable does not change in value between
2953 the setjmp and the longjmp, then the longjmp preserves it. This
2954 includes longjmp from a place where the pseudo appears dead.
2955 (In principle, the value still exists if it is in scope.)
2956 If the pseudo goes in a hard reg, some other value may occupy
2957 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2958 Conclusion: such a pseudo must not go in a hard reg. */
2959 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2960 FIRST_PSEUDO_REGISTER, i,
2961 {
2962 if (regno_reg_rtx[i] != 0)
2963 {
2964 REG_LIVE_LENGTH (i) = -1;
2965 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2966 }
2967 });
2968
2969 /* Restore regs_ever_live that was provided by reload. */
2970 if (reload_completed)
2971 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2972
2973 /* Clean up. */
2974 free (reg_next_use);
2975 reg_next_use = NULL;
2976 }
2977
2978 /* Propagate global life info around the graph of basic blocks. Begin
2979 considering blocks with their corresponding bit set in BLOCKS_IN.
2980 BLOCKS_OUT is set for every block that was changed. */
2981
2982 static void
2983 calculate_global_regs_live (blocks_in, blocks_out, flags)
2984 sbitmap blocks_in, blocks_out;
2985 int flags;
2986 {
2987 basic_block *queue, *qhead, *qtail, *qend;
2988 regset tmp, new_live_at_end;
2989 int i;
2990
2991 tmp = ALLOCA_REG_SET ();
2992 new_live_at_end = ALLOCA_REG_SET ();
2993
2994 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2995 because the `head == tail' style test for an empty queue doesn't
2996 work with a full queue. */
2997 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2998 qtail = queue;
2999 qhead = qend = queue + n_basic_blocks + 2;
3000
3001 /* Clear out the garbage that might be hanging out in bb->aux. */
3002 for (i = n_basic_blocks - 1; i >= 0; --i)
3003 BASIC_BLOCK (i)->aux = NULL;
3004
3005 /* Queue the blocks set in the initial mask. Do this in reverse block
3006 number order so that we are more likely for the first round to do
3007 useful work. We use AUX non-null to flag that the block is queued. */
3008 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3009 {
3010 basic_block bb = BASIC_BLOCK (i);
3011 *--qhead = bb;
3012 bb->aux = bb;
3013 });
3014
3015 sbitmap_zero (blocks_out);
3016
3017 while (qhead != qtail)
3018 {
3019 int rescan, changed;
3020 basic_block bb;
3021 edge e;
3022
3023 bb = *qhead++;
3024 if (qhead == qend)
3025 qhead = queue;
3026 bb->aux = NULL;
3027
3028 /* Begin by propogating live_at_start from the successor blocks. */
3029 CLEAR_REG_SET (new_live_at_end);
3030 for (e = bb->succ; e ; e = e->succ_next)
3031 {
3032 basic_block sb = e->dest;
3033 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3034 }
3035
3036 if (bb == ENTRY_BLOCK_PTR)
3037 {
3038 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3039 continue;
3040 }
3041
3042 /* On our first pass through this block, we'll go ahead and continue.
3043 Recognize first pass by local_set NULL. On subsequent passes, we
3044 get to skip out early if live_at_end wouldn't have changed. */
3045
3046 if (bb->local_set == NULL)
3047 {
3048 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3049 rescan = 1;
3050 }
3051 else
3052 {
3053 /* If any bits were removed from live_at_end, we'll have to
3054 rescan the block. This wouldn't be necessary if we had
3055 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3056 local_live is really dependant on live_at_end. */
3057 CLEAR_REG_SET (tmp);
3058 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3059 new_live_at_end, BITMAP_AND_COMPL);
3060
3061 if (! rescan)
3062 {
3063 /* Find the set of changed bits. Take this opportunity
3064 to notice that this set is empty and early out. */
3065 CLEAR_REG_SET (tmp);
3066 changed = bitmap_operation (tmp, bb->global_live_at_end,
3067 new_live_at_end, BITMAP_XOR);
3068 if (! changed)
3069 continue;
3070
3071 /* If any of the changed bits overlap with local_set,
3072 we'll have to rescan the block. Detect overlap by
3073 the AND with ~local_set turning off bits. */
3074 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3075 BITMAP_AND_COMPL);
3076 }
3077 }
3078
3079 /* Let our caller know that BB changed enough to require its
3080 death notes updated. */
3081 SET_BIT (blocks_out, bb->index);
3082
3083 if (! rescan)
3084 {
3085 /* Add to live_at_start the set of all registers in
3086 new_live_at_end that aren't in the old live_at_end. */
3087
3088 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3089 BITMAP_AND_COMPL);
3090 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3091
3092 changed = bitmap_operation (bb->global_live_at_start,
3093 bb->global_live_at_start,
3094 tmp, BITMAP_IOR);
3095 if (! changed)
3096 continue;
3097 }
3098 else
3099 {
3100 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3101
3102 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3103 into live_at_start. */
3104 propagate_block (new_live_at_end, bb->head, bb->end,
3105 bb->local_set, bb->index, flags);
3106
3107 /* If live_at start didn't change, no need to go farther. */
3108 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3109 continue;
3110
3111 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3112 }
3113
3114 /* Queue all predecessors of BB so that we may re-examine
3115 their live_at_end. */
3116 for (e = bb->pred; e ; e = e->pred_next)
3117 {
3118 basic_block pb = e->src;
3119 if (pb->aux == NULL)
3120 {
3121 *qtail++ = pb;
3122 if (qtail == qend)
3123 qtail = queue;
3124 pb->aux = pb;
3125 }
3126 }
3127 }
3128
3129 FREE_REG_SET (tmp);
3130 FREE_REG_SET (new_live_at_end);
3131
3132 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3133 {
3134 basic_block bb = BASIC_BLOCK (i);
3135 FREE_REG_SET (bb->local_set);
3136 });
3137
3138 free (queue);
3139 }
3140 \f
3141 /* Subroutines of life analysis. */
3142
3143 /* Allocate the permanent data structures that represent the results
3144 of life analysis. Not static since used also for stupid life analysis. */
3145
3146 void
3147 allocate_bb_life_data ()
3148 {
3149 register int i;
3150
3151 for (i = 0; i < n_basic_blocks; i++)
3152 {
3153 basic_block bb = BASIC_BLOCK (i);
3154
3155 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3156 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3157 }
3158
3159 ENTRY_BLOCK_PTR->global_live_at_end
3160 = OBSTACK_ALLOC_REG_SET (function_obstack);
3161 EXIT_BLOCK_PTR->global_live_at_start
3162 = OBSTACK_ALLOC_REG_SET (function_obstack);
3163
3164 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3165 }
3166
3167 void
3168 allocate_reg_life_data ()
3169 {
3170 int i;
3171
3172 /* Recalculate the register space, in case it has grown. Old style
3173 vector oriented regsets would set regset_{size,bytes} here also. */
3174 allocate_reg_info (max_regno, FALSE, FALSE);
3175
3176 /* Reset all the data we'll collect in propagate_block and its
3177 subroutines. */
3178 for (i = 0; i < max_regno; i++)
3179 {
3180 REG_N_SETS (i) = 0;
3181 REG_N_REFS (i) = 0;
3182 REG_N_DEATHS (i) = 0;
3183 REG_N_CALLS_CROSSED (i) = 0;
3184 REG_LIVE_LENGTH (i) = 0;
3185 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3186 }
3187 }
3188
3189 /* Compute the registers live at the beginning of a basic block
3190 from those live at the end.
3191
3192 When called, OLD contains those live at the end.
3193 On return, it contains those live at the beginning.
3194 FIRST and LAST are the first and last insns of the basic block.
3195
3196 FINAL is nonzero if we are doing the final pass which is not
3197 for computing the life info (since that has already been done)
3198 but for acting on it. On this pass, we delete dead stores,
3199 set up the logical links and dead-variables lists of instructions,
3200 and merge instructions for autoincrement and autodecrement addresses.
3201
3202 SIGNIFICANT is nonzero only the first time for each basic block.
3203 If it is nonzero, it points to a regset in which we store
3204 a 1 for each register that is set within the block.
3205
3206 BNUM is the number of the basic block. */
3207
3208 static void
3209 propagate_block (old, first, last, significant, bnum, flags)
3210 register regset old;
3211 rtx first;
3212 rtx last;
3213 regset significant;
3214 int bnum;
3215 int flags;
3216 {
3217 register rtx insn;
3218 rtx prev;
3219 regset live;
3220 regset dead;
3221
3222 /* Find the loop depth for this block. Ignore loop level changes in the
3223 middle of the basic block -- for register allocation purposes, the
3224 important uses will be in the blocks wholely contained within the loop
3225 not in the loop pre-header or post-trailer. */
3226 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3227
3228 dead = ALLOCA_REG_SET ();
3229 live = ALLOCA_REG_SET ();
3230
3231 cc0_live = 0;
3232
3233 if (flags & PROP_REG_INFO)
3234 {
3235 register int i;
3236
3237 /* Process the regs live at the end of the block.
3238 Mark them as not local to any one basic block. */
3239 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3240 {
3241 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3242 });
3243 }
3244
3245 /* Scan the block an insn at a time from end to beginning. */
3246
3247 for (insn = last; ; insn = prev)
3248 {
3249 prev = PREV_INSN (insn);
3250
3251 if (GET_CODE (insn) == NOTE)
3252 {
3253 /* If this is a call to `setjmp' et al,
3254 warn if any non-volatile datum is live. */
3255
3256 if ((flags & PROP_REG_INFO)
3257 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3258 IOR_REG_SET (regs_live_at_setjmp, old);
3259 }
3260
3261 /* Update the life-status of regs for this insn.
3262 First DEAD gets which regs are set in this insn
3263 then LIVE gets which regs are used in this insn.
3264 Then the regs live before the insn
3265 are those live after, with DEAD regs turned off,
3266 and then LIVE regs turned on. */
3267
3268 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3269 {
3270 register int i;
3271 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3272 int insn_is_dead = 0;
3273 int libcall_is_dead = 0;
3274
3275 if (flags & PROP_SCAN_DEAD_CODE)
3276 {
3277 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3278 /* Don't delete something that refers to volatile storage! */
3279 && ! INSN_VOLATILE (insn));
3280 libcall_is_dead = (insn_is_dead && note != 0
3281 && libcall_dead_p (PATTERN (insn), old, note, insn));
3282 }
3283
3284 /* We almost certainly don't want to delete prologue or epilogue
3285 instructions. Warn about probable compiler losage. */
3286 if ((flags & PROP_KILL_DEAD_CODE)
3287 && insn_is_dead
3288 && reload_completed
3289 && (HAVE_epilogue || HAVE_prologue)
3290 && prologue_epilogue_contains (insn))
3291 {
3292 warning ("ICE: would have deleted prologue/epilogue insn");
3293 debug_rtx (insn);
3294 libcall_is_dead = insn_is_dead = 0;
3295 }
3296
3297 /* If an instruction consists of just dead store(s) on final pass,
3298 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3299 We could really delete it with delete_insn, but that
3300 can cause trouble for first or last insn in a basic block. */
3301 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3302 {
3303 rtx inote;
3304 /* If the insn referred to a label, note that the label is
3305 now less used. */
3306 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3307 {
3308 if (REG_NOTE_KIND (inote) == REG_LABEL)
3309 {
3310 rtx label = XEXP (inote, 0);
3311 rtx next;
3312 LABEL_NUSES (label)--;
3313
3314 /* If this label was attached to an ADDR_VEC, it's
3315 safe to delete the ADDR_VEC. In fact, it's pretty much
3316 mandatory to delete it, because the ADDR_VEC may
3317 be referencing labels that no longer exist. */
3318 if (LABEL_NUSES (label) == 0
3319 && (next = next_nonnote_insn (label)) != NULL
3320 && GET_CODE (next) == JUMP_INSN
3321 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3322 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3323 {
3324 rtx pat = PATTERN (next);
3325 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3326 int len = XVECLEN (pat, diff_vec_p);
3327 int i;
3328 for (i = 0; i < len; i++)
3329 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3330 PUT_CODE (next, NOTE);
3331 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3332 NOTE_SOURCE_FILE (next) = 0;
3333 }
3334 }
3335 }
3336
3337 PUT_CODE (insn, NOTE);
3338 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3339 NOTE_SOURCE_FILE (insn) = 0;
3340
3341 /* CC0 is now known to be dead. Either this insn used it,
3342 in which case it doesn't anymore, or clobbered it,
3343 so the next insn can't use it. */
3344 cc0_live = 0;
3345
3346 /* If this insn is copying the return value from a library call,
3347 delete the entire library call. */
3348 if (libcall_is_dead)
3349 {
3350 rtx first = XEXP (note, 0);
3351 rtx p = insn;
3352 while (INSN_DELETED_P (first))
3353 first = NEXT_INSN (first);
3354 while (p != first)
3355 {
3356 p = PREV_INSN (p);
3357 PUT_CODE (p, NOTE);
3358 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3359 NOTE_SOURCE_FILE (p) = 0;
3360 }
3361 }
3362 goto flushed;
3363 }
3364
3365 CLEAR_REG_SET (dead);
3366 CLEAR_REG_SET (live);
3367
3368 /* See if this is an increment or decrement that can be
3369 merged into a following memory address. */
3370 #ifdef AUTO_INC_DEC
3371 {
3372 register rtx x = single_set (insn);
3373
3374 /* Does this instruction increment or decrement a register? */
3375 if (!reload_completed
3376 && (flags & PROP_AUTOINC)
3377 && x != 0
3378 && GET_CODE (SET_DEST (x)) == REG
3379 && (GET_CODE (SET_SRC (x)) == PLUS
3380 || GET_CODE (SET_SRC (x)) == MINUS)
3381 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3382 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3383 /* Ok, look for a following memory ref we can combine with.
3384 If one is found, change the memory ref to a PRE_INC
3385 or PRE_DEC, cancel this insn, and return 1.
3386 Return 0 if nothing has been done. */
3387 && try_pre_increment_1 (insn))
3388 goto flushed;
3389 }
3390 #endif /* AUTO_INC_DEC */
3391
3392 /* If this is not the final pass, and this insn is copying the
3393 value of a library call and it's dead, don't scan the
3394 insns that perform the library call, so that the call's
3395 arguments are not marked live. */
3396 if (libcall_is_dead)
3397 {
3398 /* Mark the dest reg as `significant'. */
3399 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3400 significant, flags);
3401
3402 insn = XEXP (note, 0);
3403 prev = PREV_INSN (insn);
3404 }
3405 else if (GET_CODE (PATTERN (insn)) == SET
3406 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3407 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3408 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3409 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3410 /* We have an insn to pop a constant amount off the stack.
3411 (Such insns use PLUS regardless of the direction of the stack,
3412 and any insn to adjust the stack by a constant is always a pop.)
3413 These insns, if not dead stores, have no effect on life. */
3414 ;
3415 else
3416 {
3417 /* Any regs live at the time of a call instruction
3418 must not go in a register clobbered by calls.
3419 Find all regs now live and record this for them. */
3420
3421 if (GET_CODE (insn) == CALL_INSN
3422 && (flags & PROP_REG_INFO))
3423 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3424 {
3425 REG_N_CALLS_CROSSED (i)++;
3426 });
3427
3428 /* LIVE gets the regs used in INSN;
3429 DEAD gets those set by it. Dead insns don't make anything
3430 live. */
3431
3432 mark_set_regs (old, dead, PATTERN (insn),
3433 insn, significant, flags);
3434
3435 /* If an insn doesn't use CC0, it becomes dead since we
3436 assume that every insn clobbers it. So show it dead here;
3437 mark_used_regs will set it live if it is referenced. */
3438 cc0_live = 0;
3439
3440 if (! insn_is_dead)
3441 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3442
3443 /* Sometimes we may have inserted something before INSN (such as
3444 a move) when we make an auto-inc. So ensure we will scan
3445 those insns. */
3446 #ifdef AUTO_INC_DEC
3447 prev = PREV_INSN (insn);
3448 #endif
3449
3450 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3451 {
3452 register int i;
3453
3454 rtx note;
3455
3456 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3457 note;
3458 note = XEXP (note, 1))
3459 if (GET_CODE (XEXP (note, 0)) == USE)
3460 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3461 flags, insn);
3462
3463 /* Each call clobbers all call-clobbered regs that are not
3464 global or fixed. Note that the function-value reg is a
3465 call-clobbered reg, and mark_set_regs has already had
3466 a chance to handle it. */
3467
3468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3469 if (call_used_regs[i] && ! global_regs[i]
3470 && ! fixed_regs[i])
3471 {
3472 SET_REGNO_REG_SET (dead, i);
3473 if (significant)
3474 SET_REGNO_REG_SET (significant, i);
3475 }
3476
3477 /* The stack ptr is used (honorarily) by a CALL insn. */
3478 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3479
3480 /* Calls may also reference any of the global registers,
3481 so they are made live. */
3482 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3483 if (global_regs[i])
3484 mark_used_regs (old, live,
3485 gen_rtx_REG (reg_raw_mode[i], i),
3486 flags, insn);
3487
3488 /* Calls also clobber memory. */
3489 free_EXPR_LIST_list (&mem_set_list);
3490 }
3491
3492 /* Update OLD for the registers used or set. */
3493 AND_COMPL_REG_SET (old, dead);
3494 IOR_REG_SET (old, live);
3495
3496 }
3497
3498 /* On final pass, update counts of how many insns each reg is live
3499 at. */
3500 if (flags & PROP_REG_INFO)
3501 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3502 { REG_LIVE_LENGTH (i)++; });
3503 }
3504 flushed: ;
3505 if (insn == first)
3506 break;
3507 }
3508
3509 FREE_REG_SET (dead);
3510 FREE_REG_SET (live);
3511 free_EXPR_LIST_list (&mem_set_list);
3512 }
3513 \f
3514 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3515 (SET expressions whose destinations are registers dead after the insn).
3516 NEEDED is the regset that says which regs are alive after the insn.
3517
3518 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3519
3520 If X is the entire body of an insn, NOTES contains the reg notes
3521 pertaining to the insn. */
3522
3523 static int
3524 insn_dead_p (x, needed, call_ok, notes)
3525 rtx x;
3526 regset needed;
3527 int call_ok;
3528 rtx notes ATTRIBUTE_UNUSED;
3529 {
3530 enum rtx_code code = GET_CODE (x);
3531
3532 #ifdef AUTO_INC_DEC
3533 /* If flow is invoked after reload, we must take existing AUTO_INC
3534 expresions into account. */
3535 if (reload_completed)
3536 {
3537 for ( ; notes; notes = XEXP (notes, 1))
3538 {
3539 if (REG_NOTE_KIND (notes) == REG_INC)
3540 {
3541 int regno = REGNO (XEXP (notes, 0));
3542
3543 /* Don't delete insns to set global regs. */
3544 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3545 || REGNO_REG_SET_P (needed, regno))
3546 return 0;
3547 }
3548 }
3549 }
3550 #endif
3551
3552 /* If setting something that's a reg or part of one,
3553 see if that register's altered value will be live. */
3554
3555 if (code == SET)
3556 {
3557 rtx r = SET_DEST (x);
3558
3559 /* A SET that is a subroutine call cannot be dead. */
3560 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3561 return 0;
3562
3563 #ifdef HAVE_cc0
3564 if (GET_CODE (r) == CC0)
3565 return ! cc0_live;
3566 #endif
3567
3568 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3569 {
3570 rtx temp;
3571 /* Walk the set of memory locations we are currently tracking
3572 and see if one is an identical match to this memory location.
3573 If so, this memory write is dead (remember, we're walking
3574 backwards from the end of the block to the start. */
3575 temp = mem_set_list;
3576 while (temp)
3577 {
3578 if (rtx_equal_p (XEXP (temp, 0), r))
3579 return 1;
3580 temp = XEXP (temp, 1);
3581 }
3582 }
3583
3584 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3585 || GET_CODE (r) == ZERO_EXTRACT)
3586 r = XEXP (r, 0);
3587
3588 if (GET_CODE (r) == REG)
3589 {
3590 int regno = REGNO (r);
3591
3592 /* Don't delete insns to set global regs. */
3593 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3594 /* Make sure insns to set frame pointer aren't deleted. */
3595 || (regno == FRAME_POINTER_REGNUM
3596 && (! reload_completed || frame_pointer_needed))
3597 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3598 || (regno == HARD_FRAME_POINTER_REGNUM
3599 && (! reload_completed || frame_pointer_needed))
3600 #endif
3601 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3602 /* Make sure insns to set arg pointer are never deleted
3603 (if the arg pointer isn't fixed, there will be a USE for
3604 it, so we can treat it normally). */
3605 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3606 #endif
3607 || REGNO_REG_SET_P (needed, regno))
3608 return 0;
3609
3610 /* If this is a hard register, verify that subsequent words are
3611 not needed. */
3612 if (regno < FIRST_PSEUDO_REGISTER)
3613 {
3614 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3615
3616 while (--n > 0)
3617 if (REGNO_REG_SET_P (needed, regno+n))
3618 return 0;
3619 }
3620
3621 return 1;
3622 }
3623 }
3624
3625 /* If performing several activities,
3626 insn is dead if each activity is individually dead.
3627 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3628 that's inside a PARALLEL doesn't make the insn worth keeping. */
3629 else if (code == PARALLEL)
3630 {
3631 int i = XVECLEN (x, 0);
3632
3633 for (i--; i >= 0; i--)
3634 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3635 && GET_CODE (XVECEXP (x, 0, i)) != USE
3636 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3637 return 0;
3638
3639 return 1;
3640 }
3641
3642 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3643 is not necessarily true for hard registers. */
3644 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3645 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3646 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3647 return 1;
3648
3649 /* We do not check other CLOBBER or USE here. An insn consisting of just
3650 a CLOBBER or just a USE should not be deleted. */
3651 return 0;
3652 }
3653
3654 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3655 return 1 if the entire library call is dead.
3656 This is true if X copies a register (hard or pseudo)
3657 and if the hard return reg of the call insn is dead.
3658 (The caller should have tested the destination of X already for death.)
3659
3660 If this insn doesn't just copy a register, then we don't
3661 have an ordinary libcall. In that case, cse could not have
3662 managed to substitute the source for the dest later on,
3663 so we can assume the libcall is dead.
3664
3665 NEEDED is the bit vector of pseudoregs live before this insn.
3666 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3667
3668 static int
3669 libcall_dead_p (x, needed, note, insn)
3670 rtx x;
3671 regset needed;
3672 rtx note;
3673 rtx insn;
3674 {
3675 register RTX_CODE code = GET_CODE (x);
3676
3677 if (code == SET)
3678 {
3679 register rtx r = SET_SRC (x);
3680 if (GET_CODE (r) == REG)
3681 {
3682 rtx call = XEXP (note, 0);
3683 rtx call_pat;
3684 register int i;
3685
3686 /* Find the call insn. */
3687 while (call != insn && GET_CODE (call) != CALL_INSN)
3688 call = NEXT_INSN (call);
3689
3690 /* If there is none, do nothing special,
3691 since ordinary death handling can understand these insns. */
3692 if (call == insn)
3693 return 0;
3694
3695 /* See if the hard reg holding the value is dead.
3696 If this is a PARALLEL, find the call within it. */
3697 call_pat = PATTERN (call);
3698 if (GET_CODE (call_pat) == PARALLEL)
3699 {
3700 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3701 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3702 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3703 break;
3704
3705 /* This may be a library call that is returning a value
3706 via invisible pointer. Do nothing special, since
3707 ordinary death handling can understand these insns. */
3708 if (i < 0)
3709 return 0;
3710
3711 call_pat = XVECEXP (call_pat, 0, i);
3712 }
3713
3714 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3715 }
3716 }
3717 return 1;
3718 }
3719
3720 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3721 live at function entry. Don't count global register variables, variables
3722 in registers that can be used for function arg passing, or variables in
3723 fixed hard registers. */
3724
3725 int
3726 regno_uninitialized (regno)
3727 int regno;
3728 {
3729 if (n_basic_blocks == 0
3730 || (regno < FIRST_PSEUDO_REGISTER
3731 && (global_regs[regno]
3732 || fixed_regs[regno]
3733 || FUNCTION_ARG_REGNO_P (regno))))
3734 return 0;
3735
3736 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3737 }
3738
3739 /* 1 if register REGNO was alive at a place where `setjmp' was called
3740 and was set more than once or is an argument.
3741 Such regs may be clobbered by `longjmp'. */
3742
3743 int
3744 regno_clobbered_at_setjmp (regno)
3745 int regno;
3746 {
3747 if (n_basic_blocks == 0)
3748 return 0;
3749
3750 return ((REG_N_SETS (regno) > 1
3751 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3752 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3753 }
3754 \f
3755 /* INSN references memory, possibly using autoincrement addressing modes.
3756 Find any entries on the mem_set_list that need to be invalidated due
3757 to an address change. */
3758 static void
3759 invalidate_mems_from_autoinc (insn)
3760 rtx insn;
3761 {
3762 rtx note = REG_NOTES (insn);
3763 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3764 {
3765 if (REG_NOTE_KIND (note) == REG_INC)
3766 {
3767 rtx temp = mem_set_list;
3768 rtx prev = NULL_RTX;
3769 rtx next;
3770
3771 while (temp)
3772 {
3773 next = XEXP (temp, 1);
3774 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3775 {
3776 /* Splice temp out of list. */
3777 if (prev)
3778 XEXP (prev, 1) = next;
3779 else
3780 mem_set_list = next;
3781 free_EXPR_LIST_node (temp);
3782 }
3783 else
3784 prev = temp;
3785 temp = next;
3786 }
3787 }
3788 }
3789 }
3790
3791 /* Process the registers that are set within X. Their bits are set to
3792 1 in the regset DEAD, because they are dead prior to this insn.
3793
3794 If INSN is nonzero, it is the insn being processed.
3795
3796 FLAGS is the set of operations to perform. */
3797
3798 static void
3799 mark_set_regs (needed, dead, x, insn, significant, flags)
3800 regset needed;
3801 regset dead;
3802 rtx x;
3803 rtx insn;
3804 regset significant;
3805 int flags;
3806 {
3807 register RTX_CODE code = GET_CODE (x);
3808
3809 if (code == SET || code == CLOBBER)
3810 mark_set_1 (needed, dead, x, insn, significant, flags);
3811 else if (code == PARALLEL)
3812 {
3813 register int i;
3814 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3815 {
3816 code = GET_CODE (XVECEXP (x, 0, i));
3817 if (code == SET || code == CLOBBER)
3818 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3819 significant, flags);
3820 }
3821 }
3822 }
3823
3824 /* Process a single SET rtx, X. */
3825
3826 static void
3827 mark_set_1 (needed, dead, x, insn, significant, flags)
3828 regset needed;
3829 regset dead;
3830 rtx x;
3831 rtx insn;
3832 regset significant;
3833 int flags;
3834 {
3835 register int regno = -1;
3836 register rtx reg = SET_DEST (x);
3837
3838 /* Some targets place small structures in registers for
3839 return values of functions. We have to detect this
3840 case specially here to get correct flow information. */
3841 if (GET_CODE (reg) == PARALLEL
3842 && GET_MODE (reg) == BLKmode)
3843 {
3844 register int i;
3845
3846 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3847 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3848 significant, flags);
3849 return;
3850 }
3851
3852 /* Modifying just one hardware register of a multi-reg value
3853 or just a byte field of a register
3854 does not mean the value from before this insn is now dead.
3855 But it does mean liveness of that register at the end of the block
3856 is significant.
3857
3858 Within mark_set_1, however, we treat it as if the register is
3859 indeed modified. mark_used_regs will, however, also treat this
3860 register as being used. Thus, we treat these insns as setting a
3861 new value for the register as a function of its old value. This
3862 cases LOG_LINKS to be made appropriately and this will help combine. */
3863
3864 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3865 || GET_CODE (reg) == SIGN_EXTRACT
3866 || GET_CODE (reg) == STRICT_LOW_PART)
3867 reg = XEXP (reg, 0);
3868
3869 /* If this set is a MEM, then it kills any aliased writes.
3870 If this set is a REG, then it kills any MEMs which use the reg. */
3871 if (flags & PROP_SCAN_DEAD_CODE)
3872 {
3873 if (GET_CODE (reg) == MEM
3874 || GET_CODE (reg) == REG)
3875 {
3876 rtx temp = mem_set_list;
3877 rtx prev = NULL_RTX;
3878 rtx next;
3879
3880 while (temp)
3881 {
3882 next = XEXP (temp, 1);
3883 if ((GET_CODE (reg) == MEM
3884 && output_dependence (XEXP (temp, 0), reg))
3885 || (GET_CODE (reg) == REG
3886 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3887 {
3888 /* Splice this entry out of the list. */
3889 if (prev)
3890 XEXP (prev, 1) = next;
3891 else
3892 mem_set_list = next;
3893 free_EXPR_LIST_node (temp);
3894 }
3895 else
3896 prev = temp;
3897 temp = next;
3898 }
3899 }
3900
3901 /* If the memory reference had embedded side effects (autoincrement
3902 address modes. Then we may need to kill some entries on the
3903 memory set list. */
3904 if (insn && GET_CODE (reg) == MEM)
3905 invalidate_mems_from_autoinc (insn);
3906
3907 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3908 /* We do not know the size of a BLKmode store, so we do not track
3909 them for redundant store elimination. */
3910 && GET_MODE (reg) != BLKmode
3911 /* There are no REG_INC notes for SP, so we can't assume we'll see
3912 everything that invalidates it. To be safe, don't eliminate any
3913 stores though SP; none of them should be redundant anyway. */
3914 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3915 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3916 }
3917
3918 if (GET_CODE (reg) == REG
3919 && (regno = REGNO (reg),
3920 ! (regno == FRAME_POINTER_REGNUM
3921 && (! reload_completed || frame_pointer_needed)))
3922 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3923 && ! (regno == HARD_FRAME_POINTER_REGNUM
3924 && (! reload_completed || frame_pointer_needed))
3925 #endif
3926 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3927 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3928 #endif
3929 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3930 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3931 {
3932 int some_needed = REGNO_REG_SET_P (needed, regno);
3933 int some_not_needed = ! some_needed;
3934
3935 /* Mark it as a significant register for this basic block. */
3936 if (significant)
3937 SET_REGNO_REG_SET (significant, regno);
3938
3939 /* Mark it as dead before this insn. */
3940 SET_REGNO_REG_SET (dead, regno);
3941
3942 /* A hard reg in a wide mode may really be multiple registers.
3943 If so, mark all of them just like the first. */
3944 if (regno < FIRST_PSEUDO_REGISTER)
3945 {
3946 int n;
3947
3948 /* Nothing below is needed for the stack pointer; get out asap.
3949 Eg, log links aren't needed, since combine won't use them. */
3950 if (regno == STACK_POINTER_REGNUM)
3951 return;
3952
3953 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3954 while (--n > 0)
3955 {
3956 int regno_n = regno + n;
3957 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3958 if (significant)
3959 SET_REGNO_REG_SET (significant, regno_n);
3960
3961 SET_REGNO_REG_SET (dead, regno_n);
3962 some_needed |= needed_regno;
3963 some_not_needed |= ! needed_regno;
3964 }
3965 }
3966
3967 /* Additional data to record if this is the final pass. */
3968 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3969 | PROP_DEATH_NOTES | PROP_AUTOINC))
3970 {
3971 register rtx y;
3972 register int blocknum = BLOCK_NUM (insn);
3973
3974 y = NULL_RTX;
3975 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3976 y = reg_next_use[regno];
3977
3978 /* If this is a hard reg, record this function uses the reg. */
3979
3980 if (regno < FIRST_PSEUDO_REGISTER)
3981 {
3982 register int i;
3983 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3984
3985 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3986 for (i = regno; i < endregno; i++)
3987 {
3988 /* The next use is no longer "next", since a store
3989 intervenes. */
3990 reg_next_use[i] = 0;
3991 }
3992
3993 if (flags & PROP_REG_INFO)
3994 for (i = regno; i < endregno; i++)
3995 {
3996 regs_ever_live[i] = 1;
3997 REG_N_SETS (i)++;
3998 }
3999 }
4000 else
4001 {
4002 /* The next use is no longer "next", since a store
4003 intervenes. */
4004 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4005 reg_next_use[regno] = 0;
4006
4007 /* Keep track of which basic blocks each reg appears in. */
4008
4009 if (flags & PROP_REG_INFO)
4010 {
4011 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4012 REG_BASIC_BLOCK (regno) = blocknum;
4013 else if (REG_BASIC_BLOCK (regno) != blocknum)
4014 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4015
4016 /* Count (weighted) references, stores, etc. This counts a
4017 register twice if it is modified, but that is correct. */
4018 REG_N_SETS (regno)++;
4019 REG_N_REFS (regno) += loop_depth;
4020
4021 /* The insns where a reg is live are normally counted
4022 elsewhere, but we want the count to include the insn
4023 where the reg is set, and the normal counting mechanism
4024 would not count it. */
4025 REG_LIVE_LENGTH (regno)++;
4026 }
4027 }
4028
4029 if (! some_not_needed)
4030 {
4031 if (flags & PROP_LOG_LINKS)
4032 {
4033 /* Make a logical link from the next following insn
4034 that uses this register, back to this insn.
4035 The following insns have already been processed.
4036
4037 We don't build a LOG_LINK for hard registers containing
4038 in ASM_OPERANDs. If these registers get replaced,
4039 we might wind up changing the semantics of the insn,
4040 even if reload can make what appear to be valid
4041 assignments later. */
4042 if (y && (BLOCK_NUM (y) == blocknum)
4043 && (regno >= FIRST_PSEUDO_REGISTER
4044 || asm_noperands (PATTERN (y)) < 0))
4045 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4046 }
4047 }
4048 else if (! some_needed)
4049 {
4050 if (flags & PROP_REG_INFO)
4051 REG_N_DEATHS (REGNO (reg))++;
4052
4053 if (flags & PROP_DEATH_NOTES)
4054 {
4055 /* Note that dead stores have already been deleted
4056 when possible. If we get here, we have found a
4057 dead store that cannot be eliminated (because the
4058 same insn does something useful). Indicate this
4059 by marking the reg being set as dying here. */
4060 REG_NOTES (insn)
4061 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4062 }
4063 }
4064 else
4065 {
4066 if (flags & PROP_DEATH_NOTES)
4067 {
4068 /* This is a case where we have a multi-word hard register
4069 and some, but not all, of the words of the register are
4070 needed in subsequent insns. Write REG_UNUSED notes
4071 for those parts that were not needed. This case should
4072 be rare. */
4073
4074 int i;
4075
4076 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4077 i >= 0; i--)
4078 if (!REGNO_REG_SET_P (needed, regno + i))
4079 REG_NOTES (insn)
4080 = (alloc_EXPR_LIST
4081 (REG_UNUSED,
4082 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4083 REG_NOTES (insn)));
4084 }
4085 }
4086 }
4087 }
4088 else if (GET_CODE (reg) == REG)
4089 {
4090 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4091 reg_next_use[regno] = 0;
4092 }
4093
4094 /* If this is the last pass and this is a SCRATCH, show it will be dying
4095 here and count it. */
4096 else if (GET_CODE (reg) == SCRATCH)
4097 {
4098 if (flags & PROP_DEATH_NOTES)
4099 REG_NOTES (insn)
4100 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4101 }
4102 }
4103 \f
4104 #ifdef AUTO_INC_DEC
4105
4106 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4107 reference. */
4108
4109 static void
4110 find_auto_inc (needed, x, insn)
4111 regset needed;
4112 rtx x;
4113 rtx insn;
4114 {
4115 rtx addr = XEXP (x, 0);
4116 HOST_WIDE_INT offset = 0;
4117 rtx set;
4118
4119 /* Here we detect use of an index register which might be good for
4120 postincrement, postdecrement, preincrement, or predecrement. */
4121
4122 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4123 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4124
4125 if (GET_CODE (addr) == REG)
4126 {
4127 register rtx y;
4128 register int size = GET_MODE_SIZE (GET_MODE (x));
4129 rtx use;
4130 rtx incr;
4131 int regno = REGNO (addr);
4132
4133 /* Is the next use an increment that might make auto-increment? */
4134 if ((incr = reg_next_use[regno]) != 0
4135 && (set = single_set (incr)) != 0
4136 && GET_CODE (set) == SET
4137 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4138 /* Can't add side effects to jumps; if reg is spilled and
4139 reloaded, there's no way to store back the altered value. */
4140 && GET_CODE (insn) != JUMP_INSN
4141 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4142 && XEXP (y, 0) == addr
4143 && GET_CODE (XEXP (y, 1)) == CONST_INT
4144 && ((HAVE_POST_INCREMENT
4145 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4146 || (HAVE_POST_DECREMENT
4147 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4148 || (HAVE_PRE_INCREMENT
4149 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4150 || (HAVE_PRE_DECREMENT
4151 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4152 /* Make sure this reg appears only once in this insn. */
4153 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4154 use != 0 && use != (rtx) 1))
4155 {
4156 rtx q = SET_DEST (set);
4157 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4158 ? (offset ? PRE_INC : POST_INC)
4159 : (offset ? PRE_DEC : POST_DEC));
4160
4161 if (dead_or_set_p (incr, addr))
4162 {
4163 /* This is the simple case. Try to make the auto-inc. If
4164 we can't, we are done. Otherwise, we will do any
4165 needed updates below. */
4166 if (! validate_change (insn, &XEXP (x, 0),
4167 gen_rtx_fmt_e (inc_code, Pmode, addr),
4168 0))
4169 return;
4170 }
4171 else if (GET_CODE (q) == REG
4172 /* PREV_INSN used here to check the semi-open interval
4173 [insn,incr). */
4174 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4175 /* We must also check for sets of q as q may be
4176 a call clobbered hard register and there may
4177 be a call between PREV_INSN (insn) and incr. */
4178 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4179 {
4180 /* We have *p followed sometime later by q = p+size.
4181 Both p and q must be live afterward,
4182 and q is not used between INSN and its assignment.
4183 Change it to q = p, ...*q..., q = q+size.
4184 Then fall into the usual case. */
4185 rtx insns, temp;
4186 basic_block bb;
4187
4188 start_sequence ();
4189 emit_move_insn (q, addr);
4190 insns = get_insns ();
4191 end_sequence ();
4192
4193 bb = BLOCK_FOR_INSN (insn);
4194 for (temp = insns; temp; temp = NEXT_INSN (temp))
4195 set_block_for_insn (temp, bb);
4196
4197 /* If we can't make the auto-inc, or can't make the
4198 replacement into Y, exit. There's no point in making
4199 the change below if we can't do the auto-inc and doing
4200 so is not correct in the pre-inc case. */
4201
4202 validate_change (insn, &XEXP (x, 0),
4203 gen_rtx_fmt_e (inc_code, Pmode, q),
4204 1);
4205 validate_change (incr, &XEXP (y, 0), q, 1);
4206 if (! apply_change_group ())
4207 return;
4208
4209 /* We now know we'll be doing this change, so emit the
4210 new insn(s) and do the updates. */
4211 emit_insns_before (insns, insn);
4212
4213 if (BLOCK_FOR_INSN (insn)->head == insn)
4214 BLOCK_FOR_INSN (insn)->head = insns;
4215
4216 /* INCR will become a NOTE and INSN won't contain a
4217 use of ADDR. If a use of ADDR was just placed in
4218 the insn before INSN, make that the next use.
4219 Otherwise, invalidate it. */
4220 if (GET_CODE (PREV_INSN (insn)) == INSN
4221 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4222 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4223 reg_next_use[regno] = PREV_INSN (insn);
4224 else
4225 reg_next_use[regno] = 0;
4226
4227 addr = q;
4228 regno = REGNO (q);
4229
4230 /* REGNO is now used in INCR which is below INSN, but
4231 it previously wasn't live here. If we don't mark
4232 it as needed, we'll put a REG_DEAD note for it
4233 on this insn, which is incorrect. */
4234 SET_REGNO_REG_SET (needed, regno);
4235
4236 /* If there are any calls between INSN and INCR, show
4237 that REGNO now crosses them. */
4238 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4239 if (GET_CODE (temp) == CALL_INSN)
4240 REG_N_CALLS_CROSSED (regno)++;
4241 }
4242 else
4243 return;
4244
4245 /* If we haven't returned, it means we were able to make the
4246 auto-inc, so update the status. First, record that this insn
4247 has an implicit side effect. */
4248
4249 REG_NOTES (insn)
4250 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4251
4252 /* Modify the old increment-insn to simply copy
4253 the already-incremented value of our register. */
4254 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4255 abort ();
4256
4257 /* If that makes it a no-op (copying the register into itself) delete
4258 it so it won't appear to be a "use" and a "set" of this
4259 register. */
4260 if (SET_DEST (set) == addr)
4261 {
4262 PUT_CODE (incr, NOTE);
4263 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4264 NOTE_SOURCE_FILE (incr) = 0;
4265 }
4266
4267 if (regno >= FIRST_PSEUDO_REGISTER)
4268 {
4269 /* Count an extra reference to the reg. When a reg is
4270 incremented, spilling it is worse, so we want to make
4271 that less likely. */
4272 REG_N_REFS (regno) += loop_depth;
4273
4274 /* Count the increment as a setting of the register,
4275 even though it isn't a SET in rtl. */
4276 REG_N_SETS (regno)++;
4277 }
4278 }
4279 }
4280 }
4281 #endif /* AUTO_INC_DEC */
4282 \f
4283 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4284 This is done assuming the registers needed from X
4285 are those that have 1-bits in NEEDED.
4286
4287 FLAGS is the set of enabled operations.
4288
4289 INSN is the containing instruction. If INSN is dead, this function is not
4290 called. */
4291
4292 static void
4293 mark_used_regs (needed, live, x, flags, insn)
4294 regset needed;
4295 regset live;
4296 rtx x;
4297 int flags;
4298 rtx insn;
4299 {
4300 register RTX_CODE code;
4301 register int regno;
4302 int i;
4303
4304 retry:
4305 code = GET_CODE (x);
4306 switch (code)
4307 {
4308 case LABEL_REF:
4309 case SYMBOL_REF:
4310 case CONST_INT:
4311 case CONST:
4312 case CONST_DOUBLE:
4313 case PC:
4314 case ADDR_VEC:
4315 case ADDR_DIFF_VEC:
4316 return;
4317
4318 #ifdef HAVE_cc0
4319 case CC0:
4320 cc0_live = 1;
4321 return;
4322 #endif
4323
4324 case CLOBBER:
4325 /* If we are clobbering a MEM, mark any registers inside the address
4326 as being used. */
4327 if (GET_CODE (XEXP (x, 0)) == MEM)
4328 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4329 return;
4330
4331 case MEM:
4332 /* Don't bother watching stores to mems if this is not the
4333 final pass. We'll not be deleting dead stores this round. */
4334 if (flags & PROP_SCAN_DEAD_CODE)
4335 {
4336 /* Invalidate the data for the last MEM stored, but only if MEM is
4337 something that can be stored into. */
4338 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4339 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4340 ; /* needn't clear the memory set list */
4341 else
4342 {
4343 rtx temp = mem_set_list;
4344 rtx prev = NULL_RTX;
4345 rtx next;
4346
4347 while (temp)
4348 {
4349 next = XEXP (temp, 1);
4350 if (anti_dependence (XEXP (temp, 0), x))
4351 {
4352 /* Splice temp out of the list. */
4353 if (prev)
4354 XEXP (prev, 1) = next;
4355 else
4356 mem_set_list = next;
4357 free_EXPR_LIST_node (temp);
4358 }
4359 else
4360 prev = temp;
4361 temp = next;
4362 }
4363 }
4364
4365 /* If the memory reference had embedded side effects (autoincrement
4366 address modes. Then we may need to kill some entries on the
4367 memory set list. */
4368 if (insn)
4369 invalidate_mems_from_autoinc (insn);
4370 }
4371
4372 #ifdef AUTO_INC_DEC
4373 if (flags & PROP_AUTOINC)
4374 find_auto_inc (needed, x, insn);
4375 #endif
4376 break;
4377
4378 case SUBREG:
4379 if (GET_CODE (SUBREG_REG (x)) == REG
4380 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4381 && (GET_MODE_SIZE (GET_MODE (x))
4382 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4383 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4384
4385 /* While we're here, optimize this case. */
4386 x = SUBREG_REG (x);
4387
4388 /* In case the SUBREG is not of a register, don't optimize */
4389 if (GET_CODE (x) != REG)
4390 {
4391 mark_used_regs (needed, live, x, flags, insn);
4392 return;
4393 }
4394
4395 /* ... fall through ... */
4396
4397 case REG:
4398 /* See a register other than being set
4399 => mark it as needed. */
4400
4401 regno = REGNO (x);
4402 {
4403 int some_needed = REGNO_REG_SET_P (needed, regno);
4404 int some_not_needed = ! some_needed;
4405
4406 SET_REGNO_REG_SET (live, regno);
4407
4408 /* A hard reg in a wide mode may really be multiple registers.
4409 If so, mark all of them just like the first. */
4410 if (regno < FIRST_PSEUDO_REGISTER)
4411 {
4412 int n;
4413
4414 /* For stack ptr or fixed arg pointer,
4415 nothing below can be necessary, so waste no more time. */
4416 if (regno == STACK_POINTER_REGNUM
4417 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4418 || (regno == HARD_FRAME_POINTER_REGNUM
4419 && (! reload_completed || frame_pointer_needed))
4420 #endif
4421 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4422 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4423 #endif
4424 || (regno == FRAME_POINTER_REGNUM
4425 && (! reload_completed || frame_pointer_needed)))
4426 {
4427 /* If this is a register we are going to try to eliminate,
4428 don't mark it live here. If we are successful in
4429 eliminating it, it need not be live unless it is used for
4430 pseudos, in which case it will have been set live when
4431 it was allocated to the pseudos. If the register will not
4432 be eliminated, reload will set it live at that point. */
4433
4434 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4435 regs_ever_live[regno] = 1;
4436 return;
4437 }
4438 /* No death notes for global register variables;
4439 their values are live after this function exits. */
4440 if (global_regs[regno])
4441 {
4442 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4443 reg_next_use[regno] = insn;
4444 return;
4445 }
4446
4447 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4448 while (--n > 0)
4449 {
4450 int regno_n = regno + n;
4451 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4452
4453 SET_REGNO_REG_SET (live, regno_n);
4454 some_needed |= needed_regno;
4455 some_not_needed |= ! needed_regno;
4456 }
4457 }
4458
4459 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4460 {
4461 /* Record where each reg is used, so when the reg
4462 is set we know the next insn that uses it. */
4463
4464 reg_next_use[regno] = insn;
4465 }
4466 if (flags & PROP_REG_INFO)
4467 {
4468 if (regno < FIRST_PSEUDO_REGISTER)
4469 {
4470 /* If a hard reg is being used,
4471 record that this function does use it. */
4472
4473 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4474 if (i == 0)
4475 i = 1;
4476 do
4477 regs_ever_live[regno + --i] = 1;
4478 while (i > 0);
4479 }
4480 else
4481 {
4482 /* Keep track of which basic block each reg appears in. */
4483
4484 register int blocknum = BLOCK_NUM (insn);
4485
4486 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4487 REG_BASIC_BLOCK (regno) = blocknum;
4488 else if (REG_BASIC_BLOCK (regno) != blocknum)
4489 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4490
4491 /* Count (weighted) number of uses of each reg. */
4492
4493 REG_N_REFS (regno) += loop_depth;
4494 }
4495 }
4496
4497 /* Record and count the insns in which a reg dies.
4498 If it is used in this insn and was dead below the insn
4499 then it dies in this insn. If it was set in this insn,
4500 we do not make a REG_DEAD note; likewise if we already
4501 made such a note. */
4502
4503 if (flags & PROP_DEATH_NOTES)
4504 {
4505 if (some_not_needed
4506 && ! dead_or_set_p (insn, x)
4507 #if 0
4508 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4509 #endif
4510 )
4511 {
4512 /* Check for the case where the register dying partially
4513 overlaps the register set by this insn. */
4514 if (regno < FIRST_PSEUDO_REGISTER
4515 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4516 {
4517 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4518 while (--n >= 0)
4519 some_needed |= dead_or_set_regno_p (insn, regno + n);
4520 }
4521
4522 /* If none of the words in X is needed, make a REG_DEAD
4523 note. Otherwise, we must make partial REG_DEAD notes. */
4524 if (! some_needed)
4525 {
4526 REG_NOTES (insn)
4527 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4528 REG_N_DEATHS (regno)++;
4529 }
4530 else
4531 {
4532 int i;
4533
4534 /* Don't make a REG_DEAD note for a part of a register
4535 that is set in the insn. */
4536
4537 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4538 i >= 0; i--)
4539 if (!REGNO_REG_SET_P (needed, regno + i)
4540 && ! dead_or_set_regno_p (insn, regno + i))
4541 REG_NOTES (insn)
4542 = (alloc_EXPR_LIST
4543 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4544 regno + i),
4545 REG_NOTES (insn)));
4546 }
4547 }
4548 }
4549 }
4550 return;
4551
4552 case SET:
4553 {
4554 register rtx testreg = SET_DEST (x);
4555 int mark_dest = 0;
4556
4557 /* If storing into MEM, don't show it as being used. But do
4558 show the address as being used. */
4559 if (GET_CODE (testreg) == MEM)
4560 {
4561 #ifdef AUTO_INC_DEC
4562 if (flags & PROP_AUTOINC)
4563 find_auto_inc (needed, testreg, insn);
4564 #endif
4565 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4566 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4567 return;
4568 }
4569
4570 /* Storing in STRICT_LOW_PART is like storing in a reg
4571 in that this SET might be dead, so ignore it in TESTREG.
4572 but in some other ways it is like using the reg.
4573
4574 Storing in a SUBREG or a bit field is like storing the entire
4575 register in that if the register's value is not used
4576 then this SET is not needed. */
4577 while (GET_CODE (testreg) == STRICT_LOW_PART
4578 || GET_CODE (testreg) == ZERO_EXTRACT
4579 || GET_CODE (testreg) == SIGN_EXTRACT
4580 || GET_CODE (testreg) == SUBREG)
4581 {
4582 if (GET_CODE (testreg) == SUBREG
4583 && GET_CODE (SUBREG_REG (testreg)) == REG
4584 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4585 && (GET_MODE_SIZE (GET_MODE (testreg))
4586 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4587 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4588
4589 /* Modifying a single register in an alternate mode
4590 does not use any of the old value. But these other
4591 ways of storing in a register do use the old value. */
4592 if (GET_CODE (testreg) == SUBREG
4593 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4594 ;
4595 else
4596 mark_dest = 1;
4597
4598 testreg = XEXP (testreg, 0);
4599 }
4600
4601 /* If this is a store into a register,
4602 recursively scan the value being stored. */
4603
4604 if ((GET_CODE (testreg) == PARALLEL
4605 && GET_MODE (testreg) == BLKmode)
4606 || (GET_CODE (testreg) == REG
4607 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4608 && (! reload_completed || frame_pointer_needed)))
4609 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4610 && ! (regno == HARD_FRAME_POINTER_REGNUM
4611 && (! reload_completed || frame_pointer_needed))
4612 #endif
4613 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4614 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4615 #endif
4616 ))
4617 /* We used to exclude global_regs here, but that seems wrong.
4618 Storing in them is like storing in mem. */
4619 {
4620 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4621 if (mark_dest)
4622 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4623 return;
4624 }
4625 }
4626 break;
4627
4628 case RETURN:
4629 /* ??? This info should have been gotten from mark_regs_live_at_end,
4630 as applied to the EXIT block, and propagated along the edge that
4631 connects this block to the EXIT. */
4632 break;
4633
4634 case ASM_OPERANDS:
4635 case UNSPEC_VOLATILE:
4636 case TRAP_IF:
4637 case ASM_INPUT:
4638 {
4639 /* Traditional and volatile asm instructions must be considered to use
4640 and clobber all hard registers, all pseudo-registers and all of
4641 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4642
4643 Consider for instance a volatile asm that changes the fpu rounding
4644 mode. An insn should not be moved across this even if it only uses
4645 pseudo-regs because it might give an incorrectly rounded result.
4646
4647 ?!? Unfortunately, marking all hard registers as live causes massive
4648 problems for the register allocator and marking all pseudos as live
4649 creates mountains of uninitialized variable warnings.
4650
4651 So for now, just clear the memory set list and mark any regs
4652 we can find in ASM_OPERANDS as used. */
4653 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4654 free_EXPR_LIST_list (&mem_set_list);
4655
4656 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4657 We can not just fall through here since then we would be confused
4658 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4659 traditional asms unlike their normal usage. */
4660 if (code == ASM_OPERANDS)
4661 {
4662 int j;
4663
4664 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4665 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4666 flags, insn);
4667 }
4668 break;
4669 }
4670
4671
4672 default:
4673 break;
4674 }
4675
4676 /* Recursively scan the operands of this expression. */
4677
4678 {
4679 register const char *fmt = GET_RTX_FORMAT (code);
4680 register int i;
4681
4682 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4683 {
4684 if (fmt[i] == 'e')
4685 {
4686 /* Tail recursive case: save a function call level. */
4687 if (i == 0)
4688 {
4689 x = XEXP (x, 0);
4690 goto retry;
4691 }
4692 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4693 }
4694 else if (fmt[i] == 'E')
4695 {
4696 register int j;
4697 for (j = 0; j < XVECLEN (x, i); j++)
4698 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4699 }
4700 }
4701 }
4702 }
4703 \f
4704 #ifdef AUTO_INC_DEC
4705
4706 static int
4707 try_pre_increment_1 (insn)
4708 rtx insn;
4709 {
4710 /* Find the next use of this reg. If in same basic block,
4711 make it do pre-increment or pre-decrement if appropriate. */
4712 rtx x = single_set (insn);
4713 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4714 * INTVAL (XEXP (SET_SRC (x), 1)));
4715 int regno = REGNO (SET_DEST (x));
4716 rtx y = reg_next_use[regno];
4717 if (y != 0
4718 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4719 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4720 mode would be better. */
4721 && ! dead_or_set_p (y, SET_DEST (x))
4722 && try_pre_increment (y, SET_DEST (x), amount))
4723 {
4724 /* We have found a suitable auto-increment
4725 and already changed insn Y to do it.
4726 So flush this increment-instruction. */
4727 PUT_CODE (insn, NOTE);
4728 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4729 NOTE_SOURCE_FILE (insn) = 0;
4730 /* Count a reference to this reg for the increment
4731 insn we are deleting. When a reg is incremented.
4732 spilling it is worse, so we want to make that
4733 less likely. */
4734 if (regno >= FIRST_PSEUDO_REGISTER)
4735 {
4736 REG_N_REFS (regno) += loop_depth;
4737 REG_N_SETS (regno)++;
4738 }
4739 return 1;
4740 }
4741 return 0;
4742 }
4743
4744 /* Try to change INSN so that it does pre-increment or pre-decrement
4745 addressing on register REG in order to add AMOUNT to REG.
4746 AMOUNT is negative for pre-decrement.
4747 Returns 1 if the change could be made.
4748 This checks all about the validity of the result of modifying INSN. */
4749
4750 static int
4751 try_pre_increment (insn, reg, amount)
4752 rtx insn, reg;
4753 HOST_WIDE_INT amount;
4754 {
4755 register rtx use;
4756
4757 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4758 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4759 int pre_ok = 0;
4760 /* Nonzero if we can try to make a post-increment or post-decrement.
4761 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4762 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4763 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4764 int post_ok = 0;
4765
4766 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4767 int do_post = 0;
4768
4769 /* From the sign of increment, see which possibilities are conceivable
4770 on this target machine. */
4771 if (HAVE_PRE_INCREMENT && amount > 0)
4772 pre_ok = 1;
4773 if (HAVE_POST_INCREMENT && amount > 0)
4774 post_ok = 1;
4775
4776 if (HAVE_PRE_DECREMENT && amount < 0)
4777 pre_ok = 1;
4778 if (HAVE_POST_DECREMENT && amount < 0)
4779 post_ok = 1;
4780
4781 if (! (pre_ok || post_ok))
4782 return 0;
4783
4784 /* It is not safe to add a side effect to a jump insn
4785 because if the incremented register is spilled and must be reloaded
4786 there would be no way to store the incremented value back in memory. */
4787
4788 if (GET_CODE (insn) == JUMP_INSN)
4789 return 0;
4790
4791 use = 0;
4792 if (pre_ok)
4793 use = find_use_as_address (PATTERN (insn), reg, 0);
4794 if (post_ok && (use == 0 || use == (rtx) 1))
4795 {
4796 use = find_use_as_address (PATTERN (insn), reg, -amount);
4797 do_post = 1;
4798 }
4799
4800 if (use == 0 || use == (rtx) 1)
4801 return 0;
4802
4803 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4804 return 0;
4805
4806 /* See if this combination of instruction and addressing mode exists. */
4807 if (! validate_change (insn, &XEXP (use, 0),
4808 gen_rtx_fmt_e (amount > 0
4809 ? (do_post ? POST_INC : PRE_INC)
4810 : (do_post ? POST_DEC : PRE_DEC),
4811 Pmode, reg), 0))
4812 return 0;
4813
4814 /* Record that this insn now has an implicit side effect on X. */
4815 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4816 return 1;
4817 }
4818
4819 #endif /* AUTO_INC_DEC */
4820 \f
4821 /* Find the place in the rtx X where REG is used as a memory address.
4822 Return the MEM rtx that so uses it.
4823 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4824 (plus REG (const_int PLUSCONST)).
4825
4826 If such an address does not appear, return 0.
4827 If REG appears more than once, or is used other than in such an address,
4828 return (rtx)1. */
4829
4830 rtx
4831 find_use_as_address (x, reg, plusconst)
4832 register rtx x;
4833 rtx reg;
4834 HOST_WIDE_INT plusconst;
4835 {
4836 enum rtx_code code = GET_CODE (x);
4837 const char *fmt = GET_RTX_FORMAT (code);
4838 register int i;
4839 register rtx value = 0;
4840 register rtx tem;
4841
4842 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4843 return x;
4844
4845 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4846 && XEXP (XEXP (x, 0), 0) == reg
4847 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4848 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4849 return x;
4850
4851 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4852 {
4853 /* If REG occurs inside a MEM used in a bit-field reference,
4854 that is unacceptable. */
4855 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4856 return (rtx) (HOST_WIDE_INT) 1;
4857 }
4858
4859 if (x == reg)
4860 return (rtx) (HOST_WIDE_INT) 1;
4861
4862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4863 {
4864 if (fmt[i] == 'e')
4865 {
4866 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4867 if (value == 0)
4868 value = tem;
4869 else if (tem != 0)
4870 return (rtx) (HOST_WIDE_INT) 1;
4871 }
4872 if (fmt[i] == 'E')
4873 {
4874 register int j;
4875 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4876 {
4877 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4878 if (value == 0)
4879 value = tem;
4880 else if (tem != 0)
4881 return (rtx) (HOST_WIDE_INT) 1;
4882 }
4883 }
4884 }
4885
4886 return value;
4887 }
4888 \f
4889 /* Write information about registers and basic blocks into FILE.
4890 This is part of making a debugging dump. */
4891
4892 void
4893 dump_flow_info (file)
4894 FILE *file;
4895 {
4896 register int i;
4897 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4898
4899 fprintf (file, "%d registers.\n", max_regno);
4900 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4901 if (REG_N_REFS (i))
4902 {
4903 enum reg_class class, altclass;
4904 fprintf (file, "\nRegister %d used %d times across %d insns",
4905 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4906 if (REG_BASIC_BLOCK (i) >= 0)
4907 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4908 if (REG_N_SETS (i))
4909 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4910 (REG_N_SETS (i) == 1) ? "" : "s");
4911 if (REG_USERVAR_P (regno_reg_rtx[i]))
4912 fprintf (file, "; user var");
4913 if (REG_N_DEATHS (i) != 1)
4914 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4915 if (REG_N_CALLS_CROSSED (i) == 1)
4916 fprintf (file, "; crosses 1 call");
4917 else if (REG_N_CALLS_CROSSED (i))
4918 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4919 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4920 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4921 class = reg_preferred_class (i);
4922 altclass = reg_alternate_class (i);
4923 if (class != GENERAL_REGS || altclass != ALL_REGS)
4924 {
4925 if (altclass == ALL_REGS || class == ALL_REGS)
4926 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4927 else if (altclass == NO_REGS)
4928 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4929 else
4930 fprintf (file, "; pref %s, else %s",
4931 reg_class_names[(int) class],
4932 reg_class_names[(int) altclass]);
4933 }
4934 if (REGNO_POINTER_FLAG (i))
4935 fprintf (file, "; pointer");
4936 fprintf (file, ".\n");
4937 }
4938
4939 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4940 for (i = 0; i < n_basic_blocks; i++)
4941 {
4942 register basic_block bb = BASIC_BLOCK (i);
4943 register int regno;
4944 register edge e;
4945
4946 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4947 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4948
4949 fprintf (file, "Predecessors: ");
4950 for (e = bb->pred; e ; e = e->pred_next)
4951 dump_edge_info (file, e, 0);
4952
4953 fprintf (file, "\nSuccessors: ");
4954 for (e = bb->succ; e ; e = e->succ_next)
4955 dump_edge_info (file, e, 1);
4956
4957 fprintf (file, "\nRegisters live at start:");
4958 if (bb->global_live_at_start)
4959 {
4960 for (regno = 0; regno < max_regno; regno++)
4961 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4962 fprintf (file, " %d", regno);
4963 }
4964 else
4965 fprintf (file, " n/a");
4966
4967 fprintf (file, "\nRegisters live at end:");
4968 if (bb->global_live_at_end)
4969 {
4970 for (regno = 0; regno < max_regno; regno++)
4971 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4972 fprintf (file, " %d", regno);
4973 }
4974 else
4975 fprintf (file, " n/a");
4976
4977 putc('\n', file);
4978 }
4979
4980 putc('\n', file);
4981 }
4982
4983 void
4984 debug_flow_info ()
4985 {
4986 dump_flow_info (stderr);
4987 }
4988
4989 static void
4990 dump_edge_info (file, e, do_succ)
4991 FILE *file;
4992 edge e;
4993 int do_succ;
4994 {
4995 basic_block side = (do_succ ? e->dest : e->src);
4996
4997 if (side == ENTRY_BLOCK_PTR)
4998 fputs (" ENTRY", file);
4999 else if (side == EXIT_BLOCK_PTR)
5000 fputs (" EXIT", file);
5001 else
5002 fprintf (file, " %d", side->index);
5003
5004 if (e->flags)
5005 {
5006 static const char * const bitnames[] = {
5007 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5008 };
5009 int comma = 0;
5010 int i, flags = e->flags;
5011
5012 fputc (' ', file);
5013 fputc ('(', file);
5014 for (i = 0; flags; i++)
5015 if (flags & (1 << i))
5016 {
5017 flags &= ~(1 << i);
5018
5019 if (comma)
5020 fputc (',', file);
5021 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5022 fputs (bitnames[i], file);
5023 else
5024 fprintf (file, "%d", i);
5025 comma = 1;
5026 }
5027 fputc (')', file);
5028 }
5029 }
5030
5031 \f
5032 /* Like print_rtl, but also print out live information for the start of each
5033 basic block. */
5034
5035 void
5036 print_rtl_with_bb (outf, rtx_first)
5037 FILE *outf;
5038 rtx rtx_first;
5039 {
5040 register rtx tmp_rtx;
5041
5042 if (rtx_first == 0)
5043 fprintf (outf, "(nil)\n");
5044 else
5045 {
5046 int i;
5047 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5048 int max_uid = get_max_uid ();
5049 basic_block *start = (basic_block *)
5050 xcalloc (max_uid, sizeof (basic_block));
5051 basic_block *end = (basic_block *)
5052 xcalloc (max_uid, sizeof (basic_block));
5053 enum bb_state *in_bb_p = (enum bb_state *)
5054 xcalloc (max_uid, sizeof (enum bb_state));
5055
5056 for (i = n_basic_blocks - 1; i >= 0; i--)
5057 {
5058 basic_block bb = BASIC_BLOCK (i);
5059 rtx x;
5060
5061 start[INSN_UID (bb->head)] = bb;
5062 end[INSN_UID (bb->end)] = bb;
5063 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5064 {
5065 enum bb_state state = IN_MULTIPLE_BB;
5066 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5067 state = IN_ONE_BB;
5068 in_bb_p[INSN_UID(x)] = state;
5069
5070 if (x == bb->end)
5071 break;
5072 }
5073 }
5074
5075 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5076 {
5077 int did_output;
5078 basic_block bb;
5079
5080 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5081 {
5082 fprintf (outf, ";; Start of basic block %d, registers live:",
5083 bb->index);
5084
5085 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5086 {
5087 fprintf (outf, " %d", i);
5088 if (i < FIRST_PSEUDO_REGISTER)
5089 fprintf (outf, " [%s]",
5090 reg_names[i]);
5091 });
5092 putc ('\n', outf);
5093 }
5094
5095 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5096 && GET_CODE (tmp_rtx) != NOTE
5097 && GET_CODE (tmp_rtx) != BARRIER
5098 && ! obey_regdecls)
5099 fprintf (outf, ";; Insn is not within a basic block\n");
5100 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5101 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5102
5103 did_output = print_rtl_single (outf, tmp_rtx);
5104
5105 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5106 fprintf (outf, ";; End of basic block %d\n", bb->index);
5107
5108 if (did_output)
5109 putc ('\n', outf);
5110 }
5111
5112 free (start);
5113 free (end);
5114 free (in_bb_p);
5115 }
5116
5117 if (current_function_epilogue_delay_list != 0)
5118 {
5119 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5120 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5121 tmp_rtx = XEXP (tmp_rtx, 1))
5122 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5123 }
5124 }
5125
5126 /* Compute dominator relationships using new flow graph structures. */
5127 void
5128 compute_flow_dominators (dominators, post_dominators)
5129 sbitmap *dominators;
5130 sbitmap *post_dominators;
5131 {
5132 int bb;
5133 sbitmap *temp_bitmap;
5134 edge e;
5135 basic_block *worklist, *tos;
5136
5137 /* Allocate a worklist array/queue. Entries are only added to the
5138 list if they were not already on the list. So the size is
5139 bounded by the number of basic blocks. */
5140 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5141 * n_basic_blocks);
5142
5143 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5144 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5145
5146 if (dominators)
5147 {
5148 /* The optimistic setting of dominators requires us to put every
5149 block on the work list initially. */
5150 for (bb = 0; bb < n_basic_blocks; bb++)
5151 {
5152 *tos++ = BASIC_BLOCK (bb);
5153 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5154 }
5155
5156 /* We want a maximal solution, so initially assume everything dominates
5157 everything else. */
5158 sbitmap_vector_ones (dominators, n_basic_blocks);
5159
5160 /* Mark successors of the entry block so we can identify them below. */
5161 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5162 e->dest->aux = ENTRY_BLOCK_PTR;
5163
5164 /* Iterate until the worklist is empty. */
5165 while (tos != worklist)
5166 {
5167 /* Take the first entry off the worklist. */
5168 basic_block b = *--tos;
5169 bb = b->index;
5170
5171 /* Compute the intersection of the dominators of all the
5172 predecessor blocks.
5173
5174 If one of the predecessor blocks is the ENTRY block, then the
5175 intersection of the dominators of the predecessor blocks is
5176 defined as the null set. We can identify such blocks by the
5177 special value in the AUX field in the block structure. */
5178 if (b->aux == ENTRY_BLOCK_PTR)
5179 {
5180 /* Do not clear the aux field for blocks which are
5181 successors of the ENTRY block. That way we we never
5182 add them to the worklist again.
5183
5184 The intersect of dominators of the preds of this block is
5185 defined as the null set. */
5186 sbitmap_zero (temp_bitmap[bb]);
5187 }
5188 else
5189 {
5190 /* Clear the aux field of this block so it can be added to
5191 the worklist again if necessary. */
5192 b->aux = NULL;
5193 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5194 }
5195
5196 /* Make sure each block always dominates itself. */
5197 SET_BIT (temp_bitmap[bb], bb);
5198
5199 /* If the out state of this block changed, then we need to
5200 add the successors of this block to the worklist if they
5201 are not already on the worklist. */
5202 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5203 {
5204 for (e = b->succ; e; e = e->succ_next)
5205 {
5206 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5207 {
5208 *tos++ = e->dest;
5209 e->dest->aux = e;
5210 }
5211 }
5212 }
5213 }
5214 }
5215
5216 if (post_dominators)
5217 {
5218 /* The optimistic setting of dominators requires us to put every
5219 block on the work list initially. */
5220 for (bb = 0; bb < n_basic_blocks; bb++)
5221 {
5222 *tos++ = BASIC_BLOCK (bb);
5223 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5224 }
5225
5226 /* We want a maximal solution, so initially assume everything post
5227 dominates everything else. */
5228 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5229
5230 /* Mark predecessors of the exit block so we can identify them below. */
5231 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5232 e->src->aux = EXIT_BLOCK_PTR;
5233
5234 /* Iterate until the worklist is empty. */
5235 while (tos != worklist)
5236 {
5237 /* Take the first entry off the worklist. */
5238 basic_block b = *--tos;
5239 bb = b->index;
5240
5241 /* Compute the intersection of the post dominators of all the
5242 successor blocks.
5243
5244 If one of the successor blocks is the EXIT block, then the
5245 intersection of the dominators of the successor blocks is
5246 defined as the null set. We can identify such blocks by the
5247 special value in the AUX field in the block structure. */
5248 if (b->aux == EXIT_BLOCK_PTR)
5249 {
5250 /* Do not clear the aux field for blocks which are
5251 predecessors of the EXIT block. That way we we never
5252 add them to the worklist again.
5253
5254 The intersect of dominators of the succs of this block is
5255 defined as the null set. */
5256 sbitmap_zero (temp_bitmap[bb]);
5257 }
5258 else
5259 {
5260 /* Clear the aux field of this block so it can be added to
5261 the worklist again if necessary. */
5262 b->aux = NULL;
5263 sbitmap_intersection_of_succs (temp_bitmap[bb],
5264 post_dominators, bb);
5265 }
5266
5267 /* Make sure each block always post dominates itself. */
5268 SET_BIT (temp_bitmap[bb], bb);
5269
5270 /* If the out state of this block changed, then we need to
5271 add the successors of this block to the worklist if they
5272 are not already on the worklist. */
5273 if (sbitmap_a_and_b (post_dominators[bb],
5274 post_dominators[bb],
5275 temp_bitmap[bb]))
5276 {
5277 for (e = b->pred; e; e = e->pred_next)
5278 {
5279 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5280 {
5281 *tos++ = e->src;
5282 e->src->aux = e;
5283 }
5284 }
5285 }
5286 }
5287 }
5288 free (temp_bitmap);
5289 }
5290
5291 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5292
5293 void
5294 compute_immediate_dominators (idom, dominators)
5295 int *idom;
5296 sbitmap *dominators;
5297 {
5298 sbitmap *tmp;
5299 int b;
5300
5301 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5302
5303 /* Begin with tmp(n) = dom(n) - { n }. */
5304 for (b = n_basic_blocks; --b >= 0; )
5305 {
5306 sbitmap_copy (tmp[b], dominators[b]);
5307 RESET_BIT (tmp[b], b);
5308 }
5309
5310 /* Subtract out all of our dominator's dominators. */
5311 for (b = n_basic_blocks; --b >= 0; )
5312 {
5313 sbitmap tmp_b = tmp[b];
5314 int s;
5315
5316 for (s = n_basic_blocks; --s >= 0; )
5317 if (TEST_BIT (tmp_b, s))
5318 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5319 }
5320
5321 /* Find the one bit set in the bitmap and put it in the output array. */
5322 for (b = n_basic_blocks; --b >= 0; )
5323 {
5324 int t;
5325 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5326 }
5327
5328 sbitmap_vector_free (tmp);
5329 }
5330
5331 /* Count for a single SET rtx, X. */
5332
5333 static void
5334 count_reg_sets_1 (x)
5335 rtx x;
5336 {
5337 register int regno;
5338 register rtx reg = SET_DEST (x);
5339
5340 /* Find the register that's set/clobbered. */
5341 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5342 || GET_CODE (reg) == SIGN_EXTRACT
5343 || GET_CODE (reg) == STRICT_LOW_PART)
5344 reg = XEXP (reg, 0);
5345
5346 if (GET_CODE (reg) == PARALLEL
5347 && GET_MODE (reg) == BLKmode)
5348 {
5349 register int i;
5350 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5351 count_reg_sets_1 (XVECEXP (reg, 0, i));
5352 return;
5353 }
5354
5355 if (GET_CODE (reg) == REG)
5356 {
5357 regno = REGNO (reg);
5358 if (regno >= FIRST_PSEUDO_REGISTER)
5359 {
5360 /* Count (weighted) references, stores, etc. This counts a
5361 register twice if it is modified, but that is correct. */
5362 REG_N_SETS (regno)++;
5363
5364 REG_N_REFS (regno) += loop_depth;
5365 }
5366 }
5367 }
5368
5369 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5370 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5371
5372 static void
5373 count_reg_sets (x)
5374 rtx x;
5375 {
5376 register RTX_CODE code = GET_CODE (x);
5377
5378 if (code == SET || code == CLOBBER)
5379 count_reg_sets_1 (x);
5380 else if (code == PARALLEL)
5381 {
5382 register int i;
5383 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5384 {
5385 code = GET_CODE (XVECEXP (x, 0, i));
5386 if (code == SET || code == CLOBBER)
5387 count_reg_sets_1 (XVECEXP (x, 0, i));
5388 }
5389 }
5390 }
5391
5392 /* Increment REG_N_REFS by the current loop depth each register reference
5393 found in X. */
5394
5395 static void
5396 count_reg_references (x)
5397 rtx x;
5398 {
5399 register RTX_CODE code;
5400
5401 retry:
5402 code = GET_CODE (x);
5403 switch (code)
5404 {
5405 case LABEL_REF:
5406 case SYMBOL_REF:
5407 case CONST_INT:
5408 case CONST:
5409 case CONST_DOUBLE:
5410 case PC:
5411 case ADDR_VEC:
5412 case ADDR_DIFF_VEC:
5413 case ASM_INPUT:
5414 return;
5415
5416 #ifdef HAVE_cc0
5417 case CC0:
5418 return;
5419 #endif
5420
5421 case CLOBBER:
5422 /* If we are clobbering a MEM, mark any registers inside the address
5423 as being used. */
5424 if (GET_CODE (XEXP (x, 0)) == MEM)
5425 count_reg_references (XEXP (XEXP (x, 0), 0));
5426 return;
5427
5428 case SUBREG:
5429 /* While we're here, optimize this case. */
5430 x = SUBREG_REG (x);
5431
5432 /* In case the SUBREG is not of a register, don't optimize */
5433 if (GET_CODE (x) != REG)
5434 {
5435 count_reg_references (x);
5436 return;
5437 }
5438
5439 /* ... fall through ... */
5440
5441 case REG:
5442 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5443 REG_N_REFS (REGNO (x)) += loop_depth;
5444 return;
5445
5446 case SET:
5447 {
5448 register rtx testreg = SET_DEST (x);
5449 int mark_dest = 0;
5450
5451 /* If storing into MEM, don't show it as being used. But do
5452 show the address as being used. */
5453 if (GET_CODE (testreg) == MEM)
5454 {
5455 count_reg_references (XEXP (testreg, 0));
5456 count_reg_references (SET_SRC (x));
5457 return;
5458 }
5459
5460 /* Storing in STRICT_LOW_PART is like storing in a reg
5461 in that this SET might be dead, so ignore it in TESTREG.
5462 but in some other ways it is like using the reg.
5463
5464 Storing in a SUBREG or a bit field is like storing the entire
5465 register in that if the register's value is not used
5466 then this SET is not needed. */
5467 while (GET_CODE (testreg) == STRICT_LOW_PART
5468 || GET_CODE (testreg) == ZERO_EXTRACT
5469 || GET_CODE (testreg) == SIGN_EXTRACT
5470 || GET_CODE (testreg) == SUBREG)
5471 {
5472 /* Modifying a single register in an alternate mode
5473 does not use any of the old value. But these other
5474 ways of storing in a register do use the old value. */
5475 if (GET_CODE (testreg) == SUBREG
5476 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5477 ;
5478 else
5479 mark_dest = 1;
5480
5481 testreg = XEXP (testreg, 0);
5482 }
5483
5484 /* If this is a store into a register,
5485 recursively scan the value being stored. */
5486
5487 if ((GET_CODE (testreg) == PARALLEL
5488 && GET_MODE (testreg) == BLKmode)
5489 || GET_CODE (testreg) == REG)
5490 {
5491 count_reg_references (SET_SRC (x));
5492 if (mark_dest)
5493 count_reg_references (SET_DEST (x));
5494 return;
5495 }
5496 }
5497 break;
5498
5499 default:
5500 break;
5501 }
5502
5503 /* Recursively scan the operands of this expression. */
5504
5505 {
5506 register const char *fmt = GET_RTX_FORMAT (code);
5507 register int i;
5508
5509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5510 {
5511 if (fmt[i] == 'e')
5512 {
5513 /* Tail recursive case: save a function call level. */
5514 if (i == 0)
5515 {
5516 x = XEXP (x, 0);
5517 goto retry;
5518 }
5519 count_reg_references (XEXP (x, i));
5520 }
5521 else if (fmt[i] == 'E')
5522 {
5523 register int j;
5524 for (j = 0; j < XVECLEN (x, i); j++)
5525 count_reg_references (XVECEXP (x, i, j));
5526 }
5527 }
5528 }
5529 }
5530
5531 /* Recompute register set/reference counts immediately prior to register
5532 allocation.
5533
5534 This avoids problems with set/reference counts changing to/from values
5535 which have special meanings to the register allocators.
5536
5537 Additionally, the reference counts are the primary component used by the
5538 register allocators to prioritize pseudos for allocation to hard regs.
5539 More accurate reference counts generally lead to better register allocation.
5540
5541 F is the first insn to be scanned.
5542 LOOP_STEP denotes how much loop_depth should be incremented per
5543 loop nesting level in order to increase the ref count more for references
5544 in a loop.
5545
5546 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5547 possibly other information which is used by the register allocators. */
5548
5549 void
5550 recompute_reg_usage (f, loop_step)
5551 rtx f;
5552 int loop_step;
5553 {
5554 rtx insn;
5555 int i, max_reg;
5556 int index;
5557
5558 /* Clear out the old data. */
5559 max_reg = max_reg_num ();
5560 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5561 {
5562 REG_N_SETS (i) = 0;
5563 REG_N_REFS (i) = 0;
5564 }
5565
5566 /* Scan each insn in the chain and count how many times each register is
5567 set/used. */
5568 loop_depth = 1;
5569 for (index = 0; index < n_basic_blocks; index++)
5570 {
5571 basic_block bb = BASIC_BLOCK (index);
5572 loop_depth = bb->loop_depth;
5573 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5574 {
5575 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5576 {
5577 rtx links;
5578
5579 /* This call will increment REG_N_SETS for each SET or CLOBBER
5580 of a register in INSN. It will also increment REG_N_REFS
5581 by the loop depth for each set of a register in INSN. */
5582 count_reg_sets (PATTERN (insn));
5583
5584 /* count_reg_sets does not detect autoincrement address modes, so
5585 detect them here by looking at the notes attached to INSN. */
5586 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5587 {
5588 if (REG_NOTE_KIND (links) == REG_INC)
5589 /* Count (weighted) references, stores, etc. This counts a
5590 register twice if it is modified, but that is correct. */
5591 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5592 }
5593
5594 /* This call will increment REG_N_REFS by the current loop depth for
5595 each reference to a register in INSN. */
5596 count_reg_references (PATTERN (insn));
5597
5598 /* count_reg_references will not include counts for arguments to
5599 function calls, so detect them here by examining the
5600 CALL_INSN_FUNCTION_USAGE data. */
5601 if (GET_CODE (insn) == CALL_INSN)
5602 {
5603 rtx note;
5604
5605 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5606 note;
5607 note = XEXP (note, 1))
5608 if (GET_CODE (XEXP (note, 0)) == USE)
5609 count_reg_references (XEXP (XEXP (note, 0), 0));
5610 }
5611 }
5612 if (insn == bb->end)
5613 break;
5614 }
5615 }
5616 }
5617
5618 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5619 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5620 of the number of registers that died. */
5621
5622 int
5623 count_or_remove_death_notes (blocks, kill)
5624 sbitmap blocks;
5625 int kill;
5626 {
5627 int i, count = 0;
5628
5629 for (i = n_basic_blocks - 1; i >= 0; --i)
5630 {
5631 basic_block bb;
5632 rtx insn;
5633
5634 if (blocks && ! TEST_BIT (blocks, i))
5635 continue;
5636
5637 bb = BASIC_BLOCK (i);
5638
5639 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5640 {
5641 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5642 {
5643 rtx *pprev = &REG_NOTES (insn);
5644 rtx link = *pprev;
5645
5646 while (link)
5647 {
5648 switch (REG_NOTE_KIND (link))
5649 {
5650 case REG_DEAD:
5651 if (GET_CODE (XEXP (link, 0)) == REG)
5652 {
5653 rtx reg = XEXP (link, 0);
5654 int n;
5655
5656 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5657 n = 1;
5658 else
5659 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5660 count += n;
5661 }
5662 /* FALLTHRU */
5663
5664 case REG_UNUSED:
5665 if (kill)
5666 {
5667 rtx next = XEXP (link, 1);
5668 free_EXPR_LIST_node (link);
5669 *pprev = link = next;
5670 break;
5671 }
5672 /* FALLTHRU */
5673
5674 default:
5675 pprev = &XEXP (link, 1);
5676 link = *pprev;
5677 break;
5678 }
5679 }
5680 }
5681
5682 if (insn == bb->end)
5683 break;
5684 }
5685 }
5686
5687 return count;
5688 }
5689
5690 /* Record INSN's block as BB. */
5691
5692 void
5693 set_block_for_insn (insn, bb)
5694 rtx insn;
5695 basic_block bb;
5696 {
5697 size_t uid = INSN_UID (insn);
5698 if (uid >= basic_block_for_insn->num_elements)
5699 {
5700 int new_size;
5701
5702 /* Add one-eighth the size so we don't keep calling xrealloc. */
5703 new_size = uid + (uid + 7) / 8;
5704
5705 VARRAY_GROW (basic_block_for_insn, new_size);
5706 }
5707 VARRAY_BB (basic_block_for_insn, uid) = bb;
5708 }
5709
5710 /* Record INSN's block number as BB. */
5711 /* ??? This has got to go. */
5712
5713 void
5714 set_block_num (insn, bb)
5715 rtx insn;
5716 int bb;
5717 {
5718 set_block_for_insn (insn, BASIC_BLOCK (bb));
5719 }
5720 \f
5721 /* Verify the CFG consistency. This function check some CFG invariants and
5722 aborts when something is wrong. Hope that this function will help to
5723 convert many optimization passes to preserve CFG consistent.
5724
5725 Currently it does following checks:
5726
5727 - test head/end pointers
5728 - overlapping of basic blocks
5729 - edge list corectness
5730 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5731 - tails of basic blocks (ensure that boundary is necesary)
5732 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5733 and NOTE_INSN_BASIC_BLOCK
5734 - check that all insns are in the basic blocks
5735 (except the switch handling code, barriers and notes)
5736
5737 In future it can be extended check a lot of other stuff as well
5738 (reachability of basic blocks, life information, etc. etc.). */
5739
5740 void
5741 verify_flow_info ()
5742 {
5743 const int max_uid = get_max_uid ();
5744 const rtx rtx_first = get_insns ();
5745 basic_block *bb_info;
5746 rtx x;
5747 int i, err = 0;
5748
5749 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5750
5751 /* First pass check head/end pointers and set bb_info array used by
5752 later passes. */
5753 for (i = n_basic_blocks - 1; i >= 0; i--)
5754 {
5755 basic_block bb = BASIC_BLOCK (i);
5756
5757 /* Check the head pointer and make sure that it is pointing into
5758 insn list. */
5759 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5760 if (x == bb->head)
5761 break;
5762 if (!x)
5763 {
5764 error ("Head insn %d for block %d not found in the insn stream.",
5765 INSN_UID (bb->head), bb->index);
5766 err = 1;
5767 }
5768
5769 /* Check the end pointer and make sure that it is pointing into
5770 insn list. */
5771 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5772 {
5773 if (bb_info[INSN_UID (x)] != NULL)
5774 {
5775 error ("Insn %d is in multiple basic blocks (%d and %d)",
5776 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5777 err = 1;
5778 }
5779 bb_info[INSN_UID (x)] = bb;
5780
5781 if (x == bb->end)
5782 break;
5783 }
5784 if (!x)
5785 {
5786 error ("End insn %d for block %d not found in the insn stream.",
5787 INSN_UID (bb->end), bb->index);
5788 err = 1;
5789 }
5790 }
5791
5792 /* Now check the basic blocks (boundaries etc.) */
5793 for (i = n_basic_blocks - 1; i >= 0; i--)
5794 {
5795 basic_block bb = BASIC_BLOCK (i);
5796 /* Check corectness of edge lists */
5797 edge e;
5798
5799 e = bb->succ;
5800 while (e)
5801 {
5802 if (e->src != bb)
5803 {
5804 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5805 bb->index);
5806 fprintf (stderr, "Predecessor: ");
5807 dump_edge_info (stderr, e, 0);
5808 fprintf (stderr, "\nSuccessor: ");
5809 dump_edge_info (stderr, e, 1);
5810 fflush (stderr);
5811 err = 1;
5812 }
5813 if (e->dest != EXIT_BLOCK_PTR)
5814 {
5815 edge e2 = e->dest->pred;
5816 while (e2 && e2 != e)
5817 e2 = e2->pred_next;
5818 if (!e2)
5819 {
5820 error ("Basic block %i edge lists are corrupted", bb->index);
5821 err = 1;
5822 }
5823 }
5824 e = e->succ_next;
5825 }
5826
5827 e = bb->pred;
5828 while (e)
5829 {
5830 if (e->dest != bb)
5831 {
5832 error ("Basic block %d pred edge is corrupted", bb->index);
5833 fputs ("Predecessor: ", stderr);
5834 dump_edge_info (stderr, e, 0);
5835 fputs ("\nSuccessor: ", stderr);
5836 dump_edge_info (stderr, e, 1);
5837 fputc ('\n', stderr);
5838 err = 1;
5839 }
5840 if (e->src != ENTRY_BLOCK_PTR)
5841 {
5842 edge e2 = e->src->succ;
5843 while (e2 && e2 != e)
5844 e2 = e2->succ_next;
5845 if (!e2)
5846 {
5847 error ("Basic block %i edge lists are corrupted", bb->index);
5848 err = 1;
5849 }
5850 }
5851 e = e->pred_next;
5852 }
5853
5854 /* OK pointers are correct. Now check the header of basic
5855 block. It ought to contain optional CODE_LABEL followed
5856 by NOTE_BASIC_BLOCK. */
5857 x = bb->head;
5858 if (GET_CODE (x) == CODE_LABEL)
5859 {
5860 if (bb->end == x)
5861 {
5862 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5863 bb->index);
5864 err = 1;
5865 }
5866 x = NEXT_INSN (x);
5867 }
5868 if (GET_CODE (x) != NOTE
5869 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5870 || NOTE_BASIC_BLOCK (x) != bb)
5871 {
5872 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5873 bb->index);
5874 err = 1;
5875 }
5876
5877 if (bb->end == x)
5878 {
5879 /* Do checks for empty blocks here */
5880 }
5881 else
5882 {
5883 x = NEXT_INSN (x);
5884 while (x)
5885 {
5886 if (GET_CODE (x) == NOTE
5887 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5888 {
5889 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5890 INSN_UID (x), bb->index);
5891 err = 1;
5892 }
5893
5894 if (x == bb->end)
5895 break;
5896
5897 if (GET_CODE (x) == JUMP_INSN
5898 || GET_CODE (x) == CODE_LABEL
5899 || GET_CODE (x) == BARRIER)
5900 {
5901 error ("In basic block %d:", bb->index);
5902 fatal_insn ("Flow control insn inside a basic block", x);
5903 }
5904
5905 x = NEXT_INSN (x);
5906 }
5907 }
5908 }
5909
5910 x = rtx_first;
5911 while (x)
5912 {
5913 if (!bb_info[INSN_UID (x)])
5914 {
5915 switch (GET_CODE (x))
5916 {
5917 case BARRIER:
5918 case NOTE:
5919 break;
5920
5921 case CODE_LABEL:
5922 /* An addr_vec is placed outside any block block. */
5923 if (NEXT_INSN (x)
5924 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5925 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5926 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5927 {
5928 x = NEXT_INSN (x);
5929 }
5930
5931 /* But in any case, non-deletable labels can appear anywhere. */
5932 break;
5933
5934 default:
5935 fatal_insn ("Insn outside basic block", x);
5936 }
5937 }
5938
5939 x = NEXT_INSN (x);
5940 }
5941
5942 if (err)
5943 abort ();
5944
5945 /* Clean up. */
5946 free (bb_info);
5947 }
5948 \f
5949 /* Functions to access an edge list with a vector representation.
5950 Enough data is kept such that given an index number, the
5951 pred and succ that edge reprsents can be determined, or
5952 given a pred and a succ, it's index number can be returned.
5953 This allows algorithms which comsume a lot of memory to
5954 represent the normally full matrix of edge (pred,succ) with a
5955 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5956 wasted space in the client code due to sparse flow graphs. */
5957
5958 /* This functions initializes the edge list. Basically the entire
5959 flowgraph is processed, and all edges are assigned a number,
5960 and the data structure is filed in. */
5961 struct edge_list *
5962 create_edge_list ()
5963 {
5964 struct edge_list *elist;
5965 edge e;
5966 int num_edges;
5967 int x;
5968 int block_count;
5969
5970 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5971
5972 num_edges = 0;
5973
5974 /* Determine the number of edges in the flow graph by counting successor
5975 edges on each basic block. */
5976 for (x = 0; x < n_basic_blocks; x++)
5977 {
5978 basic_block bb = BASIC_BLOCK (x);
5979
5980 for (e = bb->succ; e; e = e->succ_next)
5981 num_edges++;
5982 }
5983 /* Don't forget successors of the entry block. */
5984 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5985 num_edges++;
5986
5987 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
5988 elist->num_blocks = block_count;
5989 elist->num_edges = num_edges;
5990 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
5991
5992 num_edges = 0;
5993
5994 /* Follow successors of the entry block, and register these edges. */
5995 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5996 {
5997 elist->index_to_edge[num_edges] = e;
5998 num_edges++;
5999 }
6000
6001 for (x = 0; x < n_basic_blocks; x++)
6002 {
6003 basic_block bb = BASIC_BLOCK (x);
6004
6005 /* Follow all successors of blocks, and register these edges. */
6006 for (e = bb->succ; e; e = e->succ_next)
6007 {
6008 elist->index_to_edge[num_edges] = e;
6009 num_edges++;
6010 }
6011 }
6012 return elist;
6013 }
6014
6015 /* This function free's memory associated with an edge list. */
6016 void
6017 free_edge_list (elist)
6018 struct edge_list *elist;
6019 {
6020 if (elist)
6021 {
6022 free (elist->index_to_edge);
6023 free (elist);
6024 }
6025 }
6026
6027 /* This function provides debug output showing an edge list. */
6028 void
6029 print_edge_list (f, elist)
6030 FILE *f;
6031 struct edge_list *elist;
6032 {
6033 int x;
6034 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6035 elist->num_blocks - 2, elist->num_edges);
6036
6037 for (x = 0; x < elist->num_edges; x++)
6038 {
6039 fprintf (f, " %-4d - edge(", x);
6040 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6041 fprintf (f,"entry,");
6042 else
6043 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6044
6045 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6046 fprintf (f,"exit)\n");
6047 else
6048 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6049 }
6050 }
6051
6052 /* This function provides an internal consistancy check of an edge list,
6053 verifying that all edges are present, and that there are no
6054 extra edges. */
6055 void
6056 verify_edge_list (f, elist)
6057 FILE *f;
6058 struct edge_list *elist;
6059 {
6060 int x, pred, succ, index;
6061 edge e;
6062
6063 for (x = 0; x < n_basic_blocks; x++)
6064 {
6065 basic_block bb = BASIC_BLOCK (x);
6066
6067 for (e = bb->succ; e; e = e->succ_next)
6068 {
6069 pred = e->src->index;
6070 succ = e->dest->index;
6071 index = EDGE_INDEX (elist, e->src, e->dest);
6072 if (index == EDGE_INDEX_NO_EDGE)
6073 {
6074 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6075 continue;
6076 }
6077 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6078 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6079 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6080 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6081 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6082 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6083 }
6084 }
6085 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6086 {
6087 pred = e->src->index;
6088 succ = e->dest->index;
6089 index = EDGE_INDEX (elist, e->src, e->dest);
6090 if (index == EDGE_INDEX_NO_EDGE)
6091 {
6092 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6093 continue;
6094 }
6095 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6096 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6097 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6098 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6099 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6100 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6101 }
6102 /* We've verified that all the edges are in the list, no lets make sure
6103 there are no spurious edges in the list. */
6104
6105 for (pred = 0 ; pred < n_basic_blocks; pred++)
6106 for (succ = 0 ; succ < n_basic_blocks; succ++)
6107 {
6108 basic_block p = BASIC_BLOCK (pred);
6109 basic_block s = BASIC_BLOCK (succ);
6110
6111 int found_edge = 0;
6112
6113 for (e = p->succ; e; e = e->succ_next)
6114 if (e->dest == s)
6115 {
6116 found_edge = 1;
6117 break;
6118 }
6119 for (e = s->pred; e; e = e->pred_next)
6120 if (e->src == p)
6121 {
6122 found_edge = 1;
6123 break;
6124 }
6125 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6126 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6127 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6128 pred, succ);
6129 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6130 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6131 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6132 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6133 BASIC_BLOCK (succ)));
6134 }
6135 for (succ = 0 ; succ < n_basic_blocks; succ++)
6136 {
6137 basic_block p = ENTRY_BLOCK_PTR;
6138 basic_block s = BASIC_BLOCK (succ);
6139
6140 int found_edge = 0;
6141
6142 for (e = p->succ; e; e = e->succ_next)
6143 if (e->dest == s)
6144 {
6145 found_edge = 1;
6146 break;
6147 }
6148 for (e = s->pred; e; e = e->pred_next)
6149 if (e->src == p)
6150 {
6151 found_edge = 1;
6152 break;
6153 }
6154 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6155 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6156 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6157 succ);
6158 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6159 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6160 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6161 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6162 BASIC_BLOCK (succ)));
6163 }
6164 for (pred = 0 ; pred < n_basic_blocks; pred++)
6165 {
6166 basic_block p = BASIC_BLOCK (pred);
6167 basic_block s = EXIT_BLOCK_PTR;
6168
6169 int found_edge = 0;
6170
6171 for (e = p->succ; e; e = e->succ_next)
6172 if (e->dest == s)
6173 {
6174 found_edge = 1;
6175 break;
6176 }
6177 for (e = s->pred; e; e = e->pred_next)
6178 if (e->src == p)
6179 {
6180 found_edge = 1;
6181 break;
6182 }
6183 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6184 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6185 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6186 pred);
6187 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6188 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6189 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6190 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6191 EXIT_BLOCK_PTR));
6192 }
6193 }
6194
6195 /* This routine will determine what, if any, edge there is between
6196 a specified predecessor and successor. */
6197
6198 int
6199 find_edge_index (edge_list, pred, succ)
6200 struct edge_list *edge_list;
6201 basic_block pred, succ;
6202 {
6203 int x;
6204 for (x = 0; x < NUM_EDGES (edge_list); x++)
6205 {
6206 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6207 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6208 return x;
6209 }
6210 return (EDGE_INDEX_NO_EDGE);
6211 }
6212
6213 /* This function will remove an edge from the flow graph. */
6214 static void
6215 remove_edge (e)
6216 edge e;
6217 {
6218 edge last_pred = NULL;
6219 edge last_succ = NULL;
6220 edge tmp;
6221 basic_block src, dest;
6222 src = e->src;
6223 dest = e->dest;
6224 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6225 last_succ = tmp;
6226
6227 if (!tmp)
6228 abort ();
6229 if (last_succ)
6230 last_succ->succ_next = e->succ_next;
6231 else
6232 src->succ = e->succ_next;
6233
6234 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6235 last_pred = tmp;
6236
6237 if (!tmp)
6238 abort ();
6239 if (last_pred)
6240 last_pred->pred_next = e->pred_next;
6241 else
6242 dest->pred = e->pred_next;
6243
6244 n_edges--;
6245 free (e);
6246 }
6247
6248 /* This routine will remove any fake successor edges for a basic block.
6249 When the edge is removed, it is also removed from whatever predecessor
6250 list it is in. */
6251 static void
6252 remove_fake_successors (bb)
6253 basic_block bb;
6254 {
6255 edge e;
6256 for (e = bb->succ; e ; )
6257 {
6258 edge tmp = e;
6259 e = e->succ_next;
6260 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6261 remove_edge (tmp);
6262 }
6263 }
6264
6265 /* This routine will remove all fake edges from the flow graph. If
6266 we remove all fake successors, it will automatically remove all
6267 fake predecessors. */
6268 void
6269 remove_fake_edges ()
6270 {
6271 int x;
6272
6273 for (x = 0; x < n_basic_blocks; x++)
6274 remove_fake_successors (BASIC_BLOCK (x));
6275
6276 /* We've handled all successors except the entry block's. */
6277 remove_fake_successors (ENTRY_BLOCK_PTR);
6278 }
6279
6280 /* This functions will add a fake edge between any block which has no
6281 successors, and the exit block. Some data flow equations require these
6282 edges to exist. */
6283 void
6284 add_noreturn_fake_exit_edges ()
6285 {
6286 int x;
6287
6288 for (x = 0; x < n_basic_blocks; x++)
6289 if (BASIC_BLOCK (x)->succ == NULL)
6290 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6291 }
6292 \f
6293 /* Dump the list of basic blocks in the bitmap NODES. */
6294 static void
6295 flow_nodes_print (str, nodes, file)
6296 const char *str;
6297 const sbitmap nodes;
6298 FILE *file;
6299 {
6300 int node;
6301
6302 fprintf (file, "%s { ", str);
6303 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6304 fputs ("}\n", file);
6305 }
6306
6307
6308 /* Dump the list of exiting edges in the array EDGES. */
6309 static void
6310 flow_exits_print (str, edges, num_edges, file)
6311 const char *str;
6312 const edge *edges;
6313 int num_edges;
6314 FILE *file;
6315 {
6316 int i;
6317
6318 fprintf (file, "%s { ", str);
6319 for (i = 0; i < num_edges; i++)
6320 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6321 fputs ("}\n", file);
6322 }
6323
6324
6325 /* Dump loop related CFG information. */
6326 static void
6327 flow_loops_cfg_dump (loops, file)
6328 const struct loops *loops;
6329 FILE *file;
6330 {
6331 int i;
6332
6333 if (! loops->num || ! file || ! loops->cfg.dom)
6334 return;
6335
6336 for (i = 0; i < n_basic_blocks; i++)
6337 {
6338 edge succ;
6339
6340 fprintf (file, ";; %d succs { ", i);
6341 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6342 fprintf (file, "%d ", succ->dest->index);
6343 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6344 }
6345
6346
6347 /* Dump the DFS node order. */
6348 if (loops->cfg.dfs_order)
6349 {
6350 fputs (";; DFS order: ", file);
6351 for (i = 0; i < n_basic_blocks; i++)
6352 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6353 fputs ("\n", file);
6354 }
6355 }
6356
6357
6358 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6359 int
6360 flow_loop_nested_p (outer, loop)
6361 struct loop *outer;
6362 struct loop *loop;
6363 {
6364 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6365 }
6366
6367
6368 /* Dump the loop information specified by LOOPS to the stream FILE. */
6369 void
6370 flow_loops_dump (loops, file, verbose)
6371 const struct loops *loops;
6372 FILE *file;
6373 int verbose;
6374 {
6375 int i;
6376 int num_loops;
6377
6378 num_loops = loops->num;
6379 if (! num_loops || ! file)
6380 return;
6381
6382 fprintf (file, ";; %d loops found\n", num_loops);
6383
6384 for (i = 0; i < num_loops; i++)
6385 {
6386 struct loop *loop = &loops->array[i];
6387
6388 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6389 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6390 loop->header->index, loop->latch->index,
6391 loop->pre_header ? loop->pre_header->index : -1,
6392 loop->depth, loop->level,
6393 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6394 fprintf (file, ";; %d", loop->num_nodes);
6395 flow_nodes_print (" nodes", loop->nodes, file);
6396 fprintf (file, ";; %d", loop->num_exits);
6397 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6398
6399 if (loop->shared)
6400 {
6401 int j;
6402
6403 for (j = 0; j < i; j++)
6404 {
6405 struct loop *oloop = &loops->array[j];
6406
6407 if (loop->header == oloop->header)
6408 {
6409 int disjoint;
6410 int smaller;
6411
6412 smaller = loop->num_nodes < oloop->num_nodes;
6413
6414 /* If the union of LOOP and OLOOP is different than
6415 the larger of LOOP and OLOOP then LOOP and OLOOP
6416 must be disjoint. */
6417 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6418 smaller ? oloop : loop);
6419 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6420 loop->header->index, i, j,
6421 disjoint ? "disjoint" : "nested");
6422 }
6423 }
6424 }
6425
6426 if (verbose)
6427 {
6428 /* Print diagnostics to compare our concept of a loop with
6429 what the loop notes say. */
6430 if (GET_CODE (PREV_INSN (loop->header->head)) != NOTE
6431 || NOTE_LINE_NUMBER (PREV_INSN (loop->header->head))
6432 != NOTE_INSN_LOOP_BEG)
6433 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6434 INSN_UID (PREV_INSN (loop->header->head)));
6435 if (GET_CODE (NEXT_INSN (loop->latch->end)) != NOTE
6436 || NOTE_LINE_NUMBER (NEXT_INSN (loop->latch->end))
6437 != NOTE_INSN_LOOP_END)
6438 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6439 INSN_UID (NEXT_INSN (loop->latch->end)));
6440 }
6441 }
6442
6443 if (verbose)
6444 flow_loops_cfg_dump (loops, file);
6445 }
6446
6447
6448 /* Free all the memory allocated for LOOPS. */
6449 void
6450 flow_loops_free (loops)
6451 struct loops *loops;
6452 {
6453 if (loops->array)
6454 {
6455 int i;
6456
6457 if (! loops->num)
6458 abort ();
6459
6460 /* Free the loop descriptors. */
6461 for (i = 0; i < loops->num; i++)
6462 {
6463 struct loop *loop = &loops->array[i];
6464
6465 if (loop->nodes)
6466 sbitmap_free (loop->nodes);
6467 if (loop->exits)
6468 free (loop->exits);
6469 }
6470 free (loops->array);
6471 loops->array = NULL;
6472
6473 if (loops->cfg.dom)
6474 sbitmap_vector_free (loops->cfg.dom);
6475 if (loops->cfg.dfs_order)
6476 free (loops->cfg.dfs_order);
6477
6478 sbitmap_free (loops->shared_headers);
6479 }
6480 }
6481
6482
6483 /* Find the exits from the loop using the bitmap of loop nodes NODES
6484 and store in EXITS array. Return the number of exits from the
6485 loop. */
6486 static int
6487 flow_loop_exits_find (nodes, exits)
6488 const sbitmap nodes;
6489 edge **exits;
6490 {
6491 edge e;
6492 int node;
6493 int num_exits;
6494
6495 *exits = NULL;
6496
6497 /* Check all nodes within the loop to see if there are any
6498 successors not in the loop. Note that a node may have multiple
6499 exiting edges. */
6500 num_exits = 0;
6501 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6502 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6503 {
6504 basic_block dest = e->dest;
6505
6506 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6507 num_exits++;
6508 }
6509 });
6510
6511 if (! num_exits)
6512 return 0;
6513
6514 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6515
6516 /* Store all exiting edges into an array. */
6517 num_exits = 0;
6518 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6519 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6520 {
6521 basic_block dest = e->dest;
6522
6523 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6524 (*exits)[num_exits++] = e;
6525 }
6526 });
6527
6528 return num_exits;
6529 }
6530
6531
6532 /* Find the nodes contained within the loop with header HEADER and
6533 latch LATCH and store in NODES. Return the number of nodes within
6534 the loop. */
6535 static int
6536 flow_loop_nodes_find (header, latch, nodes)
6537 basic_block header;
6538 basic_block latch;
6539 sbitmap nodes;
6540 {
6541 basic_block *stack;
6542 int sp;
6543 int num_nodes = 0;
6544
6545 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6546 sp = 0;
6547
6548 /* Start with only the loop header in the set of loop nodes. */
6549 sbitmap_zero (nodes);
6550 SET_BIT (nodes, header->index);
6551 num_nodes++;
6552 header->loop_depth++;
6553
6554 /* Push the loop latch on to the stack. */
6555 if (! TEST_BIT (nodes, latch->index))
6556 {
6557 SET_BIT (nodes, latch->index);
6558 latch->loop_depth++;
6559 num_nodes++;
6560 stack[sp++] = latch;
6561 }
6562
6563 while (sp)
6564 {
6565 basic_block node;
6566 edge e;
6567
6568 node = stack[--sp];
6569 for (e = node->pred; e; e = e->pred_next)
6570 {
6571 basic_block ancestor = e->src;
6572
6573 /* If each ancestor not marked as part of loop, add to set of
6574 loop nodes and push on to stack. */
6575 if (ancestor != ENTRY_BLOCK_PTR
6576 && ! TEST_BIT (nodes, ancestor->index))
6577 {
6578 SET_BIT (nodes, ancestor->index);
6579 ancestor->loop_depth++;
6580 num_nodes++;
6581 stack[sp++] = ancestor;
6582 }
6583 }
6584 }
6585 free (stack);
6586 return num_nodes;
6587 }
6588
6589
6590 /* Compute the depth first search order and store in the array
6591 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6592 number of nodes visited. */
6593 static int
6594 flow_depth_first_order_compute (dfs_order)
6595 int *dfs_order;
6596 {
6597 edge e;
6598 edge *stack;
6599 int sp;
6600 int dfsnum = 0;
6601 sbitmap visited;
6602
6603 /* Allocate stack for back-tracking up CFG. */
6604 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6605 sp = 0;
6606
6607 /* Allocate bitmap to track nodes that have been visited. */
6608 visited = sbitmap_alloc (n_basic_blocks);
6609
6610 /* None of the nodes in the CFG have been visited yet. */
6611 sbitmap_zero (visited);
6612
6613 /* Start with the first successor edge from the entry block. */
6614 e = ENTRY_BLOCK_PTR->succ;
6615 while (e)
6616 {
6617 basic_block src = e->src;
6618 basic_block dest = e->dest;
6619
6620 /* Mark that we have visited this node. */
6621 if (src != ENTRY_BLOCK_PTR)
6622 SET_BIT (visited, src->index);
6623
6624 /* If this node has not been visited before, push the current
6625 edge on to the stack and proceed with the first successor
6626 edge of this node. */
6627 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6628 && dest->succ)
6629 {
6630 stack[sp++] = e;
6631 e = dest->succ;
6632 }
6633 else
6634 {
6635 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6636 && ! dest->succ)
6637 {
6638 /* DEST has no successors (for example, a non-returning
6639 function is called) so do not push the current edge
6640 but carry on with its next successor. */
6641 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6642 SET_BIT (visited, dest->index);
6643 }
6644
6645 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6646 {
6647 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6648
6649 /* Pop edge off stack. */
6650 e = stack[--sp];
6651 src = e->src;
6652 }
6653 e = e->succ_next;
6654 }
6655 }
6656 free (stack);
6657 sbitmap_free (visited);
6658
6659 /* The number of nodes visited should not be greater than
6660 n_basic_blocks. */
6661 if (dfsnum > n_basic_blocks)
6662 abort ();
6663
6664 /* There are some nodes left in the CFG that are unreachable. */
6665 if (dfsnum < n_basic_blocks)
6666 abort ();
6667 return dfsnum;
6668 }
6669
6670
6671 /* Return the block for the pre-header of the loop with header
6672 HEADER where DOM specifies the dominator information. Return NULL if
6673 there is no pre-header. */
6674 static basic_block
6675 flow_loop_pre_header_find (header, dom)
6676 basic_block header;
6677 const sbitmap *dom;
6678 {
6679 basic_block pre_header;
6680 edge e;
6681
6682 /* If block p is a predecessor of the header and is the only block
6683 that the header does not dominate, then it is the pre-header. */
6684 pre_header = NULL;
6685 for (e = header->pred; e; e = e->pred_next)
6686 {
6687 basic_block node = e->src;
6688
6689 if (node != ENTRY_BLOCK_PTR
6690 && ! TEST_BIT (dom[node->index], header->index))
6691 {
6692 if (pre_header == NULL)
6693 pre_header = node;
6694 else
6695 {
6696 /* There are multiple edges into the header from outside
6697 the loop so there is no pre-header block. */
6698 pre_header = NULL;
6699 break;
6700 }
6701 }
6702 }
6703 return pre_header;
6704 }
6705
6706
6707 /* Add LOOP to the loop hierarchy tree so that it is a sibling or a
6708 descendant of ROOT. */
6709 static void
6710 flow_loop_tree_node_add (root, loop)
6711 struct loop *root;
6712 struct loop *loop;
6713 {
6714 struct loop *outer;
6715
6716 if (! loop)
6717 return;
6718
6719 for (outer = root; outer; outer = outer->next)
6720 {
6721 if (flow_loop_nested_p (outer, loop))
6722 {
6723 if (outer->inner)
6724 {
6725 /* Add LOOP as a sibling or descendent of OUTER->INNER. */
6726 flow_loop_tree_node_add (outer->inner, loop);
6727 }
6728 else
6729 {
6730 /* Add LOOP as child of OUTER. */
6731 outer->inner = loop;
6732 loop->outer = outer;
6733 loop->next = NULL;
6734 }
6735 return;
6736 }
6737 }
6738 /* Add LOOP as a sibling of ROOT. */
6739 loop->next = root->next;
6740 root->next = loop;
6741 loop->outer = root->outer;
6742 }
6743
6744
6745 /* Build the loop hierarchy tree for LOOPS. */
6746 static void
6747 flow_loops_tree_build (loops)
6748 struct loops *loops;
6749 {
6750 int i;
6751 int num_loops;
6752
6753 num_loops = loops->num;
6754 if (! num_loops)
6755 return;
6756
6757 /* Root the loop hierarchy tree with the first loop found.
6758 Since we used a depth first search this should be the
6759 outermost loop. */
6760 loops->tree = &loops->array[0];
6761 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6762
6763 /* Add the remaining loops to the tree. */
6764 for (i = 1; i < num_loops; i++)
6765 flow_loop_tree_node_add (loops->tree, &loops->array[i]);
6766 }
6767
6768
6769 /* Helper function to compute loop nesting depth and enclosed loop level
6770 for the natural loop specified by LOOP at the loop depth DEPTH.
6771 Returns the loop level. */
6772 static int
6773 flow_loop_level_compute (loop, depth)
6774 struct loop *loop;
6775 int depth;
6776 {
6777 struct loop *inner;
6778 int level = 0;
6779
6780 if (! loop)
6781 return 0;
6782
6783 /* Traverse loop tree assigning depth and computing level as the
6784 maximum level of all the inner loops of this loop. The loop
6785 level is equivalent to the height of the loop in the loop tree
6786 and corresponds to the number of enclosed loop levels. */
6787 for (inner = loop->inner; inner; inner = inner->next)
6788 {
6789 int ilevel;
6790
6791 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6792
6793 if (ilevel > level)
6794 level = ilevel;
6795 }
6796 loop->level = level;
6797 loop->depth = depth;
6798 return level;
6799 }
6800
6801
6802 /* Compute the loop nesting depth and enclosed loop level for the loop
6803 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6804 level. */
6805 static int
6806 flow_loops_level_compute (loops)
6807 struct loops *loops;
6808 {
6809 return flow_loop_level_compute (loops->tree, 0);
6810 }
6811
6812
6813 /* Find all the natural loops in the function and save in LOOPS structure
6814 and recalculate loop_depth information in basic block structures.
6815 Return the number of natural loops found. */
6816 int
6817 flow_loops_find (loops)
6818 struct loops *loops;
6819 {
6820 int i;
6821 int b;
6822 int num_loops;
6823 edge e;
6824 sbitmap headers;
6825 sbitmap *dom;
6826 int *dfs_order;
6827
6828 loops->num = 0;
6829 loops->array = NULL;
6830 loops->tree = NULL;
6831 dfs_order = NULL;
6832
6833 /* Taking care of this degenerate case makes the rest of
6834 this code simpler. */
6835 if (n_basic_blocks == 0)
6836 return 0;
6837
6838 /* Compute the dominators. */
6839 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6840 compute_flow_dominators (dom, NULL);
6841
6842 /* Count the number of loop edges (back edges). This should be the
6843 same as the number of natural loops. Also clear the loop_depth
6844 and as we work from inner->outer in a loop nest we call
6845 find_loop_nodes_find which will increment loop_depth for nodes
6846 within the current loop, which happens to enclose inner loops. */
6847
6848 num_loops = 0;
6849 for (b = 0; b < n_basic_blocks; b++)
6850 {
6851 BASIC_BLOCK (b)->loop_depth = 1;
6852 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6853 {
6854 basic_block latch = e->src;
6855
6856 /* Look for back edges where a predecessor is dominated
6857 by this block. A natural loop has a single entry
6858 node (header) that dominates all the nodes in the
6859 loop. It also has single back edge to the header
6860 from a latch node. Note that multiple natural loops
6861 may share the same header. */
6862 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6863 num_loops++;
6864 }
6865 }
6866
6867 if (num_loops)
6868 {
6869 /* Compute depth first search order of the CFG so that outer
6870 natural loops will be found before inner natural loops. */
6871 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6872 flow_depth_first_order_compute (dfs_order);
6873
6874 /* Allocate loop structures. */
6875 loops->array = (struct loop *)
6876 xcalloc (num_loops, sizeof (struct loop));
6877
6878 headers = sbitmap_alloc (n_basic_blocks);
6879 sbitmap_zero (headers);
6880
6881 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6882 sbitmap_zero (loops->shared_headers);
6883
6884 /* Find and record information about all the natural loops
6885 in the CFG. */
6886 num_loops = 0;
6887 for (b = 0; b < n_basic_blocks; b++)
6888 {
6889 basic_block header;
6890
6891 /* Search the nodes of the CFG in DFS order that we can find
6892 outer loops first. */
6893 header = BASIC_BLOCK (dfs_order[b]);
6894
6895 /* Look for all the possible latch blocks for this header. */
6896 for (e = header->pred; e; e = e->pred_next)
6897 {
6898 basic_block latch = e->src;
6899
6900 /* Look for back edges where a predecessor is dominated
6901 by this block. A natural loop has a single entry
6902 node (header) that dominates all the nodes in the
6903 loop. It also has single back edge to the header
6904 from a latch node. Note that multiple natural loops
6905 may share the same header. */
6906 if (latch != ENTRY_BLOCK_PTR
6907 && TEST_BIT (dom[latch->index], header->index))
6908 {
6909 struct loop *loop;
6910
6911 loop = loops->array + num_loops;
6912
6913 loop->header = header;
6914 loop->latch = latch;
6915
6916 /* Keep track of blocks that are loop headers so
6917 that we can tell which loops should be merged. */
6918 if (TEST_BIT (headers, header->index))
6919 SET_BIT (loops->shared_headers, header->index);
6920 SET_BIT (headers, header->index);
6921
6922 /* Find nodes contained within the loop. */
6923 loop->nodes = sbitmap_alloc (n_basic_blocks);
6924 loop->num_nodes =
6925 flow_loop_nodes_find (header, latch, loop->nodes);
6926
6927 /* Find edges which exit the loop. Note that a node
6928 may have several exit edges. */
6929 loop->num_exits
6930 = flow_loop_exits_find (loop->nodes, &loop->exits);
6931
6932 /* Look to see if the loop has a pre-header node. */
6933 loop->pre_header
6934 = flow_loop_pre_header_find (header, dom);
6935
6936 num_loops++;
6937 }
6938 }
6939 }
6940
6941 /* Natural loops with shared headers may either be disjoint or
6942 nested. Disjoint loops with shared headers cannot be inner
6943 loops and should be merged. For now just mark loops that share
6944 headers. */
6945 for (i = 0; i < num_loops; i++)
6946 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6947 loops->array[i].shared = 1;
6948
6949 sbitmap_free (headers);
6950 }
6951
6952 loops->num = num_loops;
6953
6954 /* Save CFG derived information to avoid recomputing it. */
6955 loops->cfg.dom = dom;
6956 loops->cfg.dfs_order = dfs_order;
6957
6958 /* Build the loop hierarchy tree. */
6959 flow_loops_tree_build (loops);
6960
6961 /* Assign the loop nesting depth and enclosed loop level for each
6962 loop. */
6963 flow_loops_level_compute (loops);
6964
6965 return num_loops;
6966 }
6967
6968
6969 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
6970 int
6971 flow_loop_outside_edge_p (loop, e)
6972 const struct loop *loop;
6973 edge e;
6974 {
6975 if (e->dest != loop->header)
6976 abort ();
6977 return (e->src == ENTRY_BLOCK_PTR)
6978 || ! TEST_BIT (loop->nodes, e->src->index);
6979 }
This page took 0.377404 seconds and 5 git commands to generate.