]>
Commit | Line | Data |
---|---|---|
295ae817 JE |
1 | /* Basic block reordering routines for the GNU compiler. |
2 | Copyright (C) 2000 Free Software Foundation, Inc. | |
3 | ||
1322177d | 4 | This file is part of GCC. |
295ae817 | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by | |
295ae817 JE |
8 | the Free Software Foundation; either version 2, or (at your option) |
9 | any later version. | |
10 | ||
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | |
13 | or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public | |
14 | License for more details. | |
295ae817 JE |
15 | |
16 | You should have received a copy of the GNU General Public License | |
1322177d LB |
17 | along with GCC; see the file COPYING. If not, write to the Free |
18 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
19 | 02111-1307, USA. */ | |
295ae817 JE |
20 | |
21 | /* References: | |
22 | ||
23 | "Profile Guided Code Positioning" | |
24 | Pettis and Hanson; PLDI '90. | |
f008a564 RH |
25 | |
26 | TODO: | |
27 | ||
28 | (1) Consider: | |
29 | ||
30 | if (p) goto A; // predict taken | |
31 | foo (); | |
32 | A: | |
33 | if (q) goto B; // predict taken | |
34 | bar (); | |
35 | B: | |
36 | baz (); | |
37 | return; | |
38 | ||
39 | We'll currently reorder this as | |
40 | ||
41 | if (!p) goto C; | |
42 | A: | |
43 | if (!q) goto D; | |
44 | B: | |
45 | baz (); | |
46 | return; | |
47 | D: | |
48 | bar (); | |
49 | goto B; | |
50 | C: | |
51 | foo (); | |
52 | goto A; | |
53 | ||
54 | A better ordering is | |
55 | ||
56 | if (!p) goto C; | |
57 | if (!q) goto D; | |
58 | B: | |
59 | baz (); | |
60 | return; | |
61 | C: | |
62 | foo (); | |
63 | if (q) goto B; | |
64 | D: | |
65 | bar (); | |
66 | goto B; | |
67 | ||
68 | This requires that we be able to duplicate the jump at A, and | |
69 | adjust the graph traversal such that greedy placement doesn't | |
70 | fix D before C is considered. | |
71 | ||
72 | (2) Coordinate with shorten_branches to minimize the number of | |
73 | long branches. | |
74 | ||
75 | (3) Invent a method by which sufficiently non-predicted code can | |
76 | be moved to either the end of the section or another section | |
77 | entirely. Some sort of NOTE_INSN note would work fine. | |
78 | ||
79 | This completely scroggs all debugging formats, so the user | |
80 | would have to explicitly ask for it. | |
295ae817 JE |
81 | */ |
82 | ||
83 | #include "config.h" | |
84 | #include "system.h" | |
85 | #include "tree.h" | |
86 | #include "rtl.h" | |
87 | #include "tm_p.h" | |
efc9bd41 | 88 | #include "hard-reg-set.h" |
295ae817 JE |
89 | #include "basic-block.h" |
90 | #include "insn-config.h" | |
91 | #include "regs.h" | |
295ae817 JE |
92 | #include "flags.h" |
93 | #include "output.h" | |
94 | #include "function.h" | |
295ae817 JE |
95 | #include "toplev.h" |
96 | #include "recog.h" | |
295ae817 JE |
97 | #include "expr.h" |
98 | #include "obstack.h" | |
99 | ||
100 | ||
f008a564 RH |
101 | #ifndef HAVE_epilogue |
102 | #define HAVE_epilogue 0 | |
103 | #endif | |
104 | ||
105 | ||
295ae817 JE |
106 | /* The contents of the current function definition are allocated |
107 | in this obstack, and all are freed at the end of the function. | |
108 | For top-level functions, this is temporary_obstack. | |
109 | Separate obstacks are made for nested functions. */ | |
110 | ||
1f8f4a0b | 111 | extern struct obstack flow_obstack; |
295ae817 JE |
112 | |
113 | ||
e3fdc58a JE |
114 | /* Structure to hold information about lexical scopes. */ |
115 | typedef struct scope_def | |
116 | { | |
117 | int level; | |
118 | ||
119 | /* The NOTE_INSN_BLOCK_BEG that started this scope. */ | |
120 | rtx note_beg; | |
121 | ||
122 | /* The NOTE_INSN_BLOCK_END that ended this scope. */ | |
123 | rtx note_end; | |
124 | ||
125 | /* The bb containing note_beg (if any). */ | |
126 | basic_block bb_beg; | |
127 | ||
128 | /* The bb containing note_end (if any). */ | |
129 | basic_block bb_end; | |
130 | ||
131 | /* List of basic blocks contained within this scope. */ | |
132 | basic_block *bbs; | |
133 | ||
134 | /* Number of blocks contained within this scope. */ | |
135 | int num_bbs; | |
136 | ||
137 | /* The outer scope or NULL if outermost scope. */ | |
138 | struct scope_def *outer; | |
139 | ||
140 | /* The first inner scope or NULL if innermost scope. */ | |
141 | struct scope_def *inner; | |
142 | ||
143 | /* The last inner scope or NULL if innermost scope. */ | |
144 | struct scope_def *inner_last; | |
145 | ||
146 | /* Link to the next (sibling) scope. */ | |
147 | struct scope_def *next; | |
148 | } *scope; | |
149 | ||
f008a564 | 150 | |
e3fdc58a JE |
151 | /* Structure to hold information about the scope forest. */ |
152 | typedef struct | |
153 | { | |
154 | /* Number of trees in forest. */ | |
155 | int num_trees; | |
156 | ||
157 | /* List of tree roots. */ | |
158 | scope *trees; | |
159 | } scope_forest_info; | |
160 | ||
f008a564 RH |
161 | /* Structure to hold information about the blocks during reordering. */ |
162 | typedef struct reorder_block_def | |
163 | { | |
db525e17 JE |
164 | rtx eff_head; |
165 | rtx eff_end; | |
e3fdc58a | 166 | scope scope; |
f008a564 | 167 | basic_block next; |
f008a564 | 168 | int visited; |
295ae817 JE |
169 | } *reorder_block_def; |
170 | ||
f008a564 | 171 | #define RBI(BB) ((reorder_block_def) (BB)->aux) |
295ae817 | 172 | |
a7b01a4b RH |
173 | /* Holds the interesting trailing notes for the function. */ |
174 | static rtx function_tail_eff_head; | |
175 | ||
295ae817 JE |
176 | |
177 | /* Local function prototypes. */ | |
2a6fa433 | 178 | static rtx skip_insns_after_block PARAMS ((basic_block)); |
f008a564 RH |
179 | static void record_effective_endpoints PARAMS ((void)); |
180 | static void make_reorder_chain PARAMS ((void)); | |
181 | static basic_block make_reorder_chain_1 PARAMS ((basic_block, basic_block)); | |
182 | static rtx label_for_bb PARAMS ((basic_block)); | |
183 | static rtx emit_jump_to_block_after PARAMS ((basic_block, rtx)); | |
295ae817 | 184 | static void fixup_reorder_chain PARAMS ((void)); |
e3fdc58a JE |
185 | static void relate_bbs_with_scopes PARAMS ((scope)); |
186 | static scope make_new_scope PARAMS ((int, rtx)); | |
187 | static void build_scope_forest PARAMS ((scope_forest_info *)); | |
188 | static void remove_scope_notes PARAMS ((void)); | |
402209ff | 189 | static void insert_intra_1 PARAMS ((scope, rtx *, basic_block)); |
e3fdc58a JE |
190 | static void insert_intra_bb_scope_notes PARAMS ((basic_block)); |
191 | static void insert_inter_bb_scope_notes PARAMS ((basic_block, basic_block)); | |
192 | static void rebuild_scope_notes PARAMS ((scope_forest_info *)); | |
193 | static void free_scope_forest_1 PARAMS ((scope)); | |
194 | static void free_scope_forest PARAMS ((scope_forest_info *)); | |
195 | void dump_scope_forest PARAMS ((scope_forest_info *)); | |
196 | static void dump_scope_forest_1 PARAMS ((scope, int)); | |
36244024 KG |
197 | static rtx get_next_bb_note PARAMS ((rtx)); |
198 | static rtx get_prev_bb_note PARAMS ((rtx)); | |
295ae817 | 199 | |
f008a564 RH |
200 | void verify_insn_chain PARAMS ((void)); |
201 | \f | |
2a6fa433 JE |
202 | /* Skip over inter-block insns occurring after BB which are typically |
203 | associated with BB (e.g., barriers). If there are any such insns, | |
204 | we return the last one. Otherwise, we return the end of BB. */ | |
295ae817 JE |
205 | |
206 | static rtx | |
2a6fa433 | 207 | skip_insns_after_block (bb) |
295ae817 | 208 | basic_block bb; |
295ae817 | 209 | { |
6e64a52a | 210 | rtx insn, last_insn, next_head, prev; |
295ae817 | 211 | |
f008a564 RH |
212 | next_head = NULL_RTX; |
213 | if (bb->index + 1 != n_basic_blocks) | |
214 | next_head = BASIC_BLOCK (bb->index + 1)->head; | |
295ae817 | 215 | |
1ed672dd | 216 | for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)); ) |
2a6fa433 | 217 | { |
f008a564 | 218 | if (insn == next_head) |
2a6fa433 JE |
219 | break; |
220 | ||
f008a564 | 221 | switch (GET_CODE (insn)) |
295ae817 | 222 | { |
f008a564 | 223 | case BARRIER: |
1ed672dd | 224 | last_insn = insn; |
2a6fa433 | 225 | continue; |
295ae817 | 226 | |
f008a564 RH |
227 | case NOTE: |
228 | switch (NOTE_LINE_NUMBER (insn)) | |
229 | { | |
230 | case NOTE_INSN_LOOP_END: | |
231 | case NOTE_INSN_BLOCK_END: | |
1ed672dd JH |
232 | last_insn = insn; |
233 | continue; | |
f008a564 RH |
234 | case NOTE_INSN_DELETED: |
235 | case NOTE_INSN_DELETED_LABEL: | |
236 | continue; | |
237 | ||
238 | default: | |
6e64a52a | 239 | continue; |
f008a564 RH |
240 | break; |
241 | } | |
242 | break; | |
243 | ||
244 | case CODE_LABEL: | |
245 | if (NEXT_INSN (insn) | |
246 | && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN | |
247 | && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC | |
248 | || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC)) | |
249 | { | |
250 | insn = NEXT_INSN (insn); | |
1ed672dd | 251 | last_insn = insn; |
f008a564 RH |
252 | continue; |
253 | } | |
254 | break; | |
255 | ||
256 | default: | |
257 | break; | |
258 | } | |
295ae817 | 259 | |
2a6fa433 | 260 | break; |
295ae817 | 261 | } |
6e64a52a JH |
262 | /* It is possible to hit contradicting sequence. For instance: |
263 | ||
264 | jump_insn | |
265 | NOTE_INSN_LOOP_BEG | |
266 | barrier | |
267 | ||
268 | Where barrier belongs to jump_insn, but the note does not. | |
269 | This can be created by removing the basic block originally | |
270 | following NOTE_INSN_LOOP_BEG. | |
271 | ||
272 | In such case reorder the notes. */ | |
273 | for (insn = last_insn; insn != bb->end; insn = prev) | |
274 | { | |
275 | prev = PREV_INSN (insn); | |
276 | if (GET_CODE (insn) == NOTE) | |
277 | switch (NOTE_LINE_NUMBER (insn)) | |
278 | { | |
279 | case NOTE_INSN_LOOP_END: | |
280 | case NOTE_INSN_BLOCK_END: | |
281 | case NOTE_INSN_DELETED: | |
282 | case NOTE_INSN_DELETED_LABEL: | |
283 | continue; | |
284 | default: | |
285 | reorder_insns (insn, insn, last_insn); | |
286 | } | |
287 | } | |
295ae817 JE |
288 | |
289 | return last_insn; | |
290 | } | |
291 | ||
292 | ||
f008a564 RH |
293 | /* Locate the effective beginning and end of the insn chain for each |
294 | block, as defined by skip_insns_after_block above. */ | |
295ae817 | 295 | |
f008a564 RH |
296 | static void |
297 | record_effective_endpoints () | |
295ae817 | 298 | { |
f008a564 RH |
299 | rtx next_insn = get_insns (); |
300 | int i; | |
301 | ||
302 | for (i = 0; i < n_basic_blocks; ++i) | |
295ae817 | 303 | { |
f008a564 RH |
304 | basic_block bb = BASIC_BLOCK (i); |
305 | rtx end; | |
306 | ||
307 | RBI (bb)->eff_head = next_insn; | |
308 | end = skip_insns_after_block (bb); | |
309 | RBI (bb)->eff_end = end; | |
310 | next_insn = NEXT_INSN (end); | |
295ae817 | 311 | } |
a7b01a4b | 312 | function_tail_eff_head = next_insn; |
295ae817 JE |
313 | } |
314 | ||
315 | ||
f008a564 RH |
316 | /* Compute an ordering for a subgraph beginning with block BB. Record the |
317 | ordering in RBI()->index and chained through RBI()->next. */ | |
295ae817 | 318 | |
f008a564 RH |
319 | static void |
320 | make_reorder_chain () | |
295ae817 | 321 | { |
f008a564 RH |
322 | basic_block last_block = NULL; |
323 | basic_block prev = NULL; | |
324 | int nbb_m1 = n_basic_blocks - 1; | |
402209ff | 325 | basic_block next; |
f008a564 RH |
326 | |
327 | /* If we've not got epilogue in RTL, we must fallthru to the exit. | |
328 | Force the last block to be at the end. */ | |
329 | /* ??? Some ABIs (e.g. MIPS) require the return insn to be at the | |
330 | end of the function for stack unwinding purposes. */ | |
331 | if (! HAVE_epilogue) | |
295ae817 | 332 | { |
f008a564 RH |
333 | last_block = BASIC_BLOCK (nbb_m1); |
334 | RBI (last_block)->visited = 1; | |
335 | nbb_m1 -= 1; | |
336 | } | |
3ef4c9c6 | 337 | |
f008a564 RH |
338 | /* Loop until we've placed every block. */ |
339 | do | |
340 | { | |
341 | int i; | |
402209ff JH |
342 | |
343 | next = NULL; | |
295ae817 | 344 | |
f008a564 RH |
345 | /* Find the next unplaced block. */ |
346 | /* ??? Get rid of this loop, and track which blocks are not yet | |
347 | placed more directly, so as to avoid the O(N^2) worst case. | |
348 | Perhaps keep a doubly-linked list of all to-be-placed blocks; | |
349 | remove from the list as we place. The head of that list is | |
350 | what we're looking for here. */ | |
351 | ||
402209ff | 352 | for (i = 0; i <= nbb_m1 && !next; ++i) |
295ae817 | 353 | { |
f008a564 RH |
354 | basic_block bb = BASIC_BLOCK (i); |
355 | if (! RBI (bb)->visited) | |
402209ff | 356 | next = bb; |
295ae817 | 357 | } |
402209ff JH |
358 | if (next) |
359 | prev = make_reorder_chain_1 (next, prev); | |
295ae817 | 360 | } |
402209ff | 361 | while (next); |
f008a564 RH |
362 | |
363 | /* Terminate the chain. */ | |
364 | if (! HAVE_epilogue) | |
295ae817 | 365 | { |
f008a564 | 366 | RBI (prev)->next = last_block; |
f008a564 | 367 | prev = last_block; |
295ae817 | 368 | } |
f008a564 RH |
369 | RBI (prev)->next = NULL; |
370 | } | |
295ae817 | 371 | |
f008a564 | 372 | /* A helper function for make_reorder_chain. |
295ae817 | 373 | |
f008a564 RH |
374 | We do not follow EH edges, or non-fallthru edges to noreturn blocks. |
375 | These are assumed to be the error condition and we wish to cluster | |
376 | all of them at the very end of the function for the benefit of cache | |
377 | locality for the rest of the function. | |
295ae817 | 378 | |
f008a564 RH |
379 | ??? We could do slightly better by noticing earlier that some subgraph |
380 | has all paths leading to noreturn functions, but for there to be more | |
381 | than one block in such a subgraph is rare. */ | |
295ae817 | 382 | |
f008a564 RH |
383 | static basic_block |
384 | make_reorder_chain_1 (bb, prev) | |
385 | basic_block bb; | |
386 | basic_block prev; | |
387 | { | |
388 | edge e; | |
389 | basic_block next; | |
390 | rtx note; | |
391 | ||
392 | /* Mark this block visited. */ | |
393 | if (prev) | |
295ae817 | 394 | { |
f008a564 RH |
395 | restart: |
396 | RBI (prev)->next = bb; | |
295ae817 | 397 | |
f008a564 | 398 | if (rtl_dump_file && prev->index + 1 != bb->index) |
402209ff JH |
399 | fprintf (rtl_dump_file, "Reordering block %d after %d\n", |
400 | bb->index, prev->index); | |
295ae817 | 401 | } |
f008a564 | 402 | else |
402209ff JH |
403 | { |
404 | if (bb->index != 0) | |
405 | abort (); | |
406 | } | |
f008a564 RH |
407 | RBI (bb)->visited = 1; |
408 | prev = bb; | |
295ae817 | 409 | |
f008a564 RH |
410 | if (bb->succ == NULL) |
411 | return prev; | |
412 | ||
413 | /* Find the most probable block. */ | |
414 | ||
415 | next = NULL; | |
416 | if (any_condjump_p (bb->end) | |
417 | && (note = find_reg_note (bb->end, REG_BR_PROB, 0)) != NULL) | |
295ae817 | 418 | { |
f008a564 RH |
419 | int taken, probability; |
420 | edge e_taken, e_fall; | |
295ae817 | 421 | |
f008a564 RH |
422 | probability = INTVAL (XEXP (note, 0)); |
423 | taken = probability > REG_BR_PROB_BASE / 2; | |
424 | ||
425 | /* Find the normal taken edge and the normal fallthru edge. | |
b0cc7919 JDA |
426 | |
427 | Note, conditional jumps with other side effects may not | |
428 | be fully optimized. In this case it is possible for | |
429 | the conditional jump to branch to the same location as | |
430 | the fallthru path. | |
431 | ||
432 | We should probably work to improve optimization of that | |
433 | case; however, it seems silly not to also deal with such | |
434 | problems here if they happen to occur. */ | |
f008a564 RH |
435 | |
436 | e_taken = e_fall = NULL; | |
437 | for (e = bb->succ; e ; e = e->succ_next) | |
b0cc7919 JDA |
438 | { |
439 | if (e->flags & EDGE_FALLTHRU) | |
440 | e_fall = e; | |
fd052ec3 | 441 | else if (! (e->flags & EDGE_EH)) |
b0cc7919 JDA |
442 | e_taken = e; |
443 | } | |
f008a564 RH |
444 | |
445 | next = (taken ? e_taken : e_fall)->dest; | |
295ae817 JE |
446 | } |
447 | ||
f008a564 RH |
448 | /* In the absence of a prediction, disturb things as little as possible |
449 | by selecting the old "next" block from the list of successors. If | |
450 | there had been a fallthru edge, that will be the one. */ | |
451 | if (! next) | |
452 | { | |
453 | for (e = bb->succ; e ; e = e->succ_next) | |
454 | if (e->dest->index == bb->index + 1) | |
455 | { | |
456 | if ((e->flags & EDGE_FALLTHRU) | |
457 | || (e->dest->succ | |
458 | && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))) | |
459 | next = e->dest; | |
460 | break; | |
461 | } | |
462 | } | |
295ae817 | 463 | |
f008a564 RH |
464 | /* Make sure we didn't select a silly next block. */ |
465 | if (! next || next == EXIT_BLOCK_PTR || RBI (next)->visited) | |
466 | next = NULL; | |
295ae817 | 467 | |
f008a564 RH |
468 | /* Recurse on the successors. Unroll the last call, as the normal |
469 | case is exactly one or two edges, and we can tail recurse. */ | |
470 | for (e = bb->succ; e; e = e->succ_next) | |
471 | if (e->dest != EXIT_BLOCK_PTR | |
472 | && ! RBI (e->dest)->visited | |
473 | && e->dest->succ | |
474 | && ! (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))) | |
475 | { | |
476 | if (next) | |
477 | { | |
478 | prev = make_reorder_chain_1 (next, prev); | |
479 | next = RBI (e->dest)->visited ? NULL : e->dest; | |
480 | } | |
481 | else | |
482 | next = e->dest; | |
483 | } | |
484 | if (next) | |
485 | { | |
486 | bb = next; | |
487 | goto restart; | |
488 | } | |
295ae817 | 489 | |
f008a564 | 490 | return prev; |
295ae817 JE |
491 | } |
492 | ||
493 | ||
f008a564 | 494 | /* Locate or create a label for a given basic block. */ |
295ae817 | 495 | |
f008a564 RH |
496 | static rtx |
497 | label_for_bb (bb) | |
295ae817 JE |
498 | basic_block bb; |
499 | { | |
f008a564 | 500 | rtx label = bb->head; |
295ae817 | 501 | |
f008a564 | 502 | if (GET_CODE (label) != CODE_LABEL) |
295ae817 | 503 | { |
f008a564 | 504 | if (rtl_dump_file) |
402209ff JH |
505 | fprintf (rtl_dump_file, "Emitting label for block %d\n", |
506 | bb->index); | |
295ae817 | 507 | |
f008a564 RH |
508 | label = emit_label_before (gen_label_rtx (), label); |
509 | if (bb->head == RBI (bb)->eff_head) | |
510 | RBI (bb)->eff_head = label; | |
511 | bb->head = label; | |
402209ff JH |
512 | if (basic_block_for_insn) |
513 | set_block_for_insn (label, bb); | |
295ae817 JE |
514 | } |
515 | ||
f008a564 RH |
516 | return label; |
517 | } | |
295ae817 | 518 | |
295ae817 | 519 | |
f008a564 | 520 | /* Emit a jump to BB after insn AFTER. */ |
295ae817 | 521 | |
f008a564 RH |
522 | static rtx |
523 | emit_jump_to_block_after (bb, after) | |
524 | basic_block bb; | |
525 | rtx after; | |
526 | { | |
527 | rtx jump; | |
528 | ||
529 | if (bb != EXIT_BLOCK_PTR) | |
295ae817 | 530 | { |
f008a564 RH |
531 | rtx label = label_for_bb (bb); |
532 | jump = emit_jump_insn_after (gen_jump (label), after); | |
533 | JUMP_LABEL (jump) = label; | |
534 | LABEL_NUSES (label) += 1; | |
1ed672dd JH |
535 | if (basic_block_for_insn) |
536 | set_block_for_new_insns (jump, bb); | |
295ae817 | 537 | |
f008a564 | 538 | if (rtl_dump_file) |
402209ff JH |
539 | fprintf (rtl_dump_file, "Emitting jump to block %d\n", |
540 | bb->index); | |
f008a564 RH |
541 | } |
542 | else | |
295ae817 | 543 | { |
48b710dd | 544 | #ifdef HAVE_return |
f008a564 RH |
545 | if (! HAVE_return) |
546 | abort (); | |
547 | jump = emit_jump_insn_after (gen_return (), after); | |
402209ff JH |
548 | if (basic_block_for_insn) |
549 | set_block_for_new_insns (jump, bb); | |
295ae817 | 550 | |
f008a564 RH |
551 | if (rtl_dump_file) |
552 | fprintf (rtl_dump_file, "Emitting return\n"); | |
48b710dd RH |
553 | #else |
554 | abort (); | |
555 | #endif | |
295ae817 | 556 | } |
f008a564 RH |
557 | |
558 | return jump; | |
295ae817 JE |
559 | } |
560 | ||
561 | ||
f008a564 | 562 | /* Given a reorder chain, rearrange the code to match. */ |
295ae817 JE |
563 | |
564 | static void | |
565 | fixup_reorder_chain () | |
566 | { | |
f008a564 | 567 | basic_block bb, last_bb; |
402209ff JH |
568 | int index; |
569 | rtx insn; | |
570 | int old_n_basic_blocks = n_basic_blocks; | |
295ae817 | 571 | |
f008a564 RH |
572 | /* First do the bulk reordering -- rechain the blocks without regard to |
573 | the needed changes to jumps and labels. */ | |
574 | ||
575 | last_bb = BASIC_BLOCK (0); | |
576 | bb = RBI (last_bb)->next; | |
402209ff | 577 | index = 1; |
f008a564 | 578 | while (bb) |
295ae817 | 579 | { |
f008a564 RH |
580 | rtx last_e = RBI (last_bb)->eff_end; |
581 | rtx curr_h = RBI (bb)->eff_head; | |
295ae817 | 582 | |
f008a564 RH |
583 | NEXT_INSN (last_e) = curr_h; |
584 | PREV_INSN (curr_h) = last_e; | |
db525e17 | 585 | |
f008a564 RH |
586 | last_bb = bb; |
587 | bb = RBI (bb)->next; | |
402209ff | 588 | index++; |
f008a564 | 589 | } |
a7b01a4b | 590 | |
402209ff JH |
591 | if (index != n_basic_blocks) |
592 | abort (); | |
a7b01a4b | 593 | |
402209ff | 594 | insn = RBI (last_bb)->eff_end; |
a7b01a4b | 595 | |
402209ff JH |
596 | NEXT_INSN (insn) = function_tail_eff_head; |
597 | if (function_tail_eff_head) | |
598 | PREV_INSN (function_tail_eff_head) = insn; | |
599 | ||
600 | while (NEXT_INSN (insn)) | |
601 | insn = NEXT_INSN (insn); | |
602 | set_last_insn (insn); | |
603 | #ifdef ENABLE_CHECKING | |
604 | verify_insn_chain (); | |
605 | #endif | |
f008a564 RH |
606 | |
607 | /* Now add jumps and labels as needed to match the blocks new | |
608 | outgoing edges. */ | |
609 | ||
610 | for (bb = BASIC_BLOCK (0); bb ; bb = RBI (bb)->next) | |
611 | { | |
612 | edge e_fall, e_taken, e; | |
0d45144b | 613 | rtx jump_insn, barrier_insn, bb_end_insn; |
f008a564 RH |
614 | basic_block nb; |
615 | ||
616 | if (bb->succ == NULL) | |
617 | continue; | |
618 | ||
619 | /* Find the old fallthru edge, and another non-EH edge for | |
620 | a taken jump. */ | |
621 | e_taken = e_fall = NULL; | |
622 | for (e = bb->succ; e ; e = e->succ_next) | |
623 | if (e->flags & EDGE_FALLTHRU) | |
624 | e_fall = e; | |
625 | else if (! (e->flags & EDGE_EH)) | |
626 | e_taken = e; | |
627 | ||
0d45144b RH |
628 | bb_end_insn = bb->end; |
629 | if (GET_CODE (bb_end_insn) == JUMP_INSN) | |
f008a564 | 630 | { |
402209ff | 631 | if (any_condjump_p (bb_end_insn)) |
db525e17 | 632 | { |
f008a564 | 633 | /* If the old fallthru is still next, nothing to do. */ |
402209ff JH |
634 | if (RBI (bb)->next == e_fall->dest |
635 | || (!RBI (bb)->next | |
f008a564 RH |
636 | && e_fall->dest == EXIT_BLOCK_PTR)) |
637 | continue; | |
638 | ||
639 | /* There is one special case: if *neither* block is next, | |
640 | such as happens at the very end of a function, then we'll | |
641 | need to add a new unconditional jump. Choose the taken | |
642 | edge based on known or assumed probability. */ | |
402209ff | 643 | if (RBI (bb)->next != e_taken->dest) |
f008a564 | 644 | { |
0d45144b | 645 | rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0); |
f008a564 RH |
646 | if (note |
647 | && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2 | |
0d45144b RH |
648 | && invert_jump (bb_end_insn, |
649 | label_for_bb (e_fall->dest), 0)) | |
f008a564 RH |
650 | { |
651 | e_fall->flags &= ~EDGE_FALLTHRU; | |
652 | e_taken->flags |= EDGE_FALLTHRU; | |
653 | e = e_fall, e_fall = e_taken, e_taken = e; | |
654 | } | |
655 | } | |
db525e17 | 656 | |
f008a564 RH |
657 | /* Otherwise we can try to invert the jump. This will |
658 | basically never fail, however, keep up the pretense. */ | |
0d45144b RH |
659 | else if (invert_jump (bb_end_insn, |
660 | label_for_bb (e_fall->dest), 0)) | |
295ae817 | 661 | { |
f008a564 RH |
662 | e_fall->flags &= ~EDGE_FALLTHRU; |
663 | e_taken->flags |= EDGE_FALLTHRU; | |
664 | continue; | |
295ae817 JE |
665 | } |
666 | } | |
0d45144b | 667 | else if (returnjump_p (bb_end_insn)) |
f008a564 | 668 | continue; |
e3fdc58a | 669 | else |
f008a564 RH |
670 | { |
671 | /* Otherwise we have some switch or computed jump. In the | |
672 | 99% case, there should not have been a fallthru edge. */ | |
673 | if (! e_fall) | |
674 | continue; | |
675 | #ifdef CASE_DROPS_THROUGH | |
676 | /* Except for VAX. Since we didn't have predication for the | |
677 | tablejump, the fallthru block should not have moved. */ | |
402209ff | 678 | if (RBI (bb)->next == e_fall->dest) |
f008a564 | 679 | continue; |
0d45144b RH |
680 | bb_end_insn = skip_insns_after_block (bb); |
681 | #else | |
f008a564 | 682 | abort (); |
0d45144b | 683 | #endif |
f008a564 | 684 | } |
295ae817 | 685 | } |
f008a564 RH |
686 | else |
687 | { | |
688 | /* No fallthru implies a noreturn function with EH edges, or | |
689 | something similarly bizarre. In any case, we don't need to | |
690 | do anything. */ | |
691 | if (! e_fall) | |
692 | continue; | |
693 | ||
694 | /* If the fallthru block is still next, nothing to do. */ | |
402209ff | 695 | if (RBI (bb)->next == e_fall->dest) |
f008a564 RH |
696 | continue; |
697 | ||
698 | /* We need a new jump insn. If the block has only one outgoing | |
699 | edge, then we can stuff the new jump insn in directly. */ | |
700 | if (bb->succ->succ_next == NULL) | |
701 | { | |
702 | e_fall->flags &= ~EDGE_FALLTHRU; | |
703 | ||
0d45144b | 704 | jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn); |
f008a564 RH |
705 | bb->end = jump_insn; |
706 | barrier_insn = emit_barrier_after (jump_insn); | |
707 | RBI (bb)->eff_end = barrier_insn; | |
708 | continue; | |
709 | } | |
710 | } | |
711 | ||
712 | /* We got here if we need to add a new jump insn in a new block | |
713 | across the edge e_fall. */ | |
714 | ||
0d45144b | 715 | jump_insn = emit_jump_to_block_after (e_fall->dest, bb_end_insn); |
f008a564 RH |
716 | barrier_insn = emit_barrier_after (jump_insn); |
717 | ||
718 | VARRAY_GROW (basic_block_info, ++n_basic_blocks); | |
719 | create_basic_block (n_basic_blocks - 1, jump_insn, jump_insn, NULL); | |
720 | ||
721 | nb = BASIC_BLOCK (n_basic_blocks - 1); | |
f008a564 | 722 | nb->local_set = 0; |
134d3a2e JH |
723 | nb->count = e_fall->count; |
724 | nb->frequency = EDGE_FREQUENCY (e_fall); | |
f008a564 | 725 | |
402209ff JH |
726 | nb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack); |
727 | nb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack); | |
f008a564 RH |
728 | COPY_REG_SET (nb->global_live_at_start, bb->global_live_at_start); |
729 | COPY_REG_SET (nb->global_live_at_end, bb->global_live_at_start); | |
730 | ||
731 | nb->aux = xmalloc (sizeof (struct reorder_block_def)); | |
732 | RBI (nb)->eff_head = nb->head; | |
733 | RBI (nb)->eff_end = barrier_insn; | |
734 | RBI (nb)->scope = RBI (bb)->scope; | |
f008a564 RH |
735 | RBI (nb)->visited = 1; |
736 | RBI (nb)->next = RBI (bb)->next; | |
737 | RBI (bb)->next = nb; | |
738 | ||
739 | /* Link to new block. */ | |
740 | make_edge (NULL, nb, e_fall->dest, 0); | |
741 | redirect_edge_succ (e_fall, nb); | |
134d3a2e JH |
742 | nb->succ->count = e_fall->count; |
743 | nb->succ->probability = REG_BR_PROB_BASE; | |
f008a564 RH |
744 | |
745 | /* Don't process this new block. */ | |
746 | bb = nb; | |
f008a564 RH |
747 | } |
748 | ||
749 | /* Put basic_block_info in the new order. */ | |
402209ff JH |
750 | bb = BASIC_BLOCK (0); |
751 | index = 0; | |
752 | ||
753 | if (rtl_dump_file) | |
754 | fprintf (rtl_dump_file, "Reordered sequence:\n"); | |
755 | while (bb) | |
f008a564 | 756 | { |
402209ff JH |
757 | if (rtl_dump_file) |
758 | fprintf (rtl_dump_file, " %i %sbb %i freq %i\n", index, | |
759 | bb->index >= old_n_basic_blocks ? "compensation " : "", | |
760 | bb->index, | |
761 | bb->frequency); | |
762 | bb->index = index; | |
763 | BASIC_BLOCK (index) = bb; | |
764 | ||
765 | bb = RBI (bb)->next; | |
766 | index++; | |
295ae817 JE |
767 | } |
768 | } | |
769 | ||
770 | ||
db525e17 JE |
771 | /* Perform sanity checks on the insn chain. |
772 | 1. Check that next/prev pointers are consistent in both the forward and | |
773 | reverse direction. | |
774 | 2. Count insns in chain, going both directions, and check if equal. | |
775 | 3. Check that get_last_insn () returns the actual end of chain. */ | |
f008a564 RH |
776 | |
777 | void | |
db525e17 JE |
778 | verify_insn_chain () |
779 | { | |
780 | rtx x, | |
781 | prevx, | |
782 | nextx; | |
783 | int insn_cnt1, | |
784 | insn_cnt2; | |
785 | ||
786 | prevx = NULL; | |
787 | insn_cnt1 = 1; | |
788 | for (x = get_insns (); x; x = NEXT_INSN (x)) | |
789 | { | |
790 | if (PREV_INSN (x) != prevx) | |
791 | { | |
792 | fprintf (stderr, "Forward traversal: insn chain corrupt.\n"); | |
793 | fprintf (stderr, "previous insn:\n"); | |
794 | debug_rtx (prevx); | |
795 | fprintf (stderr, "current insn:\n"); | |
796 | debug_rtx (x); | |
797 | abort (); | |
798 | } | |
799 | ++insn_cnt1; | |
800 | prevx = x; | |
801 | } | |
802 | ||
803 | if (prevx != get_last_insn ()) | |
804 | { | |
805 | fprintf (stderr, "last_insn corrupt.\n"); | |
806 | abort (); | |
807 | } | |
808 | ||
809 | nextx = NULL; | |
810 | insn_cnt2 = 1; | |
811 | for (x = get_last_insn (); x; x = PREV_INSN (x)) | |
812 | { | |
813 | if (NEXT_INSN (x) != nextx) | |
814 | { | |
815 | fprintf (stderr, "Reverse traversal: insn chain corrupt.\n"); | |
816 | fprintf (stderr, "current insn:\n"); | |
817 | debug_rtx (x); | |
818 | fprintf (stderr, "next insn:\n"); | |
819 | debug_rtx (nextx); | |
820 | abort (); | |
821 | } | |
822 | ++insn_cnt2; | |
823 | nextx = x; | |
824 | } | |
825 | ||
826 | if (insn_cnt1 != insn_cnt2) | |
827 | { | |
828 | fprintf (stderr, "insn_cnt1 (%d) not equal to insn_cnt2 (%d).\n", | |
829 | insn_cnt1, insn_cnt2); | |
830 | abort (); | |
831 | } | |
832 | } | |
db525e17 | 833 | |
e3fdc58a JE |
834 | static rtx |
835 | get_next_bb_note (x) | |
836 | rtx x; | |
837 | { | |
838 | while (x) | |
839 | { | |
589ca5cb | 840 | if (NOTE_INSN_BASIC_BLOCK_P (x)) |
e3fdc58a JE |
841 | return x; |
842 | x = NEXT_INSN (x); | |
843 | } | |
844 | return NULL; | |
845 | } | |
846 | ||
847 | ||
848 | static rtx | |
849 | get_prev_bb_note (x) | |
850 | rtx x; | |
851 | { | |
852 | while (x) | |
853 | { | |
589ca5cb | 854 | if (NOTE_INSN_BASIC_BLOCK_P (x)) |
e3fdc58a JE |
855 | return x; |
856 | x = PREV_INSN (x); | |
857 | } | |
858 | return NULL; | |
859 | } | |
860 | ||
861 | ||
862 | /* Determine and record the relationships between basic blocks and | |
863 | scopes in scope tree S. */ | |
864 | ||
865 | static void | |
866 | relate_bbs_with_scopes (s) | |
867 | scope s; | |
868 | { | |
869 | scope p; | |
870 | int i, bbi1, bbi2, bbs_spanned; | |
871 | rtx bbnote; | |
872 | ||
873 | for (p = s->inner; p; p = p->next) | |
874 | relate_bbs_with_scopes (p); | |
875 | ||
876 | bbi1 = bbi2 = -1; | |
877 | bbs_spanned = 0; | |
878 | ||
879 | /* If the begin and end notes are both inside the same basic block, | |
880 | or if they are both outside of basic blocks, then we know immediately | |
881 | how they are related. Otherwise, we need to poke around to make the | |
882 | determination. */ | |
883 | if (s->bb_beg != s->bb_end) | |
884 | { | |
885 | if (s->bb_beg && s->bb_end) | |
886 | { | |
887 | /* Both notes are in different bbs. This implies that all the | |
888 | basic blocks spanned by the pair of notes are contained in | |
889 | this scope. */ | |
890 | bbi1 = s->bb_beg->index; | |
891 | bbi2 = s->bb_end->index; | |
892 | bbs_spanned = 1; | |
893 | } | |
894 | else if (! s->bb_beg) | |
895 | { | |
896 | /* First note is outside of a bb. If the scope spans more than | |
897 | one basic block, then they all are contained within this | |
898 | scope. Otherwise, this scope is contained within the basic | |
899 | block. */ | |
900 | bbnote = get_next_bb_note (s->note_beg); | |
901 | if (! bbnote) | |
902 | abort (); | |
903 | if (NOTE_BASIC_BLOCK (bbnote) == s->bb_end) | |
904 | { | |
905 | bbs_spanned = 0; | |
906 | s->bb_beg = NOTE_BASIC_BLOCK (bbnote); | |
907 | } | |
908 | else | |
909 | { | |
910 | bbi1 = NOTE_BASIC_BLOCK (bbnote)->index; | |
911 | bbi2 = s->bb_end->index; | |
912 | s->bb_end = NULL; | |
913 | bbs_spanned = 1; | |
914 | } | |
915 | } | |
916 | else /* ! s->bb_end */ | |
917 | { | |
918 | /* Second note is outside of a bb. If the scope spans more than | |
919 | one basic block, then they all are contained within this | |
920 | scope. Otherwise, this scope is contained within the basic | |
921 | block. */ | |
922 | bbnote = get_prev_bb_note (s->note_end); | |
923 | if (! bbnote) | |
924 | abort (); | |
925 | if (NOTE_BASIC_BLOCK (bbnote) == s->bb_beg) | |
926 | { | |
927 | bbs_spanned = 0; | |
928 | s->bb_end = NOTE_BASIC_BLOCK (bbnote); | |
929 | } | |
930 | else | |
931 | { | |
932 | bbi1 = s->bb_beg->index; | |
933 | bbi2 = NOTE_BASIC_BLOCK (bbnote)->index; | |
934 | s->bb_beg = NULL; | |
935 | bbs_spanned = 1; | |
936 | } | |
937 | } | |
938 | } | |
939 | else | |
940 | { | |
941 | if (s->bb_beg) | |
942 | /* Both notes are in the same bb, which implies the block | |
943 | contains this scope. */ | |
944 | bbs_spanned = 0; | |
945 | else | |
946 | { | |
947 | rtx x1, x2; | |
948 | /* Both notes are outside of any bbs. This implies that all the | |
949 | basic blocks spanned by the pair of notes are contained in | |
950 | this scope. | |
951 | There is a degenerate case to consider. If the notes do not | |
952 | span any basic blocks, then it is an empty scope that can | |
953 | safely be deleted or ignored. Mark these with level = -1. */ | |
954 | ||
955 | x1 = get_next_bb_note (s->note_beg); | |
956 | x2 = get_prev_bb_note (s->note_end); | |
957 | if (! (x1 && x2)) | |
958 | { | |
959 | s->level = -1; | |
960 | bbs_spanned = 0; | |
961 | } | |
962 | else | |
963 | { | |
964 | bbi1 = NOTE_BASIC_BLOCK (x1)->index; | |
965 | bbi2 = NOTE_BASIC_BLOCK (x2)->index; | |
966 | bbs_spanned = 1; | |
967 | } | |
968 | } | |
969 | } | |
970 | ||
e3fdc58a JE |
971 | /* If the scope spans one or more basic blocks, we record them. We |
972 | only record the bbs that are immediately contained within this | |
973 | scope. Note that if a scope is contained within a bb, we can tell | |
974 | by checking that bb_beg = bb_end and that they are non-null. */ | |
975 | if (bbs_spanned) | |
976 | { | |
977 | int j = 0; | |
978 | ||
979 | s->num_bbs = 0; | |
980 | for (i = bbi1; i <= bbi2; i++) | |
f008a564 | 981 | if (! RBI (BASIC_BLOCK (i))->scope) |
e3fdc58a JE |
982 | s->num_bbs++; |
983 | ||
f008a564 | 984 | s->bbs = xmalloc (s->num_bbs * sizeof (basic_block)); |
e3fdc58a JE |
985 | for (i = bbi1; i <= bbi2; i++) |
986 | { | |
987 | basic_block curr_bb = BASIC_BLOCK (i); | |
f008a564 | 988 | if (! RBI (curr_bb)->scope) |
e3fdc58a JE |
989 | { |
990 | s->bbs[j++] = curr_bb; | |
f008a564 | 991 | RBI (curr_bb)->scope = s; |
e3fdc58a JE |
992 | } |
993 | } | |
994 | } | |
995 | else | |
996 | s->num_bbs = 0; | |
997 | } | |
998 | ||
999 | ||
1000 | /* Allocate and initialize a new scope structure with scope level LEVEL, | |
1001 | and record the NOTE beginning the scope. */ | |
1002 | ||
1003 | static scope | |
1004 | make_new_scope (level, note) | |
1005 | int level; | |
1006 | rtx note; | |
1007 | { | |
1008 | scope new_scope = xcalloc (1, sizeof (struct scope_def)); | |
1009 | new_scope->level = level; | |
1010 | new_scope->note_beg = note; | |
e3fdc58a JE |
1011 | return new_scope; |
1012 | } | |
1013 | ||
1014 | ||
1015 | /* Build a forest representing the scope structure of the function. | |
1016 | Return a pointer to a structure describing the forest. */ | |
1017 | ||
1018 | static void | |
1019 | build_scope_forest (forest) | |
1020 | scope_forest_info *forest; | |
1021 | { | |
1022 | rtx x; | |
1023 | int level, bbi, i; | |
1024 | basic_block curr_bb; | |
5ac9118e | 1025 | scope root, curr_scope = 0; |
e3fdc58a JE |
1026 | |
1027 | forest->num_trees = 0; | |
1028 | forest->trees = NULL; | |
1029 | level = -1; | |
1030 | root = NULL; | |
1031 | curr_bb = NULL; | |
1032 | bbi = 0; | |
1033 | for (x = get_insns (); x; x = NEXT_INSN (x)) | |
1034 | { | |
1035 | if (bbi < n_basic_blocks && x == BASIC_BLOCK (bbi)->head) | |
1036 | curr_bb = BASIC_BLOCK (bbi); | |
1037 | ||
1038 | if (GET_CODE (x) == NOTE) | |
1039 | { | |
1040 | if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG) | |
1041 | { | |
1042 | if (root) | |
1043 | { | |
1044 | scope new_scope; | |
1045 | if (! curr_scope) | |
1046 | abort(); | |
1047 | level++; | |
1048 | new_scope = make_new_scope (level, x); | |
1049 | new_scope->outer = curr_scope; | |
1050 | new_scope->next = NULL; | |
1051 | if (! curr_scope->inner) | |
1052 | { | |
1053 | curr_scope->inner = new_scope; | |
1054 | curr_scope->inner_last = new_scope; | |
1055 | } | |
1056 | else | |
1057 | { | |
1058 | curr_scope->inner_last->next = new_scope; | |
1059 | curr_scope->inner_last = new_scope; | |
1060 | } | |
1061 | curr_scope = curr_scope->inner_last; | |
1062 | } | |
1063 | else | |
1064 | { | |
1065 | int ntrees = forest->num_trees; | |
1066 | level++; | |
1067 | curr_scope = make_new_scope (level, x); | |
1068 | root = curr_scope; | |
f008a564 RH |
1069 | forest->trees = xrealloc (forest->trees, |
1070 | sizeof (scope) * (ntrees + 1)); | |
e3fdc58a JE |
1071 | forest->trees[forest->num_trees++] = root; |
1072 | } | |
1073 | curr_scope->bb_beg = curr_bb; | |
1074 | } | |
1075 | else if (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END) | |
1076 | { | |
1077 | curr_scope->bb_end = curr_bb; | |
1078 | curr_scope->note_end = x; | |
1079 | level--; | |
1080 | curr_scope = curr_scope->outer; | |
1081 | if (level == -1) | |
1082 | root = NULL; | |
1083 | } | |
1084 | } /* if note */ | |
1085 | ||
1086 | if (curr_bb && curr_bb->end == x) | |
1087 | { | |
1088 | curr_bb = NULL; | |
1089 | bbi++; | |
1090 | } | |
1091 | ||
1092 | } /* for */ | |
1093 | ||
1094 | for (i = 0; i < forest->num_trees; i++) | |
1095 | relate_bbs_with_scopes (forest->trees[i]); | |
1096 | } | |
1097 | ||
1098 | ||
1099 | /* Remove all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes from | |
1100 | the insn chain. */ | |
1101 | ||
1102 | static void | |
1103 | remove_scope_notes () | |
1104 | { | |
1105 | rtx x, next; | |
1106 | basic_block currbb = NULL; | |
1107 | ||
1108 | for (x = get_insns (); x; x = next) | |
1109 | { | |
1110 | next = NEXT_INSN (x); | |
589ca5cb | 1111 | if (NOTE_INSN_BASIC_BLOCK_P (x)) |
e3fdc58a JE |
1112 | currbb = NOTE_BASIC_BLOCK (x); |
1113 | ||
1114 | if (GET_CODE (x) == NOTE | |
1115 | && (NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_BEG | |
1116 | || NOTE_LINE_NUMBER (x) == NOTE_INSN_BLOCK_END)) | |
1117 | { | |
eb6f82f7 JE |
1118 | /* Check if the scope note happens to be the end of a bb. */ |
1119 | if (currbb && x == currbb->end) | |
e3fdc58a | 1120 | currbb->end = PREV_INSN (x); |
eb6f82f7 JE |
1121 | if (currbb && x == currbb->head) |
1122 | abort (); | |
e3fdc58a JE |
1123 | |
1124 | if (PREV_INSN (x)) | |
1125 | { | |
1126 | NEXT_INSN (PREV_INSN (x)) = next; | |
1127 | PREV_INSN (next) = PREV_INSN (x); | |
1128 | ||
1129 | NEXT_INSN (x) = NULL; | |
1130 | PREV_INSN (x) = NULL; | |
1131 | } | |
1132 | else | |
1133 | abort (); | |
1134 | } | |
1135 | } | |
1136 | } | |
1137 | ||
1138 | ||
1139 | /* Insert scope note pairs for a contained scope tree S after insn IP. */ | |
f008a564 | 1140 | |
e3fdc58a | 1141 | static void |
402209ff | 1142 | insert_intra_1 (s, ip, bb) |
e3fdc58a JE |
1143 | scope s; |
1144 | rtx *ip; | |
402209ff | 1145 | basic_block bb; |
e3fdc58a JE |
1146 | { |
1147 | scope p; | |
1148 | ||
1149 | if (NOTE_BLOCK (s->note_beg)) | |
1150 | { | |
1151 | *ip = emit_note_after (NOTE_INSN_BLOCK_BEG, *ip); | |
1152 | NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_beg); | |
402209ff JH |
1153 | if (basic_block_for_insn) |
1154 | set_block_for_insn (*ip, bb); | |
e3fdc58a JE |
1155 | } |
1156 | ||
1157 | for (p = s->inner; p; p = p->next) | |
402209ff | 1158 | insert_intra_1 (p, ip, bb); |
e3fdc58a JE |
1159 | |
1160 | if (NOTE_BLOCK (s->note_beg)) | |
1161 | { | |
1162 | *ip = emit_note_after (NOTE_INSN_BLOCK_END, *ip); | |
1163 | NOTE_BLOCK (*ip) = NOTE_BLOCK (s->note_end); | |
402209ff JH |
1164 | if (basic_block_for_insn) |
1165 | set_block_for_insn (*ip, bb); | |
e3fdc58a JE |
1166 | } |
1167 | } | |
1168 | ||
1169 | ||
1170 | /* Insert NOTE_INSN_BLOCK_END notes and NOTE_INSN_BLOCK_BEG notes for | |
1171 | scopes that are contained within BB. */ | |
1172 | ||
1173 | static void | |
1174 | insert_intra_bb_scope_notes (bb) | |
1175 | basic_block bb; | |
1176 | { | |
f008a564 | 1177 | scope s = RBI (bb)->scope; |
e3fdc58a JE |
1178 | scope p; |
1179 | rtx ip; | |
1180 | ||
1181 | if (! s) | |
1182 | return; | |
1183 | ||
1184 | ip = bb->head; | |
1185 | if (GET_CODE (ip) == CODE_LABEL) | |
1186 | ip = NEXT_INSN (ip); | |
1187 | ||
1188 | for (p = s->inner; p; p = p->next) | |
1189 | { | |
1190 | if (p->bb_beg != NULL && p->bb_beg == p->bb_end && p->bb_beg == bb) | |
402209ff | 1191 | insert_intra_1 (p, &ip, bb); |
e3fdc58a JE |
1192 | } |
1193 | } | |
1194 | ||
1195 | ||
1196 | /* Given two consecutive basic blocks BB1 and BB2 with different scopes, | |
1197 | insert NOTE_INSN_BLOCK_END notes after BB1 and NOTE_INSN_BLOCK_BEG | |
1198 | notes before BB2 such that the notes are correctly balanced. If BB1 or | |
1199 | BB2 is NULL, we are inserting scope notes for the first and last basic | |
1200 | blocks, respectively. */ | |
1201 | ||
1202 | static void | |
1203 | insert_inter_bb_scope_notes (bb1, bb2) | |
1204 | basic_block bb1; | |
1205 | basic_block bb2; | |
1206 | { | |
1207 | rtx ip; | |
1208 | scope com; | |
1209 | ||
1210 | /* It is possible that a basic block is not contained in any scope. | |
1211 | In that case, we either open or close a scope but not both. */ | |
1212 | if (bb1 && bb2) | |
1213 | { | |
f008a564 RH |
1214 | scope s1 = RBI (bb1)->scope; |
1215 | scope s2 = RBI (bb2)->scope; | |
e3fdc58a JE |
1216 | if (! s1 && ! s2) |
1217 | return; | |
1218 | if (! s1) | |
1219 | bb1 = NULL; | |
1220 | else if (! s2) | |
1221 | bb2 = NULL; | |
1222 | } | |
1223 | ||
1224 | /* Find common ancestor scope. */ | |
1225 | if (bb1 && bb2) | |
1226 | { | |
f008a564 RH |
1227 | scope s1 = RBI (bb1)->scope; |
1228 | scope s2 = RBI (bb2)->scope; | |
e3fdc58a JE |
1229 | while (s1 != s2) |
1230 | { | |
1231 | if (! (s1 && s2)) | |
1232 | abort (); | |
1233 | if (s1->level > s2->level) | |
1234 | s1 = s1->outer; | |
1235 | else if (s2->level > s1->level) | |
1236 | s2 = s2->outer; | |
1237 | else | |
1238 | { | |
1239 | s1 = s1->outer; | |
1240 | s2 = s2->outer; | |
1241 | } | |
1242 | } | |
1243 | com = s1; | |
1244 | } | |
1245 | else | |
1246 | com = NULL; | |
1247 | ||
1248 | /* Close scopes. */ | |
1249 | if (bb1) | |
1250 | { | |
f008a564 RH |
1251 | scope s = RBI (bb1)->scope; |
1252 | ip = RBI (bb1)->eff_end; | |
e3fdc58a JE |
1253 | while (s != com) |
1254 | { | |
1255 | if (NOTE_BLOCK (s->note_beg)) | |
1256 | { | |
1257 | ip = emit_note_after (NOTE_INSN_BLOCK_END, ip); | |
1258 | NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_end); | |
402209ff JH |
1259 | if (basic_block_for_insn) |
1260 | set_block_for_insn (ip, bb1); | |
e3fdc58a JE |
1261 | } |
1262 | s = s->outer; | |
1263 | } | |
1264 | } | |
1265 | ||
1266 | /* Open scopes. */ | |
1267 | if (bb2) | |
1268 | { | |
f008a564 | 1269 | scope s = RBI (bb2)->scope; |
e3fdc58a JE |
1270 | ip = bb2->head; |
1271 | while (s != com) | |
1272 | { | |
1273 | if (NOTE_BLOCK (s->note_beg)) | |
1274 | { | |
1275 | ip = emit_note_before (NOTE_INSN_BLOCK_BEG, ip); | |
1276 | NOTE_BLOCK (ip) = NOTE_BLOCK (s->note_beg); | |
402209ff JH |
1277 | if (basic_block_for_insn) |
1278 | set_block_for_insn (ip, bb2); | |
e3fdc58a JE |
1279 | } |
1280 | s = s->outer; | |
1281 | } | |
1282 | } | |
1283 | } | |
1284 | ||
1285 | ||
1286 | /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based | |
1287 | on the scope forest and the newly reordered basic blocks. */ | |
1288 | ||
1289 | static void | |
1290 | rebuild_scope_notes (forest) | |
1291 | scope_forest_info *forest; | |
1292 | { | |
1293 | int i; | |
1294 | ||
1295 | if (forest->num_trees == 0) | |
1296 | return; | |
1297 | ||
1298 | /* Start by opening the scopes before the first basic block. */ | |
1299 | insert_inter_bb_scope_notes (NULL, BASIC_BLOCK (0)); | |
1300 | ||
1301 | /* Then, open and close scopes as needed between blocks. */ | |
1302 | for (i = 0; i < n_basic_blocks - 1; i++) | |
1303 | { | |
1304 | basic_block bb1 = BASIC_BLOCK (i); | |
1305 | basic_block bb2 = BASIC_BLOCK (i + 1); | |
f008a564 | 1306 | if (RBI (bb1)->scope != RBI (bb2)->scope) |
e3fdc58a JE |
1307 | insert_inter_bb_scope_notes (bb1, bb2); |
1308 | insert_intra_bb_scope_notes (bb1); | |
1309 | } | |
1310 | ||
1311 | /* Finally, close the scopes after the last basic block. */ | |
1312 | insert_inter_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1), NULL); | |
1313 | insert_intra_bb_scope_notes (BASIC_BLOCK (n_basic_blocks - 1)); | |
1314 | } | |
1315 | ||
1316 | ||
1317 | /* Free the storage associated with the scope tree at S. */ | |
1318 | ||
1319 | static void | |
1320 | free_scope_forest_1 (s) | |
1321 | scope s; | |
1322 | { | |
1323 | scope p, next; | |
1324 | ||
1325 | for (p = s->inner; p; p = next) | |
1326 | { | |
1327 | next = p->next; | |
1328 | free_scope_forest_1 (p); | |
1329 | } | |
1330 | ||
1331 | if (s->bbs) | |
1332 | free (s->bbs); | |
1333 | free (s); | |
1334 | } | |
1335 | ||
1336 | ||
1337 | /* Free the storage associated with the scope forest. */ | |
1338 | ||
1339 | static void | |
1340 | free_scope_forest (forest) | |
1341 | scope_forest_info *forest; | |
1342 | { | |
1343 | int i; | |
1344 | for (i = 0; i < forest->num_trees; i++) | |
1345 | free_scope_forest_1 (forest->trees[i]); | |
1346 | } | |
1347 | ||
1348 | ||
1349 | /* Visualize the scope forest. */ | |
1350 | ||
1351 | void | |
1352 | dump_scope_forest (forest) | |
1353 | scope_forest_info *forest; | |
1354 | { | |
1355 | if (forest->num_trees == 0) | |
1356 | fprintf (stderr, "\n< Empty scope forest >\n"); | |
1357 | else | |
1358 | { | |
1359 | int i; | |
1360 | fprintf (stderr, "\n< Scope forest >\n"); | |
1361 | for (i = 0; i < forest->num_trees; i++) | |
1362 | dump_scope_forest_1 (forest->trees[i], 0); | |
1363 | } | |
1364 | } | |
1365 | ||
1366 | ||
1367 | /* Recursive portion of dump_scope_forest. */ | |
1368 | ||
1369 | static void | |
1370 | dump_scope_forest_1 (s, indent) | |
1371 | scope s; | |
1372 | int indent; | |
1373 | { | |
1374 | scope p; | |
1375 | int i; | |
1376 | ||
1377 | if (s->bb_beg != NULL && s->bb_beg == s->bb_end | |
f008a564 RH |
1378 | && RBI (s->bb_beg)->scope |
1379 | && RBI (s->bb_beg)->scope->level + 1 == s->level) | |
e3fdc58a JE |
1380 | { |
1381 | fprintf (stderr, "%*s", indent, ""); | |
1382 | fprintf (stderr, "BB%d:\n", s->bb_beg->index); | |
1383 | } | |
1384 | ||
1385 | fprintf (stderr, "%*s", indent, ""); | |
1386 | fprintf (stderr, "{ level %d (block %p)\n", s->level, | |
683eb0e9 | 1387 | (PTR) NOTE_BLOCK (s->note_beg)); |
e3fdc58a JE |
1388 | |
1389 | fprintf (stderr, "%*s%s", indent, "", "bbs:"); | |
1390 | for (i = 0; i < s->num_bbs; i++) | |
1391 | fprintf (stderr, " %d", s->bbs[i]->index); | |
1392 | fprintf (stderr, "\n"); | |
1393 | ||
1394 | for (p = s->inner; p; p = p->next) | |
1395 | dump_scope_forest_1 (p, indent + 2); | |
1396 | ||
1397 | fprintf (stderr, "%*s", indent, ""); | |
1398 | fprintf (stderr, "}\n"); | |
1399 | } | |
1400 | ||
1401 | ||
f008a564 | 1402 | /* Reorder basic blocks. The main entry point to this file. */ |
295ae817 JE |
1403 | |
1404 | void | |
1405 | reorder_basic_blocks () | |
1406 | { | |
e3fdc58a | 1407 | scope_forest_info forest; |
f008a564 | 1408 | int i; |
295ae817 JE |
1409 | |
1410 | if (n_basic_blocks <= 1) | |
1411 | return; | |
1412 | ||
295ae817 | 1413 | for (i = 0; i < n_basic_blocks; i++) |
f008a564 | 1414 | BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def)); |
e3fdc58a | 1415 | |
74490e05 RE |
1416 | EXIT_BLOCK_PTR->aux = xcalloc (1, sizeof (struct reorder_block_def)); |
1417 | ||
e3fdc58a JE |
1418 | build_scope_forest (&forest); |
1419 | remove_scope_notes (); | |
1420 | ||
f008a564 RH |
1421 | record_effective_endpoints (); |
1422 | make_reorder_chain (); | |
402209ff JH |
1423 | |
1424 | if (rtl_dump_file) | |
1425 | dump_flow_info (rtl_dump_file); | |
1426 | ||
295ae817 JE |
1427 | fixup_reorder_chain (); |
1428 | ||
1429 | #ifdef ENABLE_CHECKING | |
db525e17 | 1430 | verify_insn_chain (); |
295ae817 JE |
1431 | #endif |
1432 | ||
e3fdc58a JE |
1433 | rebuild_scope_notes (&forest); |
1434 | free_scope_forest (&forest); | |
1435 | reorder_blocks (); | |
1436 | ||
295ae817 JE |
1437 | for (i = 0; i < n_basic_blocks; i++) |
1438 | free (BASIC_BLOCK (i)->aux); | |
1439 | ||
74490e05 RE |
1440 | free (EXIT_BLOCK_PTR->aux); |
1441 | ||
f008a564 RH |
1442 | #ifdef ENABLE_CHECKING |
1443 | verify_flow_info (); | |
1444 | #endif | |
295ae817 | 1445 | } |