]>
Commit | Line | Data |
---|---|---|
6de9cd9a DN |
1 | /* Dead code elimination pass for the GNU compiler. |
2 | Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc. | |
3 | Contributed by Ben Elliston <bje@redhat.com> | |
4 | and Andrew MacLeod <amacleod@redhat.com> | |
5 | Adapted to use control dependence by Steven Bosscher, SUSE Labs. | |
6 | ||
7 | This file is part of GCC. | |
8 | ||
9 | GCC is free software; you can redistribute it and/or modify it | |
10 | under the terms of the GNU General Public License as published by the | |
11 | Free Software Foundation; either version 2, or (at your option) any | |
12 | later version. | |
13 | ||
14 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
16 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
17 | for more details. | |
18 | ||
19 | You should have received a copy of the GNU General Public License | |
20 | along with GCC; see the file COPYING. If not, write to the Free | |
21 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
22 | 02111-1307, USA. */ | |
23 | ||
24 | /* Dead code elimination. | |
25 | ||
26 | References: | |
27 | ||
28 | Building an Optimizing Compiler, | |
29 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
30 | ||
31 | Advanced Compiler Design and Implementation, | |
32 | Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10. | |
33 | ||
34 | Dead-code elimination is the removal of statements which have no | |
35 | impact on the program's output. "Dead statements" have no impact | |
36 | on the program's output, while "necessary statements" may have | |
37 | impact on the output. | |
38 | ||
39 | The algorithm consists of three phases: | |
40 | 1. Marking as necessary all statements known to be necessary, | |
41 | e.g. most function calls, writing a value to memory, etc; | |
42 | 2. Propagating necessary statements, e.g., the statements | |
43 | giving values to operands in necessary statements; and | |
44 | 3. Removing dead statements. */ | |
45 | ||
46 | #include "config.h" | |
47 | #include "system.h" | |
48 | #include "coretypes.h" | |
49 | #include "tm.h" | |
50 | #include "errors.h" | |
51 | #include "ggc.h" | |
52 | ||
53 | /* These RTL headers are needed for basic-block.h. */ | |
54 | #include "rtl.h" | |
55 | #include "tm_p.h" | |
56 | #include "hard-reg-set.h" | |
57 | #include "basic-block.h" | |
58 | ||
59 | #include "tree.h" | |
60 | #include "diagnostic.h" | |
61 | #include "tree-flow.h" | |
eadf906f | 62 | #include "tree-gimple.h" |
6de9cd9a DN |
63 | #include "tree-dump.h" |
64 | #include "tree-pass.h" | |
65 | #include "timevar.h" | |
66 | #include "flags.h" | |
67 | \f | |
68 | static struct stmt_stats | |
69 | { | |
70 | int total; | |
71 | int total_phis; | |
72 | int removed; | |
73 | int removed_phis; | |
74 | } stats; | |
75 | ||
76 | static varray_type worklist; | |
77 | ||
78 | /* Vector indicating an SSA name has already been processed and marked | |
79 | as necessary. */ | |
80 | static sbitmap processed; | |
81 | ||
82 | /* Vector indicating that last_stmt if a basic block has already been | |
83 | marked as necessary. */ | |
84 | static sbitmap last_stmt_necessary; | |
85 | ||
86 | /* Before we can determine whether a control branch is dead, we need to | |
87 | compute which blocks are control dependent on which edges. | |
88 | ||
89 | We expect each block to be control dependent on very few edges so we | |
90 | use a bitmap for each block recording its edges. An array holds the | |
91 | bitmap. The Ith bit in the bitmap is set if that block is dependent | |
92 | on the Ith edge. */ | |
93 | bitmap *control_dependence_map; | |
94 | ||
95 | /* Execute CODE for each edge (given number EDGE_NUMBER within the CODE) | |
96 | for which the block with index N is control dependent. */ | |
97 | #define EXECUTE_IF_CONTROL_DEPENDENT(N, EDGE_NUMBER, CODE) \ | |
98 | EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[N], 0, EDGE_NUMBER, CODE) | |
99 | ||
100 | /* Local function prototypes. */ | |
101 | static inline void set_control_dependence_map_bit (basic_block, int); | |
102 | static inline void clear_control_dependence_bitmap (basic_block); | |
103 | static void find_all_control_dependences (struct edge_list *); | |
104 | static void find_control_dependence (struct edge_list *, int); | |
105 | static inline basic_block find_pdom (basic_block); | |
106 | ||
107 | static inline void mark_stmt_necessary (tree, bool); | |
108 | static inline void mark_operand_necessary (tree); | |
109 | ||
110 | static bool need_to_preserve_store (tree); | |
111 | static void mark_stmt_if_obviously_necessary (tree, bool); | |
112 | static void find_obviously_necessary_stmts (struct edge_list *); | |
113 | ||
114 | static void mark_control_dependent_edges_necessary (basic_block, struct edge_list *); | |
115 | static void propagate_necessity (struct edge_list *); | |
116 | ||
117 | static void eliminate_unnecessary_stmts (void); | |
118 | static void remove_dead_phis (basic_block); | |
119 | static void remove_dead_stmt (block_stmt_iterator *, basic_block); | |
120 | ||
121 | static void print_stats (void); | |
122 | static void tree_dce_init (bool); | |
123 | static void tree_dce_done (bool); | |
124 | \f | |
125 | /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */ | |
126 | static inline void | |
127 | set_control_dependence_map_bit (basic_block bb, int edge_index) | |
128 | { | |
129 | if (bb == ENTRY_BLOCK_PTR) | |
130 | return; | |
131 | if (bb == EXIT_BLOCK_PTR) | |
132 | abort (); | |
133 | bitmap_set_bit (control_dependence_map[bb->index], edge_index); | |
134 | } | |
135 | ||
136 | /* Clear all control dependences for block BB. */ | |
137 | static inline | |
138 | void clear_control_dependence_bitmap (basic_block bb) | |
139 | { | |
140 | bitmap_clear (control_dependence_map[bb->index]); | |
141 | } | |
142 | ||
143 | /* Record all blocks' control dependences on all edges in the edge | |
144 | list EL, ala Morgan, Section 3.6. */ | |
145 | ||
146 | static void | |
147 | find_all_control_dependences (struct edge_list *el) | |
148 | { | |
149 | int i; | |
150 | ||
151 | for (i = 0; i < NUM_EDGES (el); ++i) | |
152 | find_control_dependence (el, i); | |
153 | } | |
154 | ||
155 | /* Determine all blocks' control dependences on the given edge with edge_list | |
156 | EL index EDGE_INDEX, ala Morgan, Section 3.6. */ | |
157 | ||
158 | static void | |
159 | find_control_dependence (struct edge_list *el, int edge_index) | |
160 | { | |
161 | basic_block current_block; | |
162 | basic_block ending_block; | |
163 | ||
164 | #ifdef ENABLE_CHECKING | |
165 | if (INDEX_EDGE_PRED_BB (el, edge_index) == EXIT_BLOCK_PTR) | |
166 | abort (); | |
167 | #endif | |
168 | ||
169 | if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR) | |
170 | ending_block = ENTRY_BLOCK_PTR->next_bb; | |
171 | else | |
172 | ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index)); | |
173 | ||
174 | for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index); | |
175 | current_block != ending_block && current_block != EXIT_BLOCK_PTR; | |
176 | current_block = find_pdom (current_block)) | |
177 | { | |
178 | edge e = INDEX_EDGE (el, edge_index); | |
179 | ||
180 | /* For abnormal edges, we don't make current_block control | |
181 | dependent because instructions that throw are always necessary | |
182 | anyway. */ | |
183 | if (e->flags & EDGE_ABNORMAL) | |
184 | continue; | |
185 | ||
186 | set_control_dependence_map_bit (current_block, edge_index); | |
187 | } | |
188 | } | |
189 | ||
190 | /* Find the immediate postdominator PDOM of the specified basic block BLOCK. | |
191 | This function is necessary because some blocks have negative numbers. */ | |
192 | ||
193 | static inline basic_block | |
194 | find_pdom (basic_block block) | |
195 | { | |
196 | if (block == ENTRY_BLOCK_PTR) | |
197 | abort (); | |
198 | else if (block == EXIT_BLOCK_PTR) | |
199 | return EXIT_BLOCK_PTR; | |
200 | else | |
201 | { | |
202 | basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block); | |
203 | if (! bb) | |
204 | return EXIT_BLOCK_PTR; | |
205 | return bb; | |
206 | } | |
207 | } | |
208 | \f | |
209 | #define NECESSARY(stmt) stmt->common.asm_written_flag | |
210 | ||
211 | /* If STMT is not already marked necessary, mark it, and add it to the | |
212 | worklist if ADD_TO_WORKLIST is true. */ | |
213 | static inline void | |
214 | mark_stmt_necessary (tree stmt, bool add_to_worklist) | |
215 | { | |
216 | #ifdef ENABLE_CHECKING | |
217 | if (stmt == NULL | |
218 | || stmt == error_mark_node | |
219 | || (stmt && DECL_P (stmt))) | |
220 | abort (); | |
221 | #endif | |
222 | ||
223 | if (NECESSARY (stmt)) | |
224 | return; | |
225 | ||
226 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
227 | { | |
228 | fprintf (dump_file, "Marking useful stmt: "); | |
229 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
230 | fprintf (dump_file, "\n"); | |
231 | } | |
232 | ||
233 | NECESSARY (stmt) = 1; | |
234 | if (add_to_worklist) | |
235 | VARRAY_PUSH_TREE (worklist, stmt); | |
236 | } | |
237 | ||
238 | /* Mark the statement defining operand OP as necessary. */ | |
239 | ||
240 | static inline void | |
241 | mark_operand_necessary (tree op) | |
242 | { | |
243 | tree stmt; | |
244 | int ver; | |
245 | ||
246 | #ifdef ENABLE_CHECKING | |
247 | if (op == NULL) | |
248 | abort (); | |
249 | #endif | |
250 | ||
251 | ver = SSA_NAME_VERSION (op); | |
252 | if (TEST_BIT (processed, ver)) | |
253 | return; | |
254 | SET_BIT (processed, ver); | |
255 | ||
256 | stmt = SSA_NAME_DEF_STMT (op); | |
257 | #ifdef ENABLE_CHECKING | |
258 | if (stmt == NULL) | |
259 | abort (); | |
260 | #endif | |
261 | ||
262 | if (NECESSARY (stmt) | |
263 | || IS_EMPTY_STMT (stmt)) | |
264 | return; | |
265 | ||
266 | NECESSARY (stmt) = 1; | |
267 | VARRAY_PUSH_TREE (worklist, stmt); | |
268 | } | |
269 | \f | |
270 | /* Return true if a store to a variable needs to be preserved. */ | |
271 | ||
272 | static inline bool | |
273 | need_to_preserve_store (tree ssa_name) | |
274 | { | |
275 | return (needs_to_live_in_memory (SSA_NAME_VAR (ssa_name))); | |
276 | } | |
277 | \f | |
278 | ||
279 | /* Mark STMT as necessary if it is obviously is. Add it to the worklist if | |
280 | it can make other statements necessary. | |
281 | ||
282 | If AGGRESSIVE is false, control statements are conservatively marked as | |
283 | necessary. */ | |
284 | ||
285 | static void | |
286 | mark_stmt_if_obviously_necessary (tree stmt, bool aggressive) | |
287 | { | |
288 | def_optype defs; | |
289 | vdef_optype vdefs; | |
290 | stmt_ann_t ann; | |
291 | size_t i; | |
292 | ||
293 | /* Statements that are implicitly live. Most function calls, asm and return | |
294 | statements are required. Labels and BIND_EXPR nodes are kept because | |
295 | they are control flow, and we have no way of knowing whether they can be | |
296 | removed. DCE can eliminate all the other statements in a block, and CFG | |
297 | can then remove the block and labels. */ | |
298 | switch (TREE_CODE (stmt)) | |
299 | { | |
300 | case BIND_EXPR: | |
301 | case LABEL_EXPR: | |
302 | case CASE_LABEL_EXPR: | |
303 | mark_stmt_necessary (stmt, false); | |
304 | return; | |
305 | ||
306 | case ASM_EXPR: | |
307 | case RESX_EXPR: | |
308 | case RETURN_EXPR: | |
309 | mark_stmt_necessary (stmt, true); | |
310 | return; | |
311 | ||
312 | case CALL_EXPR: | |
313 | /* Most, but not all function calls are required. Function calls that | |
314 | produce no result and have no side effects (i.e. const pure | |
315 | functions) are unnecessary. */ | |
316 | if (TREE_SIDE_EFFECTS (stmt)) | |
317 | mark_stmt_necessary (stmt, true); | |
318 | return; | |
319 | ||
320 | case MODIFY_EXPR: | |
321 | if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR | |
322 | && TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1))) | |
323 | { | |
324 | mark_stmt_necessary (stmt, true); | |
325 | return; | |
326 | } | |
327 | ||
328 | /* These values are mildly magic bits of the EH runtime. We can't | |
329 | see the entire lifetime of these values until landing pads are | |
330 | generated. */ | |
331 | if (TREE_CODE (TREE_OPERAND (stmt, 0)) == EXC_PTR_EXPR | |
332 | || TREE_CODE (TREE_OPERAND (stmt, 0)) == FILTER_EXPR) | |
333 | { | |
334 | mark_stmt_necessary (stmt, true); | |
335 | return; | |
336 | } | |
337 | break; | |
338 | ||
339 | case GOTO_EXPR: | |
340 | if (! simple_goto_p (stmt)) | |
341 | mark_stmt_necessary (stmt, true); | |
342 | return; | |
343 | ||
344 | case COND_EXPR: | |
345 | if (GOTO_DESTINATION (COND_EXPR_THEN (stmt)) | |
346 | == GOTO_DESTINATION (COND_EXPR_ELSE (stmt))) | |
347 | { | |
348 | /* A COND_EXPR is obviously dead if the target labels are the same. | |
349 | We cannot kill the statement at this point, so to prevent the | |
350 | statement from being marked necessary, we replace the condition | |
351 | with a constant. The stmt is killed later on in cfg_cleanup. */ | |
352 | COND_EXPR_COND (stmt) = integer_zero_node; | |
353 | modify_stmt (stmt); | |
354 | return; | |
355 | } | |
356 | /* Fall through. */ | |
357 | ||
358 | case SWITCH_EXPR: | |
359 | if (! aggressive) | |
360 | mark_stmt_necessary (stmt, true); | |
361 | break; | |
362 | ||
363 | default: | |
364 | break; | |
365 | } | |
366 | ||
367 | ann = stmt_ann (stmt); | |
368 | /* If the statement has volatile operands, it needs to be preserved. Same | |
369 | for statements that can alter control flow in unpredictable ways. */ | |
370 | if (ann->has_volatile_ops | |
371 | || is_ctrl_altering_stmt (stmt)) | |
372 | { | |
373 | mark_stmt_necessary (stmt, true); | |
374 | return; | |
375 | } | |
376 | ||
377 | get_stmt_operands (stmt); | |
378 | ||
379 | defs = DEF_OPS (ann); | |
380 | for (i = 0; i < NUM_DEFS (defs); i++) | |
381 | { | |
382 | tree def = DEF_OP (defs, i); | |
383 | if (need_to_preserve_store (def)) | |
384 | { | |
385 | mark_stmt_necessary (stmt, true); | |
386 | return; | |
387 | } | |
388 | } | |
389 | ||
390 | vdefs = VDEF_OPS (ann); | |
391 | for (i = 0; i < NUM_VDEFS (vdefs); i++) | |
392 | { | |
393 | tree vdef = VDEF_RESULT (vdefs, i); | |
394 | if (need_to_preserve_store (vdef)) | |
395 | { | |
396 | mark_stmt_necessary (stmt, true); | |
397 | return; | |
398 | } | |
399 | } | |
400 | ||
401 | return; | |
402 | } | |
403 | \f | |
404 | /* Find obviously necessary statements. These are things like most function | |
405 | calls, and stores to file level variables. | |
406 | ||
407 | If EL is NULL, control statements are conservatively marked as | |
408 | necessary. Otherwise it contains the list of edges used by control | |
409 | dependence analysis. */ | |
410 | ||
411 | static void | |
412 | find_obviously_necessary_stmts (struct edge_list *el) | |
413 | { | |
414 | basic_block bb; | |
415 | block_stmt_iterator i; | |
416 | edge e; | |
417 | ||
418 | FOR_EACH_BB (bb) | |
419 | { | |
420 | tree phi; | |
421 | ||
422 | /* Check any PHI nodes in the block. */ | |
423 | for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) | |
424 | { | |
425 | NECESSARY (phi) = 0; | |
426 | ||
427 | /* PHIs for virtual variables do not directly affect code | |
428 | generation and need not be considered inherently necessary | |
429 | regardless of the bits set in their decl. | |
430 | ||
431 | Thus, we only need to mark PHIs for real variables which | |
432 | need their result preserved as being inherently necessary. */ | |
433 | if (is_gimple_reg (PHI_RESULT (phi)) | |
434 | && need_to_preserve_store (PHI_RESULT (phi))) | |
435 | mark_stmt_necessary (phi, true); | |
436 | } | |
437 | ||
438 | /* Check all statements in the block. */ | |
439 | for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i)) | |
440 | { | |
441 | tree stmt = bsi_stmt (i); | |
442 | NECESSARY (stmt) = 0; | |
443 | mark_stmt_if_obviously_necessary (stmt, el != NULL); | |
444 | } | |
445 | ||
446 | /* Mark this basic block as `not visited'. A block will be marked | |
447 | visited when the edges that it is control dependent on have been | |
448 | marked. */ | |
449 | bb->flags &= ~BB_VISITED; | |
450 | } | |
451 | ||
452 | if (el) | |
453 | { | |
454 | /* Prevent the loops from being removed. We must keep the infinite loops, | |
455 | and we currently do not have a means to recognize the finite ones. */ | |
456 | FOR_EACH_BB (bb) | |
457 | { | |
458 | for (e = bb->succ; e; e = e->succ_next) | |
459 | if (e->flags & EDGE_DFS_BACK) | |
460 | mark_control_dependent_edges_necessary (e->dest, el); | |
461 | } | |
462 | } | |
463 | } | |
464 | \f | |
465 | /* Make corresponding control dependent edges necessary. We only | |
466 | have to do this once for each basic block, so we clear the bitmap | |
467 | after we're done. */ | |
468 | static void | |
469 | mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el) | |
470 | { | |
471 | int edge_number; | |
472 | ||
473 | EXECUTE_IF_CONTROL_DEPENDENT (bb->index, edge_number, | |
474 | { | |
475 | tree t; | |
476 | basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number); | |
477 | ||
478 | if (TEST_BIT (last_stmt_necessary, cd_bb->index)) | |
479 | continue; | |
480 | SET_BIT (last_stmt_necessary, cd_bb->index); | |
481 | ||
482 | t = last_stmt (cd_bb); | |
483 | if (is_ctrl_stmt (t)) | |
484 | mark_stmt_necessary (t, true); | |
485 | }); | |
486 | } | |
487 | \f | |
488 | /* Propagate necessity using the operands of necessary statements. Process | |
489 | the uses on each statement in the worklist, and add all feeding statements | |
490 | which contribute to the calculation of this value to the worklist. | |
491 | ||
492 | In conservative mode, EL is NULL. */ | |
493 | ||
494 | static void | |
495 | propagate_necessity (struct edge_list *el) | |
496 | { | |
497 | tree i; | |
498 | bool aggressive = (el ? true : false); | |
499 | ||
500 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
501 | fprintf (dump_file, "\nProcessing worklist:\n"); | |
502 | ||
503 | while (VARRAY_ACTIVE_SIZE (worklist) > 0) | |
504 | { | |
505 | /* Take `i' from worklist. */ | |
506 | i = VARRAY_TOP_TREE (worklist); | |
507 | VARRAY_POP (worklist); | |
508 | ||
509 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
510 | { | |
511 | fprintf (dump_file, "processing: "); | |
512 | print_generic_stmt (dump_file, i, TDF_SLIM); | |
513 | fprintf (dump_file, "\n"); | |
514 | } | |
515 | ||
516 | if (aggressive) | |
517 | { | |
518 | /* Mark the last statements of the basic blocks that the block | |
519 | containing `i' is control dependent on, but only if we haven't | |
520 | already done so. */ | |
521 | basic_block bb = bb_for_stmt (i); | |
522 | if (! (bb->flags & BB_VISITED)) | |
523 | { | |
524 | bb->flags |= BB_VISITED; | |
525 | mark_control_dependent_edges_necessary (bb, el); | |
526 | } | |
527 | } | |
528 | ||
529 | if (TREE_CODE (i) == PHI_NODE) | |
530 | { | |
531 | /* PHI nodes are somewhat special in that each PHI alternative has | |
532 | data and control dependencies. All the statements feeding the | |
533 | PHI node's arguments are always necessary. In aggressive mode, | |
534 | we also consider the control dependent edges leading to the | |
535 | predecessor block associated with each PHI alternative as | |
536 | necessary. */ | |
537 | int k; | |
538 | for (k = 0; k < PHI_NUM_ARGS (i); k++) | |
539 | { | |
540 | tree arg = PHI_ARG_DEF (i, k); | |
541 | if (TREE_CODE (arg) == SSA_NAME) | |
542 | mark_operand_necessary (arg); | |
543 | } | |
544 | ||
545 | if (aggressive) | |
546 | { | |
547 | for (k = 0; k < PHI_NUM_ARGS (i); k++) | |
548 | { | |
549 | basic_block arg_bb = PHI_ARG_EDGE (i, k)->src; | |
550 | if (! (arg_bb->flags & BB_VISITED)) | |
551 | { | |
552 | arg_bb->flags |= BB_VISITED; | |
553 | mark_control_dependent_edges_necessary (arg_bb, el); | |
554 | } | |
555 | } | |
556 | } | |
557 | } | |
558 | else | |
559 | { | |
560 | /* Propagate through the operands. Examine all the USE, VUSE and | |
561 | VDEF operands in this statement. Mark all the statements which | |
562 | feed this statement's uses as necessary. */ | |
563 | vuse_optype vuses; | |
564 | vdef_optype vdefs; | |
565 | use_optype uses; | |
566 | stmt_ann_t ann; | |
567 | size_t k; | |
568 | ||
569 | get_stmt_operands (i); | |
570 | ann = stmt_ann (i); | |
571 | ||
572 | uses = USE_OPS (ann); | |
573 | for (k = 0; k < NUM_USES (uses); k++) | |
574 | mark_operand_necessary (USE_OP (uses, k)); | |
575 | ||
576 | vuses = VUSE_OPS (ann); | |
577 | for (k = 0; k < NUM_VUSES (vuses); k++) | |
578 | mark_operand_necessary (VUSE_OP (vuses, k)); | |
579 | ||
580 | /* The operands of VDEF expressions are also needed as they | |
581 | represent potential definitions that may reach this | |
582 | statement (VDEF operands allow us to follow def-def links). */ | |
583 | vdefs = VDEF_OPS (ann); | |
584 | for (k = 0; k < NUM_VDEFS (vdefs); k++) | |
585 | mark_operand_necessary (VDEF_OP (vdefs, k)); | |
586 | } | |
587 | } | |
588 | } | |
589 | \f | |
590 | /* Eliminate unnecessary statements. Any instruction not marked as necessary | |
591 | contributes nothing to the program, and can be deleted. */ | |
592 | ||
593 | static void | |
594 | eliminate_unnecessary_stmts (void) | |
595 | { | |
596 | basic_block bb; | |
597 | block_stmt_iterator i; | |
598 | ||
599 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
600 | fprintf (dump_file, "\nEliminating unnecessary statements:\n"); | |
601 | ||
602 | clear_special_calls (); | |
603 | FOR_EACH_BB (bb) | |
604 | { | |
605 | /* Remove dead PHI nodes. */ | |
606 | remove_dead_phis (bb); | |
607 | ||
608 | /* Remove dead statements. */ | |
609 | for (i = bsi_start (bb); ! bsi_end_p (i) ; ) | |
610 | { | |
611 | tree t = bsi_stmt (i); | |
612 | ||
613 | stats.total++; | |
614 | ||
615 | /* If `i' is not necessary then remove it. */ | |
616 | if (! NECESSARY (t)) | |
617 | remove_dead_stmt (&i, bb); | |
618 | else | |
619 | { | |
620 | if (TREE_CODE (t) == CALL_EXPR) | |
621 | notice_special_calls (t); | |
622 | else if (TREE_CODE (t) == MODIFY_EXPR | |
623 | && TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR) | |
624 | notice_special_calls (TREE_OPERAND (t, 1)); | |
625 | bsi_next (&i); | |
626 | } | |
627 | } | |
628 | } | |
629 | } | |
630 | \f | |
631 | /* Remove dead PHI nodes from block BB. */ | |
632 | ||
633 | static void | |
634 | remove_dead_phis (basic_block bb) | |
635 | { | |
636 | tree prev, phi; | |
637 | ||
638 | prev = NULL_TREE; | |
639 | phi = phi_nodes (bb); | |
640 | while (phi) | |
641 | { | |
642 | stats.total_phis++; | |
643 | ||
644 | if (! NECESSARY (phi)) | |
645 | { | |
646 | tree next = TREE_CHAIN (phi); | |
647 | ||
648 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
649 | { | |
650 | fprintf (dump_file, "Deleting : "); | |
651 | print_generic_stmt (dump_file, phi, TDF_SLIM); | |
652 | fprintf (dump_file, "\n"); | |
653 | } | |
654 | ||
655 | remove_phi_node (phi, prev, bb); | |
656 | stats.removed_phis++; | |
657 | phi = next; | |
658 | } | |
659 | else | |
660 | { | |
661 | prev = phi; | |
662 | phi = TREE_CHAIN (phi); | |
663 | } | |
664 | } | |
665 | } | |
666 | \f | |
667 | /* Remove dead statement pointed by iterator I. Receives the basic block BB | |
668 | containing I so that we don't have to look it up. */ | |
669 | ||
670 | static void | |
671 | remove_dead_stmt (block_stmt_iterator *i, basic_block bb) | |
672 | { | |
673 | tree t = bsi_stmt (*i); | |
674 | ||
675 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
676 | { | |
677 | fprintf (dump_file, "Deleting : "); | |
678 | print_generic_stmt (dump_file, t, TDF_SLIM); | |
679 | fprintf (dump_file, "\n"); | |
680 | } | |
681 | ||
682 | stats.removed++; | |
683 | ||
684 | /* If we have determined that a conditional branch statement contributes | |
685 | nothing to the program, then we not only remove it, but we also change | |
686 | the flow graph so that the current block will simply fall-thru to its | |
687 | immediate post-dominator. The blocks we are circumventing will be | |
688 | removed by cleaup_cfg if this change in the flow graph makes them | |
689 | unreachable. */ | |
690 | if (is_ctrl_stmt (t)) | |
691 | { | |
692 | basic_block post_dom_bb; | |
693 | edge e; | |
694 | #ifdef ENABLE_CHECKING | |
695 | /* The post dominance info has to be up-to-date. */ | |
696 | if (dom_computed[CDI_POST_DOMINATORS] != DOM_OK) | |
697 | abort (); | |
698 | #endif | |
699 | /* Get the immediate post dominator of bb. */ | |
700 | post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); | |
701 | /* Some blocks don't have an immediate post dominator. This can happen | |
702 | for example with infinite loops. Removing an infinite loop is an | |
703 | inappropriate transformation anyway... */ | |
704 | if (! post_dom_bb) | |
705 | { | |
706 | bsi_next (i); | |
707 | return; | |
708 | } | |
709 | ||
710 | /* Redirect the first edge out of BB to reach POST_DOM_BB. */ | |
711 | redirect_edge_and_branch (bb->succ, post_dom_bb); | |
712 | PENDING_STMT (bb->succ) = NULL; | |
713 | ||
714 | /* The edge is no longer associated with a conditional, so it does | |
715 | not have TRUE/FALSE flags. */ | |
716 | bb->succ->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE); | |
717 | ||
718 | /* If the edge reaches any block other than the exit, then it is a | |
719 | fallthru edge; if it reaches the exit, then it is not a fallthru | |
720 | edge. */ | |
721 | if (post_dom_bb != EXIT_BLOCK_PTR) | |
722 | bb->succ->flags |= EDGE_FALLTHRU; | |
723 | else | |
724 | bb->succ->flags &= ~EDGE_FALLTHRU; | |
725 | ||
726 | /* Remove the remaining the outgoing edges. */ | |
727 | for (e = bb->succ->succ_next; e != NULL;) | |
728 | { | |
729 | edge tmp = e; | |
730 | e = e->succ_next; | |
731 | remove_edge (tmp); | |
732 | } | |
733 | } | |
734 | ||
735 | bsi_remove (i); | |
736 | } | |
737 | \f | |
738 | /* Print out removed statement statistics. */ | |
739 | ||
740 | static void | |
741 | print_stats (void) | |
742 | { | |
743 | if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS))) | |
744 | { | |
745 | float percg; | |
746 | ||
747 | percg = ((float) stats.removed / (float) stats.total) * 100; | |
748 | fprintf (dump_file, "Removed %d of %d statements (%d%%)\n", | |
749 | stats.removed, stats.total, (int) percg); | |
750 | ||
751 | if (stats.total_phis == 0) | |
752 | percg = 0; | |
753 | else | |
754 | percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100; | |
755 | ||
756 | fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n", | |
757 | stats.removed_phis, stats.total_phis, (int) percg); | |
758 | } | |
759 | } | |
760 | \f | |
761 | /* Initialization for this pass. Set up the used data structures. */ | |
762 | ||
763 | static void | |
764 | tree_dce_init (bool aggressive) | |
765 | { | |
766 | memset ((void *) &stats, 0, sizeof (stats)); | |
767 | ||
768 | if (aggressive) | |
769 | { | |
770 | int i; | |
771 | ||
772 | control_dependence_map | |
773 | = xmalloc (last_basic_block * sizeof (bitmap)); | |
774 | for (i = 0; i < last_basic_block; ++i) | |
775 | control_dependence_map[i] = BITMAP_XMALLOC (); | |
776 | ||
777 | last_stmt_necessary = sbitmap_alloc (last_basic_block); | |
778 | sbitmap_zero (last_stmt_necessary); | |
779 | } | |
780 | ||
781 | processed = sbitmap_alloc (highest_ssa_version + 1); | |
782 | sbitmap_zero (processed); | |
783 | ||
784 | VARRAY_TREE_INIT (worklist, 64, "work list"); | |
785 | } | |
786 | ||
787 | /* Cleanup after this pass. */ | |
788 | ||
789 | static void | |
790 | tree_dce_done (bool aggressive) | |
791 | { | |
792 | if (aggressive) | |
793 | { | |
794 | int i; | |
795 | ||
796 | for (i = 0; i < last_basic_block; ++i) | |
797 | BITMAP_XFREE (control_dependence_map[i]); | |
798 | free (control_dependence_map); | |
799 | ||
800 | sbitmap_free (last_stmt_necessary); | |
801 | } | |
802 | ||
803 | sbitmap_free (processed); | |
804 | } | |
805 | \f | |
806 | /* Main routine to eliminate dead code. | |
807 | ||
808 | AGGRESSIVE controls the aggressiveness of the algorithm. | |
809 | In conservative mode, we ignore control dependence and simply declare | |
810 | all but the most trivially dead branches necessary. This mode is fast. | |
811 | In aggressive mode, control dependences are taken into account, which | |
812 | results in more dead code elimination, but at the cost of some time. | |
813 | ||
814 | FIXME: Aggressive mode before PRE doesn't work currently because | |
815 | the dominance info is not invalidated after DCE1. This is | |
816 | not an issue right now because we only run aggressive DCE | |
817 | as the last tree SSA pass, but keep this in mind when you | |
818 | start experimenting with pass ordering. */ | |
819 | ||
820 | static void | |
821 | perform_tree_ssa_dce (bool aggressive) | |
822 | { | |
823 | struct edge_list *el = NULL; | |
824 | ||
825 | tree_dce_init (aggressive); | |
826 | ||
827 | if (aggressive) | |
828 | { | |
829 | /* Compute control dependence. */ | |
830 | timevar_push (TV_CONTROL_DEPENDENCES); | |
831 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
832 | el = create_edge_list (); | |
833 | find_all_control_dependences (el); | |
834 | timevar_pop (TV_CONTROL_DEPENDENCES); | |
835 | ||
836 | mark_dfs_back_edges (); | |
837 | } | |
838 | ||
839 | find_obviously_necessary_stmts (el); | |
840 | ||
841 | propagate_necessity (el); | |
842 | ||
843 | eliminate_unnecessary_stmts (); | |
844 | ||
845 | if (aggressive) | |
846 | free_dominance_info (CDI_POST_DOMINATORS); | |
847 | ||
848 | cleanup_tree_cfg (); | |
849 | ||
850 | /* Debugging dumps. */ | |
851 | if (dump_file) | |
852 | { | |
853 | dump_function_to_file (current_function_decl, dump_file, dump_flags); | |
854 | print_stats (); | |
855 | } | |
856 | ||
857 | tree_dce_done (aggressive); | |
960076d9 AP |
858 | |
859 | free_edge_list (el); | |
6de9cd9a DN |
860 | } |
861 | ||
862 | /* Pass entry points. */ | |
863 | static void | |
864 | tree_ssa_dce (void) | |
865 | { | |
866 | perform_tree_ssa_dce (/*aggressive=*/false); | |
867 | } | |
868 | ||
869 | static void | |
870 | tree_ssa_cd_dce (void) | |
871 | { | |
872 | perform_tree_ssa_dce (/*aggressive=*/optimize >= 2); | |
873 | } | |
874 | ||
875 | static bool | |
876 | gate_dce (void) | |
877 | { | |
878 | return flag_tree_dce != 0; | |
879 | } | |
880 | ||
881 | struct tree_opt_pass pass_dce = | |
882 | { | |
883 | "dce", /* name */ | |
884 | gate_dce, /* gate */ | |
885 | tree_ssa_dce, /* execute */ | |
886 | NULL, /* sub */ | |
887 | NULL, /* next */ | |
888 | 0, /* static_pass_number */ | |
889 | TV_TREE_DCE, /* tv_id */ | |
890 | PROP_cfg | PROP_ssa, /* properties_required */ | |
891 | 0, /* properties_provided */ | |
892 | 0, /* properties_destroyed */ | |
893 | 0, /* todo_flags_start */ | |
894 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ | |
895 | }; | |
896 | ||
897 | struct tree_opt_pass pass_cd_dce = | |
898 | { | |
899 | "cddce", /* name */ | |
900 | gate_dce, /* gate */ | |
901 | tree_ssa_cd_dce, /* execute */ | |
902 | NULL, /* sub */ | |
903 | NULL, /* next */ | |
904 | 0, /* static_pass_number */ | |
905 | TV_TREE_CD_DCE, /* tv_id */ | |
906 | PROP_cfg | PROP_ssa, /* properties_required */ | |
907 | 0, /* properties_provided */ | |
908 | 0, /* properties_destroyed */ | |
909 | 0, /* todo_flags_start */ | |
910 | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow | |
911 | /* todo_flags_finish */ | |
912 | }; | |
913 |