]>
Commit | Line | Data |
---|---|---|
6de9cd9a DN |
1 | /* SSA Dominator optimizations for trees |
2 | Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. | |
3 | Contributed by Diego Novillo <dnovillo@redhat.com> | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "tm.h" | |
26 | #include "tree.h" | |
27 | #include "flags.h" | |
28 | #include "rtl.h" | |
29 | #include "tm_p.h" | |
30 | #include "ggc.h" | |
31 | #include "basic-block.h" | |
32 | #include "output.h" | |
33 | #include "errors.h" | |
34 | #include "expr.h" | |
35 | #include "function.h" | |
36 | #include "diagnostic.h" | |
37 | #include "timevar.h" | |
38 | #include "tree-dump.h" | |
39 | #include "tree-flow.h" | |
40 | #include "domwalk.h" | |
41 | #include "real.h" | |
42 | #include "tree-pass.h" | |
6de9cd9a DN |
43 | #include "langhooks.h" |
44 | ||
45 | /* This file implements optimizations on the dominator tree. */ | |
46 | ||
47 | /* Hash table with expressions made available during the renaming process. | |
48 | When an assignment of the form X_i = EXPR is found, the statement is | |
49 | stored in this table. If the same expression EXPR is later found on the | |
50 | RHS of another statement, it is replaced with X_i (thus performing | |
51 | global redundancy elimination). Similarly as we pass through conditionals | |
52 | we record the conditional itself as having either a true or false value | |
53 | in this table. */ | |
54 | static htab_t avail_exprs; | |
55 | ||
56 | /* Structure for entries in the expression hash table. | |
57 | ||
58 | This requires more memory for the hash table entries, but allows us | |
59 | to avoid creating silly tree nodes and annotations for conditionals, | |
60 | eliminates 2 global hash tables and two block local varrays. | |
61 | ||
62 | It also allows us to reduce the number of hash table lookups we | |
63 | have to perform in lookup_avail_expr and finally it allows us to | |
64 | significantly reduce the number of calls into the hashing routine | |
65 | itself. */ | |
66 | struct expr_hash_elt | |
67 | { | |
68 | /* The value (lhs) of this expression. */ | |
69 | tree lhs; | |
70 | ||
71 | /* The expression (rhs) we want to record. */ | |
72 | tree rhs; | |
73 | ||
74 | /* The annotation if this element corresponds to a statement. */ | |
75 | stmt_ann_t ann; | |
76 | ||
77 | /* The hash value for RHS/ann. */ | |
78 | hashval_t hash; | |
79 | }; | |
80 | ||
81 | /* Table of constant values and copies indexed by SSA name. When the | |
82 | renaming pass finds an assignment of a constant (X_i = C) or a copy | |
83 | assignment from another SSA variable (X_i = Y_j), it creates a mapping | |
84 | between X_i and the RHS in this table. This mapping is used later on, | |
85 | when renaming uses of X_i. If an assignment to X_i is found in this | |
86 | table, instead of using X_i, we use the RHS of the statement stored in | |
87 | this table (thus performing very simplistic copy and constant | |
88 | propagation). */ | |
89 | static varray_type const_and_copies; | |
90 | ||
91 | /* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not | |
92 | know their exact value. */ | |
93 | static bitmap nonzero_vars; | |
94 | ||
95 | /* Track whether or not we have changed the control flow graph. */ | |
96 | static bool cfg_altered; | |
97 | ||
98 | /* Statistics for dominator optimizations. */ | |
99 | struct opt_stats_d | |
100 | { | |
101 | long num_stmts; | |
102 | long num_exprs_considered; | |
103 | long num_re; | |
104 | }; | |
105 | ||
106 | /* Value range propagation record. Each time we encounter a conditional | |
107 | of the form SSA_NAME COND CONST we create a new vrp_element to record | |
108 | how the condition affects the possible values SSA_NAME may have. | |
109 | ||
110 | Each record contains the condition tested (COND), and the the range of | |
111 | values the variable may legitimately have if COND is true. Note the | |
112 | range of values may be a smaller range than COND specifies if we have | |
113 | recorded other ranges for this variable. Each record also contains the | |
114 | block in which the range was recorded for invalidation purposes. | |
115 | ||
116 | Note that the current known range is computed lazily. This allows us | |
117 | to avoid the overhead of computing ranges which are never queried. | |
118 | ||
119 | When we encounter a conditional, we look for records which constrain | |
120 | the SSA_NAME used in the condition. In some cases those records allow | |
121 | us to determine the condition's result at compile time. In other cases | |
122 | they may allow us to simplify the condition. | |
123 | ||
124 | We also use value ranges to do things like transform signed div/mod | |
125 | operations into unsigned div/mod or to simplify ABS_EXPRs. | |
126 | ||
127 | Simple experiments have shown these optimizations to not be all that | |
128 | useful on switch statements (much to my surprise). So switch statement | |
129 | optimizations are not performed. | |
130 | ||
131 | Note carefully we do not propagate information through each statement | |
132 | in the block. ie, if we know variable X has a value defined of | |
133 | [0, 25] and we encounter Y = X + 1, we do not track a value range | |
134 | for Y (which would be [1, 26] if we cared). Similarly we do not | |
135 | constrain values as we encounter narrowing typecasts, etc. */ | |
136 | ||
137 | struct vrp_element | |
138 | { | |
139 | /* The highest and lowest values the variable in COND may contain when | |
140 | COND is true. Note this may not necessarily be the same values | |
141 | tested by COND if the same variable was used in earlier conditionals. | |
142 | ||
143 | Note this is computed lazily and thus can be NULL indicating that | |
144 | the values have not been computed yet. */ | |
145 | tree low; | |
146 | tree high; | |
147 | ||
148 | /* The actual conditional we recorded. This is needed since we compute | |
149 | ranges lazily. */ | |
150 | tree cond; | |
151 | ||
152 | /* The basic block where this record was created. We use this to determine | |
153 | when to remove records. */ | |
154 | basic_block bb; | |
155 | }; | |
156 | ||
157 | static struct opt_stats_d opt_stats; | |
158 | ||
159 | /* This virtual array holds pairs of edges which describe a scheduled | |
160 | edge redirection from jump threading. | |
161 | ||
162 | The first entry in each pair is the edge we are going to redirect. | |
163 | ||
164 | The second entry in each pair is the edge leading to our final | |
165 | destination block. By providing this as an edge rather than the | |
166 | final target block itself we can correctly handle redirections | |
167 | when the target block had PHIs which required edge insertions/splitting | |
168 | to remove the PHIs. */ | |
169 | static GTY(()) varray_type redirection_edges; | |
170 | ||
171 | /* A virtual array holding value range records for the variable identified | |
172 | by the index, SSA_VERSION. */ | |
173 | static varray_type vrp_data; | |
174 | ||
175 | /* Datastructure for block local data used during the dominator walk. | |
176 | We maintain a stack of these as we recursively walk down the | |
177 | dominator tree. */ | |
178 | ||
179 | struct dom_walk_block_data | |
180 | { | |
181 | /* Array of all the expressions entered into the global expression | |
182 | hash table by this block. During finalization we use this array to | |
183 | know what expressions to remove from the global expression hash | |
184 | table. */ | |
185 | varray_type avail_exprs; | |
186 | ||
187 | /* Array of dest, src pairs that need to be restored during finalization | |
188 | into the global const/copies table during finalization. */ | |
189 | varray_type const_and_copies; | |
190 | ||
191 | /* Similarly for the nonzero state of variables that needs to be | |
192 | restored during finalization. */ | |
193 | varray_type nonzero_vars; | |
194 | ||
195 | /* Array of statements we need to rescan during finalization for newly | |
196 | exposed variables. */ | |
197 | varray_type stmts_to_rescan; | |
198 | ||
199 | /* Array of variables which have their values constrained by operations | |
200 | in this basic block. We use this during finalization to know | |
201 | which variables need their VRP data updated. */ | |
202 | varray_type vrp_variables; | |
203 | ||
204 | /* Array of tree pairs used to restore the global currdefs to its | |
205 | original state after completing optimization of a block and its | |
206 | dominator children. */ | |
207 | varray_type block_defs; | |
208 | }; | |
209 | ||
210 | struct eq_expr_value | |
211 | { | |
212 | tree src; | |
213 | tree dst; | |
214 | }; | |
215 | ||
216 | /* Local functions. */ | |
217 | static void optimize_stmt (struct dom_walk_data *, | |
218 | basic_block bb, | |
219 | block_stmt_iterator); | |
220 | static inline tree get_value_for (tree, varray_type table); | |
221 | static inline void set_value_for (tree, tree, varray_type table); | |
222 | static tree lookup_avail_expr (tree, varray_type *, bool); | |
223 | static struct eq_expr_value get_eq_expr_value (tree, int, varray_type *, | |
224 | basic_block, varray_type *); | |
225 | static hashval_t avail_expr_hash (const void *); | |
940db2c8 | 226 | static hashval_t real_avail_expr_hash (const void *); |
6de9cd9a DN |
227 | static int avail_expr_eq (const void *, const void *); |
228 | static void htab_statistics (FILE *, htab_t); | |
229 | static void record_cond (tree, tree, varray_type *); | |
d2d8936f | 230 | static void record_dominating_conditions (tree, varray_type *); |
6de9cd9a DN |
231 | static void record_const_or_copy (tree, tree, varray_type *); |
232 | static void record_equality (tree, tree, varray_type *); | |
233 | static tree update_rhs_and_lookup_avail_expr (tree, tree, varray_type *, | |
234 | stmt_ann_t, bool); | |
235 | static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *, | |
236 | tree, stmt_ann_t, int); | |
237 | static tree simplify_cond_and_lookup_avail_expr (tree, varray_type *, | |
238 | stmt_ann_t, int); | |
239 | static tree simplify_switch_and_lookup_avail_expr (tree, varray_type *, | |
240 | stmt_ann_t, int); | |
241 | static tree find_equivalent_equality_comparison (tree); | |
242 | static void record_range (tree, basic_block, varray_type *); | |
243 | static bool extract_range_from_cond (tree, tree *, tree *, int *); | |
244 | static void record_equivalences_from_phis (struct dom_walk_data *, basic_block); | |
245 | static void record_equivalences_from_incoming_edge (struct dom_walk_data *, | |
246 | basic_block); | |
247 | static bool eliminate_redundant_computations (struct dom_walk_data *, | |
248 | tree, stmt_ann_t); | |
249 | static void record_equivalences_from_stmt (tree, varray_type *, varray_type *, | |
250 | int, stmt_ann_t); | |
251 | static void thread_across_edge (struct dom_walk_data *, edge); | |
252 | static void dom_opt_finalize_block (struct dom_walk_data *, basic_block); | |
253 | static void dom_opt_initialize_block_local_data (struct dom_walk_data *, | |
254 | basic_block, bool); | |
255 | static void dom_opt_initialize_block (struct dom_walk_data *, basic_block); | |
256 | static void cprop_into_phis (struct dom_walk_data *, basic_block); | |
257 | static void remove_local_expressions_from_table (varray_type locals, | |
258 | unsigned limit, | |
259 | htab_t table); | |
260 | static void restore_vars_to_original_value (varray_type locals, | |
261 | unsigned limit, | |
262 | varray_type table); | |
263 | static void restore_currdefs_to_original_value (varray_type locals, | |
264 | unsigned limit); | |
265 | static void register_definitions_for_stmt (stmt_ann_t, varray_type *); | |
266 | static void redirect_edges_and_update_ssa_graph (varray_type); | |
267 | ||
268 | /* Local version of fold that doesn't introduce cruft. */ | |
269 | ||
270 | static tree | |
271 | local_fold (tree t) | |
272 | { | |
273 | t = fold (t); | |
274 | ||
275 | /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that | |
276 | may have been added by fold, and "useless" type conversions that might | |
277 | now be apparent due to propagation. */ | |
278 | STRIP_MAIN_TYPE_NOPS (t); | |
279 | STRIP_USELESS_TYPE_CONVERSION (t); | |
280 | ||
281 | return t; | |
282 | } | |
283 | ||
284 | /* Return the value associated with variable VAR in TABLE. */ | |
285 | ||
286 | static inline tree | |
287 | get_value_for (tree var, varray_type table) | |
288 | { | |
289 | return VARRAY_TREE (table, SSA_NAME_VERSION (var)); | |
290 | } | |
291 | ||
292 | /* Associate VALUE to variable VAR in TABLE. */ | |
293 | ||
294 | static inline void | |
295 | set_value_for (tree var, tree value, varray_type table) | |
296 | { | |
297 | VARRAY_TREE (table, SSA_NAME_VERSION (var)) = value; | |
298 | } | |
299 | ||
300 | /* REDIRECTION_EDGES contains edge pairs where we want to revector the | |
301 | destination of the first edge to the destination of the second edge. | |
302 | ||
303 | These redirections may significantly change the SSA graph since we | |
304 | allow redirection through blocks with PHI nodes and blocks with | |
305 | real instructions in some cases. | |
306 | ||
307 | This routine will perform the requested redirections and incrementally | |
308 | update the SSA graph. | |
309 | ||
310 | Note in some cases requested redirections may be ignored as they can | |
311 | not be safely implemented. */ | |
312 | ||
313 | static void | |
314 | redirect_edges_and_update_ssa_graph (varray_type redirection_edges) | |
315 | { | |
91cde0c3 JL |
316 | basic_block tgt, bb; |
317 | tree phi; | |
6de9cd9a DN |
318 | unsigned int i; |
319 | size_t old_num_referenced_vars = num_referenced_vars; | |
91cde0c3 | 320 | bitmap virtuals_to_rename = BITMAP_XMALLOC (); |
6de9cd9a DN |
321 | |
322 | /* First note any variables which we are going to have to take | |
91cde0c3 | 323 | out of SSA form as well as any virtuals which need updating. */ |
6de9cd9a DN |
324 | for (i = 0; i < VARRAY_ACTIVE_SIZE (redirection_edges); i += 2) |
325 | { | |
326 | block_stmt_iterator bsi; | |
327 | edge e; | |
328 | basic_block tgt; | |
329 | tree phi; | |
330 | ||
331 | e = VARRAY_EDGE (redirection_edges, i); | |
332 | tgt = VARRAY_EDGE (redirection_edges, i + 1)->dest; | |
333 | ||
334 | /* All variables referenced in PHI nodes we bypass must be | |
335 | renamed. */ | |
17192884 | 336 | for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi)) |
6de9cd9a DN |
337 | { |
338 | tree result = SSA_NAME_VAR (PHI_RESULT (phi)); | |
91cde0c3 JL |
339 | |
340 | if (is_gimple_reg (PHI_RESULT (phi))) | |
341 | bitmap_set_bit (vars_to_rename, var_ann (result)->uid); | |
342 | else | |
343 | bitmap_set_bit (virtuals_to_rename, var_ann (result)->uid); | |
6de9cd9a DN |
344 | } |
345 | ||
346 | /* Any variables set by statements at the start of the block we | |
347 | are bypassing must also be taken our of SSA form. */ | |
348 | for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi)) | |
349 | { | |
350 | unsigned int j; | |
351 | def_optype defs; | |
a32b97a2 BB |
352 | v_may_def_optype v_may_defs; |
353 | v_must_def_optype v_must_defs; | |
6de9cd9a DN |
354 | tree stmt = bsi_stmt (bsi); |
355 | stmt_ann_t ann = stmt_ann (stmt); | |
356 | ||
357 | if (TREE_CODE (stmt) == COND_EXPR) | |
358 | break; | |
359 | ||
360 | get_stmt_operands (stmt); | |
361 | ||
362 | defs = DEF_OPS (ann); | |
363 | for (j = 0; j < NUM_DEFS (defs); j++) | |
364 | { | |
365 | tree op = SSA_NAME_VAR (DEF_OP (defs, j)); | |
366 | bitmap_set_bit (vars_to_rename, var_ann (op)->uid); | |
367 | } | |
368 | ||
a32b97a2 BB |
369 | v_may_defs = STMT_V_MAY_DEF_OPS (stmt); |
370 | for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++) | |
6de9cd9a | 371 | { |
a32b97a2 BB |
372 | tree op = V_MAY_DEF_RESULT (v_may_defs, j); |
373 | bitmap_set_bit (vars_to_rename, var_ann (op)->uid); | |
374 | } | |
375 | ||
376 | v_must_defs = STMT_V_MUST_DEF_OPS (stmt); | |
377 | for (j = 0; j < NUM_V_MUST_DEFS (v_must_defs); j++) | |
378 | { | |
379 | tree op = V_MUST_DEF_OP (v_must_defs, j); | |
380 | bitmap_set_bit (vars_to_rename, var_ann (op)->uid); | |
6de9cd9a DN |
381 | } |
382 | } | |
383 | ||
384 | /* Finally, any variables in PHI nodes at our final destination | |
385 | must also be taken our of SSA form. */ | |
17192884 | 386 | for (phi = phi_nodes (tgt); phi; phi = PHI_CHAIN (phi)) |
6de9cd9a DN |
387 | { |
388 | tree result = SSA_NAME_VAR (PHI_RESULT (phi)); | |
6de9cd9a | 389 | |
91cde0c3 JL |
390 | if (is_gimple_reg (PHI_RESULT (phi))) |
391 | bitmap_set_bit (vars_to_rename, var_ann (result)->uid); | |
392 | else | |
393 | bitmap_set_bit (virtuals_to_rename, var_ann (result)->uid); | |
6de9cd9a DN |
394 | } |
395 | } | |
396 | ||
397 | /* Take those selected variables out of SSA form. This must be | |
398 | done before we start redirecting edges. */ | |
399 | if (bitmap_first_set_bit (vars_to_rename) >= 0) | |
400 | rewrite_vars_out_of_ssa (vars_to_rename); | |
401 | ||
402 | /* The out of SSA translation above may split the edge from | |
403 | E->src to E->dest. This could potentially cause us to lose | |
404 | an assignment leading to invalid warnings about uninitialized | |
405 | variables or incorrect code. | |
406 | ||
407 | Luckily, we can detect this by looking at the last statement | |
408 | in E->dest. If it is not a COND_EXPR or SWITCH_EXPR, then | |
409 | the edge was split and instead of E, we want E->dest->succ. */ | |
410 | for (i = 0; i < VARRAY_ACTIVE_SIZE (redirection_edges); i += 2) | |
411 | { | |
412 | edge e = VARRAY_EDGE (redirection_edges, i); | |
413 | tree last = last_stmt (e->dest); | |
414 | ||
415 | if (last | |
416 | && TREE_CODE (last) != COND_EXPR | |
417 | && TREE_CODE (last) != SWITCH_EXPR) | |
418 | { | |
419 | e = e->dest->succ; | |
420 | ||
421 | #ifdef ENABLE_CHECKING | |
422 | /* There should only be a single successor if the | |
423 | original edge was split. */ | |
424 | if (e->succ_next) | |
425 | abort (); | |
426 | #endif | |
427 | /* Replace the edge in REDIRECTION_EDGES for the | |
428 | loop below. */ | |
429 | VARRAY_EDGE (redirection_edges, i) = e; | |
430 | } | |
431 | } | |
432 | ||
433 | /* If we created any new variables as part of the out-of-ssa | |
434 | translation, then any jump threads must be invalidated if they | |
435 | bypass a block in which we skipped instructions. | |
436 | ||
437 | This is necessary as instructions which appeared to be NOPS | |
438 | may be necessary after the out-of-ssa translation. */ | |
439 | if (num_referenced_vars != old_num_referenced_vars) | |
440 | { | |
441 | for (i = 0; i < VARRAY_ACTIVE_SIZE (redirection_edges); i += 2) | |
442 | { | |
443 | block_stmt_iterator bsi; | |
444 | edge e; | |
445 | ||
446 | e = VARRAY_EDGE (redirection_edges, i); | |
447 | for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi)) | |
448 | { | |
449 | tree stmt = bsi_stmt (bsi); | |
450 | ||
451 | if (IS_EMPTY_STMT (stmt) | |
452 | || TREE_CODE (stmt) == LABEL_EXPR) | |
453 | continue; | |
454 | ||
455 | if (TREE_CODE (stmt) == COND_EXPR) | |
456 | break; | |
457 | ||
458 | /* Invalidate the jump thread. */ | |
459 | VARRAY_EDGE (redirection_edges, i) = NULL; | |
460 | VARRAY_EDGE (redirection_edges, i + 1) = NULL; | |
461 | break; | |
462 | } | |
463 | } | |
464 | } | |
465 | ||
466 | /* Now redirect the edges. */ | |
467 | for (i = 0; i < VARRAY_ACTIVE_SIZE (redirection_edges); i += 2) | |
468 | { | |
469 | basic_block src; | |
470 | edge e; | |
471 | ||
472 | e = VARRAY_EDGE (redirection_edges, i); | |
473 | if (!e) | |
474 | continue; | |
475 | ||
476 | tgt = VARRAY_EDGE (redirection_edges, i + 1)->dest; | |
477 | ||
478 | ||
479 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
480 | fprintf (dump_file, " Threaded jump %d --> %d to %d\n", | |
481 | e->src->index, e->dest->index, tgt->index); | |
482 | ||
483 | src = e->src; | |
484 | ||
485 | e = redirect_edge_and_branch (e, tgt); | |
486 | PENDING_STMT (e) = NULL_TREE; | |
487 | ||
488 | /* Updating the dominance information would be nontrivial. */ | |
489 | free_dominance_info (CDI_DOMINATORS); | |
490 | ||
491 | if ((dump_file && (dump_flags & TDF_DETAILS)) | |
492 | && e->src != src) | |
493 | fprintf (dump_file, " basic block %d created\n", | |
494 | e->src->index); | |
495 | ||
496 | cfg_altered = true; | |
497 | } | |
498 | ||
499 | VARRAY_CLEAR (redirection_edges); | |
500 | ||
501 | for (i = old_num_referenced_vars; i < num_referenced_vars; i++) | |
502 | { | |
503 | bitmap_set_bit (vars_to_rename, i); | |
504 | var_ann (referenced_var (i))->out_of_ssa_tag = 0; | |
505 | } | |
91cde0c3 JL |
506 | |
507 | bitmap_a_or_b (vars_to_rename, vars_to_rename, virtuals_to_rename); | |
508 | ||
509 | /* We must remove any PHIs for virtual variables that we are going to | |
510 | re-rename. Hopefully we'll be able to simply update these incrementally | |
511 | soon. */ | |
512 | FOR_EACH_BB (bb) | |
513 | { | |
514 | tree next; | |
515 | ||
516 | for (phi = phi_nodes (bb); phi; phi = next) | |
517 | { | |
518 | tree result = PHI_RESULT (phi); | |
519 | ||
17192884 | 520 | next = PHI_CHAIN (phi); |
91cde0c3 JL |
521 | |
522 | if (bitmap_bit_p (virtuals_to_rename, | |
523 | var_ann (SSA_NAME_VAR (result))->uid)) | |
524 | remove_phi_node (phi, NULL, bb); | |
525 | } | |
526 | } | |
527 | BITMAP_XFREE (virtuals_to_rename); | |
6de9cd9a DN |
528 | } |
529 | ||
530 | /* Jump threading, redundancy elimination and const/copy propagation. | |
531 | ||
532 | Optimize function FNDECL based on a walk through the dominator tree. | |
533 | ||
534 | This pass may expose new symbols that need to be renamed into SSA. For | |
535 | every new symbol exposed, its corresponding bit will be set in | |
536 | VARS_TO_RENAME. | |
537 | ||
538 | PHASE indicates which dump file from the DUMP_FILES array to use when | |
539 | dumping debugging information. */ | |
540 | ||
541 | static void | |
542 | tree_ssa_dominator_optimize (void) | |
543 | { | |
544 | basic_block bb; | |
545 | struct dom_walk_data walk_data; | |
546 | unsigned int i; | |
547 | ||
548 | for (i = 0; i < num_referenced_vars; i++) | |
549 | var_ann (referenced_var (i))->current_def = NULL; | |
550 | ||
551 | /* Mark loop edges so we avoid threading across loop boundaries. | |
552 | This may result in transforming natural loop into irreducible | |
553 | region. */ | |
554 | mark_dfs_back_edges (); | |
555 | ||
556 | /* Create our hash tables. */ | |
940db2c8 | 557 | avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free); |
95a3742c | 558 | VARRAY_TREE_INIT (const_and_copies, num_ssa_names, "const_and_copies"); |
6de9cd9a DN |
559 | nonzero_vars = BITMAP_XMALLOC (); |
560 | VARRAY_EDGE_INIT (redirection_edges, 20, "redirection_edges"); | |
95a3742c | 561 | VARRAY_GENERIC_PTR_INIT (vrp_data, num_ssa_names, "vrp_data"); |
6de9cd9a DN |
562 | |
563 | /* Setup callbacks for the generic dominator tree walker. */ | |
564 | walk_data.walk_stmts_backward = false; | |
565 | walk_data.dom_direction = CDI_DOMINATORS; | |
566 | walk_data.initialize_block_local_data = dom_opt_initialize_block_local_data; | |
567 | walk_data.before_dom_children_before_stmts = dom_opt_initialize_block; | |
568 | walk_data.before_dom_children_walk_stmts = optimize_stmt; | |
569 | walk_data.before_dom_children_after_stmts = cprop_into_phis; | |
570 | walk_data.after_dom_children_before_stmts = NULL; | |
571 | walk_data.after_dom_children_walk_stmts = NULL; | |
572 | walk_data.after_dom_children_after_stmts = dom_opt_finalize_block; | |
573 | /* Right now we only attach a dummy COND_EXPR to the global data pointer. | |
574 | When we attach more stuff we'll need to fill this out with a real | |
575 | structure. */ | |
576 | walk_data.global_data = NULL; | |
577 | walk_data.block_local_data_size = sizeof (struct dom_walk_block_data); | |
578 | ||
579 | /* Now initialize the dominator walker. */ | |
580 | init_walk_dominator_tree (&walk_data); | |
581 | ||
582 | /* Reset block_forwardable in each block's annotation. We use that | |
583 | attribute when threading through COND_EXPRs. */ | |
584 | FOR_EACH_BB (bb) | |
585 | bb_ann (bb)->forwardable = 1; | |
586 | ||
587 | calculate_dominance_info (CDI_DOMINATORS); | |
588 | ||
589 | /* If we prove certain blocks are unreachable, then we want to | |
590 | repeat the dominator optimization process as PHI nodes may | |
591 | have turned into copies which allows better propagation of | |
592 | values. So we repeat until we do not identify any new unreachable | |
593 | blocks. */ | |
594 | do | |
595 | { | |
596 | /* Optimize the dominator tree. */ | |
597 | cfg_altered = false; | |
598 | ||
599 | /* Recursively walk the dominator tree optimizing statements. */ | |
600 | walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
601 | ||
602 | /* Wipe the hash tables. */ | |
603 | ||
604 | if (VARRAY_ACTIVE_SIZE (redirection_edges) > 0) | |
605 | redirect_edges_and_update_ssa_graph (redirection_edges); | |
606 | ||
607 | /* We may have made some basic blocks unreachable, remove them. */ | |
608 | cfg_altered |= delete_unreachable_blocks (); | |
609 | ||
610 | /* If the CFG was altered, then recompute the dominator tree. This | |
611 | is not strictly needed if we only removed unreachable blocks, but | |
612 | may produce better results. If we threaded jumps, then rebuilding | |
613 | the dominator tree is strictly necessary. */ | |
614 | if (cfg_altered) | |
615 | { | |
616 | cleanup_tree_cfg (); | |
617 | calculate_dominance_info (CDI_DOMINATORS); | |
618 | } | |
619 | ||
620 | /* If we are going to iterate (CFG_ALTERED is true), then we must | |
621 | perform any queued renaming before the next iteration. */ | |
622 | if (cfg_altered | |
623 | && bitmap_first_set_bit (vars_to_rename) >= 0) | |
624 | { | |
625 | rewrite_into_ssa (); | |
626 | bitmap_clear (vars_to_rename); | |
627 | ||
628 | /* The into SSA translation may have created new SSA_NAMES whic | |
629 | affect the size of CONST_AND_COPIES and VRP_DATA. */ | |
95a3742c DN |
630 | VARRAY_GROW (const_and_copies, num_ssa_names); |
631 | VARRAY_GROW (vrp_data, num_ssa_names); | |
6de9cd9a DN |
632 | } |
633 | ||
634 | /* Reinitialize the various tables. */ | |
635 | bitmap_clear (nonzero_vars); | |
636 | htab_empty (avail_exprs); | |
637 | VARRAY_CLEAR (const_and_copies); | |
638 | VARRAY_CLEAR (vrp_data); | |
639 | ||
640 | for (i = 0; i < num_referenced_vars; i++) | |
641 | var_ann (referenced_var (i))->current_def = NULL; | |
642 | } | |
643 | while (cfg_altered); | |
644 | ||
645 | /* Remove any unreachable blocks left behind and linearize the CFG. */ | |
646 | cleanup_tree_cfg (); | |
647 | ||
648 | /* Debugging dumps. */ | |
649 | if (dump_file && (dump_flags & TDF_STATS)) | |
650 | dump_dominator_optimization_stats (dump_file); | |
651 | ||
652 | /* We emptyed the hash table earlier, now delete it completely. */ | |
653 | htab_delete (avail_exprs); | |
654 | ||
1ea7e6ad | 655 | /* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA, |
6de9cd9a DN |
656 | CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom |
657 | of the do-while loop above. */ | |
658 | ||
659 | /* And finalize the dominator walker. */ | |
660 | fini_walk_dominator_tree (&walk_data); | |
cfa4cb00 AP |
661 | |
662 | /* Free nonzero_vars. */ | |
663 | BITMAP_XFREE (nonzero_vars); | |
6de9cd9a DN |
664 | } |
665 | ||
666 | static bool | |
667 | gate_dominator (void) | |
668 | { | |
669 | return flag_tree_dom != 0; | |
670 | } | |
671 | ||
672 | struct tree_opt_pass pass_dominator = | |
673 | { | |
674 | "dom", /* name */ | |
675 | gate_dominator, /* gate */ | |
676 | tree_ssa_dominator_optimize, /* execute */ | |
677 | NULL, /* sub */ | |
678 | NULL, /* next */ | |
679 | 0, /* static_pass_number */ | |
680 | TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */ | |
681 | PROP_cfg | PROP_ssa, /* properties_required */ | |
682 | 0, /* properties_provided */ | |
683 | 0, /* properties_destroyed */ | |
684 | 0, /* todo_flags_start */ | |
685 | TODO_dump_func | TODO_rename_vars | |
686 | | TODO_verify_ssa /* todo_flags_finish */ | |
687 | }; | |
688 | ||
689 | ||
690 | /* We are exiting BB, see if the target block begins with a conditional | |
691 | jump which has a known value when reached via BB. */ | |
692 | ||
693 | static void | |
694 | thread_across_edge (struct dom_walk_data *walk_data, edge e) | |
695 | { | |
696 | struct dom_walk_block_data *bd | |
697 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
698 | block_stmt_iterator bsi; | |
699 | tree stmt = NULL; | |
700 | tree phi; | |
701 | ||
702 | /* Each PHI creates a temporary equivalence, record them. */ | |
17192884 | 703 | for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi)) |
6de9cd9a | 704 | { |
d00ad49b | 705 | tree src = PHI_ARG_DEF_FROM_EDGE (phi, e); |
6de9cd9a DN |
706 | tree dst = PHI_RESULT (phi); |
707 | record_const_or_copy (dst, src, &bd->const_and_copies); | |
708 | register_new_def (dst, &bd->block_defs); | |
709 | } | |
710 | ||
711 | for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi)) | |
712 | { | |
713 | tree lhs, cached_lhs; | |
714 | ||
715 | stmt = bsi_stmt (bsi); | |
716 | ||
717 | /* Ignore empty statements and labels. */ | |
718 | if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR) | |
719 | continue; | |
720 | ||
721 | /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new | |
722 | value, then stop our search here. Ideally when we stop a | |
723 | search we stop on a COND_EXPR or SWITCH_EXPR. */ | |
724 | if (TREE_CODE (stmt) != MODIFY_EXPR | |
725 | || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME) | |
726 | break; | |
727 | ||
728 | /* At this point we have a statement which assigns an RHS to an | |
729 | SSA_VAR on the LHS. We want to prove that the RHS is already | |
730 | available and that its value is held in the current definition | |
731 | of the LHS -- meaning that this assignment is a NOP when | |
732 | reached via edge E. */ | |
733 | if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME) | |
734 | cached_lhs = TREE_OPERAND (stmt, 1); | |
735 | else | |
736 | cached_lhs = lookup_avail_expr (stmt, NULL, false); | |
737 | ||
738 | lhs = TREE_OPERAND (stmt, 0); | |
739 | ||
740 | /* This can happen if we thread around to the start of a loop. */ | |
741 | if (lhs == cached_lhs) | |
742 | break; | |
743 | ||
744 | /* If we did not find RHS in the hash table, then try again after | |
745 | temporarily const/copy propagating the operands. */ | |
746 | if (!cached_lhs) | |
747 | { | |
748 | /* Copy the operands. */ | |
749 | stmt_ann_t ann = stmt_ann (stmt); | |
750 | use_optype uses = USE_OPS (ann); | |
751 | vuse_optype vuses = VUSE_OPS (ann); | |
752 | tree *uses_copy = xcalloc (NUM_USES (uses), sizeof (tree)); | |
753 | tree *vuses_copy = xcalloc (NUM_VUSES (vuses), sizeof (tree)); | |
754 | unsigned int i; | |
755 | ||
756 | /* Make a copy of the uses into USES_COPY, then cprop into | |
757 | the use operands. */ | |
758 | for (i = 0; i < NUM_USES (uses); i++) | |
759 | { | |
760 | tree tmp = NULL; | |
761 | ||
762 | uses_copy[i] = USE_OP (uses, i); | |
763 | if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME) | |
764 | tmp = get_value_for (USE_OP (uses, i), const_and_copies); | |
765 | if (tmp) | |
d00ad49b | 766 | SET_USE_OP (uses, i, tmp); |
6de9cd9a DN |
767 | } |
768 | ||
769 | /* Similarly for virtual uses. */ | |
770 | for (i = 0; i < NUM_VUSES (vuses); i++) | |
771 | { | |
772 | tree tmp = NULL; | |
773 | ||
774 | vuses_copy[i] = VUSE_OP (vuses, i); | |
775 | if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME) | |
776 | tmp = get_value_for (VUSE_OP (vuses, i), const_and_copies); | |
777 | if (tmp) | |
d00ad49b | 778 | SET_VUSE_OP (vuses, i, tmp); |
6de9cd9a DN |
779 | } |
780 | ||
781 | /* Try to lookup the new expression. */ | |
782 | cached_lhs = lookup_avail_expr (stmt, NULL, false); | |
783 | ||
784 | /* Restore the statement's original uses/defs. */ | |
785 | for (i = 0; i < NUM_USES (uses); i++) | |
d00ad49b | 786 | SET_USE_OP (uses, i, uses_copy[i]); |
6de9cd9a DN |
787 | |
788 | for (i = 0; i < NUM_VUSES (vuses); i++) | |
d00ad49b | 789 | SET_VUSE_OP (vuses, i, vuses_copy[i]); |
6de9cd9a DN |
790 | |
791 | free (uses_copy); | |
792 | free (vuses_copy); | |
793 | ||
794 | /* If we still did not find the expression in the hash table, | |
795 | then we can not ignore this statement. */ | |
796 | if (! cached_lhs) | |
797 | break; | |
798 | } | |
799 | ||
800 | /* If the expression in the hash table was not assigned to an | |
801 | SSA_NAME, then we can not ignore this statement. */ | |
802 | if (TREE_CODE (cached_lhs) != SSA_NAME) | |
803 | break; | |
804 | ||
805 | /* If we have different underlying variables, then we can not | |
806 | ignore this statement. */ | |
807 | if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs)) | |
808 | break; | |
809 | ||
810 | /* If CACHED_LHS does not represent the current value of the undering | |
811 | variable in CACHED_LHS/LHS, then we can not ignore this statement. */ | |
812 | if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs) | |
813 | break; | |
814 | ||
815 | /* If we got here, then we can ignore this statement and continue | |
816 | walking through the statements in the block looking for a threadable | |
817 | COND_EXPR. | |
818 | ||
819 | We want to record an equivalence lhs = cache_lhs so that if | |
820 | the result of this statement is used later we can copy propagate | |
821 | suitably. */ | |
822 | record_const_or_copy (lhs, cached_lhs, &bd->const_and_copies); | |
823 | register_new_def (lhs, &bd->block_defs); | |
824 | } | |
825 | ||
826 | /* If we stopped at a COND_EXPR or SWITCH_EXPR, then see if we know which | |
827 | arm will be taken. */ | |
828 | if (stmt | |
829 | && (TREE_CODE (stmt) == COND_EXPR | |
830 | || TREE_CODE (stmt) == SWITCH_EXPR)) | |
831 | { | |
832 | tree cond, cached_lhs; | |
833 | edge e1; | |
834 | ||
835 | /* Do not forward entry edges into the loop. In the case loop | |
836 | has multiple entry edges we may end up in constructing irreducible | |
837 | region. | |
838 | ??? We may consider forwarding the edges in the case all incoming | |
839 | edges forward to the same destination block. */ | |
840 | if (!e->flags & EDGE_DFS_BACK) | |
841 | { | |
842 | for (e1 = e->dest->pred; e; e = e->pred_next) | |
843 | if (e1->flags & EDGE_DFS_BACK) | |
844 | break; | |
845 | if (e1) | |
846 | return; | |
847 | } | |
848 | ||
849 | /* Now temporarily cprop the operands and try to find the resulting | |
850 | expression in the hash tables. */ | |
851 | if (TREE_CODE (stmt) == COND_EXPR) | |
852 | cond = COND_EXPR_COND (stmt); | |
853 | else | |
854 | cond = SWITCH_COND (stmt); | |
855 | ||
856 | if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<') | |
857 | { | |
858 | tree dummy_cond, op0, op1; | |
859 | enum tree_code cond_code; | |
860 | ||
861 | op0 = TREE_OPERAND (cond, 0); | |
862 | op1 = TREE_OPERAND (cond, 1); | |
863 | cond_code = TREE_CODE (cond); | |
864 | ||
865 | /* Get the current value of both operands. */ | |
866 | if (TREE_CODE (op0) == SSA_NAME) | |
867 | { | |
868 | tree tmp = get_value_for (op0, const_and_copies); | |
869 | if (tmp) | |
870 | op0 = tmp; | |
871 | } | |
872 | ||
873 | if (TREE_CODE (op1) == SSA_NAME) | |
874 | { | |
875 | tree tmp = get_value_for (op1, const_and_copies); | |
876 | if (tmp) | |
877 | op1 = tmp; | |
878 | } | |
879 | ||
880 | /* Stuff the operator and operands into our dummy conditional | |
881 | expression, creating the dummy conditional if necessary. */ | |
882 | dummy_cond = walk_data->global_data; | |
883 | if (! dummy_cond) | |
884 | { | |
885 | dummy_cond = build (cond_code, boolean_type_node, op0, op1); | |
886 | dummy_cond = build (COND_EXPR, void_type_node, | |
887 | dummy_cond, NULL, NULL); | |
888 | walk_data->global_data = dummy_cond; | |
889 | } | |
890 | else | |
891 | { | |
892 | TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), cond_code); | |
893 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op0; | |
894 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) = op1; | |
895 | } | |
896 | ||
897 | /* If the conditional folds to an invariant, then we are done, | |
898 | otherwise look it up in the hash tables. */ | |
899 | cached_lhs = local_fold (COND_EXPR_COND (dummy_cond)); | |
900 | if (! is_gimple_min_invariant (cached_lhs)) | |
901 | cached_lhs = lookup_avail_expr (dummy_cond, NULL, false); | |
902 | if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs)) | |
903 | { | |
904 | stmt_ann_t ann = get_stmt_ann (dummy_cond); | |
905 | cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond, | |
906 | NULL, | |
907 | ann, | |
908 | false); | |
909 | } | |
910 | } | |
911 | /* We can have conditionals which just test the state of a | |
912 | variable rather than use a relational operator. These are | |
913 | simpler to handle. */ | |
914 | else if (TREE_CODE (cond) == SSA_NAME) | |
915 | { | |
916 | cached_lhs = cond; | |
917 | cached_lhs = get_value_for (cached_lhs, const_and_copies); | |
918 | if (cached_lhs && ! is_gimple_min_invariant (cached_lhs)) | |
919 | cached_lhs = 0; | |
920 | } | |
921 | else | |
922 | cached_lhs = lookup_avail_expr (stmt, NULL, false); | |
923 | ||
924 | if (cached_lhs) | |
925 | { | |
926 | edge taken_edge = find_taken_edge (e->dest, cached_lhs); | |
927 | basic_block dest = (taken_edge ? taken_edge->dest : NULL); | |
928 | ||
8a78744f | 929 | if (dest == e->dest) |
6de9cd9a DN |
930 | return; |
931 | ||
932 | /* If we have a known destination for the conditional, then | |
933 | we can perform this optimization, which saves at least one | |
934 | conditional jump each time it applies since we get to | |
935 | bypass the conditional at our original destination. | |
936 | ||
937 | Note that we can either thread through a block with PHIs | |
938 | or to a block with PHIs, but not both. At this time the | |
939 | bookkeeping to keep the CFG & SSA up-to-date has proven | |
940 | difficult. */ | |
941 | if (dest) | |
942 | { | |
943 | int saved_forwardable = bb_ann (e->src)->forwardable; | |
944 | edge tmp_edge; | |
945 | ||
946 | bb_ann (e->src)->forwardable = 0; | |
947 | tmp_edge = tree_block_forwards_to (dest); | |
948 | taken_edge = (tmp_edge ? tmp_edge : taken_edge); | |
949 | bb_ann (e->src)->forwardable = saved_forwardable; | |
950 | VARRAY_PUSH_EDGE (redirection_edges, e); | |
951 | VARRAY_PUSH_EDGE (redirection_edges, taken_edge); | |
952 | } | |
953 | } | |
954 | } | |
955 | } | |
956 | ||
957 | ||
958 | /* Initialize the local stacks. | |
959 | ||
960 | AVAIL_EXPRS stores all the expressions made available in this block. | |
961 | ||
962 | CONST_AND_COPIES stores var/value pairs to restore at the end of this | |
963 | block. | |
964 | ||
965 | NONZERO_VARS stores the vars which have a nonzero value made in this | |
966 | block. | |
967 | ||
968 | STMTS_TO_RESCAN is a list of statements we will rescan for operands. | |
969 | ||
970 | VRP_VARIABLES is the list of variables which have had their values | |
971 | constrained by an operation in this block. | |
972 | ||
973 | These stacks are cleared in the finalization routine run for each | |
974 | block. */ | |
975 | ||
976 | static void | |
977 | dom_opt_initialize_block_local_data (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED, | |
978 | basic_block bb ATTRIBUTE_UNUSED, | |
979 | bool recycled ATTRIBUTE_UNUSED) | |
980 | { | |
981 | #ifdef ENABLE_CHECKING | |
982 | struct dom_walk_block_data *bd | |
983 | = (struct dom_walk_block_data *)VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
984 | ||
985 | /* We get cleared memory from the allocator, so if the memory is not | |
986 | cleared, then we are re-using a previously allocated entry. In | |
987 | that case, we can also re-use the underlying virtual arrays. Just | |
988 | make sure we clear them before using them! */ | |
989 | if (recycled) | |
990 | { | |
991 | if (bd->avail_exprs && VARRAY_ACTIVE_SIZE (bd->avail_exprs) > 0) | |
992 | abort (); | |
993 | if (bd->const_and_copies && VARRAY_ACTIVE_SIZE (bd->const_and_copies) > 0) | |
994 | abort (); | |
995 | if (bd->nonzero_vars && VARRAY_ACTIVE_SIZE (bd->nonzero_vars) > 0) | |
996 | abort (); | |
997 | if (bd->stmts_to_rescan && VARRAY_ACTIVE_SIZE (bd->stmts_to_rescan) > 0) | |
998 | abort (); | |
999 | if (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0) | |
1000 | abort (); | |
1001 | if (bd->block_defs && VARRAY_ACTIVE_SIZE (bd->block_defs) > 0) | |
1002 | abort (); | |
1003 | } | |
1004 | #endif | |
1005 | } | |
1006 | ||
1007 | /* Initialize local stacks for this optimizer and record equivalences | |
1008 | upon entry to BB. Equivalences can come from the edge traversed to | |
1009 | reach BB or they may come from PHI nodes at the start of BB. */ | |
1010 | ||
1011 | static void | |
1012 | dom_opt_initialize_block (struct dom_walk_data *walk_data, basic_block bb) | |
1013 | { | |
1014 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1015 | fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index); | |
1016 | ||
1017 | record_equivalences_from_incoming_edge (walk_data, bb); | |
1018 | ||
1019 | /* PHI nodes can create equivalences too. */ | |
1020 | record_equivalences_from_phis (walk_data, bb); | |
1021 | } | |
1022 | ||
1023 | /* Given an expression EXPR (a relational expression or a statement), | |
1024 | initialize the hash table element pointed by by ELEMENT. */ | |
1025 | ||
1026 | static void | |
1027 | initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element) | |
1028 | { | |
1029 | /* Hash table elements may be based on conditional expressions or statements. | |
1030 | ||
1031 | For the former case, we have no annotation and we want to hash the | |
1032 | conditional expression. In the latter case we have an annotation and | |
1033 | we want to record the expression the statement evaluates. */ | |
1034 | if (TREE_CODE_CLASS (TREE_CODE (expr)) == '<' | |
1035 | || TREE_CODE (expr) == TRUTH_NOT_EXPR) | |
1036 | { | |
1037 | element->ann = NULL; | |
1038 | element->rhs = expr; | |
1039 | } | |
1040 | else if (TREE_CODE (expr) == COND_EXPR) | |
1041 | { | |
1042 | element->ann = stmt_ann (expr); | |
1043 | element->rhs = COND_EXPR_COND (expr); | |
1044 | } | |
1045 | else if (TREE_CODE (expr) == SWITCH_EXPR) | |
1046 | { | |
1047 | element->ann = stmt_ann (expr); | |
1048 | element->rhs = SWITCH_COND (expr); | |
1049 | } | |
1050 | else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0)) | |
1051 | { | |
1052 | element->ann = stmt_ann (expr); | |
1053 | element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1); | |
1054 | } | |
1055 | else | |
1056 | { | |
1057 | element->ann = stmt_ann (expr); | |
1058 | element->rhs = TREE_OPERAND (expr, 1); | |
1059 | } | |
1060 | ||
1061 | element->lhs = lhs; | |
1062 | element->hash = avail_expr_hash (element); | |
1063 | } | |
1064 | ||
1065 | /* Remove all the expressions in LOCALS from TABLE, stopping when there are | |
1066 | LIMIT entries left in LOCALs. */ | |
1067 | ||
1068 | static void | |
1069 | remove_local_expressions_from_table (varray_type locals, | |
1070 | unsigned limit, | |
1071 | htab_t table) | |
1072 | { | |
1073 | if (! locals) | |
1074 | return; | |
1075 | ||
1076 | /* Remove all the expressions made available in this block. */ | |
1077 | while (VARRAY_ACTIVE_SIZE (locals) > limit) | |
1078 | { | |
1079 | struct expr_hash_elt element; | |
1080 | tree expr = VARRAY_TOP_TREE (locals); | |
1081 | VARRAY_POP (locals); | |
1082 | ||
1083 | initialize_hash_element (expr, NULL, &element); | |
1084 | htab_remove_elt_with_hash (table, &element, element.hash); | |
1085 | } | |
1086 | } | |
1087 | ||
1088 | /* Use the SSA_NAMES in LOCALS to restore TABLE to its original | |
1ea7e6ad | 1089 | state, stopping when there are LIMIT entries left in LOCALs. */ |
6de9cd9a DN |
1090 | |
1091 | static void | |
1092 | restore_nonzero_vars_to_original_value (varray_type locals, | |
1093 | unsigned limit, | |
1094 | bitmap table) | |
1095 | { | |
1096 | if (!locals) | |
1097 | return; | |
1098 | ||
1099 | while (VARRAY_ACTIVE_SIZE (locals) > limit) | |
1100 | { | |
1101 | tree name = VARRAY_TOP_TREE (locals); | |
1102 | VARRAY_POP (locals); | |
1103 | bitmap_clear_bit (table, SSA_NAME_VERSION (name)); | |
1104 | } | |
1105 | } | |
1106 | ||
1107 | /* Use the source/dest pairs in LOCALS to restore TABLE to its original | |
1ea7e6ad | 1108 | state, stopping when there are LIMIT entries left in LOCALs. */ |
6de9cd9a DN |
1109 | |
1110 | static void | |
1111 | restore_vars_to_original_value (varray_type locals, | |
1112 | unsigned limit, | |
1113 | varray_type table) | |
1114 | { | |
1115 | if (! locals) | |
1116 | return; | |
1117 | ||
1118 | while (VARRAY_ACTIVE_SIZE (locals) > limit) | |
1119 | { | |
1120 | tree prev_value, dest; | |
1121 | ||
1122 | prev_value = VARRAY_TOP_TREE (locals); | |
1123 | VARRAY_POP (locals); | |
1124 | dest = VARRAY_TOP_TREE (locals); | |
1125 | VARRAY_POP (locals); | |
1126 | ||
1127 | set_value_for (dest, prev_value, table); | |
1128 | } | |
1129 | } | |
1130 | ||
1131 | /* Similar to restore_vars_to_original_value, except that it restores | |
1132 | CURRDEFS to its original value. */ | |
1133 | static void | |
1134 | restore_currdefs_to_original_value (varray_type locals, unsigned limit) | |
1135 | { | |
1136 | if (!locals) | |
1137 | return; | |
1138 | ||
1139 | /* Restore CURRDEFS to its original state. */ | |
1140 | while (VARRAY_ACTIVE_SIZE (locals) > limit) | |
1141 | { | |
1142 | tree tmp = VARRAY_TOP_TREE (locals); | |
1143 | tree saved_def, var; | |
1144 | ||
1145 | VARRAY_POP (locals); | |
1146 | ||
1147 | /* If we recorded an SSA_NAME, then make the SSA_NAME the current | |
1148 | definition of its underlying variable. If we recorded anything | |
1149 | else, it must have been an _DECL node and its current reaching | |
1150 | definition must have been NULL. */ | |
1151 | if (TREE_CODE (tmp) == SSA_NAME) | |
1152 | { | |
1153 | saved_def = tmp; | |
1154 | var = SSA_NAME_VAR (saved_def); | |
1155 | } | |
1156 | else | |
1157 | { | |
1158 | saved_def = NULL; | |
1159 | var = tmp; | |
1160 | } | |
1161 | ||
1162 | var_ann (var)->current_def = saved_def; | |
1163 | } | |
1164 | } | |
1165 | ||
1166 | /* We have finished processing the dominator children of BB, perform | |
1167 | any finalization actions in preparation for leaving this node in | |
1168 | the dominator tree. */ | |
1169 | ||
1170 | static void | |
1171 | dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb) | |
1172 | { | |
1173 | struct dom_walk_block_data *bd | |
1174 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
1175 | tree last; | |
1176 | ||
1177 | /* If we are at a leaf node in the dominator graph, see if we can thread | |
1178 | the edge from BB through its successor. | |
1179 | ||
1180 | Do this before we remove entries from our equivalence tables. */ | |
1181 | if (bb->succ | |
1182 | && ! bb->succ->succ_next | |
1183 | && (bb->succ->flags & EDGE_ABNORMAL) == 0 | |
1184 | && (get_immediate_dominator (CDI_DOMINATORS, bb->succ->dest) != bb | |
1185 | || phi_nodes (bb->succ->dest))) | |
1186 | ||
1187 | { | |
1188 | thread_across_edge (walk_data, bb->succ); | |
1189 | } | |
1190 | else if ((last = last_stmt (bb)) | |
1191 | && TREE_CODE (last) == COND_EXPR | |
1192 | && (TREE_CODE_CLASS (TREE_CODE (COND_EXPR_COND (last))) == '<' | |
1193 | || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME) | |
1194 | && bb->succ | |
1195 | && (bb->succ->flags & EDGE_ABNORMAL) == 0 | |
1196 | && bb->succ->succ_next | |
1197 | && (bb->succ->succ_next->flags & EDGE_ABNORMAL) == 0 | |
1198 | && ! bb->succ->succ_next->succ_next) | |
1199 | { | |
1200 | edge true_edge, false_edge; | |
1201 | tree cond, inverted = NULL; | |
1202 | enum tree_code cond_code; | |
1203 | ||
1204 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
1205 | ||
1206 | cond = COND_EXPR_COND (last); | |
1207 | cond_code = TREE_CODE (cond); | |
1208 | ||
1209 | if (TREE_CODE_CLASS (cond_code) == '<') | |
1210 | inverted = invert_truthvalue (cond); | |
1211 | ||
1212 | /* If the THEN arm is the end of a dominator tree or has PHI nodes, | |
1213 | then try to thread through its edge. */ | |
1214 | if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb | |
1215 | || phi_nodes (true_edge->dest)) | |
1216 | { | |
1217 | unsigned avail_expr_limit; | |
1218 | unsigned const_and_copies_limit; | |
1219 | unsigned currdefs_limit; | |
1220 | ||
1221 | avail_expr_limit | |
1222 | = bd->avail_exprs ? VARRAY_ACTIVE_SIZE (bd->avail_exprs) : 0; | |
1223 | const_and_copies_limit | |
1224 | = bd->const_and_copies ? VARRAY_ACTIVE_SIZE (bd->const_and_copies) | |
1225 | : 0; | |
1226 | currdefs_limit | |
1227 | = bd->block_defs ? VARRAY_ACTIVE_SIZE (bd->block_defs) : 0; | |
1228 | ||
1229 | /* Record any equivalences created by following this edge. */ | |
1230 | if (TREE_CODE_CLASS (cond_code) == '<') | |
1231 | { | |
1232 | record_cond (cond, boolean_true_node, &bd->avail_exprs); | |
d2d8936f | 1233 | record_dominating_conditions (cond, &bd->avail_exprs); |
6de9cd9a DN |
1234 | record_cond (inverted, boolean_false_node, &bd->avail_exprs); |
1235 | } | |
1236 | else if (cond_code == SSA_NAME) | |
1237 | record_const_or_copy (cond, boolean_true_node, | |
1238 | &bd->const_and_copies); | |
1239 | ||
1240 | /* Now thread the edge. */ | |
1241 | thread_across_edge (walk_data, true_edge); | |
1242 | ||
1243 | /* And restore the various tables to their state before | |
1244 | we threaded this edge. */ | |
1245 | remove_local_expressions_from_table (bd->avail_exprs, | |
1246 | avail_expr_limit, | |
1247 | avail_exprs); | |
1248 | restore_vars_to_original_value (bd->const_and_copies, | |
1249 | const_and_copies_limit, | |
1250 | const_and_copies); | |
1251 | restore_currdefs_to_original_value (bd->block_defs, currdefs_limit); | |
1252 | } | |
1253 | ||
1254 | /* Similarly for the ELSE arm. */ | |
1255 | if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb | |
1256 | || phi_nodes (false_edge->dest)) | |
1257 | { | |
1258 | /* Record any equivalences created by following this edge. */ | |
1259 | if (TREE_CODE_CLASS (cond_code) == '<') | |
1260 | { | |
1261 | record_cond (cond, boolean_false_node, &bd->avail_exprs); | |
1262 | record_cond (inverted, boolean_true_node, &bd->avail_exprs); | |
d2d8936f | 1263 | record_dominating_conditions (inverted, &bd->avail_exprs); |
6de9cd9a DN |
1264 | } |
1265 | else if (cond_code == SSA_NAME) | |
1266 | record_const_or_copy (cond, boolean_false_node, | |
1267 | &bd->const_and_copies); | |
1268 | ||
1269 | thread_across_edge (walk_data, false_edge); | |
1270 | ||
1271 | /* No need to remove local expressions from our tables | |
1272 | or restore vars to their original value as that will | |
1273 | be done immediately below. */ | |
1274 | } | |
1275 | } | |
1276 | ||
1277 | remove_local_expressions_from_table (bd->avail_exprs, 0, avail_exprs); | |
1278 | restore_nonzero_vars_to_original_value (bd->nonzero_vars, 0, nonzero_vars); | |
1279 | restore_vars_to_original_value (bd->const_and_copies, 0, const_and_copies); | |
1280 | restore_currdefs_to_original_value (bd->block_defs, 0); | |
1281 | ||
1282 | /* Remove VRP records associated with this basic block. They are no | |
1283 | longer valid. | |
1284 | ||
1285 | To be efficient, we note which variables have had their values | |
1286 | constrained in this block. So walk over each variable in the | |
1287 | VRP_VARIABLEs array. */ | |
1288 | while (bd->vrp_variables && VARRAY_ACTIVE_SIZE (bd->vrp_variables) > 0) | |
1289 | { | |
1290 | tree var = VARRAY_TOP_TREE (bd->vrp_variables); | |
1291 | ||
1292 | /* Each variable has a stack of value range records. We want to | |
1293 | invalidate those associated with our basic block. So we walk | |
1294 | the array backwards popping off records associated with our | |
1295 | block. Once we hit a record not associated with our block | |
1296 | we are done. */ | |
1297 | varray_type var_vrp_records = VARRAY_GENERIC_PTR (vrp_data, | |
1298 | SSA_NAME_VERSION (var)); | |
1299 | ||
1300 | while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0) | |
1301 | { | |
1302 | struct vrp_element *element | |
1303 | = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records); | |
1304 | ||
1305 | if (element->bb != bb) | |
1306 | break; | |
1307 | ||
1308 | VARRAY_POP (var_vrp_records); | |
1309 | } | |
1310 | ||
1311 | VARRAY_POP (bd->vrp_variables); | |
1312 | } | |
1313 | ||
1314 | /* Re-scan operands in all statements that may have had new symbols | |
1315 | exposed. */ | |
1316 | while (bd->stmts_to_rescan && VARRAY_ACTIVE_SIZE (bd->stmts_to_rescan) > 0) | |
1317 | { | |
1318 | tree stmt = VARRAY_TOP_TREE (bd->stmts_to_rescan); | |
1319 | VARRAY_POP (bd->stmts_to_rescan); | |
1320 | mark_new_vars_to_rename (stmt, vars_to_rename); | |
1321 | } | |
1322 | } | |
1323 | ||
1324 | /* PHI nodes can create equivalences too. | |
1325 | ||
1326 | Ignoring any alternatives which are the same as the result, if | |
1327 | all the alternatives are equal, then the PHI node creates an | |
dd747311 JL |
1328 | equivalence. |
1329 | ||
1330 | Additionally, if all the PHI alternatives are known to have a nonzero | |
1331 | value, then the result of this PHI is known to have a nonzero value, | |
1332 | even if we do not know its exact value. */ | |
1333 | ||
6de9cd9a DN |
1334 | static void |
1335 | record_equivalences_from_phis (struct dom_walk_data *walk_data, basic_block bb) | |
1336 | { | |
1337 | struct dom_walk_block_data *bd | |
1338 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
1339 | tree phi; | |
1340 | ||
17192884 | 1341 | for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi)) |
6de9cd9a DN |
1342 | { |
1343 | tree lhs = PHI_RESULT (phi); | |
1344 | tree rhs = NULL; | |
1345 | int i; | |
1346 | ||
1347 | for (i = 0; i < PHI_NUM_ARGS (phi); i++) | |
1348 | { | |
1349 | tree t = PHI_ARG_DEF (phi, i); | |
1350 | ||
1351 | if (TREE_CODE (t) == SSA_NAME || is_gimple_min_invariant (t)) | |
1352 | { | |
1353 | /* Ignore alternatives which are the same as our LHS. */ | |
1354 | if (operand_equal_p (lhs, t, 0)) | |
1355 | continue; | |
1356 | ||
1357 | /* If we have not processed an alternative yet, then set | |
1358 | RHS to this alternative. */ | |
1359 | if (rhs == NULL) | |
1360 | rhs = t; | |
1361 | /* If we have processed an alternative (stored in RHS), then | |
1362 | see if it is equal to this one. If it isn't, then stop | |
1363 | the search. */ | |
1364 | else if (! operand_equal_p (rhs, t, 0)) | |
1365 | break; | |
1366 | } | |
1367 | else | |
1368 | break; | |
1369 | } | |
1370 | ||
1371 | /* If we had no interesting alternatives, then all the RHS alternatives | |
1372 | must have been the same as LHS. */ | |
1373 | if (!rhs) | |
1374 | rhs = lhs; | |
1375 | ||
1376 | /* If we managed to iterate through each PHI alternative without | |
1377 | breaking out of the loop, then we have a PHI which may create | |
1378 | a useful equivalence. We do not need to record unwind data for | |
1379 | this, since this is a true assignment and not an equivalence | |
1ea7e6ad | 1380 | inferred from a comparison. All uses of this ssa name are dominated |
6de9cd9a DN |
1381 | by this assignment, so unwinding just costs time and space. */ |
1382 | if (i == PHI_NUM_ARGS (phi) | |
1383 | && may_propagate_copy (lhs, rhs)) | |
1384 | set_value_for (lhs, rhs, const_and_copies); | |
1385 | ||
dd747311 JL |
1386 | /* Now see if we know anything about the nonzero property for the |
1387 | result of this PHI. */ | |
1388 | for (i = 0; i < PHI_NUM_ARGS (phi); i++) | |
1389 | { | |
1390 | if (!PHI_ARG_NONZERO (phi, i)) | |
1391 | break; | |
1392 | } | |
1393 | ||
1394 | if (i == PHI_NUM_ARGS (phi)) | |
1395 | bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi))); | |
1396 | ||
6de9cd9a DN |
1397 | register_new_def (lhs, &bd->block_defs); |
1398 | } | |
1399 | } | |
1400 | ||
1401 | /* Record any equivalences created by the incoming edge to BB. If BB | |
1402 | has more than one incoming edge, then no equivalence is created. */ | |
1403 | ||
1404 | static void | |
1405 | record_equivalences_from_incoming_edge (struct dom_walk_data *walk_data, | |
1406 | basic_block bb) | |
1407 | { | |
1408 | int edge_flags; | |
1409 | basic_block parent; | |
1410 | struct eq_expr_value eq_expr_value; | |
1411 | tree parent_block_last_stmt = NULL; | |
1412 | struct dom_walk_block_data *bd | |
1413 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
1414 | ||
1415 | /* If our parent block ended with a control statment, then we may be | |
1416 | able to record some equivalences based on which outgoing edge from | |
1417 | the parent was followed. */ | |
1418 | parent = get_immediate_dominator (CDI_DOMINATORS, bb); | |
1419 | if (parent) | |
1420 | { | |
1421 | parent_block_last_stmt = last_stmt (parent); | |
1422 | if (parent_block_last_stmt && !is_ctrl_stmt (parent_block_last_stmt)) | |
1423 | parent_block_last_stmt = NULL; | |
1424 | } | |
1425 | ||
1426 | eq_expr_value.src = NULL; | |
1427 | eq_expr_value.dst = NULL; | |
1428 | ||
1429 | /* If we have a single predecessor, then extract EDGE_FLAGS from | |
1430 | our single incoming edge. Otherwise clear EDGE_FLAGS and | |
1431 | PARENT_BLOCK_LAST_STMT since they're not needed. */ | |
1432 | if (bb->pred | |
1433 | && ! bb->pred->pred_next | |
1434 | && parent_block_last_stmt | |
1435 | && bb_for_stmt (parent_block_last_stmt) == bb->pred->src) | |
1436 | { | |
1437 | edge_flags = bb->pred->flags; | |
1438 | } | |
1439 | else | |
1440 | { | |
1441 | edge_flags = 0; | |
1442 | parent_block_last_stmt = NULL; | |
1443 | } | |
1444 | ||
1445 | /* If our parent block ended in a COND_EXPR, add any equivalences | |
1446 | created by the COND_EXPR to the hash table and initialize | |
1447 | EQ_EXPR_VALUE appropriately. | |
1448 | ||
1449 | EQ_EXPR_VALUE is an assignment expression created when BB's immediate | |
1450 | dominator ends in a COND_EXPR statement whose predicate is of the form | |
1451 | 'VAR == VALUE', where VALUE may be another variable or a constant. | |
1452 | This is used to propagate VALUE on the THEN_CLAUSE of that | |
1453 | conditional. This assignment is inserted in CONST_AND_COPIES so that | |
1454 | the copy and constant propagator can find more propagation | |
1455 | opportunities. */ | |
1456 | if (parent_block_last_stmt | |
1457 | && bb->pred->pred_next == NULL | |
1458 | && TREE_CODE (parent_block_last_stmt) == COND_EXPR | |
1459 | && (edge_flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))) | |
1460 | eq_expr_value = get_eq_expr_value (parent_block_last_stmt, | |
1461 | (edge_flags & EDGE_TRUE_VALUE) != 0, | |
1462 | &bd->avail_exprs, | |
1463 | bb, | |
1464 | &bd->vrp_variables); | |
1c052514 SB |
1465 | /* Similarly when the parent block ended in a SWITCH_EXPR. |
1466 | We can only know the value of the switch's condition if the dominator | |
1467 | parent is also the only predecessor of this block. */ | |
6de9cd9a DN |
1468 | else if (parent_block_last_stmt |
1469 | && bb->pred->pred_next == NULL | |
1c052514 | 1470 | && bb->pred->src == parent |
6de9cd9a DN |
1471 | && TREE_CODE (parent_block_last_stmt) == SWITCH_EXPR) |
1472 | { | |
1473 | tree switch_cond = SWITCH_COND (parent_block_last_stmt); | |
1474 | ||
1475 | /* If the switch's condition is an SSA variable, then we may | |
1476 | know its value at each of the case labels. */ | |
1477 | if (TREE_CODE (switch_cond) == SSA_NAME) | |
1478 | { | |
1479 | tree switch_vec = SWITCH_LABELS (parent_block_last_stmt); | |
1480 | size_t i, n = TREE_VEC_LENGTH (switch_vec); | |
1481 | int case_count = 0; | |
1482 | tree match_case = NULL_TREE; | |
1483 | ||
1484 | /* Search the case labels for those whose destination is | |
1485 | the current basic block. */ | |
1486 | for (i = 0; i < n; ++i) | |
1487 | { | |
1488 | tree elt = TREE_VEC_ELT (switch_vec, i); | |
1489 | if (label_to_block (CASE_LABEL (elt)) == bb) | |
1490 | { | |
1c052514 | 1491 | if (++case_count > 1 || CASE_HIGH (elt)) |
6de9cd9a DN |
1492 | break; |
1493 | match_case = elt; | |
1494 | } | |
1495 | } | |
1496 | ||
1497 | /* If we encountered precisely one CASE_LABEL_EXPR and it | |
1498 | was not the default case, or a case range, then we know | |
1499 | the exact value of SWITCH_COND which caused us to get to | |
1500 | this block. Record that equivalence in EQ_EXPR_VALUE. */ | |
1501 | if (case_count == 1 | |
1c052514 | 1502 | && match_case |
6de9cd9a DN |
1503 | && CASE_LOW (match_case) |
1504 | && !CASE_HIGH (match_case)) | |
1505 | { | |
1506 | eq_expr_value.dst = switch_cond; | |
1507 | eq_expr_value.src = CASE_LOW (match_case); | |
1508 | } | |
1509 | } | |
1510 | } | |
1511 | ||
1512 | /* If EQ_EXPR_VALUE (VAR == VALUE) is given, register the VALUE as a | |
1513 | new value for VAR, so that occurrences of VAR can be replaced with | |
1514 | VALUE while re-writing the THEN arm of a COND_EXPR. */ | |
1515 | if (eq_expr_value.src && eq_expr_value.dst) | |
1516 | record_equality (eq_expr_value.dst, eq_expr_value.src, | |
1517 | &bd->const_and_copies); | |
1518 | } | |
1519 | ||
1520 | /* Dump SSA statistics on FILE. */ | |
1521 | ||
1522 | void | |
1523 | dump_dominator_optimization_stats (FILE *file) | |
1524 | { | |
1525 | long n_exprs; | |
1526 | ||
1527 | fprintf (file, "Total number of statements: %6ld\n\n", | |
1528 | opt_stats.num_stmts); | |
1529 | fprintf (file, "Exprs considered for dominator optimizations: %6ld\n", | |
1530 | opt_stats.num_exprs_considered); | |
1531 | ||
1532 | n_exprs = opt_stats.num_exprs_considered; | |
1533 | if (n_exprs == 0) | |
1534 | n_exprs = 1; | |
1535 | ||
1536 | fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n", | |
1537 | opt_stats.num_re, PERCENT (opt_stats.num_re, | |
1538 | n_exprs)); | |
1539 | ||
1540 | fprintf (file, "\nHash table statistics:\n"); | |
1541 | ||
1542 | fprintf (file, " avail_exprs: "); | |
1543 | htab_statistics (file, avail_exprs); | |
1544 | } | |
1545 | ||
1546 | ||
1547 | /* Dump SSA statistics on stderr. */ | |
1548 | ||
1549 | void | |
1550 | debug_dominator_optimization_stats (void) | |
1551 | { | |
1552 | dump_dominator_optimization_stats (stderr); | |
1553 | } | |
1554 | ||
1555 | ||
1556 | /* Dump statistics for the hash table HTAB. */ | |
1557 | ||
1558 | static void | |
1559 | htab_statistics (FILE *file, htab_t htab) | |
1560 | { | |
1561 | fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n", | |
1562 | (long) htab_size (htab), | |
1563 | (long) htab_elements (htab), | |
1564 | htab_collisions (htab)); | |
1565 | } | |
1566 | ||
1567 | /* Record the fact that VAR has a nonzero value, though we may not know | |
1568 | its exact value. Note that if VAR is already known to have a nonzero | |
1569 | value, then we do nothing. */ | |
1570 | ||
1571 | static void | |
1572 | record_var_is_nonzero (tree var, varray_type *block_nonzero_vars_p) | |
1573 | { | |
1574 | int indx = SSA_NAME_VERSION (var); | |
1575 | ||
1576 | if (bitmap_bit_p (nonzero_vars, indx)) | |
1577 | return; | |
1578 | ||
1579 | /* Mark it in the global table. */ | |
1580 | bitmap_set_bit (nonzero_vars, indx); | |
1581 | ||
1582 | /* Record this SSA_NAME so that we can reset the global table | |
1583 | when we leave this block. */ | |
1584 | if (! *block_nonzero_vars_p) | |
1585 | VARRAY_TREE_INIT (*block_nonzero_vars_p, 2, "block_nonzero_vars"); | |
1586 | VARRAY_PUSH_TREE (*block_nonzero_vars_p, var); | |
1587 | } | |
1588 | ||
1589 | /* Enter a statement into the true/false expression hash table indicating | |
1590 | that the condition COND has the value VALUE. */ | |
1591 | ||
1592 | static void | |
1593 | record_cond (tree cond, tree value, varray_type *block_avail_exprs_p) | |
1594 | { | |
1595 | struct expr_hash_elt *element = xmalloc (sizeof (struct expr_hash_elt)); | |
1596 | void **slot; | |
1597 | ||
1598 | initialize_hash_element (cond, value, element); | |
1599 | ||
1600 | slot = htab_find_slot_with_hash (avail_exprs, (void *)element, | |
1601 | element->hash, true); | |
1602 | if (*slot == NULL) | |
1603 | { | |
1604 | *slot = (void *) element; | |
1605 | if (! *block_avail_exprs_p) | |
1606 | VARRAY_TREE_INIT (*block_avail_exprs_p, 20, "block_avail_exprs"); | |
1607 | VARRAY_PUSH_TREE (*block_avail_exprs_p, cond); | |
1608 | } | |
1609 | else | |
1610 | free (element); | |
1611 | } | |
1612 | ||
d2d8936f JL |
1613 | /* COND is a condition which is known to be true. Record variants of |
1614 | COND which must also be true. | |
1615 | ||
1616 | For example, if a < b is true, then a <= b must also be true. */ | |
1617 | ||
1618 | static void | |
1619 | record_dominating_conditions (tree cond, varray_type *block_avail_exprs_p) | |
1620 | { | |
1621 | switch (TREE_CODE (cond)) | |
1622 | { | |
1623 | case LT_EXPR: | |
1624 | record_cond (build2 (LE_EXPR, boolean_type_node, | |
1625 | TREE_OPERAND (cond, 0), | |
1626 | TREE_OPERAND (cond, 1)), | |
1627 | boolean_true_node, | |
1628 | block_avail_exprs_p); | |
1629 | record_cond (build2 (ORDERED_EXPR, boolean_type_node, | |
1630 | TREE_OPERAND (cond, 0), | |
1631 | TREE_OPERAND (cond, 1)), | |
1632 | boolean_true_node, | |
1633 | block_avail_exprs_p); | |
1634 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1635 | TREE_OPERAND (cond, 0), | |
1636 | TREE_OPERAND (cond, 1)), | |
1637 | boolean_true_node, | |
1638 | block_avail_exprs_p); | |
1639 | record_cond (build2 (LTGT_EXPR, boolean_type_node, | |
1640 | TREE_OPERAND (cond, 0), | |
1641 | TREE_OPERAND (cond, 1)), | |
1642 | boolean_true_node, | |
1643 | block_avail_exprs_p); | |
1644 | break; | |
1645 | ||
1646 | case GT_EXPR: | |
1647 | record_cond (build2 (GE_EXPR, boolean_type_node, | |
1648 | TREE_OPERAND (cond, 0), | |
1649 | TREE_OPERAND (cond, 1)), | |
1650 | boolean_true_node, | |
1651 | block_avail_exprs_p); | |
1652 | record_cond (build2 (ORDERED_EXPR, boolean_type_node, | |
1653 | TREE_OPERAND (cond, 0), | |
1654 | TREE_OPERAND (cond, 1)), | |
1655 | boolean_true_node, | |
1656 | block_avail_exprs_p); | |
1657 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1658 | TREE_OPERAND (cond, 0), | |
1659 | TREE_OPERAND (cond, 1)), | |
1660 | boolean_true_node, | |
1661 | block_avail_exprs_p); | |
1662 | record_cond (build2 (LTGT_EXPR, boolean_type_node, | |
1663 | TREE_OPERAND (cond, 0), | |
1664 | TREE_OPERAND (cond, 1)), | |
1665 | boolean_true_node, | |
1666 | block_avail_exprs_p); | |
1667 | break; | |
1668 | ||
1669 | case GE_EXPR: | |
1670 | case LE_EXPR: | |
1671 | record_cond (build2 (ORDERED_EXPR, boolean_type_node, | |
1672 | TREE_OPERAND (cond, 0), | |
1673 | TREE_OPERAND (cond, 1)), | |
1674 | boolean_true_node, | |
1675 | block_avail_exprs_p); | |
1676 | break; | |
1677 | ||
1678 | case EQ_EXPR: | |
1679 | record_cond (build2 (ORDERED_EXPR, boolean_type_node, | |
1680 | TREE_OPERAND (cond, 0), | |
1681 | TREE_OPERAND (cond, 1)), | |
1682 | boolean_true_node, | |
1683 | block_avail_exprs_p); | |
1684 | record_cond (build2 (LE_EXPR, boolean_type_node, | |
1685 | TREE_OPERAND (cond, 0), | |
1686 | TREE_OPERAND (cond, 1)), | |
1687 | boolean_true_node, | |
1688 | block_avail_exprs_p); | |
1689 | record_cond (build2 (GE_EXPR, boolean_type_node, | |
1690 | TREE_OPERAND (cond, 0), | |
1691 | TREE_OPERAND (cond, 1)), | |
1692 | boolean_true_node, | |
1693 | block_avail_exprs_p); | |
1694 | break; | |
1695 | ||
1696 | case UNORDERED_EXPR: | |
1697 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1698 | TREE_OPERAND (cond, 0), | |
1699 | TREE_OPERAND (cond, 1)), | |
1700 | boolean_true_node, | |
1701 | block_avail_exprs_p); | |
1702 | record_cond (build2 (UNLE_EXPR, boolean_type_node, | |
1703 | TREE_OPERAND (cond, 0), | |
1704 | TREE_OPERAND (cond, 1)), | |
1705 | boolean_true_node, | |
1706 | block_avail_exprs_p); | |
1707 | record_cond (build2 (UNGE_EXPR, boolean_type_node, | |
1708 | TREE_OPERAND (cond, 0), | |
1709 | TREE_OPERAND (cond, 1)), | |
1710 | boolean_true_node, | |
1711 | block_avail_exprs_p); | |
1712 | record_cond (build2 (UNEQ_EXPR, boolean_type_node, | |
1713 | TREE_OPERAND (cond, 0), | |
1714 | TREE_OPERAND (cond, 1)), | |
1715 | boolean_true_node, | |
1716 | block_avail_exprs_p); | |
1717 | record_cond (build2 (UNLT_EXPR, boolean_type_node, | |
1718 | TREE_OPERAND (cond, 0), | |
1719 | TREE_OPERAND (cond, 1)), | |
1720 | boolean_true_node, | |
1721 | block_avail_exprs_p); | |
1722 | record_cond (build2 (UNGT_EXPR, boolean_type_node, | |
1723 | TREE_OPERAND (cond, 0), | |
1724 | TREE_OPERAND (cond, 1)), | |
1725 | boolean_true_node, | |
1726 | block_avail_exprs_p); | |
1727 | break; | |
1728 | ||
1729 | case UNLT_EXPR: | |
1730 | record_cond (build2 (UNLE_EXPR, boolean_type_node, | |
1731 | TREE_OPERAND (cond, 0), | |
1732 | TREE_OPERAND (cond, 1)), | |
1733 | boolean_true_node, | |
1734 | block_avail_exprs_p); | |
1735 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1736 | TREE_OPERAND (cond, 0), | |
1737 | TREE_OPERAND (cond, 1)), | |
1738 | boolean_true_node, | |
1739 | block_avail_exprs_p); | |
1740 | break; | |
1741 | ||
1742 | case UNGT_EXPR: | |
1743 | record_cond (build2 (UNGE_EXPR, boolean_type_node, | |
1744 | TREE_OPERAND (cond, 0), | |
1745 | TREE_OPERAND (cond, 1)), | |
1746 | boolean_true_node, | |
1747 | block_avail_exprs_p); | |
1748 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1749 | TREE_OPERAND (cond, 0), | |
1750 | TREE_OPERAND (cond, 1)), | |
1751 | boolean_true_node, | |
1752 | block_avail_exprs_p); | |
1753 | break; | |
1754 | ||
1755 | case UNEQ_EXPR: | |
1756 | record_cond (build2 (UNLE_EXPR, boolean_type_node, | |
1757 | TREE_OPERAND (cond, 0), | |
1758 | TREE_OPERAND (cond, 1)), | |
1759 | boolean_true_node, | |
1760 | block_avail_exprs_p); | |
1761 | record_cond (build2 (UNGE_EXPR, boolean_type_node, | |
1762 | TREE_OPERAND (cond, 0), | |
1763 | TREE_OPERAND (cond, 1)), | |
1764 | boolean_true_node, | |
1765 | block_avail_exprs_p); | |
1766 | break; | |
1767 | ||
1768 | case LTGT_EXPR: | |
1769 | record_cond (build2 (NE_EXPR, boolean_type_node, | |
1770 | TREE_OPERAND (cond, 0), | |
1771 | TREE_OPERAND (cond, 1)), | |
1772 | boolean_true_node, | |
1773 | block_avail_exprs_p); | |
1774 | record_cond (build2 (ORDERED_EXPR, boolean_type_node, | |
1775 | TREE_OPERAND (cond, 0), | |
1776 | TREE_OPERAND (cond, 1)), | |
1777 | boolean_true_node, | |
1778 | block_avail_exprs_p); | |
1779 | ||
1780 | default: | |
1781 | break; | |
1782 | } | |
1783 | } | |
1784 | ||
6de9cd9a DN |
1785 | /* A helper function for record_const_or_copy and record_equality. |
1786 | Do the work of recording the value and undo info. */ | |
1787 | ||
1788 | static void | |
1789 | record_const_or_copy_1 (tree x, tree y, tree prev_x, | |
1790 | varray_type *block_const_and_copies_p) | |
1791 | { | |
1792 | set_value_for (x, y, const_and_copies); | |
1793 | ||
1794 | if (!*block_const_and_copies_p) | |
1795 | VARRAY_TREE_INIT (*block_const_and_copies_p, 2, "block_const_and_copies"); | |
1796 | VARRAY_PUSH_TREE (*block_const_and_copies_p, x); | |
1797 | VARRAY_PUSH_TREE (*block_const_and_copies_p, prev_x); | |
1798 | } | |
1799 | ||
1800 | /* Record that X is equal to Y in const_and_copies. Record undo | |
1801 | information in the block-local varray. */ | |
1802 | ||
1803 | static void | |
1804 | record_const_or_copy (tree x, tree y, varray_type *block_const_and_copies_p) | |
1805 | { | |
1806 | tree prev_x = get_value_for (x, const_and_copies); | |
1807 | ||
1808 | if (TREE_CODE (y) == SSA_NAME) | |
1809 | { | |
1810 | tree tmp = get_value_for (y, const_and_copies); | |
1811 | if (tmp) | |
1812 | y = tmp; | |
1813 | } | |
1814 | ||
1815 | record_const_or_copy_1 (x, y, prev_x, block_const_and_copies_p); | |
1816 | } | |
1817 | ||
1818 | /* Similarly, but assume that X and Y are the two operands of an EQ_EXPR. | |
1819 | This constrains the cases in which we may treat this as assignment. */ | |
1820 | ||
1821 | static void | |
1822 | record_equality (tree x, tree y, varray_type *block_const_and_copies_p) | |
1823 | { | |
1824 | tree prev_x = NULL, prev_y = NULL; | |
1825 | ||
1826 | if (TREE_CODE (x) == SSA_NAME) | |
1827 | prev_x = get_value_for (x, const_and_copies); | |
1828 | if (TREE_CODE (y) == SSA_NAME) | |
1829 | prev_y = get_value_for (y, const_and_copies); | |
1830 | ||
1831 | /* If one of the previous values is invariant, then use that. | |
1832 | Otherwise it doesn't matter which value we choose, just so | |
1833 | long as we canonicalize on one value. */ | |
1834 | if (TREE_INVARIANT (y)) | |
1835 | ; | |
1836 | else if (TREE_INVARIANT (x)) | |
1837 | prev_x = x, x = y, y = prev_x, prev_x = prev_y; | |
1838 | else if (prev_x && TREE_INVARIANT (prev_x)) | |
1839 | x = y, y = prev_x, prev_x = prev_y; | |
1840 | else if (prev_y) | |
1841 | y = prev_y; | |
1842 | ||
1843 | /* After the swapping, we must have one SSA_NAME. */ | |
1844 | if (TREE_CODE (x) != SSA_NAME) | |
1845 | return; | |
1846 | ||
1847 | /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a | |
1848 | variable compared against zero. If we're honoring signed zeros, | |
1849 | then we cannot record this value unless we know that the value is | |
1ea7e6ad | 1850 | nonzero. */ |
6de9cd9a DN |
1851 | if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (x))) |
1852 | && (TREE_CODE (y) != REAL_CST | |
1853 | || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y)))) | |
1854 | return; | |
1855 | ||
1856 | record_const_or_copy_1 (x, y, prev_x, block_const_and_copies_p); | |
1857 | } | |
1858 | ||
1859 | /* STMT is a MODIFY_EXPR for which we were unable to find RHS in the | |
1860 | hash tables. Try to simplify the RHS using whatever equivalences | |
1861 | we may have recorded. | |
1862 | ||
1863 | If we are able to simplify the RHS, then lookup the simplified form in | |
1864 | the hash table and return the result. Otherwise return NULL. */ | |
1865 | ||
1866 | static tree | |
1867 | simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data, | |
1868 | tree stmt, | |
1869 | stmt_ann_t ann, | |
1870 | int insert) | |
1871 | { | |
1872 | tree rhs = TREE_OPERAND (stmt, 1); | |
1873 | enum tree_code rhs_code = TREE_CODE (rhs); | |
1874 | tree result = NULL; | |
1875 | struct dom_walk_block_data *bd | |
1876 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
1877 | ||
1878 | /* If we have lhs = ~x, look and see if we earlier had x = ~y. | |
1879 | In which case we can change this statement to be lhs = y. | |
1880 | Which can then be copy propagated. | |
1881 | ||
1882 | Similarly for negation. */ | |
1883 | if ((rhs_code == BIT_NOT_EXPR || rhs_code == NEGATE_EXPR) | |
1884 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) | |
1885 | { | |
1886 | /* Get the definition statement for our RHS. */ | |
1887 | tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)); | |
1888 | ||
1889 | /* See if the RHS_DEF_STMT has the same form as our statement. */ | |
1890 | if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR | |
1891 | && TREE_CODE (TREE_OPERAND (rhs_def_stmt, 1)) == rhs_code) | |
1892 | { | |
1893 | tree rhs_def_operand; | |
1894 | ||
1895 | rhs_def_operand = TREE_OPERAND (TREE_OPERAND (rhs_def_stmt, 1), 0); | |
1896 | ||
1897 | /* Verify that RHS_DEF_OPERAND is a suitable SSA variable. */ | |
1898 | if (TREE_CODE (rhs_def_operand) == SSA_NAME | |
1899 | && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand)) | |
1900 | result = update_rhs_and_lookup_avail_expr (stmt, | |
1901 | rhs_def_operand, | |
1902 | &bd->avail_exprs, | |
1903 | ann, | |
1904 | insert); | |
1905 | } | |
1906 | } | |
1907 | ||
1908 | /* If we have z = (x OP C1), see if we earlier had x = y OP C2. | |
1909 | If OP is associative, create and fold (y OP C2) OP C1 which | |
1910 | should result in (y OP C3), use that as the RHS for the | |
1911 | assignment. Add minus to this, as we handle it specially below. */ | |
1912 | if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR) | |
1913 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME | |
1914 | && is_gimple_min_invariant (TREE_OPERAND (rhs, 1))) | |
1915 | { | |
1916 | tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)); | |
1917 | ||
1918 | /* See if the RHS_DEF_STMT has the same form as our statement. */ | |
1919 | if (TREE_CODE (rhs_def_stmt) == MODIFY_EXPR) | |
1920 | { | |
1921 | tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1); | |
1922 | enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs); | |
1923 | ||
1924 | if (rhs_code == rhs_def_code | |
1925 | || (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR) | |
1926 | || (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR)) | |
1927 | { | |
1928 | tree def_stmt_op0 = TREE_OPERAND (rhs_def_rhs, 0); | |
1929 | tree def_stmt_op1 = TREE_OPERAND (rhs_def_rhs, 1); | |
1930 | ||
1931 | if (TREE_CODE (def_stmt_op0) == SSA_NAME | |
1932 | && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def_stmt_op0) | |
1933 | && is_gimple_min_invariant (def_stmt_op1)) | |
1934 | { | |
1935 | tree outer_const = TREE_OPERAND (rhs, 1); | |
1936 | tree type = TREE_TYPE (TREE_OPERAND (stmt, 0)); | |
1937 | tree t; | |
1938 | ||
1939 | /* Ho hum. So fold will only operate on the outermost | |
1940 | thingy that we give it, so we have to build the new | |
1941 | expression in two pieces. This requires that we handle | |
1942 | combinations of plus and minus. */ | |
1943 | if (rhs_def_code != rhs_code) | |
1944 | { | |
1945 | if (rhs_def_code == MINUS_EXPR) | |
1946 | t = build (MINUS_EXPR, type, outer_const, def_stmt_op1); | |
1947 | else | |
1948 | t = build (MINUS_EXPR, type, def_stmt_op1, outer_const); | |
1949 | rhs_code = PLUS_EXPR; | |
1950 | } | |
1951 | else if (rhs_def_code == MINUS_EXPR) | |
1952 | t = build (PLUS_EXPR, type, def_stmt_op1, outer_const); | |
1953 | else | |
1954 | t = build (rhs_def_code, type, def_stmt_op1, outer_const); | |
1955 | t = local_fold (t); | |
1956 | t = build (rhs_code, type, def_stmt_op0, t); | |
1957 | t = local_fold (t); | |
1958 | ||
1959 | /* If the result is a suitable looking gimple expression, | |
1960 | then use it instead of the original for STMT. */ | |
1961 | if (TREE_CODE (t) == SSA_NAME | |
1962 | || (TREE_CODE_CLASS (TREE_CODE (t)) == '1' | |
1963 | && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME) | |
1964 | || ((TREE_CODE_CLASS (TREE_CODE (t)) == '2' | |
1965 | || TREE_CODE_CLASS (TREE_CODE (t)) == '<') | |
1966 | && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME | |
1967 | && is_gimple_val (TREE_OPERAND (t, 1)))) | |
1968 | result = update_rhs_and_lookup_avail_expr | |
1969 | (stmt, t, &bd->avail_exprs, ann, insert); | |
1970 | } | |
1971 | } | |
1972 | } | |
1973 | } | |
1974 | ||
1975 | /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR | |
1976 | and BIT_AND_EXPR respectively if the first operand is greater | |
1977 | than zero and the second operand is an exact power of two. */ | |
1978 | if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR) | |
1979 | && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))) | |
1980 | && integer_pow2p (TREE_OPERAND (rhs, 1))) | |
1981 | { | |
1982 | tree val; | |
1983 | tree op = TREE_OPERAND (rhs, 0); | |
1984 | ||
1985 | if (TYPE_UNSIGNED (TREE_TYPE (op))) | |
1986 | { | |
1987 | val = integer_one_node; | |
1988 | } | |
1989 | else | |
1990 | { | |
1991 | tree dummy_cond = walk_data->global_data; | |
1992 | ||
1993 | if (! dummy_cond) | |
1994 | { | |
1995 | dummy_cond = build (GT_EXPR, boolean_type_node, | |
1996 | op, integer_zero_node); | |
1997 | dummy_cond = build (COND_EXPR, void_type_node, | |
1998 | dummy_cond, NULL, NULL); | |
1999 | walk_data->global_data = dummy_cond; | |
2000 | } | |
2001 | else | |
2002 | { | |
2003 | TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GT_EXPR); | |
2004 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op; | |
2005 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) | |
2006 | = integer_zero_node; | |
2007 | } | |
2008 | val = simplify_cond_and_lookup_avail_expr (dummy_cond, | |
2009 | &bd->avail_exprs, | |
2010 | NULL, false); | |
2011 | } | |
2012 | ||
2013 | if (val && integer_onep (val)) | |
2014 | { | |
2015 | tree t; | |
2016 | tree op0 = TREE_OPERAND (rhs, 0); | |
2017 | tree op1 = TREE_OPERAND (rhs, 1); | |
2018 | ||
2019 | if (rhs_code == TRUNC_DIV_EXPR) | |
2020 | t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0, | |
2021 | build_int_2 (tree_log2 (op1), 0)); | |
2022 | else | |
2023 | t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0, | |
2024 | local_fold (build (MINUS_EXPR, TREE_TYPE (op1), | |
2025 | op1, integer_one_node))); | |
2026 | ||
2027 | result = update_rhs_and_lookup_avail_expr (stmt, t, | |
2028 | &bd->avail_exprs, | |
2029 | ann, insert); | |
2030 | } | |
2031 | } | |
2032 | ||
2033 | /* Transform ABS (X) into X or -X as appropriate. */ | |
2034 | if (rhs_code == ABS_EXPR | |
2035 | && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))) | |
2036 | { | |
2037 | tree val; | |
2038 | tree op = TREE_OPERAND (rhs, 0); | |
2039 | tree type = TREE_TYPE (op); | |
2040 | ||
2041 | if (TYPE_UNSIGNED (type)) | |
2042 | { | |
2043 | val = integer_zero_node; | |
2044 | } | |
2045 | else | |
2046 | { | |
2047 | tree dummy_cond = walk_data->global_data; | |
2048 | ||
2049 | if (! dummy_cond) | |
2050 | { | |
14bc8dc2 | 2051 | dummy_cond = build (LE_EXPR, boolean_type_node, |
6de9cd9a DN |
2052 | op, integer_zero_node); |
2053 | dummy_cond = build (COND_EXPR, void_type_node, | |
2054 | dummy_cond, NULL, NULL); | |
2055 | walk_data->global_data = dummy_cond; | |
2056 | } | |
2057 | else | |
2058 | { | |
14bc8dc2 | 2059 | TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), LE_EXPR); |
6de9cd9a DN |
2060 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op; |
2061 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) | |
e072ae27 | 2062 | = fold_convert (type, integer_zero_node); |
6de9cd9a DN |
2063 | } |
2064 | val = simplify_cond_and_lookup_avail_expr (dummy_cond, | |
2065 | &bd->avail_exprs, | |
2066 | NULL, false); | |
14bc8dc2 JL |
2067 | |
2068 | if (!val) | |
2069 | { | |
2070 | TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GE_EXPR); | |
2071 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op; | |
2072 | TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) | |
e072ae27 | 2073 | = fold_convert (type, integer_zero_node); |
14bc8dc2 JL |
2074 | |
2075 | val = simplify_cond_and_lookup_avail_expr (dummy_cond, | |
2076 | &bd->avail_exprs, | |
2077 | NULL, false); | |
2078 | ||
2079 | if (val) | |
2080 | { | |
2081 | if (integer_zerop (val)) | |
2082 | val = integer_one_node; | |
2083 | else if (integer_onep (val)) | |
2084 | val = integer_zero_node; | |
2085 | } | |
2086 | } | |
6de9cd9a DN |
2087 | } |
2088 | ||
2089 | if (val | |
2090 | && (integer_onep (val) || integer_zerop (val))) | |
2091 | { | |
2092 | tree t; | |
2093 | ||
2094 | if (integer_onep (val)) | |
2095 | t = build1 (NEGATE_EXPR, TREE_TYPE (op), op); | |
2096 | else | |
2097 | t = op; | |
2098 | ||
2099 | result = update_rhs_and_lookup_avail_expr (stmt, t, | |
2100 | &bd->avail_exprs, | |
2101 | ann, insert); | |
2102 | } | |
2103 | } | |
2104 | ||
2105 | /* Optimize *"foo" into 'f'. This is done here rather than | |
2106 | in fold to avoid problems with stuff like &*"foo". */ | |
2107 | if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF) | |
2108 | { | |
2109 | tree t = fold_read_from_constant_string (rhs); | |
2110 | ||
2111 | if (t) | |
2112 | result = update_rhs_and_lookup_avail_expr (stmt, t, | |
2113 | &bd->avail_exprs, | |
2114 | ann, insert); | |
2115 | } | |
2116 | ||
2117 | return result; | |
2118 | } | |
2119 | ||
2120 | /* COND is a condition of the form: | |
2121 | ||
2122 | x == const or x != const | |
2123 | ||
2124 | Look back to x's defining statement and see if x is defined as | |
2125 | ||
2126 | x = (type) y; | |
2127 | ||
2128 | If const is unchanged if we convert it to type, then we can build | |
2129 | the equivalent expression: | |
2130 | ||
2131 | ||
2132 | y == const or y != const | |
2133 | ||
2134 | Which may allow further optimizations. | |
2135 | ||
2136 | Return the equivalent comparison or NULL if no such equivalent comparison | |
2137 | was found. */ | |
2138 | ||
2139 | static tree | |
2140 | find_equivalent_equality_comparison (tree cond) | |
2141 | { | |
2142 | tree op0 = TREE_OPERAND (cond, 0); | |
2143 | tree op1 = TREE_OPERAND (cond, 1); | |
2144 | tree def_stmt = SSA_NAME_DEF_STMT (op0); | |
2145 | ||
2146 | /* OP0 might have been a parameter, so first make sure it | |
2147 | was defined by a MODIFY_EXPR. */ | |
2148 | if (def_stmt && TREE_CODE (def_stmt) == MODIFY_EXPR) | |
2149 | { | |
2150 | tree def_rhs = TREE_OPERAND (def_stmt, 1); | |
2151 | ||
2152 | /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */ | |
2153 | if ((TREE_CODE (def_rhs) == NOP_EXPR | |
2154 | || TREE_CODE (def_rhs) == CONVERT_EXPR) | |
2155 | && TREE_CODE (TREE_OPERAND (def_rhs, 0)) == SSA_NAME) | |
2156 | { | |
2157 | tree def_rhs_inner = TREE_OPERAND (def_rhs, 0); | |
2158 | tree def_rhs_inner_type = TREE_TYPE (def_rhs_inner); | |
2159 | tree new; | |
2160 | ||
2161 | if (TYPE_PRECISION (def_rhs_inner_type) | |
2162 | > TYPE_PRECISION (TREE_TYPE (def_rhs))) | |
2163 | return NULL; | |
2164 | ||
2165 | /* What we want to prove is that if we convert OP1 to | |
2166 | the type of the object inside the NOP_EXPR that the | |
2167 | result is still equivalent to SRC. | |
2168 | ||
2169 | If that is true, the build and return new equivalent | |
2170 | condition which uses the source of the typecast and the | |
2171 | new constant (which has only changed its type). */ | |
2172 | new = build1 (TREE_CODE (def_rhs), def_rhs_inner_type, op1); | |
2173 | new = local_fold (new); | |
2174 | if (is_gimple_val (new) && tree_int_cst_equal (new, op1)) | |
2175 | return build (TREE_CODE (cond), TREE_TYPE (cond), | |
2176 | def_rhs_inner, new); | |
2177 | } | |
2178 | } | |
2179 | return NULL; | |
2180 | } | |
2181 | ||
2182 | /* STMT is a COND_EXPR for which we could not trivially determine its | |
2183 | result. This routine attempts to find equivalent forms of the | |
2184 | condition which we may be able to optimize better. It also | |
2185 | uses simple value range propagation to optimize conditionals. */ | |
2186 | ||
2187 | static tree | |
2188 | simplify_cond_and_lookup_avail_expr (tree stmt, | |
2189 | varray_type *block_avail_exprs_p, | |
2190 | stmt_ann_t ann, | |
2191 | int insert) | |
2192 | { | |
2193 | tree cond = COND_EXPR_COND (stmt); | |
2194 | ||
2195 | if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<') | |
2196 | { | |
2197 | tree op0 = TREE_OPERAND (cond, 0); | |
2198 | tree op1 = TREE_OPERAND (cond, 1); | |
2199 | ||
2200 | if (TREE_CODE (op0) == SSA_NAME && is_gimple_min_invariant (op1)) | |
2201 | { | |
2202 | int limit; | |
2203 | tree low, high, cond_low, cond_high; | |
2204 | int lowequal, highequal, swapped, no_overlap, subset, cond_inverted; | |
2205 | varray_type vrp_records; | |
2206 | struct vrp_element *element; | |
2207 | ||
2208 | /* First see if we have test of an SSA_NAME against a constant | |
2209 | where the SSA_NAME is defined by an earlier typecast which | |
2210 | is irrelevant when performing tests against the given | |
2211 | constant. */ | |
2212 | if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR) | |
2213 | { | |
2214 | tree new_cond = find_equivalent_equality_comparison (cond); | |
2215 | ||
2216 | if (new_cond) | |
2217 | { | |
2218 | /* Update the statement to use the new equivalent | |
2219 | condition. */ | |
2220 | COND_EXPR_COND (stmt) = new_cond; | |
2221 | ann->modified = 1; | |
2222 | ||
2223 | /* Lookup the condition and return its known value if it | |
2224 | exists. */ | |
2225 | new_cond = lookup_avail_expr (stmt, block_avail_exprs_p, | |
2226 | insert); | |
2227 | if (new_cond) | |
2228 | return new_cond; | |
2229 | ||
2230 | /* The operands have changed, so update op0 and op1. */ | |
2231 | op0 = TREE_OPERAND (cond, 0); | |
2232 | op1 = TREE_OPERAND (cond, 1); | |
2233 | } | |
2234 | } | |
2235 | ||
2236 | /* Consult the value range records for this variable (if they exist) | |
2237 | to see if we can eliminate or simplify this conditional. | |
2238 | ||
2239 | Note two tests are necessary to determine no records exist. | |
2240 | First we have to see if the virtual array exists, if it | |
2241 | exists, then we have to check its active size. | |
2242 | ||
2243 | Also note the vast majority of conditionals are not testing | |
2244 | a variable which has had its range constrained by an earlier | |
2245 | conditional. So this filter avoids a lot of unnecessary work. */ | |
2246 | vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (op0)); | |
2247 | if (vrp_records == NULL) | |
2248 | return NULL; | |
2249 | ||
2250 | limit = VARRAY_ACTIVE_SIZE (vrp_records); | |
2251 | ||
2252 | /* If we have no value range records for this variable, or we are | |
2253 | unable to extract a range for this condition, then there is | |
2254 | nothing to do. */ | |
2255 | if (limit == 0 | |
2256 | || ! extract_range_from_cond (cond, &cond_high, | |
2257 | &cond_low, &cond_inverted)) | |
2258 | return NULL; | |
2259 | ||
2260 | /* We really want to avoid unnecessary computations of range | |
2261 | info. So all ranges are computed lazily; this avoids a | |
2262 | lot of unnecessary work. ie, we record the conditional, | |
2263 | but do not process how it constrains the variable's | |
2264 | potential values until we know that processing the condition | |
2265 | could be helpful. | |
2266 | ||
2267 | However, we do not want to have to walk a potentially long | |
2268 | list of ranges, nor do we want to compute a variable's | |
2269 | range more than once for a given path. | |
2270 | ||
2271 | Luckily, each time we encounter a conditional that can not | |
2272 | be otherwise optimized we will end up here and we will | |
2273 | compute the necessary range information for the variable | |
2274 | used in this condition. | |
2275 | ||
2276 | Thus you can conclude that there will never be more than one | |
2277 | conditional associated with a variable which has not been | |
2278 | processed. So we never need to merge more than one new | |
2279 | conditional into the current range. | |
2280 | ||
2281 | These properties also help us avoid unnecessary work. */ | |
2282 | element | |
2283 | = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1); | |
2284 | ||
2285 | if (element->high && element->low) | |
2286 | { | |
2287 | /* The last element has been processed, so there is no range | |
2288 | merging to do, we can simply use the high/low values | |
2289 | recorded in the last element. */ | |
2290 | low = element->low; | |
2291 | high = element->high; | |
2292 | } | |
2293 | else | |
2294 | { | |
2295 | tree tmp_high, tmp_low; | |
2296 | int dummy; | |
2297 | ||
2298 | /* The last element has not been processed. Process it now. */ | |
2299 | extract_range_from_cond (element->cond, &tmp_high, | |
2300 | &tmp_low, &dummy); | |
2301 | ||
2302 | /* If this is the only element, then no merging is necessary, | |
2303 | the high/low values from extract_range_from_cond are all | |
2304 | we need. */ | |
2305 | if (limit == 1) | |
2306 | { | |
2307 | low = tmp_low; | |
2308 | high = tmp_high; | |
2309 | } | |
2310 | else | |
2311 | { | |
2312 | /* Get the high/low value from the previous element. */ | |
2313 | struct vrp_element *prev | |
2314 | = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, | |
2315 | limit - 2); | |
2316 | low = prev->low; | |
2317 | high = prev->high; | |
2318 | ||
2319 | /* Merge in this element's range with the range from the | |
2320 | previous element. | |
2321 | ||
2322 | The low value for the merged range is the maximum of | |
2323 | the previous low value and the low value of this record. | |
2324 | ||
2325 | Similarly the high value for the merged range is the | |
2326 | minimum of the previous high value and the high value of | |
2327 | this record. */ | |
2328 | low = (tree_int_cst_compare (low, tmp_low) == 1 | |
2329 | ? low : tmp_low); | |
2330 | high = (tree_int_cst_compare (high, tmp_high) == -1 | |
2331 | ? high : tmp_high); | |
2332 | } | |
2333 | ||
2334 | /* And record the computed range. */ | |
2335 | element->low = low; | |
2336 | element->high = high; | |
2337 | ||
2338 | } | |
2339 | ||
2340 | /* After we have constrained this variable's potential values, | |
2341 | we try to determine the result of the given conditional. | |
2342 | ||
2343 | To simplify later tests, first determine if the current | |
2344 | low value is the same low value as the conditional. | |
2345 | Similarly for the current high value and the high value | |
2346 | for the conditional. */ | |
2347 | lowequal = tree_int_cst_equal (low, cond_low); | |
2348 | highequal = tree_int_cst_equal (high, cond_high); | |
2349 | ||
2350 | if (lowequal && highequal) | |
2351 | return (cond_inverted ? boolean_false_node : boolean_true_node); | |
2352 | ||
2353 | /* To simplify the overlap/subset tests below we may want | |
2354 | to swap the two ranges so that the larger of the two | |
2355 | ranges occurs "first". */ | |
2356 | swapped = 0; | |
2357 | if (tree_int_cst_compare (low, cond_low) == 1 | |
2358 | || (lowequal | |
2359 | && tree_int_cst_compare (cond_high, high) == 1)) | |
2360 | { | |
2361 | tree temp; | |
2362 | ||
2363 | swapped = 1; | |
2364 | temp = low; | |
2365 | low = cond_low; | |
2366 | cond_low = temp; | |
2367 | temp = high; | |
2368 | high = cond_high; | |
2369 | cond_high = temp; | |
2370 | } | |
2371 | ||
2372 | /* Now determine if there is no overlap in the ranges | |
2373 | or if the second range is a subset of the first range. */ | |
2374 | no_overlap = tree_int_cst_lt (high, cond_low); | |
2375 | subset = tree_int_cst_compare (cond_high, high) != 1; | |
2376 | ||
2377 | /* If there was no overlap in the ranges, then this conditional | |
2378 | always has a false value (unless we had to invert this | |
2379 | conditional, in which case it always has a true value). */ | |
2380 | if (no_overlap) | |
2381 | return (cond_inverted ? boolean_true_node : boolean_false_node); | |
2382 | ||
2383 | /* If the current range is a subset of the condition's range, | |
2384 | then this conditional always has a true value (unless we | |
2385 | had to invert this conditional, in which case it always | |
2386 | has a true value). */ | |
2387 | if (subset && swapped) | |
2388 | return (cond_inverted ? boolean_false_node : boolean_true_node); | |
2389 | ||
2390 | /* We were unable to determine the result of the conditional. | |
2391 | However, we may be able to simplify the conditional. First | |
2392 | merge the ranges in the same manner as range merging above. */ | |
2393 | low = tree_int_cst_compare (low, cond_low) == 1 ? low : cond_low; | |
2394 | high = tree_int_cst_compare (high, cond_high) == -1 ? high : cond_high; | |
2395 | ||
2396 | /* If the range has converged to a single point, then turn this | |
2397 | into an equality comparison. */ | |
2398 | if (TREE_CODE (cond) != EQ_EXPR | |
2399 | && TREE_CODE (cond) != NE_EXPR | |
2400 | && tree_int_cst_equal (low, high)) | |
2401 | { | |
2402 | TREE_SET_CODE (cond, EQ_EXPR); | |
2403 | TREE_OPERAND (cond, 1) = high; | |
2404 | } | |
2405 | } | |
2406 | } | |
2407 | return 0; | |
2408 | } | |
2409 | ||
2410 | /* STMT is a SWITCH_EXPR for which we could not trivially determine its | |
2411 | result. This routine attempts to find equivalent forms of the | |
2412 | condition which we may be able to optimize better. */ | |
2413 | ||
2414 | static tree | |
2415 | simplify_switch_and_lookup_avail_expr (tree stmt, | |
2416 | varray_type *block_avail_exprs_p, | |
2417 | stmt_ann_t ann, | |
2418 | int insert) | |
2419 | { | |
2420 | tree cond = SWITCH_COND (stmt); | |
2421 | tree def, to, ti; | |
2422 | ||
2423 | /* The optimization that we really care about is removing unnecessary | |
2424 | casts. That will let us do much better in propagating the inferred | |
2425 | constant at the switch target. */ | |
2426 | if (TREE_CODE (cond) == SSA_NAME) | |
2427 | { | |
2428 | def = SSA_NAME_DEF_STMT (cond); | |
2429 | if (TREE_CODE (def) == MODIFY_EXPR) | |
2430 | { | |
2431 | def = TREE_OPERAND (def, 1); | |
2432 | if (TREE_CODE (def) == NOP_EXPR) | |
2433 | { | |
2434 | def = TREE_OPERAND (def, 0); | |
2435 | to = TREE_TYPE (cond); | |
2436 | ti = TREE_TYPE (def); | |
2437 | ||
2438 | /* If we have an extension that preserves sign, then we | |
2439 | can copy the source value into the switch. */ | |
2440 | if (TYPE_UNSIGNED (to) == TYPE_UNSIGNED (ti) | |
2441 | && TYPE_PRECISION (to) >= TYPE_PRECISION (ti) | |
2442 | && is_gimple_val (def)) | |
2443 | { | |
2444 | SWITCH_COND (stmt) = def; | |
2445 | ann->modified = 1; | |
2446 | ||
2447 | return lookup_avail_expr (stmt, block_avail_exprs_p, insert); | |
2448 | } | |
2449 | } | |
2450 | } | |
2451 | } | |
2452 | ||
2453 | return 0; | |
2454 | } | |
2455 | ||
2456 | /* Propagate known constants/copies into PHI nodes of BB's successor | |
2457 | blocks. */ | |
2458 | ||
2459 | static void | |
2460 | cprop_into_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED, | |
2461 | basic_block bb) | |
2462 | { | |
dd747311 | 2463 | cprop_into_successor_phis (bb, const_and_copies, nonzero_vars); |
6de9cd9a DN |
2464 | } |
2465 | ||
2466 | /* Search for redundant computations in STMT. If any are found, then | |
2467 | replace them with the variable holding the result of the computation. | |
2468 | ||
2469 | If safe, record this expression into the available expression hash | |
2470 | table. */ | |
2471 | ||
2472 | static bool | |
2473 | eliminate_redundant_computations (struct dom_walk_data *walk_data, | |
2474 | tree stmt, stmt_ann_t ann) | |
2475 | { | |
a32b97a2 | 2476 | v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann); |
6de9cd9a DN |
2477 | tree *expr_p, def = NULL_TREE; |
2478 | bool insert = true; | |
2479 | tree cached_lhs; | |
2480 | bool retval = false; | |
2481 | struct dom_walk_block_data *bd | |
2482 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
2483 | ||
2484 | if (TREE_CODE (stmt) == MODIFY_EXPR) | |
2485 | def = TREE_OPERAND (stmt, 0); | |
2486 | ||
2487 | /* Certain expressions on the RHS can be optimized away, but can not | |
2488 | themselves be entered into the hash tables. */ | |
2489 | if (ann->makes_aliased_stores | |
2490 | || ! def | |
2491 | || TREE_CODE (def) != SSA_NAME | |
2492 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def) | |
a32b97a2 | 2493 | || NUM_V_MAY_DEFS (v_may_defs) != 0) |
6de9cd9a DN |
2494 | insert = false; |
2495 | ||
2496 | /* Check if the expression has been computed before. */ | |
2497 | cached_lhs = lookup_avail_expr (stmt, &bd->avail_exprs, insert); | |
2498 | ||
2499 | /* If this is an assignment and the RHS was not in the hash table, | |
2500 | then try to simplify the RHS and lookup the new RHS in the | |
2501 | hash table. */ | |
2502 | if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR) | |
2503 | cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, | |
2504 | stmt, | |
2505 | ann, | |
2506 | insert); | |
2507 | /* Similarly if this is a COND_EXPR and we did not find its | |
2508 | expression in the hash table, simplify the condition and | |
2509 | try again. */ | |
2510 | else if (! cached_lhs && TREE_CODE (stmt) == COND_EXPR) | |
2511 | cached_lhs = simplify_cond_and_lookup_avail_expr (stmt, | |
2512 | &bd->avail_exprs, | |
2513 | ann, | |
2514 | insert); | |
2515 | /* Similarly for a SWITCH_EXPR. */ | |
2516 | else if (!cached_lhs && TREE_CODE (stmt) == SWITCH_EXPR) | |
2517 | cached_lhs = simplify_switch_and_lookup_avail_expr (stmt, | |
2518 | &bd->avail_exprs, | |
2519 | ann, | |
2520 | insert); | |
2521 | ||
2522 | opt_stats.num_exprs_considered++; | |
2523 | ||
2524 | /* Get a pointer to the expression we are trying to optimize. */ | |
2525 | if (TREE_CODE (stmt) == COND_EXPR) | |
2526 | expr_p = &COND_EXPR_COND (stmt); | |
2527 | else if (TREE_CODE (stmt) == SWITCH_EXPR) | |
2528 | expr_p = &SWITCH_COND (stmt); | |
2529 | else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0)) | |
2530 | expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1); | |
2531 | else | |
2532 | expr_p = &TREE_OPERAND (stmt, 1); | |
2533 | ||
2534 | /* It is safe to ignore types here since we have already done | |
2535 | type checking in the hashing and equality routines. In fact | |
2536 | type checking here merely gets in the way of constant | |
2537 | propagation. Also, make sure that it is safe to propagate | |
2538 | CACHED_LHS into *EXPR_P. */ | |
2539 | if (cached_lhs | |
2540 | && (TREE_CODE (cached_lhs) != SSA_NAME | |
2541 | || may_propagate_copy (cached_lhs, *expr_p))) | |
2542 | { | |
2543 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2544 | { | |
2545 | fprintf (dump_file, " Replaced redundant expr '"); | |
2546 | print_generic_expr (dump_file, *expr_p, dump_flags); | |
2547 | fprintf (dump_file, "' with '"); | |
2548 | print_generic_expr (dump_file, cached_lhs, dump_flags); | |
2549 | fprintf (dump_file, "'\n"); | |
2550 | } | |
2551 | ||
2552 | opt_stats.num_re++; | |
2553 | ||
2554 | #if defined ENABLE_CHECKING | |
2555 | if (TREE_CODE (cached_lhs) != SSA_NAME | |
2556 | && !is_gimple_min_invariant (cached_lhs)) | |
2557 | abort (); | |
2558 | #endif | |
2559 | ||
2560 | if (TREE_CODE (cached_lhs) == ADDR_EXPR | |
2561 | || (POINTER_TYPE_P (TREE_TYPE (*expr_p)) | |
2562 | && is_gimple_min_invariant (cached_lhs))) | |
2563 | retval = true; | |
2564 | ||
d00ad49b | 2565 | propagate_tree_value (expr_p, cached_lhs); |
6de9cd9a DN |
2566 | ann->modified = 1; |
2567 | } | |
2568 | return retval; | |
2569 | } | |
2570 | ||
2571 | /* STMT, a MODIFY_EXPR, may create certain equivalences, in either | |
2572 | the available expressions table or the const_and_copies table. | |
2573 | Detect and record those equivalences. */ | |
2574 | ||
2575 | static void | |
2576 | record_equivalences_from_stmt (tree stmt, | |
2577 | varray_type *block_avail_exprs_p, | |
2578 | varray_type *block_nonzero_vars_p, | |
2579 | int may_optimize_p, | |
2580 | stmt_ann_t ann) | |
2581 | { | |
2582 | tree lhs = TREE_OPERAND (stmt, 0); | |
2583 | enum tree_code lhs_code = TREE_CODE (lhs); | |
2584 | int i; | |
2585 | ||
2586 | if (lhs_code == SSA_NAME) | |
2587 | { | |
2588 | tree rhs = TREE_OPERAND (stmt, 1); | |
2589 | ||
2590 | /* Strip away any useless type conversions. */ | |
2591 | STRIP_USELESS_TYPE_CONVERSION (rhs); | |
2592 | ||
2593 | /* If the RHS of the assignment is a constant or another variable that | |
2594 | may be propagated, register it in the CONST_AND_COPIES table. We | |
2595 | do not need to record unwind data for this, since this is a true | |
1ea7e6ad | 2596 | assignment and not an equivalence inferred from a comparison. All |
6de9cd9a DN |
2597 | uses of this ssa name are dominated by this assignment, so unwinding |
2598 | just costs time and space. */ | |
2599 | if (may_optimize_p | |
2600 | && (TREE_CODE (rhs) == SSA_NAME | |
2601 | || is_gimple_min_invariant (rhs))) | |
2602 | set_value_for (lhs, rhs, const_and_copies); | |
2603 | ||
2604 | /* alloca never returns zero and the address of a non-weak symbol | |
2605 | is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely | |
2606 | stripped as they do not affect this equivalence. */ | |
2607 | while (TREE_CODE (rhs) == NOP_EXPR | |
2608 | || TREE_CODE (rhs) == CONVERT_EXPR) | |
2609 | rhs = TREE_OPERAND (rhs, 0); | |
2610 | ||
2611 | if (alloca_call_p (rhs) | |
2612 | || (TREE_CODE (rhs) == ADDR_EXPR | |
2613 | && DECL_P (TREE_OPERAND (rhs, 0)) | |
2614 | && ! DECL_WEAK (TREE_OPERAND (rhs, 0)))) | |
2615 | record_var_is_nonzero (lhs, block_nonzero_vars_p); | |
2616 | ||
2617 | /* IOR of any value with a nonzero value will result in a nonzero | |
2618 | value. Even if we do not know the exact result recording that | |
2619 | the result is nonzero is worth the effort. */ | |
2620 | if (TREE_CODE (rhs) == BIT_IOR_EXPR | |
2621 | && integer_nonzerop (TREE_OPERAND (rhs, 1))) | |
2622 | record_var_is_nonzero (lhs, block_nonzero_vars_p); | |
2623 | } | |
2624 | ||
2625 | /* Look at both sides for pointer dereferences. If we find one, then | |
2626 | the pointer must be nonnull and we can enter that equivalence into | |
2627 | the hash tables. */ | |
dd747311 JL |
2628 | if (flag_delete_null_pointer_checks) |
2629 | for (i = 0; i < 2; i++) | |
2630 | { | |
2631 | tree t = TREE_OPERAND (stmt, i); | |
2632 | ||
2633 | /* Strip away any COMPONENT_REFs. */ | |
2634 | while (TREE_CODE (t) == COMPONENT_REF) | |
2635 | t = TREE_OPERAND (t, 0); | |
2636 | ||
2637 | /* Now see if this is a pointer dereference. */ | |
2638 | if (TREE_CODE (t) == INDIRECT_REF) | |
2639 | { | |
2640 | tree op = TREE_OPERAND (t, 0); | |
2641 | ||
2642 | /* If the pointer is a SSA variable, then enter new | |
2643 | equivalences into the hash table. */ | |
2644 | while (TREE_CODE (op) == SSA_NAME) | |
2645 | { | |
2646 | tree def = SSA_NAME_DEF_STMT (op); | |
2647 | ||
2648 | record_var_is_nonzero (op, block_nonzero_vars_p); | |
2649 | ||
2650 | /* And walk up the USE-DEF chains noting other SSA_NAMEs | |
2651 | which are known to have a nonzero value. */ | |
2652 | if (def | |
2653 | && TREE_CODE (def) == MODIFY_EXPR | |
2654 | && TREE_CODE (TREE_OPERAND (def, 1)) == NOP_EXPR) | |
2655 | op = TREE_OPERAND (TREE_OPERAND (def, 1), 0); | |
2656 | else | |
2657 | break; | |
2658 | } | |
2659 | } | |
2660 | } | |
6de9cd9a DN |
2661 | |
2662 | /* A memory store, even an aliased store, creates a useful | |
2663 | equivalence. By exchanging the LHS and RHS, creating suitable | |
2664 | vops and recording the result in the available expression table, | |
2665 | we may be able to expose more redundant loads. */ | |
2666 | if (!ann->has_volatile_ops | |
2667 | && (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME | |
2668 | || is_gimple_min_invariant (TREE_OPERAND (stmt, 1))) | |
2669 | && !is_gimple_reg (lhs)) | |
2670 | { | |
2671 | tree rhs = TREE_OPERAND (stmt, 1); | |
2672 | tree new; | |
2673 | size_t j; | |
2674 | ||
2675 | /* FIXME: If the LHS of the assignment is a bitfield and the RHS | |
2676 | is a constant, we need to adjust the constant to fit into the | |
2677 | type of the LHS. If the LHS is a bitfield and the RHS is not | |
2678 | a constant, then we can not record any equivalences for this | |
2679 | statement since we would need to represent the widening or | |
2680 | narrowing of RHS. This fixes gcc.c-torture/execute/921016-1.c | |
2681 | and should not be necessary if GCC represented bitfields | |
2682 | properly. */ | |
2683 | if (lhs_code == COMPONENT_REF | |
2684 | && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1))) | |
2685 | { | |
2686 | if (TREE_CONSTANT (rhs)) | |
2687 | rhs = widen_bitfield (rhs, TREE_OPERAND (lhs, 1), lhs); | |
2688 | else | |
2689 | rhs = NULL; | |
2690 | ||
2691 | /* If the value overflowed, then we can not use this equivalence. */ | |
2692 | if (rhs && ! is_gimple_min_invariant (rhs)) | |
2693 | rhs = NULL; | |
2694 | } | |
2695 | ||
2696 | if (rhs) | |
2697 | { | |
a32b97a2 BB |
2698 | v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann); |
2699 | v_must_def_optype v_must_defs = V_MUST_DEF_OPS (ann); | |
6de9cd9a DN |
2700 | |
2701 | /* Build a new statement with the RHS and LHS exchanged. */ | |
2702 | new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs); | |
2703 | ||
2704 | /* Get an annotation and set up the real operands. */ | |
2705 | get_stmt_ann (new); | |
2706 | get_stmt_operands (new); | |
2707 | ||
2708 | /* Clear out the virtual operands on the new statement, we are | |
2709 | going to set them explicitly below. */ | |
2710 | remove_vuses (new); | |
a32b97a2 BB |
2711 | remove_v_may_defs (new); |
2712 | remove_v_must_defs (new); | |
6de9cd9a DN |
2713 | |
2714 | start_ssa_stmt_operands (new); | |
2715 | /* For each VDEF on the original statement, we want to create a | |
a32b97a2 BB |
2716 | VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new |
2717 | statement. */ | |
2718 | for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++) | |
6de9cd9a | 2719 | { |
a32b97a2 BB |
2720 | tree op = V_MAY_DEF_RESULT (v_may_defs, j); |
2721 | add_vuse (op, new); | |
2722 | } | |
2723 | ||
2724 | for (j = 0; j < NUM_V_MUST_DEFS (v_must_defs); j++) | |
2725 | { | |
2726 | tree op = V_MUST_DEF_OP (v_must_defs, j); | |
6de9cd9a DN |
2727 | add_vuse (op, new); |
2728 | } | |
2729 | ||
2730 | finalize_ssa_stmt_operands (new); | |
2731 | ||
2732 | /* Finally enter the statement into the available expression | |
2733 | table. */ | |
2734 | lookup_avail_expr (new, block_avail_exprs_p, true); | |
2735 | } | |
2736 | } | |
2737 | } | |
2738 | ||
2739 | /* Optimize the statement pointed by iterator SI. | |
2740 | ||
2741 | We try to perform some simplistic global redundancy elimination and | |
2742 | constant propagation: | |
2743 | ||
2744 | 1- To detect global redundancy, we keep track of expressions that have | |
2745 | been computed in this block and its dominators. If we find that the | |
2746 | same expression is computed more than once, we eliminate repeated | |
2747 | computations by using the target of the first one. | |
2748 | ||
2749 | 2- Constant values and copy assignments. This is used to do very | |
2750 | simplistic constant and copy propagation. When a constant or copy | |
2751 | assignment is found, we map the value on the RHS of the assignment to | |
2752 | the variable in the LHS in the CONST_AND_COPIES table. */ | |
2753 | ||
2754 | static void | |
2755 | optimize_stmt (struct dom_walk_data *walk_data, | |
2756 | basic_block bb ATTRIBUTE_UNUSED, | |
2757 | block_stmt_iterator si) | |
2758 | { | |
2759 | stmt_ann_t ann; | |
2760 | tree stmt; | |
6de9cd9a DN |
2761 | bool may_optimize_p; |
2762 | bool may_have_exposed_new_symbols = false; | |
2763 | struct dom_walk_block_data *bd | |
2764 | = VARRAY_TOP_GENERIC_PTR (walk_data->block_data_stack); | |
2765 | ||
2766 | stmt = bsi_stmt (si); | |
2767 | ||
2768 | get_stmt_operands (stmt); | |
2769 | ann = stmt_ann (stmt); | |
6de9cd9a DN |
2770 | opt_stats.num_stmts++; |
2771 | may_have_exposed_new_symbols = false; | |
2772 | ||
2773 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2774 | { | |
2775 | fprintf (dump_file, "Optimizing statement "); | |
2776 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
2777 | } | |
2778 | ||
a32b97a2 | 2779 | /* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */ |
6de9cd9a DN |
2780 | may_have_exposed_new_symbols = cprop_into_stmt (stmt, const_and_copies); |
2781 | ||
2782 | /* If the statement has been modified with constant replacements, | |
2783 | fold its RHS before checking for redundant computations. */ | |
2784 | if (ann->modified) | |
2785 | { | |
2786 | /* Try to fold the statement making sure that STMT is kept | |
2787 | up to date. */ | |
2788 | if (fold_stmt (bsi_stmt_ptr (si))) | |
2789 | { | |
2790 | stmt = bsi_stmt (si); | |
2791 | ann = stmt_ann (stmt); | |
2792 | ||
2793 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2794 | { | |
2795 | fprintf (dump_file, " Folded to: "); | |
2796 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
2797 | } | |
2798 | } | |
2799 | ||
2800 | /* Constant/copy propagation above may change the set of | |
2801 | virtual operands associated with this statement. Folding | |
2802 | may remove the need for some virtual operands. | |
2803 | ||
2804 | Indicate we will need to rescan and rewrite the statement. */ | |
2805 | may_have_exposed_new_symbols = true; | |
2806 | } | |
2807 | ||
2808 | /* Check for redundant computations. Do this optimization only | |
2809 | for assignments that have no volatile ops and conditionals. */ | |
2810 | may_optimize_p = (!ann->has_volatile_ops | |
2811 | && ((TREE_CODE (stmt) == RETURN_EXPR | |
2812 | && TREE_OPERAND (stmt, 0) | |
2813 | && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR | |
2814 | && ! (TREE_SIDE_EFFECTS | |
2815 | (TREE_OPERAND (TREE_OPERAND (stmt, 0), 1)))) | |
2816 | || (TREE_CODE (stmt) == MODIFY_EXPR | |
2817 | && ! TREE_SIDE_EFFECTS (TREE_OPERAND (stmt, 1))) | |
2818 | || TREE_CODE (stmt) == COND_EXPR | |
2819 | || TREE_CODE (stmt) == SWITCH_EXPR)); | |
2820 | ||
2821 | if (may_optimize_p) | |
2822 | may_have_exposed_new_symbols | |
2823 | |= eliminate_redundant_computations (walk_data, stmt, ann); | |
2824 | ||
2825 | /* Record any additional equivalences created by this statement. */ | |
2826 | if (TREE_CODE (stmt) == MODIFY_EXPR) | |
2827 | record_equivalences_from_stmt (stmt, | |
2828 | &bd->avail_exprs, | |
2829 | &bd->nonzero_vars, | |
2830 | may_optimize_p, | |
2831 | ann); | |
2832 | ||
2833 | register_definitions_for_stmt (ann, &bd->block_defs); | |
2834 | ||
2835 | /* If STMT is a COND_EXPR and it was modified, then we may know | |
2836 | where it goes. If that is the case, then mark the CFG as altered. | |
2837 | ||
2838 | This will cause us to later call remove_unreachable_blocks and | |
2839 | cleanup_tree_cfg when it is safe to do so. It is not safe to | |
2840 | clean things up here since removal of edges and such can trigger | |
2841 | the removal of PHI nodes, which in turn can release SSA_NAMEs to | |
2842 | the manager. | |
2843 | ||
2844 | That's all fine and good, except that once SSA_NAMEs are released | |
2845 | to the manager, we must not call create_ssa_name until all references | |
2846 | to released SSA_NAMEs have been eliminated. | |
2847 | ||
2848 | All references to the deleted SSA_NAMEs can not be eliminated until | |
2849 | we remove unreachable blocks. | |
2850 | ||
2851 | We can not remove unreachable blocks until after we have completed | |
2852 | any queued jump threading. | |
2853 | ||
2854 | We can not complete any queued jump threads until we have taken | |
2855 | appropriate variables out of SSA form. Taking variables out of | |
2856 | SSA form can call create_ssa_name and thus we lose. | |
2857 | ||
2858 | Ultimately I suspect we're going to need to change the interface | |
2859 | into the SSA_NAME manager. */ | |
2860 | ||
2861 | if (ann->modified) | |
2862 | { | |
2863 | tree val = NULL; | |
2864 | ||
2865 | if (TREE_CODE (stmt) == COND_EXPR) | |
2866 | val = COND_EXPR_COND (stmt); | |
2867 | else if (TREE_CODE (stmt) == SWITCH_EXPR) | |
2868 | val = SWITCH_COND (stmt); | |
2869 | ||
2870 | if (val && TREE_CODE (val) == INTEGER_CST | |
2871 | && find_taken_edge (bb_for_stmt (stmt), val)) | |
2872 | cfg_altered = true; | |
2873 | } | |
2874 | ||
2875 | if (may_have_exposed_new_symbols) | |
2876 | { | |
2877 | if (! bd->stmts_to_rescan) | |
2878 | VARRAY_TREE_INIT (bd->stmts_to_rescan, 20, "stmts_to_rescan"); | |
2879 | VARRAY_PUSH_TREE (bd->stmts_to_rescan, bsi_stmt (si)); | |
2880 | } | |
2881 | } | |
2882 | ||
2883 | /* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the | |
2884 | available expression hashtable, then return the LHS from the hash | |
2885 | table. | |
2886 | ||
2887 | If INSERT is true, then we also update the available expression | |
2888 | hash table to account for the changes made to STMT. */ | |
2889 | ||
2890 | static tree | |
2891 | update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, | |
2892 | varray_type *block_avail_exprs_p, | |
2893 | stmt_ann_t ann, | |
2894 | bool insert) | |
2895 | { | |
2896 | tree cached_lhs = NULL; | |
2897 | ||
2898 | /* Remove the old entry from the hash table. */ | |
2899 | if (insert) | |
2900 | { | |
2901 | struct expr_hash_elt element; | |
2902 | ||
2903 | initialize_hash_element (stmt, NULL, &element); | |
2904 | htab_remove_elt_with_hash (avail_exprs, &element, element.hash); | |
2905 | } | |
2906 | ||
2907 | /* Now update the RHS of the assignment. */ | |
2908 | TREE_OPERAND (stmt, 1) = new_rhs; | |
2909 | ||
2910 | /* Now lookup the updated statement in the hash table. */ | |
2911 | cached_lhs = lookup_avail_expr (stmt, block_avail_exprs_p, insert); | |
2912 | ||
2913 | /* We have now called lookup_avail_expr twice with two different | |
2914 | versions of this same statement, once in optimize_stmt, once here. | |
2915 | ||
2916 | We know the call in optimize_stmt did not find an existing entry | |
2917 | in the hash table, so a new entry was created. At the same time | |
2918 | this statement was pushed onto the BLOCK_AVAIL_EXPRS varray. | |
2919 | ||
2920 | If this call failed to find an existing entry on the hash table, | |
2921 | then the new version of this statement was entered into the | |
2922 | hash table. And this statement was pushed onto BLOCK_AVAIL_EXPR | |
2923 | for the second time. So there are two copies on BLOCK_AVAIL_EXPRs | |
2924 | ||
2925 | If this call succeeded, we still have one copy of this statement | |
2926 | on the BLOCK_AVAIL_EXPRs varray. | |
2927 | ||
2928 | For both cases, we need to pop the most recent entry off the | |
2929 | BLOCK_AVAIL_EXPRs varray. For the case where we never found this | |
2930 | statement in the hash tables, that will leave precisely one | |
2931 | copy of this statement on BLOCK_AVAIL_EXPRs. For the case where | |
2932 | we found a copy of this statement in the second hash table lookup | |
2933 | we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */ | |
2934 | if (insert) | |
2935 | VARRAY_POP (*block_avail_exprs_p); | |
2936 | ||
2937 | /* And make sure we record the fact that we modified this | |
2938 | statement. */ | |
2939 | ann->modified = 1; | |
2940 | ||
2941 | return cached_lhs; | |
2942 | } | |
2943 | ||
2944 | /* Search for an existing instance of STMT in the AVAIL_EXPRS table. If | |
2945 | found, return its LHS. Otherwise insert STMT in the table and return | |
2946 | NULL_TREE. | |
2947 | ||
2948 | Also, when an expression is first inserted in the AVAIL_EXPRS table, it | |
2949 | is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they | |
2950 | can be removed when we finish processing this block and its children. | |
2951 | ||
2952 | NOTE: This function assumes that STMT is a MODIFY_EXPR node that | |
2953 | contains no CALL_EXPR on its RHS and makes no volatile nor | |
2954 | aliased references. */ | |
2955 | ||
2956 | static tree | |
2957 | lookup_avail_expr (tree stmt, varray_type *block_avail_exprs_p, bool insert) | |
2958 | { | |
2959 | void **slot; | |
2960 | tree lhs; | |
2961 | tree temp; | |
2962 | struct expr_hash_elt *element = xcalloc (sizeof (struct expr_hash_elt), 1); | |
2963 | ||
2964 | lhs = TREE_CODE (stmt) == MODIFY_EXPR ? TREE_OPERAND (stmt, 0) : NULL; | |
2965 | ||
2966 | initialize_hash_element (stmt, lhs, element); | |
2967 | ||
2968 | /* Don't bother remembering constant assignments and copy operations. | |
2969 | Constants and copy operations are handled by the constant/copy propagator | |
2970 | in optimize_stmt. */ | |
2971 | if (TREE_CODE (element->rhs) == SSA_NAME | |
2972 | || is_gimple_min_invariant (element->rhs)) | |
2973 | { | |
2974 | free (element); | |
2975 | return NULL_TREE; | |
2976 | } | |
2977 | ||
2978 | /* If this is an equality test against zero, see if we have recorded a | |
2979 | nonzero value for the variable in question. */ | |
2980 | if ((TREE_CODE (element->rhs) == EQ_EXPR | |
2981 | || TREE_CODE (element->rhs) == NE_EXPR) | |
2982 | && TREE_CODE (TREE_OPERAND (element->rhs, 0)) == SSA_NAME | |
2983 | && integer_zerop (TREE_OPERAND (element->rhs, 1))) | |
2984 | { | |
2985 | int indx = SSA_NAME_VERSION (TREE_OPERAND (element->rhs, 0)); | |
2986 | ||
2987 | if (bitmap_bit_p (nonzero_vars, indx)) | |
2988 | { | |
2989 | tree t = element->rhs; | |
2990 | free (element); | |
2991 | ||
2992 | if (TREE_CODE (t) == EQ_EXPR) | |
2993 | return boolean_false_node; | |
2994 | else | |
2995 | return boolean_true_node; | |
2996 | } | |
2997 | } | |
2998 | ||
2999 | /* Finally try to find the expression in the main expression hash table. */ | |
3000 | slot = htab_find_slot_with_hash (avail_exprs, element, element->hash, | |
3001 | (insert ? INSERT : NO_INSERT)); | |
3002 | if (slot == NULL) | |
3003 | { | |
3004 | free (element); | |
3005 | return NULL_TREE; | |
3006 | } | |
3007 | ||
3008 | if (*slot == NULL) | |
3009 | { | |
3010 | *slot = (void *) element; | |
3011 | if (! *block_avail_exprs_p) | |
3012 | VARRAY_TREE_INIT (*block_avail_exprs_p, 20, "block_avail_exprs"); | |
3013 | VARRAY_PUSH_TREE (*block_avail_exprs_p, stmt ? stmt : element->rhs); | |
3014 | return NULL_TREE; | |
3015 | } | |
3016 | ||
3017 | /* Extract the LHS of the assignment so that it can be used as the current | |
3018 | definition of another variable. */ | |
3019 | lhs = ((struct expr_hash_elt *)*slot)->lhs; | |
3020 | ||
3021 | /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then | |
3022 | use the value from the const_and_copies table. */ | |
3023 | if (TREE_CODE (lhs) == SSA_NAME) | |
3024 | { | |
3025 | temp = get_value_for (lhs, const_and_copies); | |
3026 | if (temp) | |
3027 | lhs = temp; | |
3028 | } | |
3029 | ||
3030 | free (element); | |
3031 | return lhs; | |
3032 | } | |
3033 | ||
3034 | /* Given a condition COND, record into HI_P, LO_P and INVERTED_P the | |
3035 | range of values that result in the conditional having a true value. | |
3036 | ||
3037 | Return true if we are successful in extracting a range from COND and | |
3038 | false if we are unsuccessful. */ | |
3039 | ||
3040 | static bool | |
3041 | extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p) | |
3042 | { | |
3043 | tree op1 = TREE_OPERAND (cond, 1); | |
3044 | tree high, low, type; | |
3045 | int inverted; | |
3046 | ||
3047 | /* Experiments have shown that it's rarely, if ever useful to | |
3048 | record ranges for enumerations. Presumably this is due to | |
3049 | the fact that they're rarely used directly. They are typically | |
3050 | cast into an integer type and used that way. */ | |
3051 | if (TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE) | |
3052 | return 0; | |
3053 | ||
3054 | type = TREE_TYPE (op1); | |
3055 | ||
3056 | switch (TREE_CODE (cond)) | |
3057 | { | |
3058 | case EQ_EXPR: | |
3059 | high = low = op1; | |
3060 | inverted = 0; | |
3061 | break; | |
3062 | ||
3063 | case NE_EXPR: | |
3064 | high = low = op1; | |
3065 | inverted = 1; | |
3066 | break; | |
3067 | ||
3068 | case GE_EXPR: | |
3069 | low = op1; | |
3070 | high = TYPE_MAX_VALUE (type); | |
3071 | inverted = 0; | |
3072 | break; | |
3073 | ||
3074 | case GT_EXPR: | |
3075 | low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1); | |
3076 | high = TYPE_MAX_VALUE (type); | |
3077 | inverted = 0; | |
3078 | break; | |
3079 | ||
3080 | case LE_EXPR: | |
3081 | high = op1; | |
3082 | low = TYPE_MIN_VALUE (type); | |
3083 | inverted = 0; | |
3084 | break; | |
3085 | ||
3086 | case LT_EXPR: | |
3087 | high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1); | |
3088 | low = TYPE_MIN_VALUE (type); | |
3089 | inverted = 0; | |
3090 | break; | |
3091 | ||
3092 | default: | |
3093 | return 0; | |
3094 | } | |
3095 | ||
3096 | *hi_p = high; | |
3097 | *lo_p = low; | |
3098 | *inverted_p = inverted; | |
3099 | return 1; | |
3100 | } | |
3101 | ||
3102 | /* Record a range created by COND for basic block BB. */ | |
3103 | ||
3104 | static void | |
3105 | record_range (tree cond, basic_block bb, varray_type *vrp_variables_p) | |
3106 | { | |
3107 | /* We explicitly ignore NE_EXPRs. They rarely allow for meaningful | |
3108 | range optimizations and significantly complicate the implementation. */ | |
3109 | if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<' | |
3110 | && TREE_CODE (cond) != NE_EXPR | |
3111 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE) | |
3112 | { | |
3113 | struct vrp_element *element = ggc_alloc (sizeof (struct vrp_element)); | |
3114 | int ssa_version = SSA_NAME_VERSION (TREE_OPERAND (cond, 0)); | |
3115 | ||
3116 | varray_type *vrp_records_p | |
3117 | = (varray_type *)&VARRAY_GENERIC_PTR (vrp_data, ssa_version); | |
3118 | ||
3119 | element->low = NULL; | |
3120 | element->high = NULL; | |
3121 | element->cond = cond; | |
3122 | element->bb = bb; | |
3123 | ||
3124 | if (*vrp_records_p == NULL) | |
3125 | { | |
3126 | VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records"); | |
3127 | VARRAY_GENERIC_PTR (vrp_data, ssa_version) = *vrp_records_p; | |
3128 | } | |
3129 | ||
3130 | VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element); | |
3131 | if (! *vrp_variables_p) | |
3132 | VARRAY_TREE_INIT (*vrp_variables_p, 2, "vrp_variables"); | |
3133 | VARRAY_PUSH_TREE (*vrp_variables_p, TREE_OPERAND (cond, 0)); | |
3134 | } | |
3135 | } | |
3136 | ||
3137 | /* Given a conditional statement IF_STMT, return the assignment 'X = Y' | |
3138 | known to be true depending on which arm of IF_STMT is taken. | |
3139 | ||
3140 | Not all conditional statements will result in a useful assignment. | |
3141 | Return NULL_TREE in that case. | |
3142 | ||
3143 | Also enter into the available expression table statements of | |
3144 | the form: | |
3145 | ||
3146 | TRUE ARM FALSE ARM | |
3147 | 1 = cond 1 = cond' | |
3148 | 0 = cond' 0 = cond | |
3149 | ||
3150 | This allows us to lookup the condition in a dominated block and | |
3151 | get back a constant indicating if the condition is true. */ | |
3152 | ||
3153 | static struct eq_expr_value | |
3154 | get_eq_expr_value (tree if_stmt, | |
3155 | int true_arm, | |
3156 | varray_type *block_avail_exprs_p, | |
3157 | basic_block bb, | |
3158 | varray_type *vrp_variables_p) | |
3159 | { | |
3160 | tree cond; | |
3161 | struct eq_expr_value retval; | |
3162 | ||
3163 | cond = COND_EXPR_COND (if_stmt); | |
3164 | retval.src = NULL; | |
3165 | retval.dst = NULL; | |
3166 | ||
3167 | /* If the conditional is a single variable 'X', return 'X = 1' for | |
3168 | the true arm and 'X = 0' on the false arm. */ | |
3169 | if (TREE_CODE (cond) == SSA_NAME) | |
3170 | { | |
3171 | retval.dst = cond; | |
3172 | retval.src = (true_arm ? integer_one_node : integer_zero_node); | |
3173 | return retval; | |
3174 | } | |
3175 | ||
3176 | /* If we have a comparison expression, then record its result into | |
3177 | the available expression table. */ | |
3178 | if (TREE_CODE_CLASS (TREE_CODE (cond)) == '<') | |
3179 | { | |
3180 | tree op0 = TREE_OPERAND (cond, 0); | |
3181 | tree op1 = TREE_OPERAND (cond, 1); | |
3182 | ||
3183 | /* Special case comparing booleans against a constant as we know | |
3184 | the value of OP0 on both arms of the branch. ie, we can record | |
3185 | an equivalence for OP0 rather than COND. */ | |
3186 | if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR) | |
3187 | && TREE_CODE (op0) == SSA_NAME | |
3188 | && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE | |
3189 | && is_gimple_min_invariant (op1)) | |
3190 | { | |
3191 | if ((TREE_CODE (cond) == EQ_EXPR && true_arm) | |
3192 | || (TREE_CODE (cond) == NE_EXPR && ! true_arm)) | |
3193 | { | |
3194 | retval.src = op1; | |
3195 | } | |
3196 | else | |
3197 | { | |
3198 | if (integer_zerop (op1)) | |
3199 | retval.src = boolean_true_node; | |
3200 | else | |
3201 | retval.src = boolean_false_node; | |
3202 | } | |
3203 | retval.dst = op0; | |
3204 | return retval; | |
3205 | } | |
3206 | ||
3207 | if (TREE_CODE (op0) == SSA_NAME | |
3208 | && (is_gimple_min_invariant (op1) || TREE_CODE (op1) == SSA_NAME)) | |
3209 | { | |
3210 | tree inverted = invert_truthvalue (cond); | |
3211 | ||
3212 | /* When we find an available expression in the hash table, we replace | |
3213 | the expression with the LHS of the statement in the hash table. | |
3214 | ||
3215 | So, we want to build statements such as "1 = <condition>" on the | |
3216 | true arm and "0 = <condition>" on the false arm. That way if we | |
3217 | find the expression in the table, we will replace it with its | |
3218 | known constant value. Also insert inversions of the result and | |
3219 | condition into the hash table. */ | |
3220 | if (true_arm) | |
3221 | { | |
3222 | record_cond (cond, boolean_true_node, block_avail_exprs_p); | |
d2d8936f | 3223 | record_dominating_conditions (cond, block_avail_exprs_p); |
6de9cd9a DN |
3224 | record_cond (inverted, boolean_false_node, block_avail_exprs_p); |
3225 | ||
3226 | if (TREE_CONSTANT (op1)) | |
3227 | record_range (cond, bb, vrp_variables_p); | |
3228 | ||
3229 | /* If the conditional is of the form 'X == Y', return 'X = Y' | |
3230 | for the true arm. */ | |
3231 | if (TREE_CODE (cond) == EQ_EXPR) | |
3232 | { | |
3233 | retval.dst = op0; | |
3234 | retval.src = op1; | |
3235 | return retval; | |
3236 | } | |
3237 | } | |
3238 | else | |
3239 | { | |
3240 | ||
3241 | record_cond (inverted, boolean_true_node, block_avail_exprs_p); | |
d2d8936f | 3242 | record_dominating_conditions (inverted, block_avail_exprs_p); |
6de9cd9a DN |
3243 | record_cond (cond, boolean_false_node, block_avail_exprs_p); |
3244 | ||
3245 | if (TREE_CONSTANT (op1)) | |
3246 | record_range (inverted, bb, vrp_variables_p); | |
3247 | ||
3248 | /* If the conditional is of the form 'X != Y', return 'X = Y' | |
3249 | for the false arm. */ | |
3250 | if (TREE_CODE (cond) == NE_EXPR) | |
3251 | { | |
3252 | retval.dst = op0; | |
3253 | retval.src = op1; | |
3254 | return retval; | |
3255 | } | |
3256 | } | |
3257 | } | |
3258 | } | |
3259 | ||
3260 | return retval; | |
3261 | } | |
3262 | ||
3263 | /* Hashing and equality functions for AVAIL_EXPRS. The table stores | |
3264 | MODIFY_EXPR statements. We compute a value number for expressions using | |
3265 | the code of the expression and the SSA numbers of its operands. */ | |
3266 | ||
3267 | static hashval_t | |
3268 | avail_expr_hash (const void *p) | |
3269 | { | |
3270 | stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann; | |
3271 | tree rhs = ((struct expr_hash_elt *)p)->rhs; | |
3272 | hashval_t val = 0; | |
3273 | size_t i; | |
3274 | vuse_optype vuses; | |
3275 | ||
3276 | /* iterative_hash_expr knows how to deal with any expression and | |
3277 | deals with commutative operators as well, so just use it instead | |
3278 | of duplicating such complexities here. */ | |
3279 | val = iterative_hash_expr (rhs, val); | |
3280 | ||
3281 | /* If the hash table entry is not associated with a statement, then we | |
3282 | can just hash the expression and not worry about virtual operands | |
3283 | and such. */ | |
3284 | if (!ann) | |
3285 | return val; | |
3286 | ||
3287 | /* Add the SSA version numbers of every vuse operand. This is important | |
3288 | because compound variables like arrays are not renamed in the | |
3289 | operands. Rather, the rename is done on the virtual variable | |
3290 | representing all the elements of the array. */ | |
3291 | vuses = VUSE_OPS (ann); | |
3292 | for (i = 0; i < NUM_VUSES (vuses); i++) | |
3293 | val = iterative_hash_expr (VUSE_OP (vuses, i), val); | |
3294 | ||
3295 | return val; | |
3296 | } | |
3297 | ||
940db2c8 RH |
3298 | static hashval_t |
3299 | real_avail_expr_hash (const void *p) | |
3300 | { | |
3301 | return ((const struct expr_hash_elt *)p)->hash; | |
3302 | } | |
6de9cd9a DN |
3303 | |
3304 | static int | |
3305 | avail_expr_eq (const void *p1, const void *p2) | |
3306 | { | |
3307 | stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann; | |
3308 | tree rhs1 = ((struct expr_hash_elt *)p1)->rhs; | |
3309 | stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann; | |
3310 | tree rhs2 = ((struct expr_hash_elt *)p2)->rhs; | |
3311 | ||
3312 | /* If they are the same physical expression, return true. */ | |
3313 | if (rhs1 == rhs2 && ann1 == ann2) | |
3314 | return true; | |
3315 | ||
3316 | /* If their codes are not equal, then quit now. */ | |
3317 | if (TREE_CODE (rhs1) != TREE_CODE (rhs2)) | |
3318 | return false; | |
3319 | ||
3320 | /* In case of a collision, both RHS have to be identical and have the | |
3321 | same VUSE operands. */ | |
3322 | if ((TREE_TYPE (rhs1) == TREE_TYPE (rhs2) | |
3323 | || lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2))) | |
3324 | && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME)) | |
3325 | { | |
3326 | vuse_optype ops1 = NULL; | |
3327 | vuse_optype ops2 = NULL; | |
3328 | size_t num_ops1 = 0; | |
3329 | size_t num_ops2 = 0; | |
3330 | size_t i; | |
3331 | ||
3332 | if (ann1) | |
3333 | { | |
3334 | ops1 = VUSE_OPS (ann1); | |
3335 | num_ops1 = NUM_VUSES (ops1); | |
3336 | } | |
3337 | ||
3338 | if (ann2) | |
3339 | { | |
3340 | ops2 = VUSE_OPS (ann2); | |
3341 | num_ops2 = NUM_VUSES (ops2); | |
3342 | } | |
3343 | ||
3344 | /* If the number of virtual uses is different, then we consider | |
3345 | them not equal. */ | |
3346 | if (num_ops1 != num_ops2) | |
3347 | return false; | |
3348 | ||
3349 | for (i = 0; i < num_ops1; i++) | |
3350 | if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i)) | |
3351 | return false; | |
3352 | ||
3353 | #ifdef ENABLE_CHECKING | |
3354 | if (((struct expr_hash_elt *)p1)->hash | |
3355 | != ((struct expr_hash_elt *)p2)->hash) | |
3356 | abort (); | |
3357 | #endif | |
3358 | return true; | |
3359 | } | |
3360 | ||
3361 | return false; | |
3362 | } | |
3363 | ||
3364 | /* Given STMT and a pointer to the block local defintions BLOCK_DEFS_P, | |
3365 | register register all objects set by this statement into BLOCK_DEFS_P | |
3366 | and CURRDEFS. */ | |
3367 | ||
3368 | static void | |
3369 | register_definitions_for_stmt (stmt_ann_t ann, varray_type *block_defs_p) | |
3370 | { | |
3371 | def_optype defs; | |
a32b97a2 BB |
3372 | v_may_def_optype v_may_defs; |
3373 | v_must_def_optype v_must_defs; | |
6de9cd9a DN |
3374 | unsigned int i; |
3375 | ||
3376 | defs = DEF_OPS (ann); | |
3377 | for (i = 0; i < NUM_DEFS (defs); i++) | |
3378 | { | |
3379 | tree def = DEF_OP (defs, i); | |
3380 | ||
3381 | /* FIXME: We shouldn't be registering new defs if the variable | |
3382 | doesn't need to be renamed. */ | |
3383 | register_new_def (def, block_defs_p); | |
3384 | } | |
3385 | ||
3386 | /* Register new virtual definitions made by the statement. */ | |
a32b97a2 BB |
3387 | v_may_defs = V_MAY_DEF_OPS (ann); |
3388 | for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++) | |
3389 | { | |
3390 | /* FIXME: We shouldn't be registering new defs if the variable | |
3391 | doesn't need to be renamed. */ | |
3392 | register_new_def (V_MAY_DEF_RESULT (v_may_defs, i), block_defs_p); | |
3393 | } | |
3394 | ||
3395 | /* Register new virtual mustdefs made by the statement. */ | |
3396 | v_must_defs = V_MUST_DEF_OPS (ann); | |
3397 | for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++) | |
6de9cd9a DN |
3398 | { |
3399 | /* FIXME: We shouldn't be registering new defs if the variable | |
3400 | doesn't need to be renamed. */ | |
a32b97a2 | 3401 | register_new_def (V_MUST_DEF_OP (v_must_defs, i), block_defs_p); |
6de9cd9a DN |
3402 | } |
3403 | } | |
3404 |