]>
Commit | Line | Data |
---|---|---|
6de9cd9a DN |
1 | /* Conditional constant propagation pass for the GNU compiler. |
2 | Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc. | |
3 | Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> | |
4 | Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it | |
9 | under the terms of the GNU General Public License as published by the | |
10 | Free Software Foundation; either version 2, or (at your option) any | |
11 | later version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
14 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING. If not, write to the Free | |
20 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
21 | 02111-1307, USA. */ | |
22 | ||
23 | /* Conditional constant propagation. | |
24 | ||
25 | References: | |
26 | ||
27 | Constant propagation with conditional branches, | |
28 | Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | |
29 | ||
30 | Building an Optimizing Compiler, | |
31 | Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9. | |
32 | ||
33 | Advanced Compiler Design and Implementation, | |
34 | Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */ | |
35 | ||
36 | #include "config.h" | |
37 | #include "system.h" | |
38 | #include "coretypes.h" | |
39 | #include "tm.h" | |
40 | #include "errors.h" | |
41 | #include "ggc.h" | |
42 | #include "tree.h" | |
43 | #include "langhooks.h" | |
44 | ||
45 | /* These RTL headers are needed for basic-block.h. */ | |
46 | #include "rtl.h" | |
47 | #include "tm_p.h" | |
48 | #include "hard-reg-set.h" | |
49 | #include "basic-block.h" | |
50 | ||
51 | #include "diagnostic.h" | |
52 | #include "tree-inline.h" | |
53 | #include "tree-flow.h" | |
eadf906f | 54 | #include "tree-gimple.h" |
6de9cd9a DN |
55 | #include "tree-dump.h" |
56 | #include "tree-pass.h" | |
57 | #include "timevar.h" | |
58 | #include "expr.h" | |
59 | #include "flags.h" | |
60 | ||
61 | ||
62 | /* Possible lattice values. */ | |
63 | typedef enum | |
64 | { | |
65 | UNINITIALIZED = 0, | |
66 | UNDEFINED, | |
67 | CONSTANT, | |
68 | VARYING | |
69 | } latticevalue; | |
70 | ||
71 | /* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have | |
72 | been deemed VARYING and shouldn't be simulated again. */ | |
73 | #define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T) | |
74 | ||
75 | /* Main structure for CCP. Contains the lattice value and, if it's a | |
76 | constant, the constant value. */ | |
77 | typedef struct | |
78 | { | |
79 | latticevalue lattice_val; | |
80 | tree const_val; | |
81 | } value; | |
82 | ||
83 | /* A bitmap to keep track of executable blocks in the CFG. */ | |
84 | static sbitmap executable_blocks; | |
85 | ||
86 | /* Array of control flow edges on the worklist. */ | |
87 | static GTY(()) varray_type cfg_blocks = NULL; | |
88 | ||
89 | static unsigned int cfg_blocks_num = 0; | |
90 | static int cfg_blocks_tail; | |
91 | static int cfg_blocks_head; | |
92 | ||
93 | static sbitmap bb_in_list; | |
94 | ||
95 | /* This is used to track the current value of each variable. */ | |
96 | static value *value_vector; | |
97 | ||
98 | /* Worklist of SSA edges which will need reexamination as their definition | |
99 | has changed. SSA edges are def-use edges in the SSA web. For each | |
100 | edge, we store the definition statement or PHI node D. The destination | |
95eec0d6 DB |
101 | nodes that need to be visited are accessed using immediate_uses |
102 | (D). */ | |
6de9cd9a DN |
103 | static GTY(()) varray_type ssa_edges; |
104 | ||
95eec0d6 DB |
105 | /* Identical to SSA_EDGES. For performance reasons, the list of SSA |
106 | edges is split into two. One contains all SSA edges who need to be | |
107 | reexamined because their lattice value changed to varying (this | |
108 | worklist), and the other contains all other SSA edges to be | |
109 | reexamined (ssa_edges). | |
110 | ||
111 | Since most values in the program are varying, the ideal situation | |
112 | is to move them to that lattice value as quickly as possible. | |
113 | Thus, it doesn't make sense to process any other type of lattice | |
114 | value until all varying values are propagated fully, which is one | |
115 | thing using the varying worklist achieves. In addition, if you | |
116 | don't use a separate worklist for varying edges, you end up with | |
117 | situations where lattice values move from | |
118 | undefined->constant->varying instead of undefined->varying. | |
119 | */ | |
120 | static GTY(()) varray_type varying_ssa_edges; | |
121 | ||
122 | ||
6de9cd9a DN |
123 | static void initialize (void); |
124 | static void finalize (void); | |
125 | static void visit_phi_node (tree); | |
126 | static tree ccp_fold (tree); | |
127 | static value cp_lattice_meet (value, value); | |
128 | static void visit_stmt (tree); | |
129 | static void visit_cond_stmt (tree); | |
130 | static void visit_assignment (tree); | |
95eec0d6 | 131 | static void add_var_to_ssa_edges_worklist (tree, value); |
6de9cd9a DN |
132 | static void add_outgoing_control_edges (basic_block); |
133 | static void add_control_edge (edge); | |
134 | static void def_to_varying (tree); | |
135 | static void set_lattice_value (tree, value); | |
136 | static void simulate_block (basic_block); | |
137 | static void simulate_stmt (tree); | |
138 | static void substitute_and_fold (void); | |
139 | static value evaluate_stmt (tree); | |
140 | static void dump_lattice_value (FILE *, const char *, value); | |
141 | static bool replace_uses_in (tree, bool *); | |
142 | static latticevalue likely_value (tree); | |
143 | static tree get_rhs (tree); | |
144 | static void set_rhs (tree *, tree); | |
145 | static value *get_value (tree); | |
146 | static value get_default_value (tree); | |
147 | static tree ccp_fold_builtin (tree, tree); | |
148 | static bool get_strlen (tree, tree *, bitmap); | |
149 | static inline bool cfg_blocks_empty_p (void); | |
150 | static void cfg_blocks_add (basic_block); | |
151 | static basic_block cfg_blocks_get (void); | |
152 | static bool need_imm_uses_for (tree var); | |
153 | ||
95eec0d6 DB |
154 | /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to |
155 | drain. This pops statements off the given WORKLIST and processes | |
156 | them until there are no more statements on WORKLIST. */ | |
157 | ||
158 | static void | |
159 | process_ssa_edge_worklist (varray_type *worklist) | |
160 | { | |
161 | /* Drain the entire worklist. */ | |
162 | while (VARRAY_ACTIVE_SIZE (*worklist) > 0) | |
163 | { | |
164 | /* Pull the statement to simulate off the worklist. */ | |
165 | tree stmt = VARRAY_TOP_TREE (*worklist); | |
166 | stmt_ann_t ann = stmt_ann (stmt); | |
167 | VARRAY_POP (*worklist); | |
168 | ||
169 | /* visit_stmt can "cancel" reevaluation of some statements. | |
170 | If it does, then in_ccp_worklist will be zero. */ | |
171 | if (ann->in_ccp_worklist) | |
172 | { | |
173 | ann->in_ccp_worklist = 0; | |
174 | simulate_stmt (stmt); | |
175 | } | |
176 | } | |
177 | } | |
178 | ||
6de9cd9a DN |
179 | /* Main entry point for SSA Conditional Constant Propagation. FNDECL is |
180 | the declaration for the function to optimize. | |
181 | ||
182 | On exit, VARS_TO_RENAME will contain the symbols that have been exposed by | |
183 | the propagation of ADDR_EXPR expressions into pointer dereferences and need | |
184 | to be renamed into SSA. | |
185 | ||
186 | PHASE indicates which dump file from the DUMP_FILES array to use when | |
187 | dumping debugging information. */ | |
188 | ||
189 | static void | |
190 | tree_ssa_ccp (void) | |
191 | { | |
192 | initialize (); | |
193 | ||
194 | /* Iterate until the worklists are empty. */ | |
95eec0d6 DB |
195 | while (!cfg_blocks_empty_p () |
196 | || VARRAY_ACTIVE_SIZE (ssa_edges) > 0 | |
197 | || VARRAY_ACTIVE_SIZE (varying_ssa_edges) > 0) | |
6de9cd9a DN |
198 | { |
199 | if (!cfg_blocks_empty_p ()) | |
200 | { | |
201 | /* Pull the next block to simulate off the worklist. */ | |
202 | basic_block dest_block = cfg_blocks_get (); | |
203 | simulate_block (dest_block); | |
204 | } | |
205 | ||
95eec0d6 DB |
206 | /* In order to move things to varying as quickly as |
207 | possible,process the VARYING_SSA_EDGES worklist first. */ | |
208 | process_ssa_edge_worklist (&varying_ssa_edges); | |
209 | ||
210 | /* Now process the SSA_EDGES worklist. */ | |
211 | process_ssa_edge_worklist (&ssa_edges); | |
6de9cd9a DN |
212 | } |
213 | ||
214 | /* Now perform substitutions based on the known constant values. */ | |
215 | substitute_and_fold (); | |
216 | ||
217 | /* Now cleanup any unreachable code. */ | |
218 | cleanup_tree_cfg (); | |
219 | ||
220 | /* Free allocated memory. */ | |
221 | finalize (); | |
222 | ||
223 | /* Debugging dumps. */ | |
224 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
225 | { | |
226 | dump_referenced_vars (dump_file); | |
227 | fprintf (dump_file, "\n\n"); | |
228 | } | |
229 | } | |
230 | ||
231 | static bool | |
232 | gate_ccp (void) | |
233 | { | |
234 | return flag_tree_ccp != 0; | |
235 | } | |
236 | ||
237 | struct tree_opt_pass pass_ccp = | |
238 | { | |
239 | "ccp", /* name */ | |
240 | gate_ccp, /* gate */ | |
241 | tree_ssa_ccp, /* execute */ | |
242 | NULL, /* sub */ | |
243 | NULL, /* next */ | |
244 | 0, /* static_pass_number */ | |
245 | TV_TREE_CCP, /* tv_id */ | |
246 | PROP_cfg | PROP_ssa, /* properties_required */ | |
247 | 0, /* properties_provided */ | |
248 | 0, /* properties_destroyed */ | |
249 | 0, /* todo_flags_start */ | |
250 | TODO_dump_func | TODO_rename_vars | |
251 | | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ | |
252 | }; | |
253 | ||
254 | ||
255 | /* Get the constant value associated with variable VAR. */ | |
256 | ||
257 | static value * | |
258 | get_value (tree var) | |
259 | { | |
260 | value *val; | |
261 | ||
262 | #if defined ENABLE_CHECKING | |
263 | if (TREE_CODE (var) != SSA_NAME) | |
264 | abort (); | |
265 | #endif | |
266 | ||
267 | val = &value_vector[SSA_NAME_VERSION (var)]; | |
268 | if (val->lattice_val == UNINITIALIZED) | |
269 | *val = get_default_value (var); | |
270 | ||
271 | return val; | |
272 | } | |
273 | ||
274 | ||
275 | /* Simulate the execution of BLOCK. Evaluate the statement associated | |
276 | with each variable reference inside the block. */ | |
277 | ||
278 | static void | |
279 | simulate_block (basic_block block) | |
280 | { | |
281 | tree phi; | |
282 | ||
283 | /* There is nothing to do for the exit block. */ | |
284 | if (block == EXIT_BLOCK_PTR) | |
285 | return; | |
286 | ||
287 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
288 | fprintf (dump_file, "\nSimulating block %d\n", block->index); | |
289 | ||
290 | /* Always simulate PHI nodes, even if we have simulated this block | |
291 | before. */ | |
292 | for (phi = phi_nodes (block); phi; phi = TREE_CHAIN (phi)) | |
293 | visit_phi_node (phi); | |
294 | ||
295 | /* If this is the first time we've simulated this block, then we | |
296 | must simulate each of its statements. */ | |
297 | if (!TEST_BIT (executable_blocks, block->index)) | |
298 | { | |
299 | block_stmt_iterator j; | |
300 | unsigned int normal_edge_count; | |
301 | edge e, normal_edge; | |
302 | ||
303 | /* Note that we have simulated this block. */ | |
304 | SET_BIT (executable_blocks, block->index); | |
305 | ||
306 | for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j)) | |
307 | visit_stmt (bsi_stmt (j)); | |
308 | ||
309 | /* We can not predict when abnormal edges will be executed, so | |
310 | once a block is considered executable, we consider any | |
311 | outgoing abnormal edges as executable. | |
312 | ||
313 | At the same time, if this block has only one successor that is | |
314 | reached by non-abnormal edges, then add that successor to the | |
315 | worklist. */ | |
316 | normal_edge_count = 0; | |
317 | normal_edge = NULL; | |
318 | for (e = block->succ; e; e = e->succ_next) | |
319 | { | |
320 | if (e->flags & EDGE_ABNORMAL) | |
321 | { | |
322 | add_control_edge (e); | |
323 | } | |
324 | else | |
325 | { | |
326 | normal_edge_count++; | |
327 | normal_edge = e; | |
328 | } | |
329 | } | |
330 | ||
331 | if (normal_edge_count == 1) | |
332 | add_control_edge (normal_edge); | |
333 | } | |
334 | } | |
335 | ||
336 | ||
337 | /* Follow the def-use edges for statement DEF_STMT and simulate all the | |
338 | statements reached by it. */ | |
339 | ||
340 | static void | |
341 | simulate_stmt (tree use_stmt) | |
342 | { | |
343 | basic_block use_bb = bb_for_stmt (use_stmt); | |
344 | ||
345 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
346 | { | |
347 | fprintf (dump_file, "\nSimulating statement (from ssa_edges): "); | |
348 | print_generic_stmt (dump_file, use_stmt, dump_flags); | |
349 | } | |
350 | ||
351 | if (TREE_CODE (use_stmt) == PHI_NODE) | |
352 | { | |
353 | /* PHI nodes are always visited, regardless of whether or not the | |
354 | destination block is executable. */ | |
355 | visit_phi_node (use_stmt); | |
356 | } | |
357 | else if (TEST_BIT (executable_blocks, use_bb->index)) | |
358 | { | |
359 | /* Otherwise, visit the statement containing the use reached by | |
360 | DEF, only if the destination block is marked executable. */ | |
361 | visit_stmt (use_stmt); | |
362 | } | |
363 | } | |
364 | ||
365 | ||
366 | /* Perform final substitution and folding. After this pass the program | |
367 | should still be in SSA form. */ | |
368 | ||
369 | static void | |
370 | substitute_and_fold (void) | |
371 | { | |
372 | basic_block bb; | |
373 | ||
374 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
375 | fprintf (dump_file, | |
376 | "\nSubstituing constants and folding statements\n\n"); | |
377 | ||
378 | /* Substitute constants in every statement of every basic block. */ | |
379 | FOR_EACH_BB (bb) | |
380 | { | |
381 | block_stmt_iterator i; | |
382 | tree phi; | |
383 | ||
384 | /* Propagate our known constants into PHI nodes. */ | |
385 | for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) | |
386 | { | |
387 | int i; | |
388 | ||
389 | for (i = 0; i < PHI_NUM_ARGS (phi); i++) | |
390 | { | |
391 | value *new_val; | |
392 | tree *orig_p = &PHI_ARG_DEF (phi, i); | |
393 | ||
394 | if (! SSA_VAR_P (*orig_p)) | |
395 | break; | |
396 | ||
397 | new_val = get_value (*orig_p); | |
398 | if (new_val->lattice_val == CONSTANT | |
399 | && may_propagate_copy (*orig_p, new_val->const_val)) | |
400 | *orig_p = new_val->const_val; | |
401 | } | |
402 | } | |
403 | ||
404 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) | |
405 | { | |
406 | bool replaced_address; | |
407 | tree stmt = bsi_stmt (i); | |
408 | ||
409 | /* Skip statements that have been folded already. */ | |
410 | if (stmt_modified_p (stmt) || !is_exec_stmt (stmt)) | |
411 | continue; | |
412 | ||
413 | /* Replace the statement with its folded version and mark it | |
414 | folded. */ | |
415 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
416 | { | |
417 | fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt)); | |
418 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
419 | } | |
420 | ||
421 | if (replace_uses_in (stmt, &replaced_address)) | |
422 | { | |
423 | bool changed = fold_stmt (bsi_stmt_ptr (i)); | |
424 | stmt = bsi_stmt(i); | |
425 | modify_stmt (stmt); | |
426 | /* If we folded a builtin function, we'll likely | |
427 | need to rename VDEFs. */ | |
428 | if (replaced_address || changed) | |
429 | mark_new_vars_to_rename (stmt, vars_to_rename); | |
430 | } | |
431 | ||
432 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
433 | { | |
434 | fprintf (dump_file, " with "); | |
435 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
436 | fprintf (dump_file, "\n"); | |
437 | } | |
438 | } | |
439 | } | |
440 | } | |
441 | ||
442 | ||
443 | /* Loop through the PHI_NODE's parameters for BLOCK and compare their | |
444 | lattice values to determine PHI_NODE's lattice value. The value of a | |
445 | PHI node is determined calling cp_lattice_meet() with all the arguments | |
446 | of the PHI node that are incoming via executable edges. */ | |
447 | ||
448 | static void | |
449 | visit_phi_node (tree phi) | |
450 | { | |
451 | bool short_circuit = 0; | |
452 | value phi_val, *curr_val; | |
453 | int i; | |
454 | ||
455 | /* If the PHI node has already been deemed to be VARYING, don't simulate | |
456 | it again. */ | |
457 | if (DONT_SIMULATE_AGAIN (phi)) | |
458 | return; | |
459 | ||
460 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
461 | { | |
462 | fprintf (dump_file, "\nVisiting PHI node: "); | |
463 | print_generic_expr (dump_file, phi, dump_flags); | |
464 | } | |
465 | ||
466 | curr_val = get_value (PHI_RESULT (phi)); | |
467 | switch (curr_val->lattice_val) | |
468 | { | |
469 | case VARYING: | |
470 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
471 | fprintf (dump_file, "\n Shortcircuit. Default of VARYING."); | |
472 | short_circuit = 1; | |
473 | break; | |
474 | ||
475 | case CONSTANT: | |
476 | phi_val = *curr_val; | |
477 | break; | |
478 | ||
479 | case UNDEFINED: | |
480 | case UNINITIALIZED: | |
481 | phi_val.lattice_val = UNDEFINED; | |
482 | phi_val.const_val = NULL_TREE; | |
483 | break; | |
484 | ||
485 | default: | |
486 | abort (); | |
487 | } | |
488 | ||
489 | /* If the variable is volatile or the variable is never referenced in a | |
490 | real operand, then consider the PHI node VARYING. */ | |
491 | if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi)))) | |
492 | { | |
493 | phi_val.lattice_val = VARYING; | |
494 | phi_val.const_val = NULL; | |
495 | } | |
496 | else | |
497 | for (i = 0; i < PHI_NUM_ARGS (phi); i++) | |
498 | { | |
9cf737f8 | 499 | /* Compute the meet operator over all the PHI arguments. */ |
6de9cd9a DN |
500 | edge e = PHI_ARG_EDGE (phi, i); |
501 | ||
502 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
503 | { | |
504 | fprintf (dump_file, | |
505 | "\n Argument #%d (%d -> %d %sexecutable)\n", | |
506 | i, e->src->index, e->dest->index, | |
507 | (e->flags & EDGE_EXECUTABLE) ? "" : "not "); | |
508 | } | |
509 | ||
510 | /* If the incoming edge is executable, Compute the meet operator for | |
511 | the existing value of the PHI node and the current PHI argument. */ | |
512 | if (e->flags & EDGE_EXECUTABLE) | |
513 | { | |
514 | tree rdef = PHI_ARG_DEF (phi, i); | |
515 | value *rdef_val, val; | |
516 | ||
517 | if (is_gimple_min_invariant (rdef)) | |
518 | { | |
519 | val.lattice_val = CONSTANT; | |
520 | val.const_val = rdef; | |
521 | rdef_val = &val; | |
522 | } | |
523 | else | |
524 | rdef_val = get_value (rdef); | |
525 | ||
526 | phi_val = cp_lattice_meet (phi_val, *rdef_val); | |
527 | ||
528 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
529 | { | |
530 | fprintf (dump_file, "\t"); | |
531 | print_generic_expr (dump_file, rdef, dump_flags); | |
532 | dump_lattice_value (dump_file, "\tValue: ", *rdef_val); | |
533 | fprintf (dump_file, "\n"); | |
534 | } | |
535 | ||
536 | if (phi_val.lattice_val == VARYING) | |
537 | break; | |
538 | } | |
539 | } | |
540 | ||
541 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
542 | { | |
543 | dump_lattice_value (dump_file, "\n PHI node value: ", phi_val); | |
544 | fprintf (dump_file, "\n\n"); | |
545 | } | |
546 | ||
547 | set_lattice_value (PHI_RESULT (phi), phi_val); | |
548 | if (phi_val.lattice_val == VARYING) | |
549 | DONT_SIMULATE_AGAIN (phi) = 1; | |
550 | } | |
551 | ||
552 | ||
553 | /* Compute the meet operator between VAL1 and VAL2: | |
554 | ||
555 | any M UNDEFINED = any | |
556 | any M VARYING = VARYING | |
557 | Ci M Cj = Ci if (i == j) | |
558 | Ci M Cj = VARYING if (i != j) */ | |
559 | static value | |
560 | cp_lattice_meet (value val1, value val2) | |
561 | { | |
562 | value result; | |
563 | ||
564 | /* any M UNDEFINED = any. */ | |
565 | if (val1.lattice_val == UNDEFINED) | |
566 | return val2; | |
567 | else if (val2.lattice_val == UNDEFINED) | |
568 | return val1; | |
569 | ||
570 | /* any M VARYING = VARYING. */ | |
571 | if (val1.lattice_val == VARYING || val2.lattice_val == VARYING) | |
572 | { | |
573 | result.lattice_val = VARYING; | |
574 | result.const_val = NULL_TREE; | |
575 | return result; | |
576 | } | |
577 | ||
578 | /* Ci M Cj = Ci if (i == j) | |
579 | Ci M Cj = VARYING if (i != j) */ | |
580 | if (simple_cst_equal (val1.const_val, val2.const_val) == 1) | |
581 | { | |
582 | result.lattice_val = CONSTANT; | |
583 | result.const_val = val1.const_val; | |
584 | } | |
585 | else | |
586 | { | |
587 | result.lattice_val = VARYING; | |
588 | result.const_val = NULL_TREE; | |
589 | } | |
590 | ||
591 | return result; | |
592 | } | |
593 | ||
594 | ||
595 | /* Evaluate statement STMT. If the statement produces an output value and | |
596 | its evaluation changes the lattice value of its output, do the following: | |
597 | ||
598 | - If the statement is an assignment, add all the SSA edges starting at | |
599 | this definition. | |
600 | ||
601 | - If the statement is a conditional branch: | |
602 | . If the statement evaluates to non-constant, add all edges to | |
603 | worklist. | |
604 | . If the statement is constant, add the edge executed as the | |
605 | result of the branch. */ | |
606 | ||
607 | static void | |
608 | visit_stmt (tree stmt) | |
609 | { | |
610 | size_t i; | |
611 | stmt_ann_t ann; | |
612 | def_optype defs; | |
613 | vdef_optype vdefs; | |
614 | ||
615 | /* If the statement has already been deemed to be VARYING, don't simulate | |
616 | it again. */ | |
617 | if (DONT_SIMULATE_AGAIN (stmt)) | |
618 | return; | |
619 | ||
620 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
621 | { | |
622 | fprintf (dump_file, "\nVisiting statement: "); | |
623 | print_generic_stmt (dump_file, stmt, TDF_SLIM); | |
624 | fprintf (dump_file, "\n"); | |
625 | } | |
626 | ||
627 | ann = stmt_ann (stmt); | |
628 | ||
629 | /* If this statement is already in the worklist then "cancel" it. The | |
630 | reevaluation implied by the worklist entry will produce the same | |
631 | value we generate here and thus reevaluating it again from the | |
632 | worklist is pointless. */ | |
633 | if (ann->in_ccp_worklist) | |
634 | ann->in_ccp_worklist = 0; | |
635 | ||
636 | /* Now examine the statement. If the statement is an assignment that | |
637 | produces a single output value, evaluate its RHS to see if the lattice | |
638 | value of its output has changed. */ | |
639 | if (TREE_CODE (stmt) == MODIFY_EXPR | |
640 | && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME) | |
641 | visit_assignment (stmt); | |
642 | ||
643 | /* Definitions made by statements other than assignments to SSA_NAMEs | |
644 | represent unknown modifications to their outputs. Mark them VARYING. */ | |
645 | else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0) | |
646 | { | |
647 | DONT_SIMULATE_AGAIN (stmt) = 1; | |
648 | for (i = 0; i < NUM_DEFS (defs); i++) | |
649 | { | |
650 | tree def = DEF_OP (defs, i); | |
651 | def_to_varying (def); | |
652 | } | |
653 | } | |
654 | ||
655 | /* If STMT is a conditional branch, see if we can determine which branch | |
656 | will be taken. */ | |
657 | else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR) | |
658 | visit_cond_stmt (stmt); | |
659 | ||
660 | /* Any other kind of statement is not interesting for constant | |
661 | propagation and, therefore, not worth simulating. */ | |
662 | else | |
663 | { | |
664 | DONT_SIMULATE_AGAIN (stmt) = 1; | |
665 | ||
666 | /* If STMT is a computed goto, then mark all the output edges | |
667 | executable. */ | |
668 | if (computed_goto_p (stmt)) | |
669 | add_outgoing_control_edges (bb_for_stmt (stmt)); | |
670 | } | |
671 | ||
672 | /* Mark all VDEF operands VARYING. */ | |
673 | vdefs = VDEF_OPS (ann); | |
674 | for (i = 0; i < NUM_VDEFS (vdefs); i++) | |
675 | def_to_varying (VDEF_RESULT (vdefs, i)); | |
676 | } | |
677 | ||
678 | ||
679 | /* Visit the assignment statement STMT. Set the value of its LHS to the | |
680 | value computed by the RHS. */ | |
681 | ||
682 | static void | |
683 | visit_assignment (tree stmt) | |
684 | { | |
685 | value val; | |
686 | tree lhs, rhs; | |
687 | ||
688 | lhs = TREE_OPERAND (stmt, 0); | |
689 | rhs = TREE_OPERAND (stmt, 1); | |
690 | ||
691 | if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs))) | |
692 | { | |
693 | /* Volatile variables are always VARYING. */ | |
694 | val.lattice_val = VARYING; | |
695 | val.const_val = NULL_TREE; | |
696 | } | |
697 | else if (TREE_CODE (rhs) == SSA_NAME) | |
698 | { | |
699 | /* For a simple copy operation, we copy the lattice values. */ | |
700 | value *nval = get_value (rhs); | |
701 | val = *nval; | |
702 | } | |
703 | else | |
704 | { | |
705 | /* Evaluate the statement. */ | |
706 | val = evaluate_stmt (stmt); | |
707 | } | |
708 | ||
709 | /* FIXME: Hack. If this was a definition of a bitfield, we need to widen | |
710 | the constant value into the type of the destination variable. This | |
711 | should not be necessary if GCC represented bitfields properly. */ | |
712 | { | |
713 | tree lhs = TREE_OPERAND (stmt, 0); | |
714 | if (val.lattice_val == CONSTANT | |
715 | && TREE_CODE (lhs) == COMPONENT_REF | |
716 | && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1))) | |
717 | { | |
718 | tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs); | |
719 | ||
720 | if (w && is_gimple_min_invariant (w)) | |
721 | val.const_val = w; | |
722 | else | |
723 | { | |
724 | val.lattice_val = VARYING; | |
725 | val.const_val = NULL; | |
726 | } | |
727 | } | |
728 | } | |
729 | ||
730 | /* Set the lattice value of the statement's output. */ | |
731 | set_lattice_value (lhs, val); | |
732 | if (val.lattice_val == VARYING) | |
733 | DONT_SIMULATE_AGAIN (stmt) = 1; | |
734 | } | |
735 | ||
736 | ||
737 | /* Visit the conditional statement STMT. If it evaluates to a constant value, | |
738 | mark outgoing edges appropriately. */ | |
739 | ||
740 | static void | |
741 | visit_cond_stmt (tree stmt) | |
742 | { | |
743 | edge e; | |
744 | value val; | |
745 | basic_block block; | |
746 | ||
747 | block = bb_for_stmt (stmt); | |
748 | val = evaluate_stmt (stmt); | |
749 | ||
750 | /* Find which edge out of the conditional block will be taken and add it | |
751 | to the worklist. If no single edge can be determined statically, add | |
752 | all outgoing edges from BLOCK. */ | |
753 | e = find_taken_edge (block, val.const_val); | |
754 | if (e) | |
755 | add_control_edge (e); | |
756 | else | |
757 | { | |
758 | DONT_SIMULATE_AGAIN (stmt) = 1; | |
759 | add_outgoing_control_edges (block); | |
760 | } | |
761 | } | |
762 | ||
763 | ||
764 | /* Add all the edges coming out of BB to the control flow worklist. */ | |
765 | ||
766 | static void | |
767 | add_outgoing_control_edges (basic_block bb) | |
768 | { | |
769 | edge e; | |
770 | ||
771 | for (e = bb->succ; e; e = e->succ_next) | |
772 | add_control_edge (e); | |
773 | } | |
774 | ||
775 | ||
776 | /* Add edge E to the control flow worklist. */ | |
777 | ||
778 | static void | |
779 | add_control_edge (edge e) | |
780 | { | |
781 | basic_block bb = e->dest; | |
782 | if (bb == EXIT_BLOCK_PTR) | |
783 | return; | |
784 | ||
785 | /* If the edge had already been executed, skip it. */ | |
786 | if (e->flags & EDGE_EXECUTABLE) | |
787 | return; | |
788 | ||
789 | e->flags |= EDGE_EXECUTABLE; | |
790 | ||
791 | /* If the block is already in the list, we're done. */ | |
792 | if (TEST_BIT (bb_in_list, bb->index)) | |
793 | return; | |
794 | ||
795 | cfg_blocks_add (bb); | |
796 | ||
797 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
798 | fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n", | |
799 | e->src->index, e->dest->index); | |
800 | } | |
801 | ||
802 | ||
803 | /* CCP specific front-end to the non-destructive constant folding routines. | |
804 | ||
805 | Attempt to simplify the RHS of STMT knowing that one or more | |
806 | operands are constants. | |
807 | ||
808 | If simplification is possible, return the simplified RHS, | |
809 | otherwise return the original RHS. */ | |
810 | ||
811 | static tree | |
812 | ccp_fold (tree stmt) | |
813 | { | |
814 | tree rhs = get_rhs (stmt); | |
815 | enum tree_code code = TREE_CODE (rhs); | |
816 | int kind = TREE_CODE_CLASS (code); | |
817 | tree retval = NULL_TREE; | |
818 | ||
819 | /* If the RHS is just a variable, then that variable must now have | |
820 | a constant value that we can return directly. */ | |
821 | if (TREE_CODE (rhs) == SSA_NAME) | |
822 | return get_value (rhs)->const_val; | |
823 | ||
824 | /* Unary operators. Note that we know the single operand must | |
825 | be a constant. So this should almost always return a | |
826 | simplified RHS. */ | |
827 | if (kind == '1') | |
828 | { | |
829 | /* Handle unary operators which can appear in GIMPLE form. */ | |
830 | tree op0 = TREE_OPERAND (rhs, 0); | |
831 | ||
832 | /* Simplify the operand down to a constant. */ | |
833 | if (TREE_CODE (op0) == SSA_NAME) | |
834 | { | |
835 | value *val = get_value (op0); | |
836 | if (val->lattice_val == CONSTANT) | |
837 | op0 = get_value (op0)->const_val; | |
838 | } | |
839 | ||
840 | retval = nondestructive_fold_unary_to_constant (code, | |
841 | TREE_TYPE (rhs), | |
842 | op0); | |
843 | ||
844 | /* If we folded, but did not create an invariant, then we can not | |
845 | use this expression. */ | |
846 | if (retval && ! is_gimple_min_invariant (retval)) | |
847 | return NULL; | |
848 | ||
849 | /* If we could not fold the expression, but the arguments are all | |
850 | constants and gimple values, then build and return the new | |
851 | expression. | |
852 | ||
853 | In some cases the new expression is still something we can | |
854 | use as a replacement for an argument. This happens with | |
855 | NOP conversions of types for example. | |
856 | ||
857 | In other cases the new expression can not be used as a | |
858 | replacement for an argument (as it would create non-gimple | |
859 | code). But the new expression can still be used to derive | |
860 | other constants. */ | |
861 | if (! retval && is_gimple_min_invariant (op0)) | |
862 | return build1 (code, TREE_TYPE (rhs), op0); | |
863 | } | |
864 | ||
865 | /* Binary and comparison operators. We know one or both of the | |
866 | operands are constants. */ | |
867 | else if (kind == '2' | |
868 | || kind == '<' | |
869 | || code == TRUTH_AND_EXPR | |
870 | || code == TRUTH_OR_EXPR | |
871 | || code == TRUTH_XOR_EXPR) | |
872 | { | |
873 | /* Handle binary and comparison operators that can appear in | |
874 | GIMPLE form. */ | |
875 | tree op0 = TREE_OPERAND (rhs, 0); | |
876 | tree op1 = TREE_OPERAND (rhs, 1); | |
877 | ||
878 | /* Simplify the operands down to constants when appropriate. */ | |
879 | if (TREE_CODE (op0) == SSA_NAME) | |
880 | { | |
881 | value *val = get_value (op0); | |
882 | if (val->lattice_val == CONSTANT) | |
883 | op0 = val->const_val; | |
884 | } | |
885 | ||
886 | if (TREE_CODE (op1) == SSA_NAME) | |
887 | { | |
888 | value *val = get_value (op1); | |
889 | if (val->lattice_val == CONSTANT) | |
890 | op1 = val->const_val; | |
891 | } | |
892 | ||
893 | retval = nondestructive_fold_binary_to_constant (code, | |
894 | TREE_TYPE (rhs), | |
895 | op0, op1); | |
896 | ||
897 | /* If we folded, but did not create an invariant, then we can not | |
898 | use this expression. */ | |
899 | if (retval && ! is_gimple_min_invariant (retval)) | |
900 | return NULL; | |
901 | ||
902 | /* If we could not fold the expression, but the arguments are all | |
903 | constants and gimple values, then build and return the new | |
904 | expression. | |
905 | ||
906 | In some cases the new expression is still something we can | |
907 | use as a replacement for an argument. This happens with | |
908 | NOP conversions of types for example. | |
909 | ||
910 | In other cases the new expression can not be used as a | |
911 | replacement for an argument (as it would create non-gimple | |
912 | code). But the new expression can still be used to derive | |
913 | other constants. */ | |
914 | if (! retval | |
915 | && is_gimple_min_invariant (op0) | |
916 | && is_gimple_min_invariant (op1)) | |
917 | return build (code, TREE_TYPE (rhs), op0, op1); | |
918 | } | |
919 | ||
920 | /* We may be able to fold away calls to builtin functions if their | |
9cf737f8 | 921 | arguments are constants. */ |
6de9cd9a DN |
922 | else if (code == CALL_EXPR |
923 | && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR | |
924 | && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) | |
925 | == FUNCTION_DECL) | |
926 | && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))) | |
927 | { | |
928 | use_optype uses = STMT_USE_OPS (stmt); | |
929 | if (NUM_USES (uses) != 0) | |
930 | { | |
931 | tree *orig; | |
932 | size_t i; | |
933 | ||
934 | /* Preserve the original values of every operand. */ | |
935 | orig = xmalloc (sizeof (tree) * NUM_USES (uses)); | |
936 | for (i = 0; i < NUM_USES (uses); i++) | |
937 | orig[i] = USE_OP (uses, i); | |
938 | ||
939 | /* Substitute operands with their values and try to fold. */ | |
940 | replace_uses_in (stmt, NULL); | |
941 | retval = fold_builtin (rhs); | |
942 | ||
943 | /* Restore operands to their original form. */ | |
944 | for (i = 0; i < NUM_USES (uses); i++) | |
945 | *(USE_OP_PTR (uses, i)) = orig[i]; | |
946 | free (orig); | |
947 | } | |
948 | } | |
949 | else | |
950 | return rhs; | |
951 | ||
952 | /* If we got a simplified form, see if we need to convert its type. */ | |
953 | if (retval) | |
954 | { | |
955 | if (TREE_TYPE (retval) != TREE_TYPE (rhs)) | |
e072ae27 | 956 | retval = fold_convert (TREE_TYPE (rhs), retval); |
6de9cd9a DN |
957 | |
958 | if (TREE_TYPE (retval) == TREE_TYPE (rhs)) | |
959 | return retval; | |
960 | } | |
961 | ||
962 | /* No simplification was possible. */ | |
963 | return rhs; | |
964 | } | |
965 | ||
966 | ||
967 | /* Evaluate statement STMT. */ | |
968 | ||
969 | static value | |
970 | evaluate_stmt (tree stmt) | |
971 | { | |
972 | value val; | |
973 | tree simplified; | |
974 | latticevalue likelyvalue = likely_value (stmt); | |
975 | ||
976 | /* If the statement is likely to have a CONSTANT result, then try | |
977 | to fold the statement to determine the constant value. */ | |
978 | if (likelyvalue == CONSTANT) | |
979 | simplified = ccp_fold (stmt); | |
980 | /* If the statement is likely to have a VARYING result, then do not | |
981 | bother folding the statement. */ | |
982 | else if (likelyvalue == VARYING) | |
983 | simplified = get_rhs (stmt); | |
984 | /* Otherwise the statement is likely to have an UNDEFINED value and | |
985 | there will be nothing to do. */ | |
986 | else | |
987 | simplified = NULL_TREE; | |
988 | ||
989 | if (simplified && is_gimple_min_invariant (simplified)) | |
990 | { | |
991 | /* The statement produced a constant value. */ | |
992 | val.lattice_val = CONSTANT; | |
993 | val.const_val = simplified; | |
994 | } | |
995 | else | |
996 | { | |
997 | /* The statement produced a nonconstant value. If the statement | |
998 | had undefined operands, then the result of the statement should | |
999 | be undefined. Else the result of the statement is VARYING. */ | |
1000 | val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING); | |
1001 | val.const_val = NULL_TREE; | |
1002 | } | |
1003 | ||
1004 | return val; | |
1005 | } | |
1006 | ||
1007 | ||
1008 | /* Debugging dumps. */ | |
1009 | ||
1010 | static void | |
1011 | dump_lattice_value (FILE *outf, const char *prefix, value val) | |
1012 | { | |
1013 | switch (val.lattice_val) | |
1014 | { | |
1015 | case UNDEFINED: | |
1016 | fprintf (outf, "%sUNDEFINED", prefix); | |
1017 | break; | |
1018 | case VARYING: | |
1019 | fprintf (outf, "%sVARYING", prefix); | |
1020 | break; | |
1021 | case CONSTANT: | |
1022 | fprintf (outf, "%sCONSTANT ", prefix); | |
1023 | print_generic_expr (outf, val.const_val, dump_flags); | |
1024 | break; | |
1025 | default: | |
1026 | abort (); | |
1027 | } | |
1028 | } | |
1029 | ||
1030 | /* Given a constant value VAL for bitfield FIELD, and a destination | |
1031 | variable VAR, return VAL appropriately widened to fit into VAR. If | |
1032 | FIELD is wider than HOST_WIDE_INT, NULL is returned. */ | |
1033 | ||
1034 | tree | |
1035 | widen_bitfield (tree val, tree field, tree var) | |
1036 | { | |
1037 | unsigned var_size, field_size; | |
1038 | tree wide_val; | |
1039 | unsigned HOST_WIDE_INT mask; | |
1040 | unsigned i; | |
1041 | ||
1042 | var_size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var)))); | |
1043 | field_size = TREE_INT_CST_LOW (DECL_SIZE (field)); | |
1044 | ||
1045 | /* Give up if either the bitfield or the variable are too wide. */ | |
1046 | if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT) | |
1047 | return NULL; | |
1048 | ||
1049 | #if defined ENABLE_CHECKING | |
1050 | if (var_size < field_size) | |
1051 | abort (); | |
1052 | #endif | |
1053 | ||
1054 | /* If VAL is not an integer constant, then give up. */ | |
1055 | if (TREE_CODE (val) != INTEGER_CST) | |
1056 | return NULL; | |
1057 | ||
1058 | /* If the sign bit of the value is not set, or the field's type is | |
1059 | unsigned, then just mask off the high order bits of the value. */ | |
1060 | if ((TREE_INT_CST_LOW (val) & (1 << (field_size - 1))) == 0 | |
1061 | || DECL_UNSIGNED (field)) | |
1062 | { | |
1063 | /* Zero extension. Build a mask with the lower 'field_size' bits | |
1064 | set and a BIT_AND_EXPR node to clear the high order bits of | |
1065 | the value. */ | |
1066 | for (i = 0, mask = 0; i < field_size; i++) | |
1067 | mask |= 1 << i; | |
1068 | ||
1069 | wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val, | |
1070 | build_int_2 (mask, 0)); | |
1071 | } | |
1072 | else | |
1073 | { | |
1074 | /* Sign extension. Create a mask with the upper 'field_size' | |
1075 | bits set and a BIT_IOR_EXPR to set the high order bits of the | |
1076 | value. */ | |
1077 | for (i = 0, mask = 0; i < (var_size - field_size); i++) | |
1078 | mask |= 1 << (var_size - i - 1); | |
1079 | ||
1080 | wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val, | |
1081 | build_int_2 (mask, 0)); | |
1082 | } | |
1083 | ||
1084 | return fold (wide_val); | |
1085 | } | |
1086 | ||
1087 | ||
1088 | /* Function indicating whether we ought to include information for 'var' | |
1089 | when calculating immediate uses. */ | |
1090 | ||
1091 | static bool | |
1092 | need_imm_uses_for (tree var) | |
1093 | { | |
1094 | return get_value (var)->lattice_val != VARYING; | |
1095 | } | |
1096 | ||
1097 | ||
1098 | /* Initialize local data structures and worklists for CCP. */ | |
1099 | ||
1100 | static void | |
1101 | initialize (void) | |
1102 | { | |
1103 | edge e; | |
1104 | basic_block bb; | |
1105 | sbitmap virtual_var; | |
1106 | ||
95eec0d6 | 1107 | /* Worklists of SSA edges. */ |
6de9cd9a | 1108 | VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges"); |
95eec0d6 | 1109 | VARRAY_TREE_INIT (varying_ssa_edges, 20, "varying_ssa_edges"); |
6de9cd9a DN |
1110 | |
1111 | executable_blocks = sbitmap_alloc (last_basic_block); | |
1112 | sbitmap_zero (executable_blocks); | |
1113 | ||
1114 | bb_in_list = sbitmap_alloc (last_basic_block); | |
1115 | sbitmap_zero (bb_in_list); | |
1116 | ||
1117 | value_vector = (value *) xmalloc (highest_ssa_version * sizeof (value)); | |
1118 | memset (value_vector, 0, highest_ssa_version * sizeof (value)); | |
1119 | ||
1120 | /* 1 if ssa variable is used in a virtual variable context. */ | |
1121 | virtual_var = sbitmap_alloc (highest_ssa_version); | |
1122 | sbitmap_zero (virtual_var); | |
1123 | ||
1124 | /* Initialize default values and simulation flags for PHI nodes, statements | |
1125 | and edges. */ | |
1126 | FOR_EACH_BB (bb) | |
1127 | { | |
1128 | block_stmt_iterator i; | |
1129 | tree stmt; | |
1130 | stmt_ann_t ann; | |
1131 | def_optype defs; | |
1132 | vdef_optype vdefs; | |
1133 | size_t x; | |
1134 | int vary; | |
1135 | ||
1136 | /* Get the default value for each definition. */ | |
1137 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) | |
1138 | { | |
1139 | vary = 0; | |
1140 | stmt = bsi_stmt (i); | |
1141 | get_stmt_operands (stmt); | |
1142 | ann = stmt_ann (stmt); | |
1143 | defs = DEF_OPS (ann); | |
1144 | for (x = 0; x < NUM_DEFS (defs); x++) | |
1145 | { | |
1146 | tree def = DEF_OP (defs, x); | |
1147 | if (get_value (def)->lattice_val == VARYING) | |
1148 | vary = 1; | |
1149 | } | |
1150 | DONT_SIMULATE_AGAIN (stmt) = vary; | |
1151 | ||
1152 | /* Mark all VDEF operands VARYING. */ | |
1153 | vdefs = VDEF_OPS (ann); | |
1154 | for (x = 0; x < NUM_VDEFS (vdefs); x++) | |
1155 | { | |
1156 | tree res = VDEF_RESULT (vdefs, x); | |
1157 | get_value (res)->lattice_val = VARYING; | |
1158 | SET_BIT (virtual_var, SSA_NAME_VERSION (res)); | |
1159 | } | |
1160 | } | |
1161 | ||
1162 | for (e = bb->succ; e; e = e->succ_next) | |
1163 | e->flags &= ~EDGE_EXECUTABLE; | |
1164 | } | |
1165 | ||
1166 | /* Now process PHI nodes. */ | |
1167 | FOR_EACH_BB (bb) | |
1168 | { | |
1169 | tree phi, var; | |
1170 | int x; | |
1171 | for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi)) | |
1172 | { | |
1173 | value *val; | |
1174 | val = get_value (PHI_RESULT (phi)); | |
1175 | if (val->lattice_val != VARYING) | |
1176 | { | |
1177 | for (x = 0; x < PHI_NUM_ARGS (phi); x++) | |
1178 | { | |
1179 | var = PHI_ARG_DEF (phi, x); | |
1180 | /* If one argument is virtual, the result is virtual, and | |
1181 | therefore varying. */ | |
1182 | if (TREE_CODE (var) == SSA_NAME) | |
1183 | { | |
1184 | if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var))) | |
1185 | { | |
1186 | val->lattice_val = VARYING; | |
1187 | SET_BIT (virtual_var, | |
1188 | SSA_NAME_VERSION (PHI_RESULT (phi))); | |
1189 | break; | |
1190 | } | |
1191 | } | |
1192 | } | |
1193 | } | |
1194 | DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0); | |
1195 | } | |
1196 | } | |
1197 | ||
1198 | sbitmap_free (virtual_var); | |
1199 | /* Compute immediate uses for variables we care about. */ | |
1200 | compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for); | |
1201 | ||
1202 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1203 | dump_immediate_uses (dump_file); | |
1204 | ||
1205 | VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks"); | |
1206 | ||
1207 | /* Seed the algorithm by adding the successors of the entry block to the | |
1208 | edge worklist. */ | |
1209 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
1210 | { | |
1211 | if (e->dest != EXIT_BLOCK_PTR) | |
1212 | { | |
1213 | e->flags |= EDGE_EXECUTABLE; | |
1214 | cfg_blocks_add (e->dest); | |
1215 | } | |
1216 | } | |
1217 | } | |
1218 | ||
1219 | ||
1220 | /* Free allocated storage. */ | |
1221 | ||
1222 | static void | |
1223 | finalize (void) | |
1224 | { | |
1225 | ssa_edges = NULL; | |
95eec0d6 | 1226 | varying_ssa_edges = NULL; |
6de9cd9a DN |
1227 | cfg_blocks = NULL; |
1228 | free (value_vector); | |
1229 | sbitmap_free (bb_in_list); | |
1230 | sbitmap_free (executable_blocks); | |
1231 | free_df (); | |
1232 | } | |
1233 | ||
1234 | /* Is the block worklist empty. */ | |
1235 | ||
1236 | static inline bool | |
1237 | cfg_blocks_empty_p (void) | |
1238 | { | |
1239 | return (cfg_blocks_num == 0); | |
1240 | } | |
1241 | ||
1242 | /* Add a basic block to the worklist. */ | |
1243 | ||
1244 | static void | |
1245 | cfg_blocks_add (basic_block bb) | |
1246 | { | |
1247 | if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR) | |
1248 | return; | |
1249 | ||
1250 | if (TEST_BIT (bb_in_list, bb->index)) | |
1251 | return; | |
1252 | ||
1253 | if (cfg_blocks_empty_p ()) | |
1254 | { | |
1255 | cfg_blocks_tail = cfg_blocks_head = 0; | |
1256 | cfg_blocks_num = 1; | |
1257 | } | |
1258 | else | |
1259 | { | |
1260 | cfg_blocks_num++; | |
1261 | if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks)) | |
1262 | { | |
1263 | /* We have to grow the array now. Adjust to queue to occupy the | |
1264 | full space of the original array. */ | |
1265 | cfg_blocks_tail = VARRAY_SIZE (cfg_blocks); | |
1266 | cfg_blocks_head = 0; | |
1267 | VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks)); | |
1268 | } | |
1269 | else | |
1270 | cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks); | |
1271 | } | |
1272 | VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb; | |
1273 | SET_BIT (bb_in_list, bb->index); | |
1274 | } | |
1275 | ||
1276 | /* Remove a block from the worklist. */ | |
1277 | ||
1278 | static basic_block | |
1279 | cfg_blocks_get (void) | |
1280 | { | |
1281 | basic_block bb; | |
1282 | ||
1283 | bb = VARRAY_BB (cfg_blocks, cfg_blocks_head); | |
1284 | ||
1285 | #ifdef ENABLE_CHECKING | |
1286 | if (cfg_blocks_empty_p () || !bb) | |
1287 | abort (); | |
1288 | #endif | |
1289 | ||
1290 | cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks); | |
1291 | --cfg_blocks_num; | |
1292 | RESET_BIT (bb_in_list, bb->index); | |
1293 | ||
1294 | return bb; | |
1295 | } | |
1296 | ||
1297 | /* We have just defined a new value for VAR. Add all immediate uses | |
95eec0d6 | 1298 | of VAR to the ssa_edges or varying_ssa_edges worklist. */ |
6de9cd9a | 1299 | static void |
95eec0d6 | 1300 | add_var_to_ssa_edges_worklist (tree var, value val) |
6de9cd9a DN |
1301 | { |
1302 | tree stmt = SSA_NAME_DEF_STMT (var); | |
1303 | dataflow_t df = get_immediate_uses (stmt); | |
1304 | int num_uses = num_immediate_uses (df); | |
1305 | int i; | |
1306 | ||
1307 | for (i = 0; i < num_uses; i++) | |
1308 | { | |
1309 | tree use = immediate_use (df, i); | |
1310 | ||
1311 | if (!DONT_SIMULATE_AGAIN (use)) | |
1312 | { | |
1313 | stmt_ann_t ann = stmt_ann (use); | |
1314 | if (ann->in_ccp_worklist == 0) | |
1315 | { | |
1316 | ann->in_ccp_worklist = 1; | |
95eec0d6 DB |
1317 | if (val.lattice_val == VARYING) |
1318 | VARRAY_PUSH_TREE (varying_ssa_edges, use); | |
1319 | else | |
1320 | VARRAY_PUSH_TREE (ssa_edges, use); | |
6de9cd9a DN |
1321 | } |
1322 | } | |
1323 | } | |
1324 | } | |
1325 | ||
1326 | /* Set the lattice value for the variable VAR to VARYING. */ | |
1327 | ||
1328 | static void | |
1329 | def_to_varying (tree var) | |
1330 | { | |
1331 | value val; | |
1332 | val.lattice_val = VARYING; | |
1333 | val.const_val = NULL_TREE; | |
1334 | set_lattice_value (var, val); | |
1335 | } | |
1336 | ||
1337 | /* Set the lattice value for variable VAR to VAL. */ | |
1338 | ||
1339 | static void | |
1340 | set_lattice_value (tree var, value val) | |
1341 | { | |
1342 | value *old = get_value (var); | |
1343 | ||
1344 | #ifdef ENABLE_CHECKING | |
1345 | if (val.lattice_val == UNDEFINED) | |
1346 | { | |
1347 | /* CONSTANT->UNDEFINED is never a valid state transition. */ | |
1348 | if (old->lattice_val == CONSTANT) | |
1349 | abort (); | |
1350 | ||
1351 | /* VARYING->UNDEFINED is generally not a valid state transition, | |
1352 | except for values which are initialized to VARYING. */ | |
1353 | if (old->lattice_val == VARYING | |
1354 | && get_default_value (var).lattice_val != VARYING) | |
1355 | abort (); | |
1356 | } | |
1357 | else if (val.lattice_val == CONSTANT) | |
1358 | { | |
1359 | /* VARYING -> CONSTANT is an invalid state transition, except | |
1360 | for objects which start off in a VARYING state. */ | |
1361 | if (old->lattice_val == VARYING | |
1362 | && get_default_value (var).lattice_val != VARYING) | |
1363 | abort (); | |
1364 | } | |
1365 | #endif | |
1366 | ||
1367 | /* If the constant for VAR has changed, then this VAR is really varying. */ | |
1368 | if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT | |
1369 | && !simple_cst_equal (old->const_val, val.const_val)) | |
1370 | { | |
1371 | val.lattice_val = VARYING; | |
1372 | val.const_val = NULL_TREE; | |
1373 | } | |
1374 | ||
1375 | if (old->lattice_val != val.lattice_val) | |
1376 | { | |
1377 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1378 | { | |
1379 | dump_lattice_value (dump_file, | |
1380 | "Lattice value changed to ", val); | |
1381 | fprintf (dump_file, ". Adding definition to SSA edges.\n"); | |
1382 | } | |
1383 | ||
95eec0d6 | 1384 | add_var_to_ssa_edges_worklist (var, val); |
6de9cd9a DN |
1385 | *old = val; |
1386 | } | |
1387 | } | |
1388 | ||
1389 | /* Replace USE references in statement STMT with their immediate reaching | |
1390 | definition. Return true if at least one reference was replaced. If | |
1391 | REPLACED_ADDRESSES_P is given, it will be set to true if an address | |
1392 | constant was replaced. */ | |
1393 | ||
1394 | static bool | |
1395 | replace_uses_in (tree stmt, bool *replaced_addresses_p) | |
1396 | { | |
1397 | bool replaced = false; | |
1398 | use_optype uses; | |
1399 | size_t i; | |
1400 | ||
1401 | if (replaced_addresses_p) | |
1402 | *replaced_addresses_p = false; | |
1403 | ||
1404 | get_stmt_operands (stmt); | |
1405 | ||
1406 | uses = STMT_USE_OPS (stmt); | |
1407 | for (i = 0; i < NUM_USES (uses); i++) | |
1408 | { | |
1409 | tree *use = USE_OP_PTR (uses, i); | |
1410 | value *val = get_value (*use); | |
1411 | ||
1412 | if (val->lattice_val == CONSTANT) | |
1413 | { | |
1414 | *use = val->const_val; | |
1415 | replaced = true; | |
1416 | if (POINTER_TYPE_P (TREE_TYPE (*use)) && replaced_addresses_p) | |
1417 | *replaced_addresses_p = true; | |
1418 | } | |
1419 | } | |
1420 | ||
1421 | return replaced; | |
1422 | } | |
1423 | ||
1424 | /* Return the likely latticevalue for STMT. | |
1425 | ||
1426 | If STMT has no operands, then return CONSTANT. | |
1427 | ||
1428 | Else if any operands of STMT are undefined, then return UNDEFINED. | |
1429 | ||
1430 | Else if any operands of STMT are constants, then return CONSTANT. | |
1431 | ||
1432 | Else return VARYING. */ | |
1433 | ||
1434 | static latticevalue | |
1435 | likely_value (tree stmt) | |
1436 | { | |
1437 | use_optype uses; | |
1438 | size_t i; | |
1439 | int found_constant = 0; | |
1440 | stmt_ann_t ann; | |
1441 | ||
1442 | /* If the statement makes aliased loads or has volatile operands, it | |
1443 | won't fold to a constant value. */ | |
1444 | ann = stmt_ann (stmt); | |
1445 | if (ann->makes_aliased_loads || ann->has_volatile_ops) | |
1446 | return VARYING; | |
1447 | ||
1448 | /* A CALL_EXPR is assumed to be varying. This may be overly conservative, | |
1449 | in the presence of const and pure calls. */ | |
1450 | if (get_call_expr_in (stmt) != NULL_TREE) | |
1451 | return VARYING; | |
1452 | ||
1453 | get_stmt_operands (stmt); | |
1454 | ||
1455 | uses = USE_OPS (ann); | |
1456 | for (i = 0; i < NUM_USES (uses); i++) | |
1457 | { | |
1458 | tree use = USE_OP (uses, i); | |
1459 | value *val = get_value (use); | |
1460 | ||
1461 | if (val->lattice_val == UNDEFINED) | |
1462 | return UNDEFINED; | |
1463 | ||
1464 | if (val->lattice_val == CONSTANT) | |
1465 | found_constant = 1; | |
1466 | } | |
1467 | ||
1468 | return ((found_constant || !uses) ? CONSTANT : VARYING); | |
1469 | } | |
1470 | ||
1471 | /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X]. | |
1472 | BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE | |
9cf737f8 | 1473 | is the desired result type. */ |
6de9cd9a DN |
1474 | |
1475 | static tree | |
1476 | maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type) | |
1477 | { | |
1478 | unsigned HOST_WIDE_INT lquo, lrem; | |
1479 | HOST_WIDE_INT hquo, hrem; | |
1480 | tree elt_size, min_idx, idx; | |
1481 | tree array_type, elt_type; | |
1482 | ||
1483 | /* Ignore stupid user tricks of indexing non-array variables. */ | |
1484 | array_type = TREE_TYPE (base); | |
1485 | if (TREE_CODE (array_type) != ARRAY_TYPE) | |
1486 | return NULL_TREE; | |
1487 | elt_type = TREE_TYPE (array_type); | |
1488 | if (!lang_hooks.types_compatible_p (orig_type, elt_type)) | |
1489 | return NULL_TREE; | |
1490 | ||
1491 | /* Whee. Ignore indexing of variable sized types. */ | |
1492 | elt_size = TYPE_SIZE_UNIT (elt_type); | |
1493 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1494 | return NULL_TREE; | |
1495 | ||
1496 | /* If the division isn't exact, then don't do anything. Equally | |
1497 | invalid as the above indexing of non-array variables. */ | |
1498 | if (div_and_round_double (TRUNC_DIV_EXPR, 1, | |
1499 | TREE_INT_CST_LOW (offset), | |
1500 | TREE_INT_CST_HIGH (offset), | |
1501 | TREE_INT_CST_LOW (elt_size), | |
1502 | TREE_INT_CST_HIGH (elt_size), | |
1503 | &lquo, &hquo, &lrem, &hrem) | |
1504 | || lrem || hrem) | |
1505 | return NULL_TREE; | |
1506 | idx = build_int_2_wide (lquo, hquo); | |
1507 | ||
1508 | /* Re-bias the index by the min index of the array type. */ | |
1509 | min_idx = TYPE_DOMAIN (TREE_TYPE (base)); | |
1510 | if (min_idx) | |
1511 | { | |
1512 | min_idx = TYPE_MIN_VALUE (min_idx); | |
1513 | if (min_idx) | |
1514 | { | |
1515 | idx = convert (TREE_TYPE (min_idx), idx); | |
1516 | if (!integer_zerop (min_idx)) | |
1517 | idx = int_const_binop (PLUS_EXPR, idx, min_idx, 1); | |
1518 | } | |
1519 | } | |
1520 | ||
1521 | return build (ARRAY_REF, orig_type, base, idx); | |
1522 | } | |
1523 | ||
1524 | /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X. | |
1525 | BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE | |
1526 | is the desired result type. */ | |
1527 | /* ??? This doesn't handle class inheritance. */ | |
1528 | ||
1529 | static tree | |
1530 | maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, | |
1531 | tree orig_type, bool base_is_ptr) | |
1532 | { | |
1533 | tree f, t, field_type, tail_array_field; | |
1534 | ||
1535 | if (TREE_CODE (record_type) != RECORD_TYPE | |
1536 | && TREE_CODE (record_type) != UNION_TYPE | |
1537 | && TREE_CODE (record_type) != QUAL_UNION_TYPE) | |
1538 | return NULL_TREE; | |
1539 | ||
1540 | /* Short-circuit silly cases. */ | |
1541 | if (lang_hooks.types_compatible_p (record_type, orig_type)) | |
1542 | return NULL_TREE; | |
1543 | ||
1544 | tail_array_field = NULL_TREE; | |
1545 | for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) | |
1546 | { | |
1547 | int cmp; | |
1548 | ||
1549 | if (TREE_CODE (f) != FIELD_DECL) | |
1550 | continue; | |
1551 | if (DECL_BIT_FIELD (f)) | |
1552 | continue; | |
1553 | if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST) | |
1554 | continue; | |
1555 | ||
1556 | /* ??? Java creates "interesting" fields for representing base classes. | |
1557 | They have no name, and have no context. With no context, we get into | |
1558 | trouble with nonoverlapping_component_refs_p. Skip them. */ | |
1559 | if (!DECL_FIELD_CONTEXT (f)) | |
1560 | continue; | |
1561 | ||
1562 | /* The previous array field isn't at the end. */ | |
1563 | tail_array_field = NULL_TREE; | |
1564 | ||
1565 | /* Check to see if this offset overlaps with the field. */ | |
1566 | cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset); | |
1567 | if (cmp > 0) | |
1568 | continue; | |
1569 | ||
1570 | field_type = TREE_TYPE (f); | |
1571 | if (cmp < 0) | |
1572 | { | |
1573 | /* Don't care about offsets into the middle of scalars. */ | |
1574 | if (!AGGREGATE_TYPE_P (field_type)) | |
1575 | continue; | |
1576 | ||
1577 | /* Check for array at the end of the struct. This is often | |
1578 | used as for flexible array members. We should be able to | |
1579 | turn this into an array access anyway. */ | |
1580 | if (TREE_CODE (field_type) == ARRAY_TYPE) | |
1581 | tail_array_field = f; | |
1582 | ||
1583 | /* Check the end of the field against the offset. */ | |
1584 | if (!DECL_SIZE_UNIT (f) | |
1585 | || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) | |
1586 | continue; | |
1587 | t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1); | |
1588 | if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) | |
1589 | continue; | |
1590 | ||
1591 | /* If we matched, then set offset to the displacement into | |
1592 | this field. */ | |
1593 | offset = t; | |
1594 | } | |
1595 | ||
1596 | /* Here we exactly match the offset being checked. If the types match, | |
1597 | then we can return that field. */ | |
1598 | else if (lang_hooks.types_compatible_p (orig_type, field_type)) | |
1599 | { | |
1600 | if (base_is_ptr) | |
1601 | base = build1 (INDIRECT_REF, record_type, base); | |
1602 | t = build (COMPONENT_REF, field_type, base, f); | |
1603 | return t; | |
1604 | } | |
1605 | ||
1606 | /* Don't care about type-punning of scalars. */ | |
1607 | else if (!AGGREGATE_TYPE_P (field_type)) | |
1608 | return NULL_TREE; | |
1609 | ||
1610 | goto found; | |
1611 | } | |
1612 | ||
1613 | if (!tail_array_field) | |
1614 | return NULL_TREE; | |
1615 | ||
1616 | f = tail_array_field; | |
1617 | field_type = TREE_TYPE (f); | |
1618 | ||
1619 | found: | |
1620 | /* If we get here, we've got an aggregate field, and a possibly | |
1ea7e6ad | 1621 | nonzero offset into them. Recurse and hope for a valid match. */ |
6de9cd9a DN |
1622 | if (base_is_ptr) |
1623 | base = build1 (INDIRECT_REF, record_type, base); | |
1624 | base = build (COMPONENT_REF, field_type, base, f); | |
1625 | ||
1626 | t = maybe_fold_offset_to_array_ref (base, offset, orig_type); | |
1627 | if (t) | |
1628 | return t; | |
1629 | return maybe_fold_offset_to_component_ref (field_type, base, offset, | |
1630 | orig_type, false); | |
1631 | } | |
1632 | ||
1633 | /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET). | |
1634 | Return the simplified expression, or NULL if nothing could be done. */ | |
1635 | ||
1636 | static tree | |
1637 | maybe_fold_stmt_indirect (tree expr, tree base, tree offset) | |
1638 | { | |
1639 | tree t; | |
1640 | ||
1641 | /* We may well have constructed a double-nested PLUS_EXPR via multiple | |
1642 | substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that | |
1643 | are sometimes added. */ | |
1644 | base = fold (base); | |
1645 | STRIP_NOPS (base); | |
1646 | TREE_OPERAND (expr, 0) = base; | |
1647 | ||
1648 | /* One possibility is that the address reduces to a string constant. */ | |
1649 | t = fold_read_from_constant_string (expr); | |
1650 | if (t) | |
1651 | return t; | |
1652 | ||
1653 | /* Add in any offset from a PLUS_EXPR. */ | |
1654 | if (TREE_CODE (base) == PLUS_EXPR) | |
1655 | { | |
1656 | tree offset2; | |
1657 | ||
1658 | offset2 = TREE_OPERAND (base, 1); | |
1659 | if (TREE_CODE (offset2) != INTEGER_CST) | |
1660 | return NULL_TREE; | |
1661 | base = TREE_OPERAND (base, 0); | |
1662 | ||
1663 | offset = int_const_binop (PLUS_EXPR, offset, offset2, 1); | |
1664 | } | |
1665 | ||
1666 | if (TREE_CODE (base) == ADDR_EXPR) | |
1667 | { | |
1668 | /* Strip the ADDR_EXPR. */ | |
1669 | base = TREE_OPERAND (base, 0); | |
1670 | ||
1671 | /* Try folding *(&B+O) to B[X]. */ | |
1672 | t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr)); | |
1673 | if (t) | |
1674 | return t; | |
1675 | ||
1676 | /* Try folding *(&B+O) to B.X. */ | |
1677 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset, | |
1678 | TREE_TYPE (expr), false); | |
1679 | if (t) | |
1680 | return t; | |
1681 | ||
1682 | /* Fold *&B to B. */ | |
1683 | if (integer_zerop (offset)) | |
1684 | return base; | |
1685 | } | |
1686 | else | |
1687 | { | |
1688 | /* We can get here for out-of-range string constant accesses, | |
1689 | such as "_"[3]. Bail out of the entire substitution search | |
1690 | and arrange for the entire statement to be replaced by a | |
1691 | call to __builtin_trap. In all likelyhood this will all be | |
1692 | constant-folded away, but in the meantime we can't leave with | |
1693 | something that get_expr_operands can't understand. */ | |
1694 | ||
1695 | t = base; | |
1696 | STRIP_NOPS (t); | |
1697 | if (TREE_CODE (t) == ADDR_EXPR | |
1698 | && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) | |
1699 | { | |
1700 | /* FIXME: Except that this causes problems elsewhere with dead | |
1701 | code not being deleted, and we abort in the rtl expanders | |
1702 | because we failed to remove some ssa_name. In the meantime, | |
1703 | just return zero. */ | |
1704 | /* FIXME2: This condition should be signaled by | |
1705 | fold_read_from_constant_string directly, rather than | |
1706 | re-checking for it here. */ | |
1707 | return integer_zero_node; | |
1708 | } | |
1709 | ||
1710 | /* Try folding *(B+O) to B->X. Still an improvement. */ | |
1711 | if (POINTER_TYPE_P (TREE_TYPE (base))) | |
1712 | { | |
1713 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)), | |
1714 | base, offset, | |
1715 | TREE_TYPE (expr), true); | |
1716 | if (t) | |
1717 | return t; | |
1718 | } | |
1719 | } | |
1720 | ||
1721 | /* Otherwise we had an offset that we could not simplify. */ | |
1722 | return NULL_TREE; | |
1723 | } | |
1724 | ||
1725 | /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR. | |
1726 | ||
1727 | A quaint feature extant in our address arithmetic is that there | |
1728 | can be hidden type changes here. The type of the result need | |
1729 | not be the same as the type of the input pointer. | |
1730 | ||
1731 | What we're after here is an expression of the form | |
1732 | (T *)(&array + const) | |
1733 | where the cast doesn't actually exist, but is implicit in the | |
1734 | type of the PLUS_EXPR. We'd like to turn this into | |
1735 | &array[x] | |
1736 | which may be able to propagate further. */ | |
1737 | ||
1738 | static tree | |
1739 | maybe_fold_stmt_addition (tree expr) | |
1740 | { | |
1741 | tree op0 = TREE_OPERAND (expr, 0); | |
1742 | tree op1 = TREE_OPERAND (expr, 1); | |
1743 | tree ptr_type = TREE_TYPE (expr); | |
1744 | tree ptd_type; | |
1745 | tree t; | |
1746 | bool subtract = (TREE_CODE (expr) == MINUS_EXPR); | |
1747 | ||
1748 | /* We're only interested in pointer arithmetic. */ | |
1749 | if (!POINTER_TYPE_P (ptr_type)) | |
1750 | return NULL_TREE; | |
1751 | /* Canonicalize the integral operand to op1. */ | |
1752 | if (INTEGRAL_TYPE_P (TREE_TYPE (op0))) | |
1753 | { | |
1754 | if (subtract) | |
1755 | return NULL_TREE; | |
1756 | t = op0, op0 = op1, op1 = t; | |
1757 | } | |
1758 | /* It had better be a constant. */ | |
1759 | if (TREE_CODE (op1) != INTEGER_CST) | |
1760 | return NULL_TREE; | |
1761 | /* The first operand should be an ADDR_EXPR. */ | |
1762 | if (TREE_CODE (op0) != ADDR_EXPR) | |
1763 | return NULL_TREE; | |
1764 | op0 = TREE_OPERAND (op0, 0); | |
1765 | ||
1766 | /* If the first operand is an ARRAY_REF, expand it so that we can fold | |
1767 | the offset into it. */ | |
1768 | while (TREE_CODE (op0) == ARRAY_REF) | |
1769 | { | |
1770 | tree array_obj = TREE_OPERAND (op0, 0); | |
1771 | tree array_idx = TREE_OPERAND (op0, 1); | |
1772 | tree elt_type = TREE_TYPE (op0); | |
1773 | tree elt_size = TYPE_SIZE_UNIT (elt_type); | |
1774 | tree min_idx; | |
1775 | ||
1776 | if (TREE_CODE (array_idx) != INTEGER_CST) | |
1777 | break; | |
1778 | if (TREE_CODE (elt_size) != INTEGER_CST) | |
1779 | break; | |
1780 | ||
1781 | /* Un-bias the index by the min index of the array type. */ | |
1782 | min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); | |
1783 | if (min_idx) | |
1784 | { | |
1785 | min_idx = TYPE_MIN_VALUE (min_idx); | |
1786 | if (min_idx) | |
1787 | { | |
1788 | array_idx = convert (TREE_TYPE (min_idx), array_idx); | |
1789 | if (!integer_zerop (min_idx)) | |
1790 | array_idx = int_const_binop (MINUS_EXPR, array_idx, | |
1791 | min_idx, 0); | |
1792 | } | |
1793 | } | |
1794 | ||
1795 | /* Convert the index to a byte offset. */ | |
1796 | array_idx = convert (sizetype, array_idx); | |
1797 | array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0); | |
1798 | ||
1799 | /* Update the operands for the next round, or for folding. */ | |
1800 | /* If we're manipulating unsigned types, then folding into negative | |
1801 | values can produce incorrect results. Particularly if the type | |
1802 | is smaller than the width of the pointer. */ | |
1803 | if (subtract | |
1804 | && TYPE_UNSIGNED (TREE_TYPE (op1)) | |
1805 | && tree_int_cst_lt (array_idx, op1)) | |
1806 | return NULL; | |
1807 | op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR, | |
1808 | array_idx, op1, 0); | |
1809 | subtract = false; | |
1810 | op0 = array_obj; | |
1811 | } | |
1812 | ||
1813 | /* If we weren't able to fold the subtraction into another array reference, | |
1814 | canonicalize the integer for passing to the array and component ref | |
1815 | simplification functions. */ | |
1816 | if (subtract) | |
1817 | { | |
1818 | if (TYPE_UNSIGNED (TREE_TYPE (op1))) | |
1819 | return NULL; | |
1820 | op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1)); | |
1821 | /* ??? In theory fold should always produce another integer. */ | |
1822 | if (TREE_CODE (op1) != INTEGER_CST) | |
1823 | return NULL; | |
1824 | } | |
1825 | ||
1826 | ptd_type = TREE_TYPE (ptr_type); | |
1827 | ||
1828 | /* At which point we can try some of the same things as for indirects. */ | |
1829 | t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type); | |
1830 | if (!t) | |
1831 | t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1, | |
1832 | ptd_type, false); | |
1833 | if (t) | |
1834 | t = build1 (ADDR_EXPR, ptr_type, t); | |
1835 | ||
1836 | return t; | |
1837 | } | |
1838 | ||
1839 | /* Subroutine of fold_stmt called via walk_tree. We perform several | |
1840 | simplifications of EXPR_P, mostly having to do with pointer arithmetic. */ | |
1841 | ||
1842 | static tree | |
1843 | fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) | |
1844 | { | |
1845 | bool *changed_p = data; | |
1846 | tree expr = *expr_p, t; | |
1847 | ||
1848 | /* ??? It'd be nice if walk_tree had a pre-order option. */ | |
1849 | switch (TREE_CODE (expr)) | |
1850 | { | |
1851 | case INDIRECT_REF: | |
1852 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
1853 | if (t) | |
1854 | return t; | |
1855 | *walk_subtrees = 0; | |
1856 | ||
1857 | t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0), | |
1858 | integer_zero_node); | |
1859 | break; | |
1860 | ||
1861 | /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF. | |
1862 | We'd only want to bother decomposing an existing ARRAY_REF if | |
1863 | the base array is found to have another offset contained within. | |
1864 | Otherwise we'd be wasting time. */ | |
1865 | ||
1866 | case ADDR_EXPR: | |
1867 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
1868 | if (t) | |
1869 | return t; | |
1870 | *walk_subtrees = 0; | |
1871 | ||
1872 | /* Set TREE_INVARIANT properly so that the value is properly | |
1873 | considered constant, and so gets propagated as expected. */ | |
1874 | if (*changed_p) | |
1875 | recompute_tree_invarant_for_addr_expr (expr); | |
1876 | return NULL_TREE; | |
1877 | ||
1878 | case PLUS_EXPR: | |
1879 | case MINUS_EXPR: | |
1880 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
1881 | if (t) | |
1882 | return t; | |
1883 | t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL); | |
1884 | if (t) | |
1885 | return t; | |
1886 | *walk_subtrees = 0; | |
1887 | ||
1888 | t = maybe_fold_stmt_addition (expr); | |
1889 | break; | |
1890 | ||
1891 | case COMPONENT_REF: | |
1892 | t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); | |
1893 | if (t) | |
1894 | return t; | |
1895 | *walk_subtrees = 0; | |
1896 | ||
1897 | /* Make sure the FIELD_DECL is actually a field in the type on | |
1898 | the lhs. In cases with IMA it is possible that it came | |
1899 | from another, equivalent type at this point. We have | |
1900 | already checked the equivalence in this case. | |
1901 | Match on type plus offset, to allow for unnamed fields. | |
1902 | We won't necessarily get the corresponding field for | |
1903 | unions; this is believed to be harmless. */ | |
1904 | ||
1905 | if ((current_file_decl && TREE_CHAIN (current_file_decl)) | |
1906 | && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) != | |
1907 | TREE_TYPE (TREE_OPERAND (expr, 0)))) | |
1908 | { | |
1909 | tree f; | |
1910 | tree orig_field = TREE_OPERAND (expr, 1); | |
1911 | tree orig_type = TREE_TYPE (orig_field); | |
1912 | for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
1913 | f; f = TREE_CHAIN (f)) | |
1914 | { | |
1915 | if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type) | |
1916 | && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f), | |
1917 | DECL_FIELD_BIT_OFFSET (orig_field)) | |
1918 | == 0 | |
1919 | && tree_int_cst_compare (DECL_FIELD_OFFSET (f), | |
1920 | DECL_FIELD_OFFSET (orig_field)) | |
1921 | == 0) | |
1922 | { | |
1923 | TREE_OPERAND (expr, 1) = f; | |
1924 | break; | |
1925 | } | |
1926 | } | |
9cf737f8 | 1927 | /* Fall through is an error; it will be detected in tree-sra. */ |
6de9cd9a DN |
1928 | } |
1929 | break; | |
1930 | ||
1931 | default: | |
1932 | return NULL_TREE; | |
1933 | } | |
1934 | ||
1935 | if (t) | |
1936 | { | |
1937 | *expr_p = t; | |
1938 | *changed_p = true; | |
1939 | } | |
1940 | ||
1941 | return NULL_TREE; | |
1942 | } | |
1943 | ||
1944 | /* Fold the statement pointed by STMT_P. In some cases, this function may | |
1945 | replace the whole statement with a new one. Returns true iff folding | |
1946 | makes any changes. */ | |
1947 | ||
1948 | bool | |
1949 | fold_stmt (tree *stmt_p) | |
1950 | { | |
1951 | tree rhs, result, stmt; | |
1952 | bool changed = false; | |
1953 | ||
1954 | stmt = *stmt_p; | |
1955 | ||
1956 | /* If we replaced constants and the statement makes pointer dereferences, | |
1957 | then we may need to fold instances of *&VAR into VAR, etc. */ | |
1958 | if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL)) | |
1959 | { | |
1960 | *stmt_p | |
1961 | = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], | |
1962 | NULL); | |
1963 | return true; | |
1964 | } | |
1965 | ||
1966 | rhs = get_rhs (stmt); | |
1967 | if (!rhs) | |
1968 | return changed; | |
1969 | result = NULL_TREE; | |
1970 | ||
1971 | /* Check for builtins that CCP can handle using information not | |
1972 | available in the generic fold routines. */ | |
1973 | if (TREE_CODE (rhs) == CALL_EXPR) | |
1974 | { | |
1975 | tree callee = get_callee_fndecl (rhs); | |
1976 | if (callee && DECL_BUILT_IN (callee)) | |
1977 | result = ccp_fold_builtin (stmt, rhs); | |
1978 | } | |
1979 | ||
1980 | /* If we couldn't fold the RHS, hand over to the generic fold routines. */ | |
1981 | if (result == NULL_TREE) | |
1982 | result = fold (rhs); | |
1983 | ||
1984 | /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that | |
1985 | may have been added by fold, and "useless" type conversions that might | |
1986 | now be apparent due to propagation. */ | |
1987 | STRIP_MAIN_TYPE_NOPS (result); | |
1988 | STRIP_USELESS_TYPE_CONVERSION (result); | |
1989 | ||
1990 | if (result != rhs) | |
1991 | { | |
1992 | changed = true; | |
1993 | set_rhs (stmt_p, result); | |
1994 | } | |
1995 | ||
1996 | return changed; | |
1997 | } | |
1998 | ||
1999 | /* Get the main expression from statement STMT. */ | |
2000 | ||
2001 | static tree | |
2002 | get_rhs (tree stmt) | |
2003 | { | |
2004 | enum tree_code code = TREE_CODE (stmt); | |
2005 | ||
2006 | if (code == MODIFY_EXPR) | |
2007 | return TREE_OPERAND (stmt, 1); | |
2008 | if (code == COND_EXPR) | |
2009 | return COND_EXPR_COND (stmt); | |
2010 | else if (code == SWITCH_EXPR) | |
2011 | return SWITCH_COND (stmt); | |
2012 | else if (code == RETURN_EXPR) | |
2013 | { | |
2014 | if (!TREE_OPERAND (stmt, 0)) | |
2015 | return NULL_TREE; | |
2016 | if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR) | |
2017 | return TREE_OPERAND (TREE_OPERAND (stmt, 0), 1); | |
2018 | else | |
2019 | return TREE_OPERAND (stmt, 0); | |
2020 | } | |
2021 | else if (code == GOTO_EXPR) | |
2022 | return GOTO_DESTINATION (stmt); | |
2023 | else if (code == LABEL_EXPR) | |
2024 | return LABEL_EXPR_LABEL (stmt); | |
2025 | else | |
2026 | return stmt; | |
2027 | } | |
2028 | ||
2029 | ||
2030 | /* Set the main expression of *STMT_P to EXPR. */ | |
2031 | ||
2032 | static void | |
2033 | set_rhs (tree *stmt_p, tree expr) | |
2034 | { | |
2035 | tree stmt = *stmt_p; | |
2036 | enum tree_code code = TREE_CODE (stmt); | |
2037 | ||
2038 | if (code == MODIFY_EXPR) | |
2039 | TREE_OPERAND (stmt, 1) = expr; | |
2040 | else if (code == COND_EXPR) | |
2041 | COND_EXPR_COND (stmt) = expr; | |
2042 | else if (code == SWITCH_EXPR) | |
2043 | SWITCH_COND (stmt) = expr; | |
2044 | else if (code == RETURN_EXPR) | |
2045 | { | |
2046 | if (TREE_OPERAND (stmt, 0) | |
2047 | && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR) | |
2048 | TREE_OPERAND (TREE_OPERAND (stmt, 0), 1) = expr; | |
2049 | else | |
2050 | TREE_OPERAND (stmt, 0) = expr; | |
2051 | } | |
2052 | else if (code == GOTO_EXPR) | |
2053 | GOTO_DESTINATION (stmt) = expr; | |
2054 | else if (code == LABEL_EXPR) | |
2055 | LABEL_EXPR_LABEL (stmt) = expr; | |
2056 | else | |
2057 | { | |
2058 | /* Replace the whole statement with EXPR. If EXPR has no side | |
2059 | effects, then replace *STMT_P with an empty statement. */ | |
2060 | stmt_ann_t ann = stmt_ann (stmt); | |
2061 | *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt (); | |
2062 | (*stmt_p)->common.ann = (tree_ann) ann; | |
2063 | ||
2064 | if (TREE_SIDE_EFFECTS (expr)) | |
2065 | { | |
2066 | def_optype defs; | |
2067 | vdef_optype vdefs; | |
2068 | size_t i; | |
2069 | ||
2070 | /* Fix all the SSA_NAMEs created by *STMT_P to point to its new | |
2071 | replacement. */ | |
2072 | defs = DEF_OPS (ann); | |
2073 | for (i = 0; i < NUM_DEFS (defs); i++) | |
2074 | { | |
2075 | tree var = DEF_OP (defs, i); | |
2076 | if (TREE_CODE (var) == SSA_NAME) | |
2077 | SSA_NAME_DEF_STMT (var) = *stmt_p; | |
2078 | } | |
2079 | ||
2080 | vdefs = VDEF_OPS (ann); | |
2081 | for (i = 0; i < NUM_VDEFS (vdefs); i++) | |
2082 | { | |
2083 | tree var = VDEF_RESULT (vdefs, i); | |
2084 | if (TREE_CODE (var) == SSA_NAME) | |
2085 | SSA_NAME_DEF_STMT (var) = *stmt_p; | |
2086 | } | |
2087 | } | |
2088 | } | |
2089 | } | |
2090 | ||
2091 | ||
2092 | /* Return a default value for variable VAR using the following rules: | |
2093 | ||
2094 | 1- Global and static variables are considered VARYING, unless they are | |
2095 | declared const. | |
2096 | ||
2097 | 2- Function arguments are considered VARYING. | |
2098 | ||
2099 | 3- Any other value is considered UNDEFINED. This is useful when | |
2100 | considering PHI nodes. PHI arguments that are undefined do not | |
2101 | change the constant value of the PHI node, which allows for more | |
2102 | constants to be propagated. */ | |
2103 | ||
2104 | static value | |
2105 | get_default_value (tree var) | |
2106 | { | |
2107 | value val; | |
2108 | tree sym; | |
2109 | ||
2110 | if (TREE_CODE (var) == SSA_NAME) | |
2111 | sym = SSA_NAME_VAR (var); | |
2112 | else | |
2113 | { | |
2114 | #ifdef ENABLE_CHECKING | |
2115 | if (!DECL_P (var)) | |
2116 | abort (); | |
2117 | #endif | |
2118 | sym = var; | |
2119 | } | |
2120 | ||
2121 | val.lattice_val = UNDEFINED; | |
2122 | val.const_val = NULL_TREE; | |
2123 | ||
2124 | if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym)) | |
2125 | { | |
2126 | /* Function arguments and volatile variables are considered VARYING. */ | |
2127 | val.lattice_val = VARYING; | |
2128 | } | |
2129 | else if (decl_function_context (sym) != current_function_decl | |
2130 | || TREE_STATIC (sym)) | |
2131 | { | |
2132 | /* Globals and static variables are considered VARYING, unless they | |
2133 | are declared 'const'. */ | |
2134 | val.lattice_val = VARYING; | |
2135 | ||
2136 | if (TREE_READONLY (sym) | |
2137 | && DECL_INITIAL (sym) | |
2138 | && is_gimple_min_invariant (DECL_INITIAL (sym))) | |
2139 | { | |
2140 | val.lattice_val = CONSTANT; | |
2141 | val.const_val = DECL_INITIAL (sym); | |
2142 | } | |
2143 | } | |
2144 | else | |
2145 | { | |
2146 | enum tree_code code; | |
2147 | tree stmt = SSA_NAME_DEF_STMT (var); | |
2148 | ||
2149 | if (!IS_EMPTY_STMT (stmt)) | |
2150 | { | |
2151 | code = TREE_CODE (stmt); | |
2152 | if (code != MODIFY_EXPR && code != PHI_NODE) | |
2153 | val.lattice_val = VARYING; | |
2154 | } | |
2155 | } | |
2156 | ||
2157 | return val; | |
2158 | } | |
2159 | ||
2160 | ||
2161 | /* Fold builtin call FN in statement STMT. If it cannot be folded into a | |
2162 | constant, return NULL_TREE. Otherwise, return its constant value. */ | |
2163 | ||
2164 | static tree | |
2165 | ccp_fold_builtin (tree stmt, tree fn) | |
2166 | { | |
2167 | tree result, strlen_val[2]; | |
2168 | tree arglist = TREE_OPERAND (fn, 1), a; | |
2169 | tree callee = get_callee_fndecl (fn); | |
2170 | bitmap visited; | |
2171 | int strlen_arg, i; | |
2172 | ||
2173 | /* Ignore MD builtins. */ | |
2174 | if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) | |
2175 | return NULL_TREE; | |
2176 | ||
2177 | /* First try the generic builtin folder. If that succeeds, return the | |
2178 | result directly. */ | |
2179 | result = fold_builtin (fn); | |
2180 | if (result) | |
2181 | return result; | |
2182 | ||
2183 | /* If the builtin could not be folded, and it has no argument list, | |
2184 | we're done. */ | |
2185 | if (!arglist) | |
2186 | return NULL_TREE; | |
2187 | ||
2188 | /* Limit the work only for builtins we know how to simplify. */ | |
2189 | switch (DECL_FUNCTION_CODE (callee)) | |
2190 | { | |
2191 | case BUILT_IN_STRLEN: | |
2192 | case BUILT_IN_FPUTS: | |
2193 | case BUILT_IN_FPUTS_UNLOCKED: | |
2194 | strlen_arg = 1; | |
2195 | break; | |
2196 | case BUILT_IN_STRCPY: | |
2197 | case BUILT_IN_STRNCPY: | |
2198 | strlen_arg = 2; | |
2199 | break; | |
2200 | default: | |
2201 | return NULL_TREE; | |
2202 | } | |
2203 | ||
2204 | /* Try to use the dataflow information gathered by the CCP process. */ | |
2205 | visited = BITMAP_XMALLOC (); | |
2206 | ||
2207 | memset (strlen_val, 0, sizeof (strlen_val)); | |
2208 | for (i = 0, a = arglist; | |
2209 | strlen_arg; | |
2210 | i++, strlen_arg >>= 1, a = TREE_CHAIN (a)) | |
2211 | if (strlen_arg & 1) | |
2212 | { | |
2213 | bitmap_clear (visited); | |
2214 | if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited)) | |
2215 | strlen_val[i] = NULL_TREE; | |
2216 | } | |
2217 | ||
2218 | BITMAP_XFREE (visited); | |
2219 | ||
2220 | /* FIXME. All this code looks dangerous in the sense that it might | |
2221 | create non-gimple expressions. */ | |
2222 | switch (DECL_FUNCTION_CODE (callee)) | |
2223 | { | |
2224 | case BUILT_IN_STRLEN: | |
2225 | /* Convert from the internal "sizetype" type to "size_t". */ | |
2226 | if (strlen_val[0] | |
2227 | && size_type_node) | |
2228 | { | |
2229 | tree new = convert (size_type_node, strlen_val[0]); | |
2230 | ||
2231 | /* If the result is not a valid gimple value, or not a cast | |
2232 | of a valid gimple value, then we can not use the result. */ | |
2233 | if (is_gimple_val (new) | |
2234 | || (is_gimple_cast (new) | |
2235 | && is_gimple_val (TREE_OPERAND (new, 0)))) | |
2236 | return new; | |
2237 | else | |
2238 | return NULL_TREE; | |
2239 | } | |
2240 | return strlen_val[0]; | |
2241 | case BUILT_IN_STRCPY: | |
2242 | if (strlen_val[1] | |
2243 | && is_gimple_val (strlen_val[1])) | |
2244 | return simplify_builtin_strcpy (arglist, strlen_val[1]); | |
2245 | case BUILT_IN_STRNCPY: | |
2246 | if (strlen_val[1] | |
2247 | && is_gimple_val (strlen_val[1])) | |
2248 | return simplify_builtin_strncpy (arglist, strlen_val[1]); | |
2249 | case BUILT_IN_FPUTS: | |
2250 | return simplify_builtin_fputs (arglist, | |
2251 | TREE_CODE (stmt) != MODIFY_EXPR, 0, | |
2252 | strlen_val[0]); | |
2253 | case BUILT_IN_FPUTS_UNLOCKED: | |
2254 | return simplify_builtin_fputs (arglist, | |
2255 | TREE_CODE (stmt) != MODIFY_EXPR, 1, | |
2256 | strlen_val[0]); | |
2257 | ||
2258 | default: | |
2259 | abort (); | |
2260 | } | |
2261 | ||
2262 | return NULL_TREE; | |
2263 | } | |
2264 | ||
2265 | ||
2266 | /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable, | |
2267 | follow its use-def chains. If LENGTH is not NULL and its value is not | |
2268 | equal to the length we determine, or if we are unable to determine the | |
2269 | length, return false. VISITED is a bitmap of visited variables. */ | |
2270 | ||
2271 | static bool | |
2272 | get_strlen (tree arg, tree *length, bitmap visited) | |
2273 | { | |
2274 | tree var, def_stmt, val; | |
2275 | ||
2276 | if (TREE_CODE (arg) != SSA_NAME) | |
2277 | { | |
2278 | val = c_strlen (arg, 1); | |
2279 | if (!val) | |
2280 | return false; | |
2281 | ||
2282 | if (*length && simple_cst_equal (val, *length) != 1) | |
2283 | return false; | |
2284 | ||
2285 | *length = val; | |
2286 | return true; | |
2287 | } | |
2288 | ||
2289 | /* If we were already here, break the infinite cycle. */ | |
2290 | if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) | |
2291 | return true; | |
2292 | bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); | |
2293 | ||
2294 | var = arg; | |
2295 | def_stmt = SSA_NAME_DEF_STMT (var); | |
2296 | ||
2297 | switch (TREE_CODE (def_stmt)) | |
2298 | { | |
2299 | case MODIFY_EXPR: | |
2300 | { | |
2301 | tree len, rhs; | |
2302 | ||
2303 | /* The RHS of the statement defining VAR must either have a | |
2304 | constant length or come from another SSA_NAME with a constant | |
2305 | length. */ | |
2306 | rhs = TREE_OPERAND (def_stmt, 1); | |
2307 | STRIP_NOPS (rhs); | |
2308 | if (TREE_CODE (rhs) == SSA_NAME) | |
2309 | return get_strlen (rhs, length, visited); | |
2310 | ||
2311 | /* See if the RHS is a constant length. */ | |
2312 | len = c_strlen (rhs, 1); | |
2313 | if (len) | |
2314 | { | |
2315 | if (*length && simple_cst_equal (len, *length) != 1) | |
2316 | return false; | |
2317 | ||
2318 | *length = len; | |
2319 | return true; | |
2320 | } | |
2321 | ||
2322 | break; | |
2323 | } | |
2324 | ||
2325 | case PHI_NODE: | |
2326 | { | |
2327 | /* All the arguments of the PHI node must have the same constant | |
2328 | length. */ | |
2329 | int i; | |
2330 | ||
2331 | for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++) | |
2332 | { | |
2333 | tree arg = PHI_ARG_DEF (def_stmt, i); | |
2334 | ||
2335 | /* If this PHI has itself as an argument, we cannot | |
2336 | determine the string length of this argument. However, | |
2337 | if we can find a constant string length for the other | |
2338 | PHI args then we can still be sure that this is a | |
2339 | constant string length. So be optimistic and just | |
2340 | continue with the next argument. */ | |
2341 | if (arg == PHI_RESULT (def_stmt)) | |
2342 | continue; | |
2343 | ||
2344 | if (!get_strlen (arg, length, visited)) | |
2345 | return false; | |
2346 | } | |
2347 | ||
2348 | return true; | |
2349 | } | |
2350 | ||
2351 | default: | |
2352 | break; | |
2353 | } | |
2354 | ||
2355 | ||
2356 | return false; | |
2357 | } | |
2358 | ||
2359 | \f | |
2360 | /* A simple pass that attempts to fold all builtin functions. This pass | |
2361 | is run after we've propagated as many constants as we can. */ | |
2362 | ||
2363 | static void | |
2364 | execute_fold_all_builtins (void) | |
2365 | { | |
2366 | basic_block bb; | |
2367 | FOR_EACH_BB (bb) | |
2368 | { | |
2369 | block_stmt_iterator i; | |
2370 | for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) | |
2371 | { | |
2372 | tree *stmtp = bsi_stmt_ptr (i); | |
2373 | tree call = get_rhs (*stmtp); | |
2374 | tree callee, result; | |
2375 | ||
2376 | if (!call || TREE_CODE (call) != CALL_EXPR) | |
2377 | continue; | |
2378 | callee = get_callee_fndecl (call); | |
2379 | if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) | |
2380 | continue; | |
2381 | ||
2382 | result = ccp_fold_builtin (*stmtp, call); | |
2383 | if (!result) | |
2384 | switch (DECL_FUNCTION_CODE (callee)) | |
2385 | { | |
2386 | case BUILT_IN_CONSTANT_P: | |
2387 | /* Resolve __builtin_constant_p. If it hasn't been | |
2388 | folded to integer_one_node by now, it's fairly | |
2389 | certain that the value simply isn't constant. */ | |
2390 | result = integer_zero_node; | |
2391 | break; | |
2392 | ||
2393 | default: | |
2394 | continue; | |
2395 | } | |
2396 | ||
2397 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2398 | { | |
2399 | fprintf (dump_file, "Simplified\n "); | |
2400 | print_generic_stmt (dump_file, *stmtp, dump_flags); | |
2401 | } | |
2402 | ||
2403 | set_rhs (stmtp, result); | |
2404 | modify_stmt (*stmtp); | |
2405 | ||
2406 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2407 | { | |
2408 | fprintf (dump_file, "to\n "); | |
2409 | print_generic_stmt (dump_file, *stmtp, dump_flags); | |
2410 | fprintf (dump_file, "\n"); | |
2411 | } | |
2412 | } | |
2413 | } | |
2414 | } | |
2415 | ||
2416 | struct tree_opt_pass pass_fold_builtins = | |
2417 | { | |
2418 | "fab", /* name */ | |
2419 | NULL, /* gate */ | |
2420 | execute_fold_all_builtins, /* execute */ | |
2421 | NULL, /* sub */ | |
2422 | NULL, /* next */ | |
2423 | 0, /* static_pass_number */ | |
2424 | 0, /* tv_id */ | |
2425 | PROP_cfg | PROP_ssa, /* properties_required */ | |
2426 | 0, /* properties_provided */ | |
2427 | 0, /* properties_destroyed */ | |
2428 | 0, /* todo_flags_start */ | |
2429 | TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */ | |
2430 | }; | |
2431 | ||
2432 | ||
2433 | #include "gt-tree-ssa-ccp.h" |