]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-ccp.c
re PR tree-optimization/16383 (internal compiler error: in generate_element_copy...
[gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
06a9b53f 2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
6de9cd9a
DN
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 2, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
22
23/* Conditional constant propagation.
24
25 References:
26
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
29
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
32
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "errors.h"
41#include "ggc.h"
42#include "tree.h"
43#include "langhooks.h"
44
45/* These RTL headers are needed for basic-block.h. */
46#include "rtl.h"
47#include "tm_p.h"
48#include "hard-reg-set.h"
49#include "basic-block.h"
50
51#include "diagnostic.h"
52#include "tree-inline.h"
53#include "tree-flow.h"
eadf906f 54#include "tree-gimple.h"
6de9cd9a
DN
55#include "tree-dump.h"
56#include "tree-pass.h"
57#include "timevar.h"
58#include "expr.h"
59#include "flags.h"
60
61
62/* Possible lattice values. */
63typedef enum
64{
65 UNINITIALIZED = 0,
66 UNDEFINED,
67 CONSTANT,
68 VARYING
69} latticevalue;
70
71/* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73#define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
74
75/* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
77typedef struct
78{
79 latticevalue lattice_val;
80 tree const_val;
81} value;
82
83/* A bitmap to keep track of executable blocks in the CFG. */
84static sbitmap executable_blocks;
85
86/* Array of control flow edges on the worklist. */
87static GTY(()) varray_type cfg_blocks = NULL;
88
89static unsigned int cfg_blocks_num = 0;
90static int cfg_blocks_tail;
91static int cfg_blocks_head;
92
93static sbitmap bb_in_list;
94
95/* This is used to track the current value of each variable. */
96static value *value_vector;
97
98/* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
95eec0d6
DB
101 nodes that need to be visited are accessed using immediate_uses
102 (D). */
6de9cd9a
DN
103static GTY(()) varray_type ssa_edges;
104
95eec0d6
DB
105/* Identical to SSA_EDGES. For performance reasons, the list of SSA
106 edges is split into two. One contains all SSA edges who need to be
107 reexamined because their lattice value changed to varying (this
108 worklist), and the other contains all other SSA edges to be
109 reexamined (ssa_edges).
110
111 Since most values in the program are varying, the ideal situation
112 is to move them to that lattice value as quickly as possible.
113 Thus, it doesn't make sense to process any other type of lattice
114 value until all varying values are propagated fully, which is one
115 thing using the varying worklist achieves. In addition, if you
116 don't use a separate worklist for varying edges, you end up with
117 situations where lattice values move from
118 undefined->constant->varying instead of undefined->varying.
119*/
120static GTY(()) varray_type varying_ssa_edges;
121
122
6de9cd9a
DN
123static void initialize (void);
124static void finalize (void);
125static void visit_phi_node (tree);
126static tree ccp_fold (tree);
127static value cp_lattice_meet (value, value);
128static void visit_stmt (tree);
129static void visit_cond_stmt (tree);
130static void visit_assignment (tree);
95eec0d6 131static void add_var_to_ssa_edges_worklist (tree, value);
6de9cd9a
DN
132static void add_outgoing_control_edges (basic_block);
133static void add_control_edge (edge);
134static void def_to_varying (tree);
135static void set_lattice_value (tree, value);
136static void simulate_block (basic_block);
137static void simulate_stmt (tree);
138static void substitute_and_fold (void);
139static value evaluate_stmt (tree);
140static void dump_lattice_value (FILE *, const char *, value);
141static bool replace_uses_in (tree, bool *);
142static latticevalue likely_value (tree);
143static tree get_rhs (tree);
06a9b53f 144static bool set_rhs (tree *, tree);
6de9cd9a
DN
145static value *get_value (tree);
146static value get_default_value (tree);
147static tree ccp_fold_builtin (tree, tree);
148static bool get_strlen (tree, tree *, bitmap);
149static inline bool cfg_blocks_empty_p (void);
150static void cfg_blocks_add (basic_block);
151static basic_block cfg_blocks_get (void);
152static bool need_imm_uses_for (tree var);
153
95eec0d6
DB
154/* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
155 drain. This pops statements off the given WORKLIST and processes
156 them until there are no more statements on WORKLIST. */
157
158static void
159process_ssa_edge_worklist (varray_type *worklist)
160{
161 /* Drain the entire worklist. */
162 while (VARRAY_ACTIVE_SIZE (*worklist) > 0)
163 {
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (*worklist);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (*worklist);
168
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
172 {
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
175 }
176 }
177}
178
6de9cd9a
DN
179/* Main entry point for SSA Conditional Constant Propagation. FNDECL is
180 the declaration for the function to optimize.
181
182 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
183 the propagation of ADDR_EXPR expressions into pointer dereferences and need
184 to be renamed into SSA.
185
186 PHASE indicates which dump file from the DUMP_FILES array to use when
187 dumping debugging information. */
188
189static void
190tree_ssa_ccp (void)
191{
192 initialize ();
193
194 /* Iterate until the worklists are empty. */
95eec0d6
DB
195 while (!cfg_blocks_empty_p ()
196 || VARRAY_ACTIVE_SIZE (ssa_edges) > 0
197 || VARRAY_ACTIVE_SIZE (varying_ssa_edges) > 0)
6de9cd9a
DN
198 {
199 if (!cfg_blocks_empty_p ())
200 {
201 /* Pull the next block to simulate off the worklist. */
202 basic_block dest_block = cfg_blocks_get ();
203 simulate_block (dest_block);
204 }
205
95eec0d6
DB
206 /* In order to move things to varying as quickly as
207 possible,process the VARYING_SSA_EDGES worklist first. */
208 process_ssa_edge_worklist (&varying_ssa_edges);
209
210 /* Now process the SSA_EDGES worklist. */
211 process_ssa_edge_worklist (&ssa_edges);
6de9cd9a
DN
212 }
213
214 /* Now perform substitutions based on the known constant values. */
215 substitute_and_fold ();
216
217 /* Now cleanup any unreachable code. */
218 cleanup_tree_cfg ();
219
220 /* Free allocated memory. */
221 finalize ();
222
223 /* Debugging dumps. */
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 dump_referenced_vars (dump_file);
227 fprintf (dump_file, "\n\n");
228 }
229}
230
231static bool
232gate_ccp (void)
233{
234 return flag_tree_ccp != 0;
235}
236
237struct tree_opt_pass pass_ccp =
238{
239 "ccp", /* name */
240 gate_ccp, /* gate */
241 tree_ssa_ccp, /* execute */
242 NULL, /* sub */
243 NULL, /* next */
244 0, /* static_pass_number */
245 TV_TREE_CCP, /* tv_id */
246 PROP_cfg | PROP_ssa, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 TODO_dump_func | TODO_rename_vars
1eaba2f2
RH
251 | TODO_ggc_collect | TODO_verify_ssa
252 | TODO_verify_stmts /* todo_flags_finish */
6de9cd9a
DN
253};
254
255
256/* Get the constant value associated with variable VAR. */
257
258static value *
259get_value (tree var)
260{
261 value *val;
262
263#if defined ENABLE_CHECKING
264 if (TREE_CODE (var) != SSA_NAME)
265 abort ();
266#endif
267
268 val = &value_vector[SSA_NAME_VERSION (var)];
269 if (val->lattice_val == UNINITIALIZED)
270 *val = get_default_value (var);
271
272 return val;
273}
274
275
276/* Simulate the execution of BLOCK. Evaluate the statement associated
277 with each variable reference inside the block. */
278
279static void
280simulate_block (basic_block block)
281{
282 tree phi;
283
284 /* There is nothing to do for the exit block. */
285 if (block == EXIT_BLOCK_PTR)
286 return;
287
288 if (dump_file && (dump_flags & TDF_DETAILS))
289 fprintf (dump_file, "\nSimulating block %d\n", block->index);
290
291 /* Always simulate PHI nodes, even if we have simulated this block
292 before. */
17192884 293 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
294 visit_phi_node (phi);
295
296 /* If this is the first time we've simulated this block, then we
297 must simulate each of its statements. */
298 if (!TEST_BIT (executable_blocks, block->index))
299 {
300 block_stmt_iterator j;
301 unsigned int normal_edge_count;
302 edge e, normal_edge;
303
304 /* Note that we have simulated this block. */
305 SET_BIT (executable_blocks, block->index);
306
307 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
308 visit_stmt (bsi_stmt (j));
309
310 /* We can not predict when abnormal edges will be executed, so
311 once a block is considered executable, we consider any
312 outgoing abnormal edges as executable.
313
314 At the same time, if this block has only one successor that is
315 reached by non-abnormal edges, then add that successor to the
316 worklist. */
317 normal_edge_count = 0;
318 normal_edge = NULL;
319 for (e = block->succ; e; e = e->succ_next)
320 {
321 if (e->flags & EDGE_ABNORMAL)
322 {
323 add_control_edge (e);
324 }
325 else
326 {
327 normal_edge_count++;
328 normal_edge = e;
329 }
330 }
331
332 if (normal_edge_count == 1)
333 add_control_edge (normal_edge);
334 }
335}
336
337
338/* Follow the def-use edges for statement DEF_STMT and simulate all the
339 statements reached by it. */
340
341static void
342simulate_stmt (tree use_stmt)
343{
344 basic_block use_bb = bb_for_stmt (use_stmt);
345
346 if (dump_file && (dump_flags & TDF_DETAILS))
347 {
348 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
349 print_generic_stmt (dump_file, use_stmt, dump_flags);
350 }
351
352 if (TREE_CODE (use_stmt) == PHI_NODE)
353 {
354 /* PHI nodes are always visited, regardless of whether or not the
355 destination block is executable. */
356 visit_phi_node (use_stmt);
357 }
358 else if (TEST_BIT (executable_blocks, use_bb->index))
359 {
360 /* Otherwise, visit the statement containing the use reached by
361 DEF, only if the destination block is marked executable. */
362 visit_stmt (use_stmt);
363 }
364}
365
366
367/* Perform final substitution and folding. After this pass the program
368 should still be in SSA form. */
369
370static void
371substitute_and_fold (void)
372{
373 basic_block bb;
374
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file,
377 "\nSubstituing constants and folding statements\n\n");
378
379 /* Substitute constants in every statement of every basic block. */
380 FOR_EACH_BB (bb)
381 {
382 block_stmt_iterator i;
383 tree phi;
384
385 /* Propagate our known constants into PHI nodes. */
17192884 386 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
387 {
388 int i;
389
390 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
391 {
392 value *new_val;
d00ad49b
AM
393 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
394 tree orig = USE_FROM_PTR (orig_p);
6de9cd9a 395
d00ad49b 396 if (! SSA_VAR_P (orig))
6de9cd9a
DN
397 break;
398
d00ad49b 399 new_val = get_value (orig);
6de9cd9a 400 if (new_val->lattice_val == CONSTANT
d00ad49b
AM
401 && may_propagate_copy (orig, new_val->const_val))
402 SET_USE (orig_p, new_val->const_val);
6de9cd9a
DN
403 }
404 }
405
406 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
407 {
408 bool replaced_address;
409 tree stmt = bsi_stmt (i);
410
411 /* Skip statements that have been folded already. */
412 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
413 continue;
414
415 /* Replace the statement with its folded version and mark it
416 folded. */
417 if (dump_file && (dump_flags & TDF_DETAILS))
418 {
419 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
420 print_generic_stmt (dump_file, stmt, TDF_SLIM);
421 }
422
423 if (replace_uses_in (stmt, &replaced_address))
424 {
425 bool changed = fold_stmt (bsi_stmt_ptr (i));
426 stmt = bsi_stmt(i);
427 modify_stmt (stmt);
428 /* If we folded a builtin function, we'll likely
429 need to rename VDEFs. */
430 if (replaced_address || changed)
1eaba2f2
RH
431 {
432 mark_new_vars_to_rename (stmt, vars_to_rename);
433 if (maybe_clean_eh_stmt (stmt))
434 tree_purge_dead_eh_edges (bb);
435 }
6de9cd9a
DN
436 }
437
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 {
440 fprintf (dump_file, " with ");
441 print_generic_stmt (dump_file, stmt, TDF_SLIM);
442 fprintf (dump_file, "\n");
443 }
444 }
445 }
446}
447
448
449/* Loop through the PHI_NODE's parameters for BLOCK and compare their
450 lattice values to determine PHI_NODE's lattice value. The value of a
451 PHI node is determined calling cp_lattice_meet() with all the arguments
452 of the PHI node that are incoming via executable edges. */
453
454static void
455visit_phi_node (tree phi)
456{
457 bool short_circuit = 0;
458 value phi_val, *curr_val;
459 int i;
460
461 /* If the PHI node has already been deemed to be VARYING, don't simulate
462 it again. */
463 if (DONT_SIMULATE_AGAIN (phi))
464 return;
465
466 if (dump_file && (dump_flags & TDF_DETAILS))
467 {
468 fprintf (dump_file, "\nVisiting PHI node: ");
469 print_generic_expr (dump_file, phi, dump_flags);
470 }
471
472 curr_val = get_value (PHI_RESULT (phi));
473 switch (curr_val->lattice_val)
474 {
475 case VARYING:
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
478 short_circuit = 1;
479 break;
480
481 case CONSTANT:
482 phi_val = *curr_val;
483 break;
484
485 case UNDEFINED:
486 case UNINITIALIZED:
487 phi_val.lattice_val = UNDEFINED;
488 phi_val.const_val = NULL_TREE;
489 break;
490
491 default:
492 abort ();
493 }
494
495 /* If the variable is volatile or the variable is never referenced in a
496 real operand, then consider the PHI node VARYING. */
497 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
498 {
499 phi_val.lattice_val = VARYING;
500 phi_val.const_val = NULL;
501 }
502 else
503 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
504 {
9cf737f8 505 /* Compute the meet operator over all the PHI arguments. */
6de9cd9a
DN
506 edge e = PHI_ARG_EDGE (phi, i);
507
508 if (dump_file && (dump_flags & TDF_DETAILS))
509 {
510 fprintf (dump_file,
511 "\n Argument #%d (%d -> %d %sexecutable)\n",
512 i, e->src->index, e->dest->index,
513 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
514 }
515
516 /* If the incoming edge is executable, Compute the meet operator for
517 the existing value of the PHI node and the current PHI argument. */
518 if (e->flags & EDGE_EXECUTABLE)
519 {
520 tree rdef = PHI_ARG_DEF (phi, i);
521 value *rdef_val, val;
522
523 if (is_gimple_min_invariant (rdef))
524 {
525 val.lattice_val = CONSTANT;
526 val.const_val = rdef;
527 rdef_val = &val;
528 }
529 else
530 rdef_val = get_value (rdef);
531
532 phi_val = cp_lattice_meet (phi_val, *rdef_val);
533
534 if (dump_file && (dump_flags & TDF_DETAILS))
535 {
536 fprintf (dump_file, "\t");
537 print_generic_expr (dump_file, rdef, dump_flags);
538 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
539 fprintf (dump_file, "\n");
540 }
541
542 if (phi_val.lattice_val == VARYING)
543 break;
544 }
545 }
546
547 if (dump_file && (dump_flags & TDF_DETAILS))
548 {
549 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
550 fprintf (dump_file, "\n\n");
551 }
552
553 set_lattice_value (PHI_RESULT (phi), phi_val);
554 if (phi_val.lattice_val == VARYING)
555 DONT_SIMULATE_AGAIN (phi) = 1;
556}
557
558
559/* Compute the meet operator between VAL1 and VAL2:
560
561 any M UNDEFINED = any
562 any M VARYING = VARYING
563 Ci M Cj = Ci if (i == j)
564 Ci M Cj = VARYING if (i != j) */
565static value
566cp_lattice_meet (value val1, value val2)
567{
568 value result;
569
570 /* any M UNDEFINED = any. */
571 if (val1.lattice_val == UNDEFINED)
572 return val2;
573 else if (val2.lattice_val == UNDEFINED)
574 return val1;
575
576 /* any M VARYING = VARYING. */
577 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
578 {
579 result.lattice_val = VARYING;
580 result.const_val = NULL_TREE;
581 return result;
582 }
583
584 /* Ci M Cj = Ci if (i == j)
585 Ci M Cj = VARYING if (i != j) */
586 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
587 {
588 result.lattice_val = CONSTANT;
589 result.const_val = val1.const_val;
590 }
591 else
592 {
593 result.lattice_val = VARYING;
594 result.const_val = NULL_TREE;
595 }
596
597 return result;
598}
599
600
601/* Evaluate statement STMT. If the statement produces an output value and
602 its evaluation changes the lattice value of its output, do the following:
603
604 - If the statement is an assignment, add all the SSA edges starting at
605 this definition.
606
607 - If the statement is a conditional branch:
608 . If the statement evaluates to non-constant, add all edges to
609 worklist.
610 . If the statement is constant, add the edge executed as the
611 result of the branch. */
612
613static void
614visit_stmt (tree stmt)
615{
616 size_t i;
617 stmt_ann_t ann;
618 def_optype defs;
a32b97a2
BB
619 v_may_def_optype v_may_defs;
620 v_must_def_optype v_must_defs;
6de9cd9a
DN
621
622 /* If the statement has already been deemed to be VARYING, don't simulate
623 it again. */
624 if (DONT_SIMULATE_AGAIN (stmt))
625 return;
626
627 if (dump_file && (dump_flags & TDF_DETAILS))
628 {
629 fprintf (dump_file, "\nVisiting statement: ");
630 print_generic_stmt (dump_file, stmt, TDF_SLIM);
631 fprintf (dump_file, "\n");
632 }
633
634 ann = stmt_ann (stmt);
635
636 /* If this statement is already in the worklist then "cancel" it. The
637 reevaluation implied by the worklist entry will produce the same
638 value we generate here and thus reevaluating it again from the
639 worklist is pointless. */
640 if (ann->in_ccp_worklist)
641 ann->in_ccp_worklist = 0;
642
643 /* Now examine the statement. If the statement is an assignment that
644 produces a single output value, evaluate its RHS to see if the lattice
645 value of its output has changed. */
646 if (TREE_CODE (stmt) == MODIFY_EXPR
647 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
648 visit_assignment (stmt);
649
650 /* Definitions made by statements other than assignments to SSA_NAMEs
651 represent unknown modifications to their outputs. Mark them VARYING. */
652 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
653 {
654 DONT_SIMULATE_AGAIN (stmt) = 1;
655 for (i = 0; i < NUM_DEFS (defs); i++)
656 {
657 tree def = DEF_OP (defs, i);
658 def_to_varying (def);
659 }
660 }
661
662 /* If STMT is a conditional branch, see if we can determine which branch
663 will be taken. */
664 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
665 visit_cond_stmt (stmt);
666
667 /* Any other kind of statement is not interesting for constant
668 propagation and, therefore, not worth simulating. */
669 else
670 {
671 DONT_SIMULATE_AGAIN (stmt) = 1;
672
673 /* If STMT is a computed goto, then mark all the output edges
674 executable. */
675 if (computed_goto_p (stmt))
676 add_outgoing_control_edges (bb_for_stmt (stmt));
677 }
678
a32b97a2
BB
679 /* Mark all V_MAY_DEF operands VARYING. */
680 v_may_defs = V_MAY_DEF_OPS (ann);
681 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
682 def_to_varying (V_MAY_DEF_RESULT (v_may_defs, i));
683
684 /* Mark all V_MUST_DEF operands VARYING. */
685 v_must_defs = V_MUST_DEF_OPS (ann);
686 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
687 def_to_varying (V_MUST_DEF_OP (v_must_defs, i));
6de9cd9a
DN
688}
689
690
691/* Visit the assignment statement STMT. Set the value of its LHS to the
692 value computed by the RHS. */
693
694static void
695visit_assignment (tree stmt)
696{
697 value val;
698 tree lhs, rhs;
699
700 lhs = TREE_OPERAND (stmt, 0);
701 rhs = TREE_OPERAND (stmt, 1);
702
703 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
704 {
705 /* Volatile variables are always VARYING. */
706 val.lattice_val = VARYING;
707 val.const_val = NULL_TREE;
708 }
709 else if (TREE_CODE (rhs) == SSA_NAME)
710 {
711 /* For a simple copy operation, we copy the lattice values. */
712 value *nval = get_value (rhs);
713 val = *nval;
714 }
715 else
716 {
717 /* Evaluate the statement. */
718 val = evaluate_stmt (stmt);
719 }
720
721 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
722 the constant value into the type of the destination variable. This
723 should not be necessary if GCC represented bitfields properly. */
724 {
725 tree lhs = TREE_OPERAND (stmt, 0);
726 if (val.lattice_val == CONSTANT
727 && TREE_CODE (lhs) == COMPONENT_REF
728 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
729 {
730 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
731
732 if (w && is_gimple_min_invariant (w))
733 val.const_val = w;
734 else
735 {
736 val.lattice_val = VARYING;
737 val.const_val = NULL;
738 }
739 }
740 }
741
742 /* Set the lattice value of the statement's output. */
743 set_lattice_value (lhs, val);
744 if (val.lattice_val == VARYING)
745 DONT_SIMULATE_AGAIN (stmt) = 1;
746}
747
748
749/* Visit the conditional statement STMT. If it evaluates to a constant value,
750 mark outgoing edges appropriately. */
751
752static void
753visit_cond_stmt (tree stmt)
754{
755 edge e;
756 value val;
757 basic_block block;
758
759 block = bb_for_stmt (stmt);
760 val = evaluate_stmt (stmt);
761
762 /* Find which edge out of the conditional block will be taken and add it
763 to the worklist. If no single edge can be determined statically, add
764 all outgoing edges from BLOCK. */
765 e = find_taken_edge (block, val.const_val);
766 if (e)
767 add_control_edge (e);
768 else
769 {
770 DONT_SIMULATE_AGAIN (stmt) = 1;
771 add_outgoing_control_edges (block);
772 }
773}
774
775
776/* Add all the edges coming out of BB to the control flow worklist. */
777
778static void
779add_outgoing_control_edges (basic_block bb)
780{
781 edge e;
782
783 for (e = bb->succ; e; e = e->succ_next)
784 add_control_edge (e);
785}
786
787
788/* Add edge E to the control flow worklist. */
789
790static void
791add_control_edge (edge e)
792{
793 basic_block bb = e->dest;
794 if (bb == EXIT_BLOCK_PTR)
795 return;
796
797 /* If the edge had already been executed, skip it. */
798 if (e->flags & EDGE_EXECUTABLE)
799 return;
800
801 e->flags |= EDGE_EXECUTABLE;
802
803 /* If the block is already in the list, we're done. */
804 if (TEST_BIT (bb_in_list, bb->index))
805 return;
806
807 cfg_blocks_add (bb);
808
809 if (dump_file && (dump_flags & TDF_DETAILS))
810 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
811 e->src->index, e->dest->index);
812}
813
814
815/* CCP specific front-end to the non-destructive constant folding routines.
816
817 Attempt to simplify the RHS of STMT knowing that one or more
818 operands are constants.
819
820 If simplification is possible, return the simplified RHS,
821 otherwise return the original RHS. */
822
823static tree
824ccp_fold (tree stmt)
825{
826 tree rhs = get_rhs (stmt);
827 enum tree_code code = TREE_CODE (rhs);
828 int kind = TREE_CODE_CLASS (code);
829 tree retval = NULL_TREE;
830
831 /* If the RHS is just a variable, then that variable must now have
832 a constant value that we can return directly. */
833 if (TREE_CODE (rhs) == SSA_NAME)
834 return get_value (rhs)->const_val;
835
836 /* Unary operators. Note that we know the single operand must
837 be a constant. So this should almost always return a
838 simplified RHS. */
839 if (kind == '1')
840 {
841 /* Handle unary operators which can appear in GIMPLE form. */
842 tree op0 = TREE_OPERAND (rhs, 0);
843
844 /* Simplify the operand down to a constant. */
845 if (TREE_CODE (op0) == SSA_NAME)
846 {
847 value *val = get_value (op0);
848 if (val->lattice_val == CONSTANT)
849 op0 = get_value (op0)->const_val;
850 }
851
852 retval = nondestructive_fold_unary_to_constant (code,
853 TREE_TYPE (rhs),
854 op0);
855
856 /* If we folded, but did not create an invariant, then we can not
857 use this expression. */
858 if (retval && ! is_gimple_min_invariant (retval))
859 return NULL;
860
861 /* If we could not fold the expression, but the arguments are all
862 constants and gimple values, then build and return the new
863 expression.
864
865 In some cases the new expression is still something we can
866 use as a replacement for an argument. This happens with
867 NOP conversions of types for example.
868
869 In other cases the new expression can not be used as a
870 replacement for an argument (as it would create non-gimple
871 code). But the new expression can still be used to derive
872 other constants. */
873 if (! retval && is_gimple_min_invariant (op0))
874 return build1 (code, TREE_TYPE (rhs), op0);
875 }
876
877 /* Binary and comparison operators. We know one or both of the
878 operands are constants. */
879 else if (kind == '2'
880 || kind == '<'
881 || code == TRUTH_AND_EXPR
882 || code == TRUTH_OR_EXPR
883 || code == TRUTH_XOR_EXPR)
884 {
885 /* Handle binary and comparison operators that can appear in
886 GIMPLE form. */
887 tree op0 = TREE_OPERAND (rhs, 0);
888 tree op1 = TREE_OPERAND (rhs, 1);
889
890 /* Simplify the operands down to constants when appropriate. */
891 if (TREE_CODE (op0) == SSA_NAME)
892 {
893 value *val = get_value (op0);
894 if (val->lattice_val == CONSTANT)
895 op0 = val->const_val;
896 }
897
898 if (TREE_CODE (op1) == SSA_NAME)
899 {
900 value *val = get_value (op1);
901 if (val->lattice_val == CONSTANT)
902 op1 = val->const_val;
903 }
904
905 retval = nondestructive_fold_binary_to_constant (code,
906 TREE_TYPE (rhs),
907 op0, op1);
908
909 /* If we folded, but did not create an invariant, then we can not
910 use this expression. */
911 if (retval && ! is_gimple_min_invariant (retval))
912 return NULL;
913
914 /* If we could not fold the expression, but the arguments are all
915 constants and gimple values, then build and return the new
916 expression.
917
918 In some cases the new expression is still something we can
919 use as a replacement for an argument. This happens with
920 NOP conversions of types for example.
921
922 In other cases the new expression can not be used as a
923 replacement for an argument (as it would create non-gimple
924 code). But the new expression can still be used to derive
925 other constants. */
926 if (! retval
927 && is_gimple_min_invariant (op0)
928 && is_gimple_min_invariant (op1))
929 return build (code, TREE_TYPE (rhs), op0, op1);
930 }
931
932 /* We may be able to fold away calls to builtin functions if their
9cf737f8 933 arguments are constants. */
6de9cd9a
DN
934 else if (code == CALL_EXPR
935 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
936 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
937 == FUNCTION_DECL)
938 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
939 {
940 use_optype uses = STMT_USE_OPS (stmt);
941 if (NUM_USES (uses) != 0)
942 {
943 tree *orig;
944 size_t i;
945
946 /* Preserve the original values of every operand. */
947 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
948 for (i = 0; i < NUM_USES (uses); i++)
949 orig[i] = USE_OP (uses, i);
950
951 /* Substitute operands with their values and try to fold. */
952 replace_uses_in (stmt, NULL);
a32e70c3 953 retval = fold_builtin (rhs, false);
6de9cd9a
DN
954
955 /* Restore operands to their original form. */
956 for (i = 0; i < NUM_USES (uses); i++)
d00ad49b 957 SET_USE_OP (uses, i, orig[i]);
6de9cd9a
DN
958 free (orig);
959 }
960 }
961 else
962 return rhs;
963
964 /* If we got a simplified form, see if we need to convert its type. */
965 if (retval)
a32e70c3 966 return fold_convert (TREE_TYPE (rhs), retval);
6de9cd9a
DN
967
968 /* No simplification was possible. */
969 return rhs;
970}
971
972
973/* Evaluate statement STMT. */
974
975static value
976evaluate_stmt (tree stmt)
977{
978 value val;
979 tree simplified;
980 latticevalue likelyvalue = likely_value (stmt);
981
982 /* If the statement is likely to have a CONSTANT result, then try
983 to fold the statement to determine the constant value. */
984 if (likelyvalue == CONSTANT)
985 simplified = ccp_fold (stmt);
986 /* If the statement is likely to have a VARYING result, then do not
987 bother folding the statement. */
988 else if (likelyvalue == VARYING)
989 simplified = get_rhs (stmt);
990 /* Otherwise the statement is likely to have an UNDEFINED value and
991 there will be nothing to do. */
992 else
993 simplified = NULL_TREE;
994
995 if (simplified && is_gimple_min_invariant (simplified))
996 {
997 /* The statement produced a constant value. */
998 val.lattice_val = CONSTANT;
999 val.const_val = simplified;
1000 }
1001 else
1002 {
1003 /* The statement produced a nonconstant value. If the statement
1004 had undefined operands, then the result of the statement should
1005 be undefined. Else the result of the statement is VARYING. */
1006 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1007 val.const_val = NULL_TREE;
1008 }
1009
1010 return val;
1011}
1012
1013
1014/* Debugging dumps. */
1015
1016static void
1017dump_lattice_value (FILE *outf, const char *prefix, value val)
1018{
1019 switch (val.lattice_val)
1020 {
1021 case UNDEFINED:
1022 fprintf (outf, "%sUNDEFINED", prefix);
1023 break;
1024 case VARYING:
1025 fprintf (outf, "%sVARYING", prefix);
1026 break;
1027 case CONSTANT:
1028 fprintf (outf, "%sCONSTANT ", prefix);
1029 print_generic_expr (outf, val.const_val, dump_flags);
1030 break;
1031 default:
1032 abort ();
1033 }
1034}
1035
1036/* Given a constant value VAL for bitfield FIELD, and a destination
1037 variable VAR, return VAL appropriately widened to fit into VAR. If
1038 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1039
1040tree
1041widen_bitfield (tree val, tree field, tree var)
1042{
44de5aeb 1043 unsigned HOST_WIDE_INT var_size, field_size;
6de9cd9a
DN
1044 tree wide_val;
1045 unsigned HOST_WIDE_INT mask;
44de5aeb 1046 unsigned int i;
6de9cd9a 1047
44de5aeb
RK
1048 /* We can only do this if the size of the type and field and VAL are
1049 all constants representable in HOST_WIDE_INT. */
1050 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1051 || !host_integerp (DECL_SIZE (field), 1)
1052 || !host_integerp (val, 0))
1053 return NULL_TREE;
1054
1055 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1056 field_size = tree_low_cst (DECL_SIZE (field), 1);
6de9cd9a
DN
1057
1058 /* Give up if either the bitfield or the variable are too wide. */
1059 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
44de5aeb 1060 return NULL_TREE;
6de9cd9a
DN
1061
1062#if defined ENABLE_CHECKING
1063 if (var_size < field_size)
1064 abort ();
1065#endif
1066
44de5aeb
RK
1067 /* If the sign bit of the value is not set or the field's type is unsigned,
1068 just mask off the high order bits of the value. */
1069 if (DECL_UNSIGNED (field)
1070 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
6de9cd9a
DN
1071 {
1072 /* Zero extension. Build a mask with the lower 'field_size' bits
1073 set and a BIT_AND_EXPR node to clear the high order bits of
1074 the value. */
1075 for (i = 0, mask = 0; i < field_size; i++)
44de5aeb 1076 mask |= ((HOST_WIDE_INT) 1) << i;
6de9cd9a
DN
1077
1078 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
44de5aeb 1079 fold_convert (TREE_TYPE (var), build_int_2 (mask, 0)));
6de9cd9a
DN
1080 }
1081 else
1082 {
1083 /* Sign extension. Create a mask with the upper 'field_size'
1084 bits set and a BIT_IOR_EXPR to set the high order bits of the
1085 value. */
1086 for (i = 0, mask = 0; i < (var_size - field_size); i++)
44de5aeb 1087 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
6de9cd9a
DN
1088
1089 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
44de5aeb 1090 fold_convert (TREE_TYPE (var), build_int_2 (mask, 0)));
6de9cd9a
DN
1091 }
1092
1093 return fold (wide_val);
1094}
1095
1096
1097/* Function indicating whether we ought to include information for 'var'
1098 when calculating immediate uses. */
1099
1100static bool
1101need_imm_uses_for (tree var)
1102{
1103 return get_value (var)->lattice_val != VARYING;
1104}
1105
1106
1107/* Initialize local data structures and worklists for CCP. */
1108
1109static void
1110initialize (void)
1111{
1112 edge e;
1113 basic_block bb;
1114 sbitmap virtual_var;
1115
95eec0d6 1116 /* Worklists of SSA edges. */
6de9cd9a 1117 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
95eec0d6 1118 VARRAY_TREE_INIT (varying_ssa_edges, 20, "varying_ssa_edges");
6de9cd9a
DN
1119
1120 executable_blocks = sbitmap_alloc (last_basic_block);
1121 sbitmap_zero (executable_blocks);
1122
1123 bb_in_list = sbitmap_alloc (last_basic_block);
1124 sbitmap_zero (bb_in_list);
1125
95a3742c
DN
1126 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
1127 memset (value_vector, 0, num_ssa_names * sizeof (value));
6de9cd9a
DN
1128
1129 /* 1 if ssa variable is used in a virtual variable context. */
95a3742c 1130 virtual_var = sbitmap_alloc (num_ssa_names);
6de9cd9a
DN
1131 sbitmap_zero (virtual_var);
1132
1133 /* Initialize default values and simulation flags for PHI nodes, statements
1134 and edges. */
1135 FOR_EACH_BB (bb)
1136 {
1137 block_stmt_iterator i;
1138 tree stmt;
1139 stmt_ann_t ann;
1140 def_optype defs;
a32b97a2
BB
1141 v_may_def_optype v_may_defs;
1142 v_must_def_optype v_must_defs;
6de9cd9a
DN
1143 size_t x;
1144 int vary;
1145
1146 /* Get the default value for each definition. */
1147 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1148 {
1149 vary = 0;
1150 stmt = bsi_stmt (i);
1151 get_stmt_operands (stmt);
1152 ann = stmt_ann (stmt);
1153 defs = DEF_OPS (ann);
1154 for (x = 0; x < NUM_DEFS (defs); x++)
1155 {
1156 tree def = DEF_OP (defs, x);
1157 if (get_value (def)->lattice_val == VARYING)
1158 vary = 1;
1159 }
1160 DONT_SIMULATE_AGAIN (stmt) = vary;
1161
a32b97a2
BB
1162 /* Mark all V_MAY_DEF operands VARYING. */
1163 v_may_defs = V_MAY_DEF_OPS (ann);
1164 for (x = 0; x < NUM_V_MAY_DEFS (v_may_defs); x++)
6de9cd9a 1165 {
a32b97a2 1166 tree res = V_MAY_DEF_RESULT (v_may_defs, x);
6de9cd9a
DN
1167 get_value (res)->lattice_val = VARYING;
1168 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1169 }
a32b97a2
BB
1170
1171 /* Mark all V_MUST_DEF operands VARYING. */
1172 v_must_defs = V_MUST_DEF_OPS (ann);
1173 for (x = 0; x < NUM_V_MUST_DEFS (v_must_defs); x++)
1174 {
1175 tree v_must_def = V_MUST_DEF_OP (v_must_defs, x);
1176 get_value (v_must_def)->lattice_val = VARYING;
1177 SET_BIT (virtual_var, SSA_NAME_VERSION (v_must_def));
1178 }
6de9cd9a
DN
1179 }
1180
1181 for (e = bb->succ; e; e = e->succ_next)
1182 e->flags &= ~EDGE_EXECUTABLE;
1183 }
1184
1185 /* Now process PHI nodes. */
1186 FOR_EACH_BB (bb)
1187 {
1188 tree phi, var;
1189 int x;
17192884 1190 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
1191 {
1192 value *val;
1193 val = get_value (PHI_RESULT (phi));
1194 if (val->lattice_val != VARYING)
1195 {
1196 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1197 {
1198 var = PHI_ARG_DEF (phi, x);
1199 /* If one argument is virtual, the result is virtual, and
1200 therefore varying. */
1201 if (TREE_CODE (var) == SSA_NAME)
1202 {
1203 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1204 {
1205 val->lattice_val = VARYING;
1206 SET_BIT (virtual_var,
1207 SSA_NAME_VERSION (PHI_RESULT (phi)));
1208 break;
1209 }
1210 }
1211 }
1212 }
1213 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1214 }
1215 }
1216
1217 sbitmap_free (virtual_var);
1218 /* Compute immediate uses for variables we care about. */
1219 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1220
1221 if (dump_file && (dump_flags & TDF_DETAILS))
1222 dump_immediate_uses (dump_file);
1223
1224 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1225
1226 /* Seed the algorithm by adding the successors of the entry block to the
1227 edge worklist. */
1228 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1229 {
1230 if (e->dest != EXIT_BLOCK_PTR)
1231 {
1232 e->flags |= EDGE_EXECUTABLE;
1233 cfg_blocks_add (e->dest);
1234 }
1235 }
1236}
1237
1238
1239/* Free allocated storage. */
1240
1241static void
1242finalize (void)
1243{
1244 ssa_edges = NULL;
95eec0d6 1245 varying_ssa_edges = NULL;
6de9cd9a
DN
1246 cfg_blocks = NULL;
1247 free (value_vector);
1248 sbitmap_free (bb_in_list);
1249 sbitmap_free (executable_blocks);
1250 free_df ();
1251}
1252
1253/* Is the block worklist empty. */
1254
1255static inline bool
1256cfg_blocks_empty_p (void)
1257{
1258 return (cfg_blocks_num == 0);
1259}
1260
1261/* Add a basic block to the worklist. */
1262
1263static void
1264cfg_blocks_add (basic_block bb)
1265{
1266 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1267 return;
1268
1269 if (TEST_BIT (bb_in_list, bb->index))
1270 return;
1271
1272 if (cfg_blocks_empty_p ())
1273 {
1274 cfg_blocks_tail = cfg_blocks_head = 0;
1275 cfg_blocks_num = 1;
1276 }
1277 else
1278 {
1279 cfg_blocks_num++;
1280 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1281 {
1282 /* We have to grow the array now. Adjust to queue to occupy the
1283 full space of the original array. */
1284 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1285 cfg_blocks_head = 0;
1286 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1287 }
1288 else
1289 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1290 }
1291 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1292 SET_BIT (bb_in_list, bb->index);
1293}
1294
1295/* Remove a block from the worklist. */
1296
1297static basic_block
1298cfg_blocks_get (void)
1299{
1300 basic_block bb;
1301
1302 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1303
1304#ifdef ENABLE_CHECKING
1305 if (cfg_blocks_empty_p () || !bb)
1306 abort ();
1307#endif
1308
1309 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1310 --cfg_blocks_num;
1311 RESET_BIT (bb_in_list, bb->index);
1312
1313 return bb;
1314}
1315
1316/* We have just defined a new value for VAR. Add all immediate uses
95eec0d6 1317 of VAR to the ssa_edges or varying_ssa_edges worklist. */
6de9cd9a 1318static void
95eec0d6 1319add_var_to_ssa_edges_worklist (tree var, value val)
6de9cd9a
DN
1320{
1321 tree stmt = SSA_NAME_DEF_STMT (var);
1322 dataflow_t df = get_immediate_uses (stmt);
1323 int num_uses = num_immediate_uses (df);
1324 int i;
1325
1326 for (i = 0; i < num_uses; i++)
1327 {
1328 tree use = immediate_use (df, i);
1329
1330 if (!DONT_SIMULATE_AGAIN (use))
1331 {
1332 stmt_ann_t ann = stmt_ann (use);
1333 if (ann->in_ccp_worklist == 0)
1334 {
1335 ann->in_ccp_worklist = 1;
95eec0d6
DB
1336 if (val.lattice_val == VARYING)
1337 VARRAY_PUSH_TREE (varying_ssa_edges, use);
1338 else
1339 VARRAY_PUSH_TREE (ssa_edges, use);
6de9cd9a
DN
1340 }
1341 }
1342 }
1343}
1344
1345/* Set the lattice value for the variable VAR to VARYING. */
1346
1347static void
1348def_to_varying (tree var)
1349{
1350 value val;
1351 val.lattice_val = VARYING;
1352 val.const_val = NULL_TREE;
1353 set_lattice_value (var, val);
1354}
1355
1356/* Set the lattice value for variable VAR to VAL. */
1357
1358static void
1359set_lattice_value (tree var, value val)
1360{
1361 value *old = get_value (var);
1362
1363#ifdef ENABLE_CHECKING
1364 if (val.lattice_val == UNDEFINED)
1365 {
1366 /* CONSTANT->UNDEFINED is never a valid state transition. */
1367 if (old->lattice_val == CONSTANT)
1368 abort ();
1369
1370 /* VARYING->UNDEFINED is generally not a valid state transition,
1371 except for values which are initialized to VARYING. */
1372 if (old->lattice_val == VARYING
1373 && get_default_value (var).lattice_val != VARYING)
1374 abort ();
1375 }
1376 else if (val.lattice_val == CONSTANT)
1377 {
1378 /* VARYING -> CONSTANT is an invalid state transition, except
1379 for objects which start off in a VARYING state. */
1380 if (old->lattice_val == VARYING
1381 && get_default_value (var).lattice_val != VARYING)
1382 abort ();
1383 }
1384#endif
1385
1386 /* If the constant for VAR has changed, then this VAR is really varying. */
1387 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1388 && !simple_cst_equal (old->const_val, val.const_val))
1389 {
1390 val.lattice_val = VARYING;
1391 val.const_val = NULL_TREE;
1392 }
1393
1394 if (old->lattice_val != val.lattice_val)
1395 {
1396 if (dump_file && (dump_flags & TDF_DETAILS))
1397 {
1398 dump_lattice_value (dump_file,
1399 "Lattice value changed to ", val);
1400 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1401 }
1402
95eec0d6 1403 add_var_to_ssa_edges_worklist (var, val);
6de9cd9a
DN
1404 *old = val;
1405 }
1406}
1407
1408/* Replace USE references in statement STMT with their immediate reaching
1409 definition. Return true if at least one reference was replaced. If
1410 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1411 constant was replaced. */
1412
1413static bool
1414replace_uses_in (tree stmt, bool *replaced_addresses_p)
1415{
1416 bool replaced = false;
1417 use_optype uses;
1418 size_t i;
1419
1420 if (replaced_addresses_p)
1421 *replaced_addresses_p = false;
1422
1423 get_stmt_operands (stmt);
1424
1425 uses = STMT_USE_OPS (stmt);
1426 for (i = 0; i < NUM_USES (uses); i++)
1427 {
d00ad49b
AM
1428 use_operand_p use = USE_OP_PTR (uses, i);
1429 value *val = get_value (USE_FROM_PTR (use));
6de9cd9a
DN
1430
1431 if (val->lattice_val == CONSTANT)
1432 {
d00ad49b 1433 SET_USE (use, val->const_val);
6de9cd9a 1434 replaced = true;
d00ad49b
AM
1435 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use)))
1436 && replaced_addresses_p)
6de9cd9a
DN
1437 *replaced_addresses_p = true;
1438 }
1439 }
1440
1441 return replaced;
1442}
1443
1444/* Return the likely latticevalue for STMT.
1445
1446 If STMT has no operands, then return CONSTANT.
1447
1448 Else if any operands of STMT are undefined, then return UNDEFINED.
1449
1450 Else if any operands of STMT are constants, then return CONSTANT.
1451
1452 Else return VARYING. */
1453
1454static latticevalue
1455likely_value (tree stmt)
1456{
1457 use_optype uses;
1458 size_t i;
1459 int found_constant = 0;
1460 stmt_ann_t ann;
1461
1462 /* If the statement makes aliased loads or has volatile operands, it
1463 won't fold to a constant value. */
1464 ann = stmt_ann (stmt);
1465 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1466 return VARYING;
1467
1468 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1469 in the presence of const and pure calls. */
1470 if (get_call_expr_in (stmt) != NULL_TREE)
1471 return VARYING;
1472
1473 get_stmt_operands (stmt);
1474
1475 uses = USE_OPS (ann);
1476 for (i = 0; i < NUM_USES (uses); i++)
1477 {
1478 tree use = USE_OP (uses, i);
1479 value *val = get_value (use);
1480
1481 if (val->lattice_val == UNDEFINED)
1482 return UNDEFINED;
1483
1484 if (val->lattice_val == CONSTANT)
1485 found_constant = 1;
1486 }
1487
1488 return ((found_constant || !uses) ? CONSTANT : VARYING);
1489}
1490
1491/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1492 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1493 is the desired result type. */
6de9cd9a
DN
1494
1495static tree
1496maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1497{
44de5aeb
RK
1498 tree min_idx, idx, elt_offset = integer_zero_node;
1499 tree array_type, elt_type, elt_size;
1500
1501 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1502 measured in units of the size of elements type) from that ARRAY_REF).
1503 We can't do anything if either is variable.
1504
1505 The case we handle here is *(&A[N]+O). */
1506 if (TREE_CODE (base) == ARRAY_REF)
1507 {
1508 tree low_bound = array_ref_low_bound (base);
1509
1510 elt_offset = TREE_OPERAND (base, 1);
1511 if (TREE_CODE (low_bound) != INTEGER_CST
1512 || TREE_CODE (elt_offset) != INTEGER_CST)
1513 return NULL_TREE;
1514
1515 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1516 base = TREE_OPERAND (base, 0);
1517 }
6de9cd9a
DN
1518
1519 /* Ignore stupid user tricks of indexing non-array variables. */
1520 array_type = TREE_TYPE (base);
1521 if (TREE_CODE (array_type) != ARRAY_TYPE)
1522 return NULL_TREE;
1523 elt_type = TREE_TYPE (array_type);
1524 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1525 return NULL_TREE;
1526
44de5aeb
RK
1527 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1528 element type (so we can use the alignment if it's not constant).
1529 Otherwise, compute the offset as an index by using a division. If the
1530 division isn't exact, then don't do anything. */
6de9cd9a 1531 elt_size = TYPE_SIZE_UNIT (elt_type);
44de5aeb
RK
1532 if (integer_zerop (offset))
1533 {
1534 if (TREE_CODE (elt_size) != INTEGER_CST)
1535 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1536
44de5aeb
RK
1537 idx = integer_zero_node;
1538 }
1539 else
1540 {
1541 unsigned HOST_WIDE_INT lquo, lrem;
1542 HOST_WIDE_INT hquo, hrem;
1543
1544 if (TREE_CODE (elt_size) != INTEGER_CST
1545 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1546 TREE_INT_CST_LOW (offset),
1547 TREE_INT_CST_HIGH (offset),
1548 TREE_INT_CST_LOW (elt_size),
1549 TREE_INT_CST_HIGH (elt_size),
1550 &lquo, &hquo, &lrem, &hrem)
1551 || lrem || hrem)
1552 return NULL_TREE;
6de9cd9a 1553
44de5aeb
RK
1554 idx = build_int_2_wide (lquo, hquo);
1555 }
1556
1557 /* Assume the low bound is zero. If there is a domain type, get the
1558 low bound, if any, convert the index into that type, and add the
1559 low bound. */
1560 min_idx = integer_zero_node;
1561 if (TYPE_DOMAIN (array_type))
6de9cd9a 1562 {
44de5aeb
RK
1563 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1564 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1565 else
1566 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1567
1568 if (TREE_CODE (min_idx) != INTEGER_CST)
1569 return NULL_TREE;
1570
1571 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1572 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
6de9cd9a
DN
1573 }
1574
44de5aeb
RK
1575 if (!integer_zerop (min_idx))
1576 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1577 if (!integer_zerop (elt_offset))
1578 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1579
1580 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1581 size_int (tree_low_cst (elt_size, 1)
1582 / (TYPE_ALIGN (elt_type) / BITS_PER_UNIT)));
6de9cd9a
DN
1583}
1584
1585/* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1586 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1587 is the desired result type. */
1588/* ??? This doesn't handle class inheritance. */
1589
1590static tree
1591maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1592 tree orig_type, bool base_is_ptr)
1593{
1594 tree f, t, field_type, tail_array_field;
1595
1596 if (TREE_CODE (record_type) != RECORD_TYPE
1597 && TREE_CODE (record_type) != UNION_TYPE
1598 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1599 return NULL_TREE;
1600
1601 /* Short-circuit silly cases. */
1602 if (lang_hooks.types_compatible_p (record_type, orig_type))
1603 return NULL_TREE;
1604
1605 tail_array_field = NULL_TREE;
1606 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1607 {
1608 int cmp;
1609
1610 if (TREE_CODE (f) != FIELD_DECL)
1611 continue;
1612 if (DECL_BIT_FIELD (f))
1613 continue;
1614 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1615 continue;
1616
1617 /* ??? Java creates "interesting" fields for representing base classes.
1618 They have no name, and have no context. With no context, we get into
1619 trouble with nonoverlapping_component_refs_p. Skip them. */
1620 if (!DECL_FIELD_CONTEXT (f))
1621 continue;
1622
1623 /* The previous array field isn't at the end. */
1624 tail_array_field = NULL_TREE;
1625
1626 /* Check to see if this offset overlaps with the field. */
1627 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1628 if (cmp > 0)
1629 continue;
1630
1631 field_type = TREE_TYPE (f);
1632 if (cmp < 0)
1633 {
1634 /* Don't care about offsets into the middle of scalars. */
1635 if (!AGGREGATE_TYPE_P (field_type))
1636 continue;
1637
1638 /* Check for array at the end of the struct. This is often
1639 used as for flexible array members. We should be able to
1640 turn this into an array access anyway. */
1641 if (TREE_CODE (field_type) == ARRAY_TYPE)
1642 tail_array_field = f;
1643
1644 /* Check the end of the field against the offset. */
1645 if (!DECL_SIZE_UNIT (f)
1646 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1647 continue;
1648 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1649 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1650 continue;
1651
1652 /* If we matched, then set offset to the displacement into
1653 this field. */
1654 offset = t;
1655 }
1656
1657 /* Here we exactly match the offset being checked. If the types match,
1658 then we can return that field. */
1659 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1660 {
1661 if (base_is_ptr)
1662 base = build1 (INDIRECT_REF, record_type, base);
44de5aeb 1663 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1664 return t;
1665 }
1666
1667 /* Don't care about type-punning of scalars. */
1668 else if (!AGGREGATE_TYPE_P (field_type))
1669 return NULL_TREE;
1670
1671 goto found;
1672 }
1673
1674 if (!tail_array_field)
1675 return NULL_TREE;
1676
1677 f = tail_array_field;
1678 field_type = TREE_TYPE (f);
1679
1680 found:
1681 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1682 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1683 if (base_is_ptr)
1684 base = build1 (INDIRECT_REF, record_type, base);
44de5aeb 1685 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1686
1687 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1688 if (t)
1689 return t;
1690 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1691 orig_type, false);
1692}
1693
1694/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1695 Return the simplified expression, or NULL if nothing could be done. */
1696
1697static tree
1698maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1699{
1700 tree t;
1701
1702 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1703 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1704 are sometimes added. */
1705 base = fold (base);
1706 STRIP_NOPS (base);
1707 TREE_OPERAND (expr, 0) = base;
1708
1709 /* One possibility is that the address reduces to a string constant. */
1710 t = fold_read_from_constant_string (expr);
1711 if (t)
1712 return t;
1713
1714 /* Add in any offset from a PLUS_EXPR. */
1715 if (TREE_CODE (base) == PLUS_EXPR)
1716 {
1717 tree offset2;
1718
1719 offset2 = TREE_OPERAND (base, 1);
1720 if (TREE_CODE (offset2) != INTEGER_CST)
1721 return NULL_TREE;
1722 base = TREE_OPERAND (base, 0);
1723
1724 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1725 }
1726
1727 if (TREE_CODE (base) == ADDR_EXPR)
1728 {
1729 /* Strip the ADDR_EXPR. */
1730 base = TREE_OPERAND (base, 0);
1731
1732 /* Try folding *(&B+O) to B[X]. */
1733 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1734 if (t)
1735 return t;
1736
1737 /* Try folding *(&B+O) to B.X. */
1738 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1739 TREE_TYPE (expr), false);
1740 if (t)
1741 return t;
1742
44de5aeb
RK
1743 /* Fold *&B to B. We can only do this if EXPR is the same type
1744 as BASE. We can't do this if EXPR is the element type of an array
1745 and BASE is the array. */
1746 if (integer_zerop (offset)
1747 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1748 TREE_TYPE (expr)))
6de9cd9a
DN
1749 return base;
1750 }
1751 else
1752 {
1753 /* We can get here for out-of-range string constant accesses,
1754 such as "_"[3]. Bail out of the entire substitution search
1755 and arrange for the entire statement to be replaced by a
1756 call to __builtin_trap. In all likelyhood this will all be
1757 constant-folded away, but in the meantime we can't leave with
1758 something that get_expr_operands can't understand. */
1759
1760 t = base;
1761 STRIP_NOPS (t);
1762 if (TREE_CODE (t) == ADDR_EXPR
1763 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1764 {
1765 /* FIXME: Except that this causes problems elsewhere with dead
1766 code not being deleted, and we abort in the rtl expanders
1767 because we failed to remove some ssa_name. In the meantime,
1768 just return zero. */
1769 /* FIXME2: This condition should be signaled by
1770 fold_read_from_constant_string directly, rather than
1771 re-checking for it here. */
1772 return integer_zero_node;
1773 }
1774
1775 /* Try folding *(B+O) to B->X. Still an improvement. */
1776 if (POINTER_TYPE_P (TREE_TYPE (base)))
1777 {
1778 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1779 base, offset,
1780 TREE_TYPE (expr), true);
1781 if (t)
1782 return t;
1783 }
1784 }
1785
1786 /* Otherwise we had an offset that we could not simplify. */
1787 return NULL_TREE;
1788}
1789
1790/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1791
1792 A quaint feature extant in our address arithmetic is that there
1793 can be hidden type changes here. The type of the result need
1794 not be the same as the type of the input pointer.
1795
1796 What we're after here is an expression of the form
1797 (T *)(&array + const)
1798 where the cast doesn't actually exist, but is implicit in the
1799 type of the PLUS_EXPR. We'd like to turn this into
1800 &array[x]
1801 which may be able to propagate further. */
1802
1803static tree
1804maybe_fold_stmt_addition (tree expr)
1805{
1806 tree op0 = TREE_OPERAND (expr, 0);
1807 tree op1 = TREE_OPERAND (expr, 1);
1808 tree ptr_type = TREE_TYPE (expr);
1809 tree ptd_type;
1810 tree t;
1811 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1812
1813 /* We're only interested in pointer arithmetic. */
1814 if (!POINTER_TYPE_P (ptr_type))
1815 return NULL_TREE;
1816 /* Canonicalize the integral operand to op1. */
1817 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1818 {
1819 if (subtract)
1820 return NULL_TREE;
1821 t = op0, op0 = op1, op1 = t;
1822 }
1823 /* It had better be a constant. */
1824 if (TREE_CODE (op1) != INTEGER_CST)
1825 return NULL_TREE;
1826 /* The first operand should be an ADDR_EXPR. */
1827 if (TREE_CODE (op0) != ADDR_EXPR)
1828 return NULL_TREE;
1829 op0 = TREE_OPERAND (op0, 0);
1830
1831 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1832 the offset into it. */
1833 while (TREE_CODE (op0) == ARRAY_REF)
1834 {
1835 tree array_obj = TREE_OPERAND (op0, 0);
1836 tree array_idx = TREE_OPERAND (op0, 1);
1837 tree elt_type = TREE_TYPE (op0);
1838 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1839 tree min_idx;
1840
1841 if (TREE_CODE (array_idx) != INTEGER_CST)
1842 break;
1843 if (TREE_CODE (elt_size) != INTEGER_CST)
1844 break;
1845
1846 /* Un-bias the index by the min index of the array type. */
1847 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1848 if (min_idx)
1849 {
1850 min_idx = TYPE_MIN_VALUE (min_idx);
1851 if (min_idx)
1852 {
44de5aeb
RK
1853 if (TREE_CODE (min_idx) != INTEGER_CST)
1854 break;
1855
6de9cd9a
DN
1856 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1857 if (!integer_zerop (min_idx))
1858 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1859 min_idx, 0);
1860 }
1861 }
1862
1863 /* Convert the index to a byte offset. */
1864 array_idx = convert (sizetype, array_idx);
1865 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1866
1867 /* Update the operands for the next round, or for folding. */
1868 /* If we're manipulating unsigned types, then folding into negative
1869 values can produce incorrect results. Particularly if the type
1870 is smaller than the width of the pointer. */
1871 if (subtract
1872 && TYPE_UNSIGNED (TREE_TYPE (op1))
1873 && tree_int_cst_lt (array_idx, op1))
1874 return NULL;
1875 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1876 array_idx, op1, 0);
1877 subtract = false;
1878 op0 = array_obj;
1879 }
1880
1881 /* If we weren't able to fold the subtraction into another array reference,
1882 canonicalize the integer for passing to the array and component ref
1883 simplification functions. */
1884 if (subtract)
1885 {
1886 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1887 return NULL;
1888 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1889 /* ??? In theory fold should always produce another integer. */
1890 if (TREE_CODE (op1) != INTEGER_CST)
1891 return NULL;
1892 }
1893
1894 ptd_type = TREE_TYPE (ptr_type);
1895
1896 /* At which point we can try some of the same things as for indirects. */
1897 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1898 if (!t)
1899 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1900 ptd_type, false);
1901 if (t)
1902 t = build1 (ADDR_EXPR, ptr_type, t);
1903
1904 return t;
1905}
1906
1907/* Subroutine of fold_stmt called via walk_tree. We perform several
1908 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1909
1910static tree
1911fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1912{
1913 bool *changed_p = data;
1914 tree expr = *expr_p, t;
1915
1916 /* ??? It'd be nice if walk_tree had a pre-order option. */
1917 switch (TREE_CODE (expr))
1918 {
1919 case INDIRECT_REF:
1920 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1921 if (t)
1922 return t;
1923 *walk_subtrees = 0;
1924
1925 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1926 integer_zero_node);
1927 break;
1928
1929 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1930 We'd only want to bother decomposing an existing ARRAY_REF if
1931 the base array is found to have another offset contained within.
1932 Otherwise we'd be wasting time. */
1933
1934 case ADDR_EXPR:
1935 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1936 if (t)
1937 return t;
1938 *walk_subtrees = 0;
1939
1940 /* Set TREE_INVARIANT properly so that the value is properly
1941 considered constant, and so gets propagated as expected. */
1942 if (*changed_p)
1943 recompute_tree_invarant_for_addr_expr (expr);
1944 return NULL_TREE;
1945
1946 case PLUS_EXPR:
1947 case MINUS_EXPR:
1948 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1949 if (t)
1950 return t;
1951 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1952 if (t)
1953 return t;
1954 *walk_subtrees = 0;
1955
1956 t = maybe_fold_stmt_addition (expr);
1957 break;
1958
1959 case COMPONENT_REF:
1960 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1961 if (t)
1962 return t;
1963 *walk_subtrees = 0;
1964
fa27426e
RH
1965 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1966 We've already checked that the records are compatible, so we should
1967 come up with a set of compatible fields. */
1968 {
1969 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1970 tree expr_field = TREE_OPERAND (expr, 1);
1971
1972 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1973 {
1974 expr_field = find_compatible_field (expr_record, expr_field);
1975 TREE_OPERAND (expr, 1) = expr_field;
1976 }
1977 }
6de9cd9a
DN
1978 break;
1979
1980 default:
1981 return NULL_TREE;
1982 }
1983
1984 if (t)
1985 {
1986 *expr_p = t;
1987 *changed_p = true;
1988 }
1989
1990 return NULL_TREE;
1991}
1992
1993/* Fold the statement pointed by STMT_P. In some cases, this function may
1994 replace the whole statement with a new one. Returns true iff folding
1995 makes any changes. */
1996
1997bool
1998fold_stmt (tree *stmt_p)
1999{
2000 tree rhs, result, stmt;
2001 bool changed = false;
2002
2003 stmt = *stmt_p;
2004
2005 /* If we replaced constants and the statement makes pointer dereferences,
2006 then we may need to fold instances of *&VAR into VAR, etc. */
2007 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2008 {
2009 *stmt_p
2010 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2011 NULL);
2012 return true;
2013 }
2014
2015 rhs = get_rhs (stmt);
2016 if (!rhs)
2017 return changed;
2018 result = NULL_TREE;
2019
6de9cd9a
DN
2020 if (TREE_CODE (rhs) == CALL_EXPR)
2021 {
0f59171d
RH
2022 tree callee;
2023
2024 /* Check for builtins that CCP can handle using information not
2025 available in the generic fold routines. */
2026 callee = get_callee_fndecl (rhs);
6de9cd9a
DN
2027 if (callee && DECL_BUILT_IN (callee))
2028 result = ccp_fold_builtin (stmt, rhs);
0f59171d
RH
2029 else
2030 {
2031 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2032 here are when we've propagated the address of a decl into the
2033 object slot. */
2034 /* ??? Should perhaps do this in fold proper. However, doing it
2035 there requires that we create a new CALL_EXPR, and that requires
2036 copying EH region info to the new node. Easier to just do it
2037 here where we can just smash the call operand. */
2038 callee = TREE_OPERAND (rhs, 0);
2039 if (TREE_CODE (callee) == OBJ_TYPE_REF
2040 && lang_hooks.fold_obj_type_ref
2041 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2042 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee), 0)))
2043 {
2044 tree t;
2045
5df6d966
RH
2046 /* ??? Caution: Broken ADDR_EXPR semantics means that
2047 looking at the type of the operand of the addr_expr
2048 can yield an array type. See silly exception in
2049 check_pointer_types_r. */
2050
2051 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
0f59171d
RH
2052 t = lang_hooks.fold_obj_type_ref (callee, t);
2053 if (t)
2054 {
2055 TREE_OPERAND (rhs, 0) = t;
2056 changed = true;
2057 }
2058 }
2059 }
6de9cd9a
DN
2060 }
2061
2062 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2063 if (result == NULL_TREE)
2064 result = fold (rhs);
2065
2066 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2067 may have been added by fold, and "useless" type conversions that might
2068 now be apparent due to propagation. */
6de9cd9a
DN
2069 STRIP_USELESS_TYPE_CONVERSION (result);
2070
2071 if (result != rhs)
06a9b53f 2072 changed |= set_rhs (stmt_p, result);
6de9cd9a
DN
2073
2074 return changed;
2075}
2076
2077/* Get the main expression from statement STMT. */
2078
2079static tree
2080get_rhs (tree stmt)
2081{
2082 enum tree_code code = TREE_CODE (stmt);
2083
cd709752 2084 switch (code)
6de9cd9a 2085 {
cd709752
RH
2086 case RETURN_EXPR:
2087 stmt = TREE_OPERAND (stmt, 0);
2088 if (stmt)
2089 return get_rhs (stmt);
6de9cd9a 2090 else
cd709752
RH
2091 return NULL;
2092
2093 case MODIFY_EXPR:
2094 return TREE_OPERAND (stmt, 1);
2095
2096 case COND_EXPR:
2097 return COND_EXPR_COND (stmt);
2098 case SWITCH_EXPR:
2099 return SWITCH_COND (stmt);
2100 case GOTO_EXPR:
2101 return GOTO_DESTINATION (stmt);
2102 case LABEL_EXPR:
2103 return LABEL_EXPR_LABEL (stmt);
2104
2105 default:
2106 return stmt;
6de9cd9a 2107 }
6de9cd9a
DN
2108}
2109
2110
2111/* Set the main expression of *STMT_P to EXPR. */
2112
06a9b53f 2113static bool
6de9cd9a
DN
2114set_rhs (tree *stmt_p, tree expr)
2115{
cd709752 2116 tree stmt = *stmt_p, op;
06a9b53f 2117 enum tree_code code = TREE_CODE (expr);
cd709752 2118 stmt_ann_t ann;
6de9cd9a 2119
06a9b53f
RS
2120 /* Verify the constant folded result is valid gimple. */
2121 if (TREE_CODE_CLASS (code) == '2')
2122 {
2123 if (!is_gimple_val (TREE_OPERAND (expr, 0))
2124 || !is_gimple_val (TREE_OPERAND (expr, 1)))
2125 return false;
2126 }
2127 else if (TREE_CODE_CLASS (code) == '1')
2128 {
2129 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
2130 return false;
2131 }
2132
cd709752 2133 switch (TREE_CODE (stmt))
6de9cd9a 2134 {
cd709752
RH
2135 case RETURN_EXPR:
2136 op = TREE_OPERAND (stmt, 0);
2137 if (TREE_CODE (op) != MODIFY_EXPR)
2138 {
2139 TREE_OPERAND (stmt, 0) = expr;
2140 break;
2141 }
2142 stmt = op;
2143 /* FALLTHRU */
2144
2145 case MODIFY_EXPR:
2146 TREE_OPERAND (stmt, 1) = expr;
2147 break;
2148
2149 case COND_EXPR:
2150 COND_EXPR_COND (stmt) = expr;
2151 break;
2152 case SWITCH_EXPR:
2153 SWITCH_COND (stmt) = expr;
2154 break;
2155 case GOTO_EXPR:
2156 GOTO_DESTINATION (stmt) = expr;
2157 break;
2158 case LABEL_EXPR:
2159 LABEL_EXPR_LABEL (stmt) = expr;
2160 break;
2161
2162 default:
6de9cd9a
DN
2163 /* Replace the whole statement with EXPR. If EXPR has no side
2164 effects, then replace *STMT_P with an empty statement. */
cd709752 2165 ann = stmt_ann (stmt);
6de9cd9a 2166 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
06d72ee6 2167 (*stmt_p)->common.ann = (tree_ann_t) ann;
6de9cd9a
DN
2168
2169 if (TREE_SIDE_EFFECTS (expr))
2170 {
2171 def_optype defs;
a32b97a2
BB
2172 v_may_def_optype v_may_defs;
2173 v_must_def_optype v_must_defs;
6de9cd9a
DN
2174 size_t i;
2175
2176 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2177 replacement. */
2178 defs = DEF_OPS (ann);
2179 for (i = 0; i < NUM_DEFS (defs); i++)
2180 {
2181 tree var = DEF_OP (defs, i);
2182 if (TREE_CODE (var) == SSA_NAME)
2183 SSA_NAME_DEF_STMT (var) = *stmt_p;
2184 }
2185
a32b97a2
BB
2186 v_may_defs = V_MAY_DEF_OPS (ann);
2187 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
2188 {
2189 tree var = V_MAY_DEF_RESULT (v_may_defs, i);
2190 if (TREE_CODE (var) == SSA_NAME)
2191 SSA_NAME_DEF_STMT (var) = *stmt_p;
2192 }
2193
2194 v_must_defs = V_MUST_DEF_OPS (ann);
2195 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
6de9cd9a 2196 {
a32b97a2 2197 tree var = V_MUST_DEF_OP (v_must_defs, i);
6de9cd9a
DN
2198 if (TREE_CODE (var) == SSA_NAME)
2199 SSA_NAME_DEF_STMT (var) = *stmt_p;
2200 }
2201 }
cd709752 2202 break;
6de9cd9a 2203 }
06a9b53f
RS
2204
2205 return true;
6de9cd9a
DN
2206}
2207
2208
2209/* Return a default value for variable VAR using the following rules:
2210
2211 1- Global and static variables are considered VARYING, unless they are
2212 declared const.
2213
2214 2- Function arguments are considered VARYING.
2215
2216 3- Any other value is considered UNDEFINED. This is useful when
2217 considering PHI nodes. PHI arguments that are undefined do not
2218 change the constant value of the PHI node, which allows for more
2219 constants to be propagated. */
2220
2221static value
2222get_default_value (tree var)
2223{
2224 value val;
2225 tree sym;
2226
2227 if (TREE_CODE (var) == SSA_NAME)
2228 sym = SSA_NAME_VAR (var);
2229 else
2230 {
2231#ifdef ENABLE_CHECKING
2232 if (!DECL_P (var))
2233 abort ();
2234#endif
2235 sym = var;
2236 }
2237
2238 val.lattice_val = UNDEFINED;
2239 val.const_val = NULL_TREE;
2240
2241 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2242 {
2243 /* Function arguments and volatile variables are considered VARYING. */
2244 val.lattice_val = VARYING;
2245 }
2246 else if (decl_function_context (sym) != current_function_decl
2247 || TREE_STATIC (sym))
2248 {
2249 /* Globals and static variables are considered VARYING, unless they
2250 are declared 'const'. */
2251 val.lattice_val = VARYING;
2252
2253 if (TREE_READONLY (sym)
2254 && DECL_INITIAL (sym)
2255 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2256 {
2257 val.lattice_val = CONSTANT;
2258 val.const_val = DECL_INITIAL (sym);
2259 }
2260 }
2261 else
2262 {
2263 enum tree_code code;
2264 tree stmt = SSA_NAME_DEF_STMT (var);
2265
2266 if (!IS_EMPTY_STMT (stmt))
2267 {
2268 code = TREE_CODE (stmt);
2269 if (code != MODIFY_EXPR && code != PHI_NODE)
2270 val.lattice_val = VARYING;
2271 }
2272 }
2273
2274 return val;
2275}
2276
2277
2278/* Fold builtin call FN in statement STMT. If it cannot be folded into a
2279 constant, return NULL_TREE. Otherwise, return its constant value. */
2280
2281static tree
2282ccp_fold_builtin (tree stmt, tree fn)
2283{
2284 tree result, strlen_val[2];
a32e70c3 2285 tree callee, arglist, a;
6de9cd9a 2286 int strlen_arg, i;
a32e70c3
RS
2287 bitmap visited;
2288 bool ignore;
6de9cd9a 2289
a32e70c3 2290 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
6de9cd9a
DN
2291
2292 /* First try the generic builtin folder. If that succeeds, return the
2293 result directly. */
a32e70c3 2294 result = fold_builtin (fn, ignore);
6de9cd9a 2295 if (result)
a32e70c3
RS
2296 {
2297 if (ignore)
2298 STRIP_NOPS (result);
6de9cd9a 2299 return result;
a32e70c3
RS
2300 }
2301
2302 /* Ignore MD builtins. */
2303 callee = get_callee_fndecl (fn);
2304 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2305 return NULL_TREE;
6de9cd9a
DN
2306
2307 /* If the builtin could not be folded, and it has no argument list,
2308 we're done. */
a32e70c3 2309 arglist = TREE_OPERAND (fn, 1);
6de9cd9a
DN
2310 if (!arglist)
2311 return NULL_TREE;
2312
2313 /* Limit the work only for builtins we know how to simplify. */
2314 switch (DECL_FUNCTION_CODE (callee))
2315 {
2316 case BUILT_IN_STRLEN:
2317 case BUILT_IN_FPUTS:
2318 case BUILT_IN_FPUTS_UNLOCKED:
2319 strlen_arg = 1;
2320 break;
2321 case BUILT_IN_STRCPY:
2322 case BUILT_IN_STRNCPY:
2323 strlen_arg = 2;
2324 break;
2325 default:
2326 return NULL_TREE;
2327 }
2328
2329 /* Try to use the dataflow information gathered by the CCP process. */
2330 visited = BITMAP_XMALLOC ();
2331
2332 memset (strlen_val, 0, sizeof (strlen_val));
2333 for (i = 0, a = arglist;
2334 strlen_arg;
2335 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2336 if (strlen_arg & 1)
2337 {
2338 bitmap_clear (visited);
2339 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2340 strlen_val[i] = NULL_TREE;
2341 }
2342
2343 BITMAP_XFREE (visited);
2344
a32e70c3 2345 result = NULL_TREE;
6de9cd9a
DN
2346 switch (DECL_FUNCTION_CODE (callee))
2347 {
2348 case BUILT_IN_STRLEN:
a32e70c3 2349 if (strlen_val[0])
6de9cd9a 2350 {
a32e70c3 2351 tree new = fold_convert (TREE_TYPE (fn), strlen_val[0]);
6de9cd9a
DN
2352
2353 /* If the result is not a valid gimple value, or not a cast
2354 of a valid gimple value, then we can not use the result. */
2355 if (is_gimple_val (new)
2356 || (is_gimple_cast (new)
2357 && is_gimple_val (TREE_OPERAND (new, 0))))
2358 return new;
6de9cd9a 2359 }
a32e70c3
RS
2360 break;
2361
6de9cd9a 2362 case BUILT_IN_STRCPY:
a32e70c3
RS
2363 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2364 result = fold_builtin_strcpy (fn, strlen_val[1]);
2365 break;
2366
6de9cd9a 2367 case BUILT_IN_STRNCPY:
a32e70c3
RS
2368 if (strlen_val[1] && is_gimple_val (strlen_val[1]))
2369 result = fold_builtin_strncpy (fn, strlen_val[1]);
2370 break;
2371
6de9cd9a 2372 case BUILT_IN_FPUTS:
a32e70c3
RS
2373 result = fold_builtin_fputs (arglist,
2374 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2375 strlen_val[0]);
2376 break;
2377
6de9cd9a 2378 case BUILT_IN_FPUTS_UNLOCKED:
a32e70c3
RS
2379 result = fold_builtin_fputs (arglist,
2380 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2381 strlen_val[0]);
2382 break;
6de9cd9a
DN
2383
2384 default:
2385 abort ();
2386 }
2387
a32e70c3 2388 if (result && ignore)
020510c9
RH
2389 {
2390 /* STRIP_NOPS isn't strong enough -- it'll stop when we change modes,
2391 but given that we're ignoring the result, we don't care what type
2392 is being returned by the transformed function. */
2393 while (TREE_CODE (result) == NOP_EXPR
2394 || TREE_CODE (result) == CONVERT_EXPR
2395 || TREE_CODE (result) == NON_LVALUE_EXPR)
2396 result = TREE_OPERAND (result, 0);
2397 }
2398
a32e70c3 2399 return result;
6de9cd9a
DN
2400}
2401
2402
2403/* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2404 follow its use-def chains. If LENGTH is not NULL and its value is not
2405 equal to the length we determine, or if we are unable to determine the
2406 length, return false. VISITED is a bitmap of visited variables. */
2407
2408static bool
2409get_strlen (tree arg, tree *length, bitmap visited)
2410{
2411 tree var, def_stmt, val;
2412
2413 if (TREE_CODE (arg) != SSA_NAME)
2414 {
2415 val = c_strlen (arg, 1);
2416 if (!val)
2417 return false;
2418
2419 if (*length && simple_cst_equal (val, *length) != 1)
2420 return false;
2421
2422 *length = val;
2423 return true;
2424 }
2425
2426 /* If we were already here, break the infinite cycle. */
2427 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2428 return true;
2429 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2430
2431 var = arg;
2432 def_stmt = SSA_NAME_DEF_STMT (var);
2433
2434 switch (TREE_CODE (def_stmt))
2435 {
2436 case MODIFY_EXPR:
2437 {
2438 tree len, rhs;
2439
2440 /* The RHS of the statement defining VAR must either have a
2441 constant length or come from another SSA_NAME with a constant
2442 length. */
2443 rhs = TREE_OPERAND (def_stmt, 1);
2444 STRIP_NOPS (rhs);
2445 if (TREE_CODE (rhs) == SSA_NAME)
2446 return get_strlen (rhs, length, visited);
2447
2448 /* See if the RHS is a constant length. */
2449 len = c_strlen (rhs, 1);
2450 if (len)
2451 {
2452 if (*length && simple_cst_equal (len, *length) != 1)
2453 return false;
2454
2455 *length = len;
2456 return true;
2457 }
2458
2459 break;
2460 }
2461
2462 case PHI_NODE:
2463 {
2464 /* All the arguments of the PHI node must have the same constant
2465 length. */
2466 int i;
2467
2468 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2469 {
2470 tree arg = PHI_ARG_DEF (def_stmt, i);
2471
2472 /* If this PHI has itself as an argument, we cannot
2473 determine the string length of this argument. However,
2474 if we can find a constant string length for the other
2475 PHI args then we can still be sure that this is a
2476 constant string length. So be optimistic and just
2477 continue with the next argument. */
2478 if (arg == PHI_RESULT (def_stmt))
2479 continue;
2480
2481 if (!get_strlen (arg, length, visited))
2482 return false;
2483 }
2484
2485 return true;
2486 }
2487
2488 default:
2489 break;
2490 }
2491
2492
2493 return false;
2494}
2495
2496\f
2497/* A simple pass that attempts to fold all builtin functions. This pass
2498 is run after we've propagated as many constants as we can. */
2499
2500static void
2501execute_fold_all_builtins (void)
2502{
2503 basic_block bb;
2504 FOR_EACH_BB (bb)
2505 {
2506 block_stmt_iterator i;
2507 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2508 {
2509 tree *stmtp = bsi_stmt_ptr (i);
2510 tree call = get_rhs (*stmtp);
2511 tree callee, result;
2512
2513 if (!call || TREE_CODE (call) != CALL_EXPR)
2514 continue;
2515 callee = get_callee_fndecl (call);
2516 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2517 continue;
2518
2519 result = ccp_fold_builtin (*stmtp, call);
2520 if (!result)
2521 switch (DECL_FUNCTION_CODE (callee))
2522 {
2523 case BUILT_IN_CONSTANT_P:
2524 /* Resolve __builtin_constant_p. If it hasn't been
2525 folded to integer_one_node by now, it's fairly
2526 certain that the value simply isn't constant. */
2527 result = integer_zero_node;
2528 break;
2529
2530 default:
2531 continue;
2532 }
2533
2534 if (dump_file && (dump_flags & TDF_DETAILS))
2535 {
2536 fprintf (dump_file, "Simplified\n ");
2537 print_generic_stmt (dump_file, *stmtp, dump_flags);
2538 }
2539
06a9b53f
RS
2540 if (set_rhs (stmtp, result))
2541 modify_stmt (*stmtp);
6de9cd9a
DN
2542
2543 if (dump_file && (dump_flags & TDF_DETAILS))
2544 {
2545 fprintf (dump_file, "to\n ");
2546 print_generic_stmt (dump_file, *stmtp, dump_flags);
2547 fprintf (dump_file, "\n");
2548 }
2549 }
2550 }
2551}
2552
2553struct tree_opt_pass pass_fold_builtins =
2554{
2555 "fab", /* name */
2556 NULL, /* gate */
2557 execute_fold_all_builtins, /* execute */
2558 NULL, /* sub */
2559 NULL, /* next */
2560 0, /* static_pass_number */
2561 0, /* tv_id */
2562 PROP_cfg | PROP_ssa, /* properties_required */
2563 0, /* properties_provided */
2564 0, /* properties_destroyed */
2565 0, /* todo_flags_start */
2566 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2567};
2568
2569
2570#include "gt-tree-ssa-ccp.h"
This page took 0.372642 seconds and 5 git commands to generate.