]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-ccp.c
* java-gimplify.c (java_gimplify_expr): Correct minor typos.
[gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a
DN
1/* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 2, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
22
23/* Conditional constant propagation.
24
25 References:
26
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
29
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
32
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "errors.h"
41#include "ggc.h"
42#include "tree.h"
43#include "langhooks.h"
44
45/* These RTL headers are needed for basic-block.h. */
46#include "rtl.h"
47#include "tm_p.h"
48#include "hard-reg-set.h"
49#include "basic-block.h"
50
51#include "diagnostic.h"
52#include "tree-inline.h"
53#include "tree-flow.h"
54#include "tree-simple.h"
55#include "tree-dump.h"
56#include "tree-pass.h"
57#include "timevar.h"
58#include "expr.h"
59#include "flags.h"
60
61
62/* Possible lattice values. */
63typedef enum
64{
65 UNINITIALIZED = 0,
66 UNDEFINED,
67 CONSTANT,
68 VARYING
69} latticevalue;
70
71/* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73#define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
74
75/* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
77typedef struct
78{
79 latticevalue lattice_val;
80 tree const_val;
81} value;
82
83/* A bitmap to keep track of executable blocks in the CFG. */
84static sbitmap executable_blocks;
85
86/* Array of control flow edges on the worklist. */
87static GTY(()) varray_type cfg_blocks = NULL;
88
89static unsigned int cfg_blocks_num = 0;
90static int cfg_blocks_tail;
91static int cfg_blocks_head;
92
93static sbitmap bb_in_list;
94
95/* This is used to track the current value of each variable. */
96static value *value_vector;
97
98/* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
101 nodes that need to be visited are accessed using immediate_uses (D). */
102static GTY(()) varray_type ssa_edges;
103
104static void initialize (void);
105static void finalize (void);
106static void visit_phi_node (tree);
107static tree ccp_fold (tree);
108static value cp_lattice_meet (value, value);
109static void visit_stmt (tree);
110static void visit_cond_stmt (tree);
111static void visit_assignment (tree);
112static void add_var_to_ssa_edges_worklist (tree);
113static void add_outgoing_control_edges (basic_block);
114static void add_control_edge (edge);
115static void def_to_varying (tree);
116static void set_lattice_value (tree, value);
117static void simulate_block (basic_block);
118static void simulate_stmt (tree);
119static void substitute_and_fold (void);
120static value evaluate_stmt (tree);
121static void dump_lattice_value (FILE *, const char *, value);
122static bool replace_uses_in (tree, bool *);
123static latticevalue likely_value (tree);
124static tree get_rhs (tree);
125static void set_rhs (tree *, tree);
126static value *get_value (tree);
127static value get_default_value (tree);
128static tree ccp_fold_builtin (tree, tree);
129static bool get_strlen (tree, tree *, bitmap);
130static inline bool cfg_blocks_empty_p (void);
131static void cfg_blocks_add (basic_block);
132static basic_block cfg_blocks_get (void);
133static bool need_imm_uses_for (tree var);
134
135/* Main entry point for SSA Conditional Constant Propagation. FNDECL is
136 the declaration for the function to optimize.
137
138 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
139 the propagation of ADDR_EXPR expressions into pointer dereferences and need
140 to be renamed into SSA.
141
142 PHASE indicates which dump file from the DUMP_FILES array to use when
143 dumping debugging information. */
144
145static void
146tree_ssa_ccp (void)
147{
148 initialize ();
149
150 /* Iterate until the worklists are empty. */
151 while (!cfg_blocks_empty_p () || VARRAY_ACTIVE_SIZE (ssa_edges) > 0)
152 {
153 if (!cfg_blocks_empty_p ())
154 {
155 /* Pull the next block to simulate off the worklist. */
156 basic_block dest_block = cfg_blocks_get ();
157 simulate_block (dest_block);
158 }
159
160 /* The SSA_EDGES worklist can get rather large. Go ahead and
161 drain the entire worklist each iteration through this loop. */
162 while (VARRAY_ACTIVE_SIZE (ssa_edges) > 0)
163 {
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (ssa_edges);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (ssa_edges);
168
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
172 {
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
175 }
176 }
177 }
178
179 /* Now perform substitutions based on the known constant values. */
180 substitute_and_fold ();
181
182 /* Now cleanup any unreachable code. */
183 cleanup_tree_cfg ();
184
185 /* Free allocated memory. */
186 finalize ();
187
188 /* Debugging dumps. */
189 if (dump_file && (dump_flags & TDF_DETAILS))
190 {
191 dump_referenced_vars (dump_file);
192 fprintf (dump_file, "\n\n");
193 }
194}
195
196static bool
197gate_ccp (void)
198{
199 return flag_tree_ccp != 0;
200}
201
202struct tree_opt_pass pass_ccp =
203{
204 "ccp", /* name */
205 gate_ccp, /* gate */
206 tree_ssa_ccp, /* execute */
207 NULL, /* sub */
208 NULL, /* next */
209 0, /* static_pass_number */
210 TV_TREE_CCP, /* tv_id */
211 PROP_cfg | PROP_ssa, /* properties_required */
212 0, /* properties_provided */
213 0, /* properties_destroyed */
214 0, /* todo_flags_start */
215 TODO_dump_func | TODO_rename_vars
216 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
217};
218
219
220/* Get the constant value associated with variable VAR. */
221
222static value *
223get_value (tree var)
224{
225 value *val;
226
227#if defined ENABLE_CHECKING
228 if (TREE_CODE (var) != SSA_NAME)
229 abort ();
230#endif
231
232 val = &value_vector[SSA_NAME_VERSION (var)];
233 if (val->lattice_val == UNINITIALIZED)
234 *val = get_default_value (var);
235
236 return val;
237}
238
239
240/* Simulate the execution of BLOCK. Evaluate the statement associated
241 with each variable reference inside the block. */
242
243static void
244simulate_block (basic_block block)
245{
246 tree phi;
247
248 /* There is nothing to do for the exit block. */
249 if (block == EXIT_BLOCK_PTR)
250 return;
251
252 if (dump_file && (dump_flags & TDF_DETAILS))
253 fprintf (dump_file, "\nSimulating block %d\n", block->index);
254
255 /* Always simulate PHI nodes, even if we have simulated this block
256 before. */
257 for (phi = phi_nodes (block); phi; phi = TREE_CHAIN (phi))
258 visit_phi_node (phi);
259
260 /* If this is the first time we've simulated this block, then we
261 must simulate each of its statements. */
262 if (!TEST_BIT (executable_blocks, block->index))
263 {
264 block_stmt_iterator j;
265 unsigned int normal_edge_count;
266 edge e, normal_edge;
267
268 /* Note that we have simulated this block. */
269 SET_BIT (executable_blocks, block->index);
270
271 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
272 visit_stmt (bsi_stmt (j));
273
274 /* We can not predict when abnormal edges will be executed, so
275 once a block is considered executable, we consider any
276 outgoing abnormal edges as executable.
277
278 At the same time, if this block has only one successor that is
279 reached by non-abnormal edges, then add that successor to the
280 worklist. */
281 normal_edge_count = 0;
282 normal_edge = NULL;
283 for (e = block->succ; e; e = e->succ_next)
284 {
285 if (e->flags & EDGE_ABNORMAL)
286 {
287 add_control_edge (e);
288 }
289 else
290 {
291 normal_edge_count++;
292 normal_edge = e;
293 }
294 }
295
296 if (normal_edge_count == 1)
297 add_control_edge (normal_edge);
298 }
299}
300
301
302/* Follow the def-use edges for statement DEF_STMT and simulate all the
303 statements reached by it. */
304
305static void
306simulate_stmt (tree use_stmt)
307{
308 basic_block use_bb = bb_for_stmt (use_stmt);
309
310 if (dump_file && (dump_flags & TDF_DETAILS))
311 {
312 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
313 print_generic_stmt (dump_file, use_stmt, dump_flags);
314 }
315
316 if (TREE_CODE (use_stmt) == PHI_NODE)
317 {
318 /* PHI nodes are always visited, regardless of whether or not the
319 destination block is executable. */
320 visit_phi_node (use_stmt);
321 }
322 else if (TEST_BIT (executable_blocks, use_bb->index))
323 {
324 /* Otherwise, visit the statement containing the use reached by
325 DEF, only if the destination block is marked executable. */
326 visit_stmt (use_stmt);
327 }
328}
329
330
331/* Perform final substitution and folding. After this pass the program
332 should still be in SSA form. */
333
334static void
335substitute_and_fold (void)
336{
337 basic_block bb;
338
339 if (dump_file && (dump_flags & TDF_DETAILS))
340 fprintf (dump_file,
341 "\nSubstituing constants and folding statements\n\n");
342
343 /* Substitute constants in every statement of every basic block. */
344 FOR_EACH_BB (bb)
345 {
346 block_stmt_iterator i;
347 tree phi;
348
349 /* Propagate our known constants into PHI nodes. */
350 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
351 {
352 int i;
353
354 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
355 {
356 value *new_val;
357 tree *orig_p = &PHI_ARG_DEF (phi, i);
358
359 if (! SSA_VAR_P (*orig_p))
360 break;
361
362 new_val = get_value (*orig_p);
363 if (new_val->lattice_val == CONSTANT
364 && may_propagate_copy (*orig_p, new_val->const_val))
365 *orig_p = new_val->const_val;
366 }
367 }
368
369 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
370 {
371 bool replaced_address;
372 tree stmt = bsi_stmt (i);
373
374 /* Skip statements that have been folded already. */
375 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
376 continue;
377
378 /* Replace the statement with its folded version and mark it
379 folded. */
380 if (dump_file && (dump_flags & TDF_DETAILS))
381 {
382 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
383 print_generic_stmt (dump_file, stmt, TDF_SLIM);
384 }
385
386 if (replace_uses_in (stmt, &replaced_address))
387 {
388 bool changed = fold_stmt (bsi_stmt_ptr (i));
389 stmt = bsi_stmt(i);
390 modify_stmt (stmt);
391 /* If we folded a builtin function, we'll likely
392 need to rename VDEFs. */
393 if (replaced_address || changed)
394 mark_new_vars_to_rename (stmt, vars_to_rename);
395 }
396
397 if (dump_file && (dump_flags & TDF_DETAILS))
398 {
399 fprintf (dump_file, " with ");
400 print_generic_stmt (dump_file, stmt, TDF_SLIM);
401 fprintf (dump_file, "\n");
402 }
403 }
404 }
405}
406
407
408/* Loop through the PHI_NODE's parameters for BLOCK and compare their
409 lattice values to determine PHI_NODE's lattice value. The value of a
410 PHI node is determined calling cp_lattice_meet() with all the arguments
411 of the PHI node that are incoming via executable edges. */
412
413static void
414visit_phi_node (tree phi)
415{
416 bool short_circuit = 0;
417 value phi_val, *curr_val;
418 int i;
419
420 /* If the PHI node has already been deemed to be VARYING, don't simulate
421 it again. */
422 if (DONT_SIMULATE_AGAIN (phi))
423 return;
424
425 if (dump_file && (dump_flags & TDF_DETAILS))
426 {
427 fprintf (dump_file, "\nVisiting PHI node: ");
428 print_generic_expr (dump_file, phi, dump_flags);
429 }
430
431 curr_val = get_value (PHI_RESULT (phi));
432 switch (curr_val->lattice_val)
433 {
434 case VARYING:
435 if (dump_file && (dump_flags & TDF_DETAILS))
436 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
437 short_circuit = 1;
438 break;
439
440 case CONSTANT:
441 phi_val = *curr_val;
442 break;
443
444 case UNDEFINED:
445 case UNINITIALIZED:
446 phi_val.lattice_val = UNDEFINED;
447 phi_val.const_val = NULL_TREE;
448 break;
449
450 default:
451 abort ();
452 }
453
454 /* If the variable is volatile or the variable is never referenced in a
455 real operand, then consider the PHI node VARYING. */
456 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
457 {
458 phi_val.lattice_val = VARYING;
459 phi_val.const_val = NULL;
460 }
461 else
462 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
463 {
464 /* Compute the meet operator over all the PHI arguments. */
465 edge e = PHI_ARG_EDGE (phi, i);
466
467 if (dump_file && (dump_flags & TDF_DETAILS))
468 {
469 fprintf (dump_file,
470 "\n Argument #%d (%d -> %d %sexecutable)\n",
471 i, e->src->index, e->dest->index,
472 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
473 }
474
475 /* If the incoming edge is executable, Compute the meet operator for
476 the existing value of the PHI node and the current PHI argument. */
477 if (e->flags & EDGE_EXECUTABLE)
478 {
479 tree rdef = PHI_ARG_DEF (phi, i);
480 value *rdef_val, val;
481
482 if (is_gimple_min_invariant (rdef))
483 {
484 val.lattice_val = CONSTANT;
485 val.const_val = rdef;
486 rdef_val = &val;
487 }
488 else
489 rdef_val = get_value (rdef);
490
491 phi_val = cp_lattice_meet (phi_val, *rdef_val);
492
493 if (dump_file && (dump_flags & TDF_DETAILS))
494 {
495 fprintf (dump_file, "\t");
496 print_generic_expr (dump_file, rdef, dump_flags);
497 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
498 fprintf (dump_file, "\n");
499 }
500
501 if (phi_val.lattice_val == VARYING)
502 break;
503 }
504 }
505
506 if (dump_file && (dump_flags & TDF_DETAILS))
507 {
508 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
509 fprintf (dump_file, "\n\n");
510 }
511
512 set_lattice_value (PHI_RESULT (phi), phi_val);
513 if (phi_val.lattice_val == VARYING)
514 DONT_SIMULATE_AGAIN (phi) = 1;
515}
516
517
518/* Compute the meet operator between VAL1 and VAL2:
519
520 any M UNDEFINED = any
521 any M VARYING = VARYING
522 Ci M Cj = Ci if (i == j)
523 Ci M Cj = VARYING if (i != j) */
524static value
525cp_lattice_meet (value val1, value val2)
526{
527 value result;
528
529 /* any M UNDEFINED = any. */
530 if (val1.lattice_val == UNDEFINED)
531 return val2;
532 else if (val2.lattice_val == UNDEFINED)
533 return val1;
534
535 /* any M VARYING = VARYING. */
536 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
537 {
538 result.lattice_val = VARYING;
539 result.const_val = NULL_TREE;
540 return result;
541 }
542
543 /* Ci M Cj = Ci if (i == j)
544 Ci M Cj = VARYING if (i != j) */
545 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
546 {
547 result.lattice_val = CONSTANT;
548 result.const_val = val1.const_val;
549 }
550 else
551 {
552 result.lattice_val = VARYING;
553 result.const_val = NULL_TREE;
554 }
555
556 return result;
557}
558
559
560/* Evaluate statement STMT. If the statement produces an output value and
561 its evaluation changes the lattice value of its output, do the following:
562
563 - If the statement is an assignment, add all the SSA edges starting at
564 this definition.
565
566 - If the statement is a conditional branch:
567 . If the statement evaluates to non-constant, add all edges to
568 worklist.
569 . If the statement is constant, add the edge executed as the
570 result of the branch. */
571
572static void
573visit_stmt (tree stmt)
574{
575 size_t i;
576 stmt_ann_t ann;
577 def_optype defs;
578 vdef_optype vdefs;
579
580 /* If the statement has already been deemed to be VARYING, don't simulate
581 it again. */
582 if (DONT_SIMULATE_AGAIN (stmt))
583 return;
584
585 if (dump_file && (dump_flags & TDF_DETAILS))
586 {
587 fprintf (dump_file, "\nVisiting statement: ");
588 print_generic_stmt (dump_file, stmt, TDF_SLIM);
589 fprintf (dump_file, "\n");
590 }
591
592 ann = stmt_ann (stmt);
593
594 /* If this statement is already in the worklist then "cancel" it. The
595 reevaluation implied by the worklist entry will produce the same
596 value we generate here and thus reevaluating it again from the
597 worklist is pointless. */
598 if (ann->in_ccp_worklist)
599 ann->in_ccp_worklist = 0;
600
601 /* Now examine the statement. If the statement is an assignment that
602 produces a single output value, evaluate its RHS to see if the lattice
603 value of its output has changed. */
604 if (TREE_CODE (stmt) == MODIFY_EXPR
605 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
606 visit_assignment (stmt);
607
608 /* Definitions made by statements other than assignments to SSA_NAMEs
609 represent unknown modifications to their outputs. Mark them VARYING. */
610 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
611 {
612 DONT_SIMULATE_AGAIN (stmt) = 1;
613 for (i = 0; i < NUM_DEFS (defs); i++)
614 {
615 tree def = DEF_OP (defs, i);
616 def_to_varying (def);
617 }
618 }
619
620 /* If STMT is a conditional branch, see if we can determine which branch
621 will be taken. */
622 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
623 visit_cond_stmt (stmt);
624
625 /* Any other kind of statement is not interesting for constant
626 propagation and, therefore, not worth simulating. */
627 else
628 {
629 DONT_SIMULATE_AGAIN (stmt) = 1;
630
631 /* If STMT is a computed goto, then mark all the output edges
632 executable. */
633 if (computed_goto_p (stmt))
634 add_outgoing_control_edges (bb_for_stmt (stmt));
635 }
636
637 /* Mark all VDEF operands VARYING. */
638 vdefs = VDEF_OPS (ann);
639 for (i = 0; i < NUM_VDEFS (vdefs); i++)
640 def_to_varying (VDEF_RESULT (vdefs, i));
641}
642
643
644/* Visit the assignment statement STMT. Set the value of its LHS to the
645 value computed by the RHS. */
646
647static void
648visit_assignment (tree stmt)
649{
650 value val;
651 tree lhs, rhs;
652
653 lhs = TREE_OPERAND (stmt, 0);
654 rhs = TREE_OPERAND (stmt, 1);
655
656 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
657 {
658 /* Volatile variables are always VARYING. */
659 val.lattice_val = VARYING;
660 val.const_val = NULL_TREE;
661 }
662 else if (TREE_CODE (rhs) == SSA_NAME)
663 {
664 /* For a simple copy operation, we copy the lattice values. */
665 value *nval = get_value (rhs);
666 val = *nval;
667 }
668 else
669 {
670 /* Evaluate the statement. */
671 val = evaluate_stmt (stmt);
672 }
673
674 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
675 the constant value into the type of the destination variable. This
676 should not be necessary if GCC represented bitfields properly. */
677 {
678 tree lhs = TREE_OPERAND (stmt, 0);
679 if (val.lattice_val == CONSTANT
680 && TREE_CODE (lhs) == COMPONENT_REF
681 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
682 {
683 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
684
685 if (w && is_gimple_min_invariant (w))
686 val.const_val = w;
687 else
688 {
689 val.lattice_val = VARYING;
690 val.const_val = NULL;
691 }
692 }
693 }
694
695 /* Set the lattice value of the statement's output. */
696 set_lattice_value (lhs, val);
697 if (val.lattice_val == VARYING)
698 DONT_SIMULATE_AGAIN (stmt) = 1;
699}
700
701
702/* Visit the conditional statement STMT. If it evaluates to a constant value,
703 mark outgoing edges appropriately. */
704
705static void
706visit_cond_stmt (tree stmt)
707{
708 edge e;
709 value val;
710 basic_block block;
711
712 block = bb_for_stmt (stmt);
713 val = evaluate_stmt (stmt);
714
715 /* Find which edge out of the conditional block will be taken and add it
716 to the worklist. If no single edge can be determined statically, add
717 all outgoing edges from BLOCK. */
718 e = find_taken_edge (block, val.const_val);
719 if (e)
720 add_control_edge (e);
721 else
722 {
723 DONT_SIMULATE_AGAIN (stmt) = 1;
724 add_outgoing_control_edges (block);
725 }
726}
727
728
729/* Add all the edges coming out of BB to the control flow worklist. */
730
731static void
732add_outgoing_control_edges (basic_block bb)
733{
734 edge e;
735
736 for (e = bb->succ; e; e = e->succ_next)
737 add_control_edge (e);
738}
739
740
741/* Add edge E to the control flow worklist. */
742
743static void
744add_control_edge (edge e)
745{
746 basic_block bb = e->dest;
747 if (bb == EXIT_BLOCK_PTR)
748 return;
749
750 /* If the edge had already been executed, skip it. */
751 if (e->flags & EDGE_EXECUTABLE)
752 return;
753
754 e->flags |= EDGE_EXECUTABLE;
755
756 /* If the block is already in the list, we're done. */
757 if (TEST_BIT (bb_in_list, bb->index))
758 return;
759
760 cfg_blocks_add (bb);
761
762 if (dump_file && (dump_flags & TDF_DETAILS))
763 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
764 e->src->index, e->dest->index);
765}
766
767
768/* CCP specific front-end to the non-destructive constant folding routines.
769
770 Attempt to simplify the RHS of STMT knowing that one or more
771 operands are constants.
772
773 If simplification is possible, return the simplified RHS,
774 otherwise return the original RHS. */
775
776static tree
777ccp_fold (tree stmt)
778{
779 tree rhs = get_rhs (stmt);
780 enum tree_code code = TREE_CODE (rhs);
781 int kind = TREE_CODE_CLASS (code);
782 tree retval = NULL_TREE;
783
784 /* If the RHS is just a variable, then that variable must now have
785 a constant value that we can return directly. */
786 if (TREE_CODE (rhs) == SSA_NAME)
787 return get_value (rhs)->const_val;
788
789 /* Unary operators. Note that we know the single operand must
790 be a constant. So this should almost always return a
791 simplified RHS. */
792 if (kind == '1')
793 {
794 /* Handle unary operators which can appear in GIMPLE form. */
795 tree op0 = TREE_OPERAND (rhs, 0);
796
797 /* Simplify the operand down to a constant. */
798 if (TREE_CODE (op0) == SSA_NAME)
799 {
800 value *val = get_value (op0);
801 if (val->lattice_val == CONSTANT)
802 op0 = get_value (op0)->const_val;
803 }
804
805 retval = nondestructive_fold_unary_to_constant (code,
806 TREE_TYPE (rhs),
807 op0);
808
809 /* If we folded, but did not create an invariant, then we can not
810 use this expression. */
811 if (retval && ! is_gimple_min_invariant (retval))
812 return NULL;
813
814 /* If we could not fold the expression, but the arguments are all
815 constants and gimple values, then build and return the new
816 expression.
817
818 In some cases the new expression is still something we can
819 use as a replacement for an argument. This happens with
820 NOP conversions of types for example.
821
822 In other cases the new expression can not be used as a
823 replacement for an argument (as it would create non-gimple
824 code). But the new expression can still be used to derive
825 other constants. */
826 if (! retval && is_gimple_min_invariant (op0))
827 return build1 (code, TREE_TYPE (rhs), op0);
828 }
829
830 /* Binary and comparison operators. We know one or both of the
831 operands are constants. */
832 else if (kind == '2'
833 || kind == '<'
834 || code == TRUTH_AND_EXPR
835 || code == TRUTH_OR_EXPR
836 || code == TRUTH_XOR_EXPR)
837 {
838 /* Handle binary and comparison operators that can appear in
839 GIMPLE form. */
840 tree op0 = TREE_OPERAND (rhs, 0);
841 tree op1 = TREE_OPERAND (rhs, 1);
842
843 /* Simplify the operands down to constants when appropriate. */
844 if (TREE_CODE (op0) == SSA_NAME)
845 {
846 value *val = get_value (op0);
847 if (val->lattice_val == CONSTANT)
848 op0 = val->const_val;
849 }
850
851 if (TREE_CODE (op1) == SSA_NAME)
852 {
853 value *val = get_value (op1);
854 if (val->lattice_val == CONSTANT)
855 op1 = val->const_val;
856 }
857
858 retval = nondestructive_fold_binary_to_constant (code,
859 TREE_TYPE (rhs),
860 op0, op1);
861
862 /* If we folded, but did not create an invariant, then we can not
863 use this expression. */
864 if (retval && ! is_gimple_min_invariant (retval))
865 return NULL;
866
867 /* If we could not fold the expression, but the arguments are all
868 constants and gimple values, then build and return the new
869 expression.
870
871 In some cases the new expression is still something we can
872 use as a replacement for an argument. This happens with
873 NOP conversions of types for example.
874
875 In other cases the new expression can not be used as a
876 replacement for an argument (as it would create non-gimple
877 code). But the new expression can still be used to derive
878 other constants. */
879 if (! retval
880 && is_gimple_min_invariant (op0)
881 && is_gimple_min_invariant (op1))
882 return build (code, TREE_TYPE (rhs), op0, op1);
883 }
884
885 /* We may be able to fold away calls to builtin functions if their
886 arguments are constants. */
887 else if (code == CALL_EXPR
888 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
889 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
890 == FUNCTION_DECL)
891 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
892 {
893 use_optype uses = STMT_USE_OPS (stmt);
894 if (NUM_USES (uses) != 0)
895 {
896 tree *orig;
897 size_t i;
898
899 /* Preserve the original values of every operand. */
900 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
901 for (i = 0; i < NUM_USES (uses); i++)
902 orig[i] = USE_OP (uses, i);
903
904 /* Substitute operands with their values and try to fold. */
905 replace_uses_in (stmt, NULL);
906 retval = fold_builtin (rhs);
907
908 /* Restore operands to their original form. */
909 for (i = 0; i < NUM_USES (uses); i++)
910 *(USE_OP_PTR (uses, i)) = orig[i];
911 free (orig);
912 }
913 }
914 else
915 return rhs;
916
917 /* If we got a simplified form, see if we need to convert its type. */
918 if (retval)
919 {
920 if (TREE_TYPE (retval) != TREE_TYPE (rhs))
921 retval = convert (TREE_TYPE (rhs), retval);
922
923 if (TREE_TYPE (retval) == TREE_TYPE (rhs))
924 return retval;
925 }
926
927 /* No simplification was possible. */
928 return rhs;
929}
930
931
932/* Evaluate statement STMT. */
933
934static value
935evaluate_stmt (tree stmt)
936{
937 value val;
938 tree simplified;
939 latticevalue likelyvalue = likely_value (stmt);
940
941 /* If the statement is likely to have a CONSTANT result, then try
942 to fold the statement to determine the constant value. */
943 if (likelyvalue == CONSTANT)
944 simplified = ccp_fold (stmt);
945 /* If the statement is likely to have a VARYING result, then do not
946 bother folding the statement. */
947 else if (likelyvalue == VARYING)
948 simplified = get_rhs (stmt);
949 /* Otherwise the statement is likely to have an UNDEFINED value and
950 there will be nothing to do. */
951 else
952 simplified = NULL_TREE;
953
954 if (simplified && is_gimple_min_invariant (simplified))
955 {
956 /* The statement produced a constant value. */
957 val.lattice_val = CONSTANT;
958 val.const_val = simplified;
959 }
960 else
961 {
962 /* The statement produced a nonconstant value. If the statement
963 had undefined operands, then the result of the statement should
964 be undefined. Else the result of the statement is VARYING. */
965 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
966 val.const_val = NULL_TREE;
967 }
968
969 return val;
970}
971
972
973/* Debugging dumps. */
974
975static void
976dump_lattice_value (FILE *outf, const char *prefix, value val)
977{
978 switch (val.lattice_val)
979 {
980 case UNDEFINED:
981 fprintf (outf, "%sUNDEFINED", prefix);
982 break;
983 case VARYING:
984 fprintf (outf, "%sVARYING", prefix);
985 break;
986 case CONSTANT:
987 fprintf (outf, "%sCONSTANT ", prefix);
988 print_generic_expr (outf, val.const_val, dump_flags);
989 break;
990 default:
991 abort ();
992 }
993}
994
995/* Given a constant value VAL for bitfield FIELD, and a destination
996 variable VAR, return VAL appropriately widened to fit into VAR. If
997 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
998
999tree
1000widen_bitfield (tree val, tree field, tree var)
1001{
1002 unsigned var_size, field_size;
1003 tree wide_val;
1004 unsigned HOST_WIDE_INT mask;
1005 unsigned i;
1006
1007 var_size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var))));
1008 field_size = TREE_INT_CST_LOW (DECL_SIZE (field));
1009
1010 /* Give up if either the bitfield or the variable are too wide. */
1011 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1012 return NULL;
1013
1014#if defined ENABLE_CHECKING
1015 if (var_size < field_size)
1016 abort ();
1017#endif
1018
1019 /* If VAL is not an integer constant, then give up. */
1020 if (TREE_CODE (val) != INTEGER_CST)
1021 return NULL;
1022
1023 /* If the sign bit of the value is not set, or the field's type is
1024 unsigned, then just mask off the high order bits of the value. */
1025 if ((TREE_INT_CST_LOW (val) & (1 << (field_size - 1))) == 0
1026 || DECL_UNSIGNED (field))
1027 {
1028 /* Zero extension. Build a mask with the lower 'field_size' bits
1029 set and a BIT_AND_EXPR node to clear the high order bits of
1030 the value. */
1031 for (i = 0, mask = 0; i < field_size; i++)
1032 mask |= 1 << i;
1033
1034 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
1035 build_int_2 (mask, 0));
1036 }
1037 else
1038 {
1039 /* Sign extension. Create a mask with the upper 'field_size'
1040 bits set and a BIT_IOR_EXPR to set the high order bits of the
1041 value. */
1042 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1043 mask |= 1 << (var_size - i - 1);
1044
1045 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
1046 build_int_2 (mask, 0));
1047 }
1048
1049 return fold (wide_val);
1050}
1051
1052
1053/* Function indicating whether we ought to include information for 'var'
1054 when calculating immediate uses. */
1055
1056static bool
1057need_imm_uses_for (tree var)
1058{
1059 return get_value (var)->lattice_val != VARYING;
1060}
1061
1062
1063/* Initialize local data structures and worklists for CCP. */
1064
1065static void
1066initialize (void)
1067{
1068 edge e;
1069 basic_block bb;
1070 sbitmap virtual_var;
1071
1072 /* Worklist of SSA edges. */
1073 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
1074
1075 executable_blocks = sbitmap_alloc (last_basic_block);
1076 sbitmap_zero (executable_blocks);
1077
1078 bb_in_list = sbitmap_alloc (last_basic_block);
1079 sbitmap_zero (bb_in_list);
1080
1081 value_vector = (value *) xmalloc (highest_ssa_version * sizeof (value));
1082 memset (value_vector, 0, highest_ssa_version * sizeof (value));
1083
1084 /* 1 if ssa variable is used in a virtual variable context. */
1085 virtual_var = sbitmap_alloc (highest_ssa_version);
1086 sbitmap_zero (virtual_var);
1087
1088 /* Initialize default values and simulation flags for PHI nodes, statements
1089 and edges. */
1090 FOR_EACH_BB (bb)
1091 {
1092 block_stmt_iterator i;
1093 tree stmt;
1094 stmt_ann_t ann;
1095 def_optype defs;
1096 vdef_optype vdefs;
1097 size_t x;
1098 int vary;
1099
1100 /* Get the default value for each definition. */
1101 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1102 {
1103 vary = 0;
1104 stmt = bsi_stmt (i);
1105 get_stmt_operands (stmt);
1106 ann = stmt_ann (stmt);
1107 defs = DEF_OPS (ann);
1108 for (x = 0; x < NUM_DEFS (defs); x++)
1109 {
1110 tree def = DEF_OP (defs, x);
1111 if (get_value (def)->lattice_val == VARYING)
1112 vary = 1;
1113 }
1114 DONT_SIMULATE_AGAIN (stmt) = vary;
1115
1116 /* Mark all VDEF operands VARYING. */
1117 vdefs = VDEF_OPS (ann);
1118 for (x = 0; x < NUM_VDEFS (vdefs); x++)
1119 {
1120 tree res = VDEF_RESULT (vdefs, x);
1121 get_value (res)->lattice_val = VARYING;
1122 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1123 }
1124 }
1125
1126 for (e = bb->succ; e; e = e->succ_next)
1127 e->flags &= ~EDGE_EXECUTABLE;
1128 }
1129
1130 /* Now process PHI nodes. */
1131 FOR_EACH_BB (bb)
1132 {
1133 tree phi, var;
1134 int x;
1135 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
1136 {
1137 value *val;
1138 val = get_value (PHI_RESULT (phi));
1139 if (val->lattice_val != VARYING)
1140 {
1141 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1142 {
1143 var = PHI_ARG_DEF (phi, x);
1144 /* If one argument is virtual, the result is virtual, and
1145 therefore varying. */
1146 if (TREE_CODE (var) == SSA_NAME)
1147 {
1148 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1149 {
1150 val->lattice_val = VARYING;
1151 SET_BIT (virtual_var,
1152 SSA_NAME_VERSION (PHI_RESULT (phi)));
1153 break;
1154 }
1155 }
1156 }
1157 }
1158 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1159 }
1160 }
1161
1162 sbitmap_free (virtual_var);
1163 /* Compute immediate uses for variables we care about. */
1164 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1165
1166 if (dump_file && (dump_flags & TDF_DETAILS))
1167 dump_immediate_uses (dump_file);
1168
1169 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1170
1171 /* Seed the algorithm by adding the successors of the entry block to the
1172 edge worklist. */
1173 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1174 {
1175 if (e->dest != EXIT_BLOCK_PTR)
1176 {
1177 e->flags |= EDGE_EXECUTABLE;
1178 cfg_blocks_add (e->dest);
1179 }
1180 }
1181}
1182
1183
1184/* Free allocated storage. */
1185
1186static void
1187finalize (void)
1188{
1189 ssa_edges = NULL;
1190 cfg_blocks = NULL;
1191 free (value_vector);
1192 sbitmap_free (bb_in_list);
1193 sbitmap_free (executable_blocks);
1194 free_df ();
1195}
1196
1197/* Is the block worklist empty. */
1198
1199static inline bool
1200cfg_blocks_empty_p (void)
1201{
1202 return (cfg_blocks_num == 0);
1203}
1204
1205/* Add a basic block to the worklist. */
1206
1207static void
1208cfg_blocks_add (basic_block bb)
1209{
1210 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1211 return;
1212
1213 if (TEST_BIT (bb_in_list, bb->index))
1214 return;
1215
1216 if (cfg_blocks_empty_p ())
1217 {
1218 cfg_blocks_tail = cfg_blocks_head = 0;
1219 cfg_blocks_num = 1;
1220 }
1221 else
1222 {
1223 cfg_blocks_num++;
1224 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1225 {
1226 /* We have to grow the array now. Adjust to queue to occupy the
1227 full space of the original array. */
1228 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1229 cfg_blocks_head = 0;
1230 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1231 }
1232 else
1233 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1234 }
1235 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1236 SET_BIT (bb_in_list, bb->index);
1237}
1238
1239/* Remove a block from the worklist. */
1240
1241static basic_block
1242cfg_blocks_get (void)
1243{
1244 basic_block bb;
1245
1246 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1247
1248#ifdef ENABLE_CHECKING
1249 if (cfg_blocks_empty_p () || !bb)
1250 abort ();
1251#endif
1252
1253 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1254 --cfg_blocks_num;
1255 RESET_BIT (bb_in_list, bb->index);
1256
1257 return bb;
1258}
1259
1260/* We have just defined a new value for VAR. Add all immediate uses
1261 of VAR to the ssa_edges worklist. */
1262static void
1263add_var_to_ssa_edges_worklist (tree var)
1264{
1265 tree stmt = SSA_NAME_DEF_STMT (var);
1266 dataflow_t df = get_immediate_uses (stmt);
1267 int num_uses = num_immediate_uses (df);
1268 int i;
1269
1270 for (i = 0; i < num_uses; i++)
1271 {
1272 tree use = immediate_use (df, i);
1273
1274 if (!DONT_SIMULATE_AGAIN (use))
1275 {
1276 stmt_ann_t ann = stmt_ann (use);
1277 if (ann->in_ccp_worklist == 0)
1278 {
1279 ann->in_ccp_worklist = 1;
1280 VARRAY_PUSH_TREE (ssa_edges, use);
1281 }
1282 }
1283 }
1284}
1285
1286/* Set the lattice value for the variable VAR to VARYING. */
1287
1288static void
1289def_to_varying (tree var)
1290{
1291 value val;
1292 val.lattice_val = VARYING;
1293 val.const_val = NULL_TREE;
1294 set_lattice_value (var, val);
1295}
1296
1297/* Set the lattice value for variable VAR to VAL. */
1298
1299static void
1300set_lattice_value (tree var, value val)
1301{
1302 value *old = get_value (var);
1303
1304#ifdef ENABLE_CHECKING
1305 if (val.lattice_val == UNDEFINED)
1306 {
1307 /* CONSTANT->UNDEFINED is never a valid state transition. */
1308 if (old->lattice_val == CONSTANT)
1309 abort ();
1310
1311 /* VARYING->UNDEFINED is generally not a valid state transition,
1312 except for values which are initialized to VARYING. */
1313 if (old->lattice_val == VARYING
1314 && get_default_value (var).lattice_val != VARYING)
1315 abort ();
1316 }
1317 else if (val.lattice_val == CONSTANT)
1318 {
1319 /* VARYING -> CONSTANT is an invalid state transition, except
1320 for objects which start off in a VARYING state. */
1321 if (old->lattice_val == VARYING
1322 && get_default_value (var).lattice_val != VARYING)
1323 abort ();
1324 }
1325#endif
1326
1327 /* If the constant for VAR has changed, then this VAR is really varying. */
1328 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1329 && !simple_cst_equal (old->const_val, val.const_val))
1330 {
1331 val.lattice_val = VARYING;
1332 val.const_val = NULL_TREE;
1333 }
1334
1335 if (old->lattice_val != val.lattice_val)
1336 {
1337 if (dump_file && (dump_flags & TDF_DETAILS))
1338 {
1339 dump_lattice_value (dump_file,
1340 "Lattice value changed to ", val);
1341 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1342 }
1343
1344 add_var_to_ssa_edges_worklist (var);
1345 *old = val;
1346 }
1347}
1348
1349/* Replace USE references in statement STMT with their immediate reaching
1350 definition. Return true if at least one reference was replaced. If
1351 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1352 constant was replaced. */
1353
1354static bool
1355replace_uses_in (tree stmt, bool *replaced_addresses_p)
1356{
1357 bool replaced = false;
1358 use_optype uses;
1359 size_t i;
1360
1361 if (replaced_addresses_p)
1362 *replaced_addresses_p = false;
1363
1364 get_stmt_operands (stmt);
1365
1366 uses = STMT_USE_OPS (stmt);
1367 for (i = 0; i < NUM_USES (uses); i++)
1368 {
1369 tree *use = USE_OP_PTR (uses, i);
1370 value *val = get_value (*use);
1371
1372 if (val->lattice_val == CONSTANT)
1373 {
1374 *use = val->const_val;
1375 replaced = true;
1376 if (POINTER_TYPE_P (TREE_TYPE (*use)) && replaced_addresses_p)
1377 *replaced_addresses_p = true;
1378 }
1379 }
1380
1381 return replaced;
1382}
1383
1384/* Return the likely latticevalue for STMT.
1385
1386 If STMT has no operands, then return CONSTANT.
1387
1388 Else if any operands of STMT are undefined, then return UNDEFINED.
1389
1390 Else if any operands of STMT are constants, then return CONSTANT.
1391
1392 Else return VARYING. */
1393
1394static latticevalue
1395likely_value (tree stmt)
1396{
1397 use_optype uses;
1398 size_t i;
1399 int found_constant = 0;
1400 stmt_ann_t ann;
1401
1402 /* If the statement makes aliased loads or has volatile operands, it
1403 won't fold to a constant value. */
1404 ann = stmt_ann (stmt);
1405 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1406 return VARYING;
1407
1408 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1409 in the presence of const and pure calls. */
1410 if (get_call_expr_in (stmt) != NULL_TREE)
1411 return VARYING;
1412
1413 get_stmt_operands (stmt);
1414
1415 uses = USE_OPS (ann);
1416 for (i = 0; i < NUM_USES (uses); i++)
1417 {
1418 tree use = USE_OP (uses, i);
1419 value *val = get_value (use);
1420
1421 if (val->lattice_val == UNDEFINED)
1422 return UNDEFINED;
1423
1424 if (val->lattice_val == CONSTANT)
1425 found_constant = 1;
1426 }
1427
1428 return ((found_constant || !uses) ? CONSTANT : VARYING);
1429}
1430
1431/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1432 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1433 is the desired result type. */
1434
1435static tree
1436maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1437{
1438 unsigned HOST_WIDE_INT lquo, lrem;
1439 HOST_WIDE_INT hquo, hrem;
1440 tree elt_size, min_idx, idx;
1441 tree array_type, elt_type;
1442
1443 /* Ignore stupid user tricks of indexing non-array variables. */
1444 array_type = TREE_TYPE (base);
1445 if (TREE_CODE (array_type) != ARRAY_TYPE)
1446 return NULL_TREE;
1447 elt_type = TREE_TYPE (array_type);
1448 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1449 return NULL_TREE;
1450
1451 /* Whee. Ignore indexing of variable sized types. */
1452 elt_size = TYPE_SIZE_UNIT (elt_type);
1453 if (TREE_CODE (elt_size) != INTEGER_CST)
1454 return NULL_TREE;
1455
1456 /* If the division isn't exact, then don't do anything. Equally
1457 invalid as the above indexing of non-array variables. */
1458 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
1459 TREE_INT_CST_LOW (offset),
1460 TREE_INT_CST_HIGH (offset),
1461 TREE_INT_CST_LOW (elt_size),
1462 TREE_INT_CST_HIGH (elt_size),
1463 &lquo, &hquo, &lrem, &hrem)
1464 || lrem || hrem)
1465 return NULL_TREE;
1466 idx = build_int_2_wide (lquo, hquo);
1467
1468 /* Re-bias the index by the min index of the array type. */
1469 min_idx = TYPE_DOMAIN (TREE_TYPE (base));
1470 if (min_idx)
1471 {
1472 min_idx = TYPE_MIN_VALUE (min_idx);
1473 if (min_idx)
1474 {
1475 idx = convert (TREE_TYPE (min_idx), idx);
1476 if (!integer_zerop (min_idx))
1477 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 1);
1478 }
1479 }
1480
1481 return build (ARRAY_REF, orig_type, base, idx);
1482}
1483
1484/* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1485 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1486 is the desired result type. */
1487/* ??? This doesn't handle class inheritance. */
1488
1489static tree
1490maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1491 tree orig_type, bool base_is_ptr)
1492{
1493 tree f, t, field_type, tail_array_field;
1494
1495 if (TREE_CODE (record_type) != RECORD_TYPE
1496 && TREE_CODE (record_type) != UNION_TYPE
1497 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1498 return NULL_TREE;
1499
1500 /* Short-circuit silly cases. */
1501 if (lang_hooks.types_compatible_p (record_type, orig_type))
1502 return NULL_TREE;
1503
1504 tail_array_field = NULL_TREE;
1505 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1506 {
1507 int cmp;
1508
1509 if (TREE_CODE (f) != FIELD_DECL)
1510 continue;
1511 if (DECL_BIT_FIELD (f))
1512 continue;
1513 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1514 continue;
1515
1516 /* ??? Java creates "interesting" fields for representing base classes.
1517 They have no name, and have no context. With no context, we get into
1518 trouble with nonoverlapping_component_refs_p. Skip them. */
1519 if (!DECL_FIELD_CONTEXT (f))
1520 continue;
1521
1522 /* The previous array field isn't at the end. */
1523 tail_array_field = NULL_TREE;
1524
1525 /* Check to see if this offset overlaps with the field. */
1526 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1527 if (cmp > 0)
1528 continue;
1529
1530 field_type = TREE_TYPE (f);
1531 if (cmp < 0)
1532 {
1533 /* Don't care about offsets into the middle of scalars. */
1534 if (!AGGREGATE_TYPE_P (field_type))
1535 continue;
1536
1537 /* Check for array at the end of the struct. This is often
1538 used as for flexible array members. We should be able to
1539 turn this into an array access anyway. */
1540 if (TREE_CODE (field_type) == ARRAY_TYPE)
1541 tail_array_field = f;
1542
1543 /* Check the end of the field against the offset. */
1544 if (!DECL_SIZE_UNIT (f)
1545 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1546 continue;
1547 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1548 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1549 continue;
1550
1551 /* If we matched, then set offset to the displacement into
1552 this field. */
1553 offset = t;
1554 }
1555
1556 /* Here we exactly match the offset being checked. If the types match,
1557 then we can return that field. */
1558 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1559 {
1560 if (base_is_ptr)
1561 base = build1 (INDIRECT_REF, record_type, base);
1562 t = build (COMPONENT_REF, field_type, base, f);
1563 return t;
1564 }
1565
1566 /* Don't care about type-punning of scalars. */
1567 else if (!AGGREGATE_TYPE_P (field_type))
1568 return NULL_TREE;
1569
1570 goto found;
1571 }
1572
1573 if (!tail_array_field)
1574 return NULL_TREE;
1575
1576 f = tail_array_field;
1577 field_type = TREE_TYPE (f);
1578
1579 found:
1580 /* If we get here, we've got an aggregate field, and a possibly
1581 non-zero offset into them. Recurse and hope for a valid match. */
1582 if (base_is_ptr)
1583 base = build1 (INDIRECT_REF, record_type, base);
1584 base = build (COMPONENT_REF, field_type, base, f);
1585
1586 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1587 if (t)
1588 return t;
1589 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1590 orig_type, false);
1591}
1592
1593/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1594 Return the simplified expression, or NULL if nothing could be done. */
1595
1596static tree
1597maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1598{
1599 tree t;
1600
1601 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1602 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1603 are sometimes added. */
1604 base = fold (base);
1605 STRIP_NOPS (base);
1606 TREE_OPERAND (expr, 0) = base;
1607
1608 /* One possibility is that the address reduces to a string constant. */
1609 t = fold_read_from_constant_string (expr);
1610 if (t)
1611 return t;
1612
1613 /* Add in any offset from a PLUS_EXPR. */
1614 if (TREE_CODE (base) == PLUS_EXPR)
1615 {
1616 tree offset2;
1617
1618 offset2 = TREE_OPERAND (base, 1);
1619 if (TREE_CODE (offset2) != INTEGER_CST)
1620 return NULL_TREE;
1621 base = TREE_OPERAND (base, 0);
1622
1623 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1624 }
1625
1626 if (TREE_CODE (base) == ADDR_EXPR)
1627 {
1628 /* Strip the ADDR_EXPR. */
1629 base = TREE_OPERAND (base, 0);
1630
1631 /* Try folding *(&B+O) to B[X]. */
1632 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1633 if (t)
1634 return t;
1635
1636 /* Try folding *(&B+O) to B.X. */
1637 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1638 TREE_TYPE (expr), false);
1639 if (t)
1640 return t;
1641
1642 /* Fold *&B to B. */
1643 if (integer_zerop (offset))
1644 return base;
1645 }
1646 else
1647 {
1648 /* We can get here for out-of-range string constant accesses,
1649 such as "_"[3]. Bail out of the entire substitution search
1650 and arrange for the entire statement to be replaced by a
1651 call to __builtin_trap. In all likelyhood this will all be
1652 constant-folded away, but in the meantime we can't leave with
1653 something that get_expr_operands can't understand. */
1654
1655 t = base;
1656 STRIP_NOPS (t);
1657 if (TREE_CODE (t) == ADDR_EXPR
1658 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1659 {
1660 /* FIXME: Except that this causes problems elsewhere with dead
1661 code not being deleted, and we abort in the rtl expanders
1662 because we failed to remove some ssa_name. In the meantime,
1663 just return zero. */
1664 /* FIXME2: This condition should be signaled by
1665 fold_read_from_constant_string directly, rather than
1666 re-checking for it here. */
1667 return integer_zero_node;
1668 }
1669
1670 /* Try folding *(B+O) to B->X. Still an improvement. */
1671 if (POINTER_TYPE_P (TREE_TYPE (base)))
1672 {
1673 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1674 base, offset,
1675 TREE_TYPE (expr), true);
1676 if (t)
1677 return t;
1678 }
1679 }
1680
1681 /* Otherwise we had an offset that we could not simplify. */
1682 return NULL_TREE;
1683}
1684
1685/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1686
1687 A quaint feature extant in our address arithmetic is that there
1688 can be hidden type changes here. The type of the result need
1689 not be the same as the type of the input pointer.
1690
1691 What we're after here is an expression of the form
1692 (T *)(&array + const)
1693 where the cast doesn't actually exist, but is implicit in the
1694 type of the PLUS_EXPR. We'd like to turn this into
1695 &array[x]
1696 which may be able to propagate further. */
1697
1698static tree
1699maybe_fold_stmt_addition (tree expr)
1700{
1701 tree op0 = TREE_OPERAND (expr, 0);
1702 tree op1 = TREE_OPERAND (expr, 1);
1703 tree ptr_type = TREE_TYPE (expr);
1704 tree ptd_type;
1705 tree t;
1706 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1707
1708 /* We're only interested in pointer arithmetic. */
1709 if (!POINTER_TYPE_P (ptr_type))
1710 return NULL_TREE;
1711 /* Canonicalize the integral operand to op1. */
1712 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1713 {
1714 if (subtract)
1715 return NULL_TREE;
1716 t = op0, op0 = op1, op1 = t;
1717 }
1718 /* It had better be a constant. */
1719 if (TREE_CODE (op1) != INTEGER_CST)
1720 return NULL_TREE;
1721 /* The first operand should be an ADDR_EXPR. */
1722 if (TREE_CODE (op0) != ADDR_EXPR)
1723 return NULL_TREE;
1724 op0 = TREE_OPERAND (op0, 0);
1725
1726 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1727 the offset into it. */
1728 while (TREE_CODE (op0) == ARRAY_REF)
1729 {
1730 tree array_obj = TREE_OPERAND (op0, 0);
1731 tree array_idx = TREE_OPERAND (op0, 1);
1732 tree elt_type = TREE_TYPE (op0);
1733 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1734 tree min_idx;
1735
1736 if (TREE_CODE (array_idx) != INTEGER_CST)
1737 break;
1738 if (TREE_CODE (elt_size) != INTEGER_CST)
1739 break;
1740
1741 /* Un-bias the index by the min index of the array type. */
1742 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1743 if (min_idx)
1744 {
1745 min_idx = TYPE_MIN_VALUE (min_idx);
1746 if (min_idx)
1747 {
1748 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1749 if (!integer_zerop (min_idx))
1750 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1751 min_idx, 0);
1752 }
1753 }
1754
1755 /* Convert the index to a byte offset. */
1756 array_idx = convert (sizetype, array_idx);
1757 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1758
1759 /* Update the operands for the next round, or for folding. */
1760 /* If we're manipulating unsigned types, then folding into negative
1761 values can produce incorrect results. Particularly if the type
1762 is smaller than the width of the pointer. */
1763 if (subtract
1764 && TYPE_UNSIGNED (TREE_TYPE (op1))
1765 && tree_int_cst_lt (array_idx, op1))
1766 return NULL;
1767 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1768 array_idx, op1, 0);
1769 subtract = false;
1770 op0 = array_obj;
1771 }
1772
1773 /* If we weren't able to fold the subtraction into another array reference,
1774 canonicalize the integer for passing to the array and component ref
1775 simplification functions. */
1776 if (subtract)
1777 {
1778 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1779 return NULL;
1780 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1781 /* ??? In theory fold should always produce another integer. */
1782 if (TREE_CODE (op1) != INTEGER_CST)
1783 return NULL;
1784 }
1785
1786 ptd_type = TREE_TYPE (ptr_type);
1787
1788 /* At which point we can try some of the same things as for indirects. */
1789 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1790 if (!t)
1791 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1792 ptd_type, false);
1793 if (t)
1794 t = build1 (ADDR_EXPR, ptr_type, t);
1795
1796 return t;
1797}
1798
1799/* Subroutine of fold_stmt called via walk_tree. We perform several
1800 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1801
1802static tree
1803fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1804{
1805 bool *changed_p = data;
1806 tree expr = *expr_p, t;
1807
1808 /* ??? It'd be nice if walk_tree had a pre-order option. */
1809 switch (TREE_CODE (expr))
1810 {
1811 case INDIRECT_REF:
1812 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1813 if (t)
1814 return t;
1815 *walk_subtrees = 0;
1816
1817 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1818 integer_zero_node);
1819 break;
1820
1821 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1822 We'd only want to bother decomposing an existing ARRAY_REF if
1823 the base array is found to have another offset contained within.
1824 Otherwise we'd be wasting time. */
1825
1826 case ADDR_EXPR:
1827 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1828 if (t)
1829 return t;
1830 *walk_subtrees = 0;
1831
1832 /* Set TREE_INVARIANT properly so that the value is properly
1833 considered constant, and so gets propagated as expected. */
1834 if (*changed_p)
1835 recompute_tree_invarant_for_addr_expr (expr);
1836 return NULL_TREE;
1837
1838 case PLUS_EXPR:
1839 case MINUS_EXPR:
1840 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1841 if (t)
1842 return t;
1843 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1844 if (t)
1845 return t;
1846 *walk_subtrees = 0;
1847
1848 t = maybe_fold_stmt_addition (expr);
1849 break;
1850
1851 case COMPONENT_REF:
1852 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1853 if (t)
1854 return t;
1855 *walk_subtrees = 0;
1856
1857 /* Make sure the FIELD_DECL is actually a field in the type on
1858 the lhs. In cases with IMA it is possible that it came
1859 from another, equivalent type at this point. We have
1860 already checked the equivalence in this case.
1861 Match on type plus offset, to allow for unnamed fields.
1862 We won't necessarily get the corresponding field for
1863 unions; this is believed to be harmless. */
1864
1865 if ((current_file_decl && TREE_CHAIN (current_file_decl))
1866 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) !=
1867 TREE_TYPE (TREE_OPERAND (expr, 0))))
1868 {
1869 tree f;
1870 tree orig_field = TREE_OPERAND (expr, 1);
1871 tree orig_type = TREE_TYPE (orig_field);
1872 for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1873 f; f = TREE_CHAIN (f))
1874 {
1875 if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type)
1876 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f),
1877 DECL_FIELD_BIT_OFFSET (orig_field))
1878 == 0
1879 && tree_int_cst_compare (DECL_FIELD_OFFSET (f),
1880 DECL_FIELD_OFFSET (orig_field))
1881 == 0)
1882 {
1883 TREE_OPERAND (expr, 1) = f;
1884 break;
1885 }
1886 }
1887 /* Fall through is an error; it will be detected in tree-sra. */
1888 }
1889 break;
1890
1891 default:
1892 return NULL_TREE;
1893 }
1894
1895 if (t)
1896 {
1897 *expr_p = t;
1898 *changed_p = true;
1899 }
1900
1901 return NULL_TREE;
1902}
1903
1904/* Fold the statement pointed by STMT_P. In some cases, this function may
1905 replace the whole statement with a new one. Returns true iff folding
1906 makes any changes. */
1907
1908bool
1909fold_stmt (tree *stmt_p)
1910{
1911 tree rhs, result, stmt;
1912 bool changed = false;
1913
1914 stmt = *stmt_p;
1915
1916 /* If we replaced constants and the statement makes pointer dereferences,
1917 then we may need to fold instances of *&VAR into VAR, etc. */
1918 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
1919 {
1920 *stmt_p
1921 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
1922 NULL);
1923 return true;
1924 }
1925
1926 rhs = get_rhs (stmt);
1927 if (!rhs)
1928 return changed;
1929 result = NULL_TREE;
1930
1931 /* Check for builtins that CCP can handle using information not
1932 available in the generic fold routines. */
1933 if (TREE_CODE (rhs) == CALL_EXPR)
1934 {
1935 tree callee = get_callee_fndecl (rhs);
1936 if (callee && DECL_BUILT_IN (callee))
1937 result = ccp_fold_builtin (stmt, rhs);
1938 }
1939
1940 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
1941 if (result == NULL_TREE)
1942 result = fold (rhs);
1943
1944 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
1945 may have been added by fold, and "useless" type conversions that might
1946 now be apparent due to propagation. */
1947 STRIP_MAIN_TYPE_NOPS (result);
1948 STRIP_USELESS_TYPE_CONVERSION (result);
1949
1950 if (result != rhs)
1951 {
1952 changed = true;
1953 set_rhs (stmt_p, result);
1954 }
1955
1956 return changed;
1957}
1958
1959/* Get the main expression from statement STMT. */
1960
1961static tree
1962get_rhs (tree stmt)
1963{
1964 enum tree_code code = TREE_CODE (stmt);
1965
1966 if (code == MODIFY_EXPR)
1967 return TREE_OPERAND (stmt, 1);
1968 if (code == COND_EXPR)
1969 return COND_EXPR_COND (stmt);
1970 else if (code == SWITCH_EXPR)
1971 return SWITCH_COND (stmt);
1972 else if (code == RETURN_EXPR)
1973 {
1974 if (!TREE_OPERAND (stmt, 0))
1975 return NULL_TREE;
1976 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
1977 return TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
1978 else
1979 return TREE_OPERAND (stmt, 0);
1980 }
1981 else if (code == GOTO_EXPR)
1982 return GOTO_DESTINATION (stmt);
1983 else if (code == LABEL_EXPR)
1984 return LABEL_EXPR_LABEL (stmt);
1985 else
1986 return stmt;
1987}
1988
1989
1990/* Set the main expression of *STMT_P to EXPR. */
1991
1992static void
1993set_rhs (tree *stmt_p, tree expr)
1994{
1995 tree stmt = *stmt_p;
1996 enum tree_code code = TREE_CODE (stmt);
1997
1998 if (code == MODIFY_EXPR)
1999 TREE_OPERAND (stmt, 1) = expr;
2000 else if (code == COND_EXPR)
2001 COND_EXPR_COND (stmt) = expr;
2002 else if (code == SWITCH_EXPR)
2003 SWITCH_COND (stmt) = expr;
2004 else if (code == RETURN_EXPR)
2005 {
2006 if (TREE_OPERAND (stmt, 0)
2007 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2008 TREE_OPERAND (TREE_OPERAND (stmt, 0), 1) = expr;
2009 else
2010 TREE_OPERAND (stmt, 0) = expr;
2011 }
2012 else if (code == GOTO_EXPR)
2013 GOTO_DESTINATION (stmt) = expr;
2014 else if (code == LABEL_EXPR)
2015 LABEL_EXPR_LABEL (stmt) = expr;
2016 else
2017 {
2018 /* Replace the whole statement with EXPR. If EXPR has no side
2019 effects, then replace *STMT_P with an empty statement. */
2020 stmt_ann_t ann = stmt_ann (stmt);
2021 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
2022 (*stmt_p)->common.ann = (tree_ann) ann;
2023
2024 if (TREE_SIDE_EFFECTS (expr))
2025 {
2026 def_optype defs;
2027 vdef_optype vdefs;
2028 size_t i;
2029
2030 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2031 replacement. */
2032 defs = DEF_OPS (ann);
2033 for (i = 0; i < NUM_DEFS (defs); i++)
2034 {
2035 tree var = DEF_OP (defs, i);
2036 if (TREE_CODE (var) == SSA_NAME)
2037 SSA_NAME_DEF_STMT (var) = *stmt_p;
2038 }
2039
2040 vdefs = VDEF_OPS (ann);
2041 for (i = 0; i < NUM_VDEFS (vdefs); i++)
2042 {
2043 tree var = VDEF_RESULT (vdefs, i);
2044 if (TREE_CODE (var) == SSA_NAME)
2045 SSA_NAME_DEF_STMT (var) = *stmt_p;
2046 }
2047 }
2048 }
2049}
2050
2051
2052/* Return a default value for variable VAR using the following rules:
2053
2054 1- Global and static variables are considered VARYING, unless they are
2055 declared const.
2056
2057 2- Function arguments are considered VARYING.
2058
2059 3- Any other value is considered UNDEFINED. This is useful when
2060 considering PHI nodes. PHI arguments that are undefined do not
2061 change the constant value of the PHI node, which allows for more
2062 constants to be propagated. */
2063
2064static value
2065get_default_value (tree var)
2066{
2067 value val;
2068 tree sym;
2069
2070 if (TREE_CODE (var) == SSA_NAME)
2071 sym = SSA_NAME_VAR (var);
2072 else
2073 {
2074#ifdef ENABLE_CHECKING
2075 if (!DECL_P (var))
2076 abort ();
2077#endif
2078 sym = var;
2079 }
2080
2081 val.lattice_val = UNDEFINED;
2082 val.const_val = NULL_TREE;
2083
2084 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2085 {
2086 /* Function arguments and volatile variables are considered VARYING. */
2087 val.lattice_val = VARYING;
2088 }
2089 else if (decl_function_context (sym) != current_function_decl
2090 || TREE_STATIC (sym))
2091 {
2092 /* Globals and static variables are considered VARYING, unless they
2093 are declared 'const'. */
2094 val.lattice_val = VARYING;
2095
2096 if (TREE_READONLY (sym)
2097 && DECL_INITIAL (sym)
2098 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2099 {
2100 val.lattice_val = CONSTANT;
2101 val.const_val = DECL_INITIAL (sym);
2102 }
2103 }
2104 else
2105 {
2106 enum tree_code code;
2107 tree stmt = SSA_NAME_DEF_STMT (var);
2108
2109 if (!IS_EMPTY_STMT (stmt))
2110 {
2111 code = TREE_CODE (stmt);
2112 if (code != MODIFY_EXPR && code != PHI_NODE)
2113 val.lattice_val = VARYING;
2114 }
2115 }
2116
2117 return val;
2118}
2119
2120
2121/* Fold builtin call FN in statement STMT. If it cannot be folded into a
2122 constant, return NULL_TREE. Otherwise, return its constant value. */
2123
2124static tree
2125ccp_fold_builtin (tree stmt, tree fn)
2126{
2127 tree result, strlen_val[2];
2128 tree arglist = TREE_OPERAND (fn, 1), a;
2129 tree callee = get_callee_fndecl (fn);
2130 bitmap visited;
2131 int strlen_arg, i;
2132
2133 /* Ignore MD builtins. */
2134 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2135 return NULL_TREE;
2136
2137 /* First try the generic builtin folder. If that succeeds, return the
2138 result directly. */
2139 result = fold_builtin (fn);
2140 if (result)
2141 return result;
2142
2143 /* If the builtin could not be folded, and it has no argument list,
2144 we're done. */
2145 if (!arglist)
2146 return NULL_TREE;
2147
2148 /* Limit the work only for builtins we know how to simplify. */
2149 switch (DECL_FUNCTION_CODE (callee))
2150 {
2151 case BUILT_IN_STRLEN:
2152 case BUILT_IN_FPUTS:
2153 case BUILT_IN_FPUTS_UNLOCKED:
2154 strlen_arg = 1;
2155 break;
2156 case BUILT_IN_STRCPY:
2157 case BUILT_IN_STRNCPY:
2158 strlen_arg = 2;
2159 break;
2160 default:
2161 return NULL_TREE;
2162 }
2163
2164 /* Try to use the dataflow information gathered by the CCP process. */
2165 visited = BITMAP_XMALLOC ();
2166
2167 memset (strlen_val, 0, sizeof (strlen_val));
2168 for (i = 0, a = arglist;
2169 strlen_arg;
2170 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2171 if (strlen_arg & 1)
2172 {
2173 bitmap_clear (visited);
2174 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2175 strlen_val[i] = NULL_TREE;
2176 }
2177
2178 BITMAP_XFREE (visited);
2179
2180 /* FIXME. All this code looks dangerous in the sense that it might
2181 create non-gimple expressions. */
2182 switch (DECL_FUNCTION_CODE (callee))
2183 {
2184 case BUILT_IN_STRLEN:
2185 /* Convert from the internal "sizetype" type to "size_t". */
2186 if (strlen_val[0]
2187 && size_type_node)
2188 {
2189 tree new = convert (size_type_node, strlen_val[0]);
2190
2191 /* If the result is not a valid gimple value, or not a cast
2192 of a valid gimple value, then we can not use the result. */
2193 if (is_gimple_val (new)
2194 || (is_gimple_cast (new)
2195 && is_gimple_val (TREE_OPERAND (new, 0))))
2196 return new;
2197 else
2198 return NULL_TREE;
2199 }
2200 return strlen_val[0];
2201 case BUILT_IN_STRCPY:
2202 if (strlen_val[1]
2203 && is_gimple_val (strlen_val[1]))
2204 return simplify_builtin_strcpy (arglist, strlen_val[1]);
2205 case BUILT_IN_STRNCPY:
2206 if (strlen_val[1]
2207 && is_gimple_val (strlen_val[1]))
2208 return simplify_builtin_strncpy (arglist, strlen_val[1]);
2209 case BUILT_IN_FPUTS:
2210 return simplify_builtin_fputs (arglist,
2211 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2212 strlen_val[0]);
2213 case BUILT_IN_FPUTS_UNLOCKED:
2214 return simplify_builtin_fputs (arglist,
2215 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2216 strlen_val[0]);
2217
2218 default:
2219 abort ();
2220 }
2221
2222 return NULL_TREE;
2223}
2224
2225
2226/* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2227 follow its use-def chains. If LENGTH is not NULL and its value is not
2228 equal to the length we determine, or if we are unable to determine the
2229 length, return false. VISITED is a bitmap of visited variables. */
2230
2231static bool
2232get_strlen (tree arg, tree *length, bitmap visited)
2233{
2234 tree var, def_stmt, val;
2235
2236 if (TREE_CODE (arg) != SSA_NAME)
2237 {
2238 val = c_strlen (arg, 1);
2239 if (!val)
2240 return false;
2241
2242 if (*length && simple_cst_equal (val, *length) != 1)
2243 return false;
2244
2245 *length = val;
2246 return true;
2247 }
2248
2249 /* If we were already here, break the infinite cycle. */
2250 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2251 return true;
2252 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2253
2254 var = arg;
2255 def_stmt = SSA_NAME_DEF_STMT (var);
2256
2257 switch (TREE_CODE (def_stmt))
2258 {
2259 case MODIFY_EXPR:
2260 {
2261 tree len, rhs;
2262
2263 /* The RHS of the statement defining VAR must either have a
2264 constant length or come from another SSA_NAME with a constant
2265 length. */
2266 rhs = TREE_OPERAND (def_stmt, 1);
2267 STRIP_NOPS (rhs);
2268 if (TREE_CODE (rhs) == SSA_NAME)
2269 return get_strlen (rhs, length, visited);
2270
2271 /* See if the RHS is a constant length. */
2272 len = c_strlen (rhs, 1);
2273 if (len)
2274 {
2275 if (*length && simple_cst_equal (len, *length) != 1)
2276 return false;
2277
2278 *length = len;
2279 return true;
2280 }
2281
2282 break;
2283 }
2284
2285 case PHI_NODE:
2286 {
2287 /* All the arguments of the PHI node must have the same constant
2288 length. */
2289 int i;
2290
2291 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2292 {
2293 tree arg = PHI_ARG_DEF (def_stmt, i);
2294
2295 /* If this PHI has itself as an argument, we cannot
2296 determine the string length of this argument. However,
2297 if we can find a constant string length for the other
2298 PHI args then we can still be sure that this is a
2299 constant string length. So be optimistic and just
2300 continue with the next argument. */
2301 if (arg == PHI_RESULT (def_stmt))
2302 continue;
2303
2304 if (!get_strlen (arg, length, visited))
2305 return false;
2306 }
2307
2308 return true;
2309 }
2310
2311 default:
2312 break;
2313 }
2314
2315
2316 return false;
2317}
2318
2319\f
2320/* A simple pass that attempts to fold all builtin functions. This pass
2321 is run after we've propagated as many constants as we can. */
2322
2323static void
2324execute_fold_all_builtins (void)
2325{
2326 basic_block bb;
2327 FOR_EACH_BB (bb)
2328 {
2329 block_stmt_iterator i;
2330 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2331 {
2332 tree *stmtp = bsi_stmt_ptr (i);
2333 tree call = get_rhs (*stmtp);
2334 tree callee, result;
2335
2336 if (!call || TREE_CODE (call) != CALL_EXPR)
2337 continue;
2338 callee = get_callee_fndecl (call);
2339 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2340 continue;
2341
2342 result = ccp_fold_builtin (*stmtp, call);
2343 if (!result)
2344 switch (DECL_FUNCTION_CODE (callee))
2345 {
2346 case BUILT_IN_CONSTANT_P:
2347 /* Resolve __builtin_constant_p. If it hasn't been
2348 folded to integer_one_node by now, it's fairly
2349 certain that the value simply isn't constant. */
2350 result = integer_zero_node;
2351 break;
2352
2353 default:
2354 continue;
2355 }
2356
2357 if (dump_file && (dump_flags & TDF_DETAILS))
2358 {
2359 fprintf (dump_file, "Simplified\n ");
2360 print_generic_stmt (dump_file, *stmtp, dump_flags);
2361 }
2362
2363 set_rhs (stmtp, result);
2364 modify_stmt (*stmtp);
2365
2366 if (dump_file && (dump_flags & TDF_DETAILS))
2367 {
2368 fprintf (dump_file, "to\n ");
2369 print_generic_stmt (dump_file, *stmtp, dump_flags);
2370 fprintf (dump_file, "\n");
2371 }
2372 }
2373 }
2374}
2375
2376struct tree_opt_pass pass_fold_builtins =
2377{
2378 "fab", /* name */
2379 NULL, /* gate */
2380 execute_fold_all_builtins, /* execute */
2381 NULL, /* sub */
2382 NULL, /* next */
2383 0, /* static_pass_number */
2384 0, /* tv_id */
2385 PROP_cfg | PROP_ssa, /* properties_required */
2386 0, /* properties_provided */
2387 0, /* properties_destroyed */
2388 0, /* todo_flags_start */
2389 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2390};
2391
2392
2393#include "gt-tree-ssa-ccp.h"
This page took 0.245907 seconds and 5 git commands to generate.