1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
33 #include "gimple-fold.h"
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
57 can_refer_decl_in_current_unit_p (tree decl
)
59 struct varpool_node
*vnode
;
60 struct cgraph_node
*node
;
62 if (!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl
) && TREE_STATIC (decl
)
67 && TREE_CODE (decl
) == VAR_DECL
)
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
72 gcc_checking_assert (!(vnode
= varpool_get_node (decl
))
73 || !vnode
->finalized
);
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans
&& (!DECL_COMDAT (decl
) || !cgraph_function_flags_ready
))
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl
))
92 if (TREE_CODE (decl
) == FUNCTION_DECL
)
94 node
= cgraph_get_node (decl
);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
100 if (!node
|| !node
->analyzed
|| node
->global
.inlined_to
)
103 else if (TREE_CODE (decl
) == VAR_DECL
)
105 vnode
= varpool_get_node (decl
);
106 if (!vnode
|| !vnode
->finalized
)
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
116 canonicalize_constructor_val (tree cval
)
119 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
122 tree ptr
= TREE_OPERAND (cval
, 0);
123 if (is_gimple_min_invariant (ptr
))
124 cval
= build1_loc (EXPR_LOCATION (cval
),
125 ADDR_EXPR
, TREE_TYPE (ptr
),
126 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
128 fold_convert (ptr_type_node
,
129 TREE_OPERAND (cval
, 1))));
131 if (TREE_CODE (cval
) == ADDR_EXPR
)
133 tree base
= get_base_address (TREE_OPERAND (cval
, 0));
136 && (TREE_CODE (base
) == VAR_DECL
137 || TREE_CODE (base
) == FUNCTION_DECL
)
138 && !can_refer_decl_in_current_unit_p (base
))
140 if (cfun
&& base
&& TREE_CODE (base
) == VAR_DECL
)
141 add_referenced_var (base
);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
144 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
153 get_symbol_constant_value (tree sym
)
155 if (const_value_known_p (sym
))
157 tree val
= DECL_INITIAL (sym
);
160 val
= canonicalize_constructor_val (val
);
161 if (val
&& is_gimple_min_invariant (val
))
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
172 return build_zero_cst (TREE_TYPE (sym
));
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
186 maybe_fold_reference (tree expr
, bool is_lhs
)
191 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr
) == REALPART_EXPR
193 || TREE_CODE (expr
) == IMAGPART_EXPR
)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr
),
198 TREE_OPERAND (expr
, 0));
199 else if (TREE_CODE (expr
) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr
),
204 TREE_OPERAND (expr
, 0),
205 TREE_OPERAND (expr
, 1),
206 TREE_OPERAND (expr
, 2));
208 while (handled_component_p (*t
))
209 t
= &TREE_OPERAND (*t
, 0);
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t
) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)))
216 bool volatile_p
= TREE_THIS_VOLATILE (*t
);
217 tree tem
= fold_binary (MEM_REF
, TREE_TYPE (*t
),
218 TREE_OPERAND (*t
, 0),
219 TREE_OPERAND (*t
, 1));
222 TREE_THIS_VOLATILE (tem
) = volatile_p
;
224 tem
= maybe_fold_reference (expr
, is_lhs
);
232 && (result
= fold_const_aggregate_ref (expr
))
233 && is_gimple_min_invariant (result
))
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t
) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t
, 1))
240 && (TREE_THIS_VOLATILE (*t
)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t
, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t
))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t
, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
249 && types_compatible_p (TREE_TYPE (*t
),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t
, 0), 0))))
254 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
255 tem
= maybe_fold_reference (expr
, is_lhs
);
260 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
262 tree tem
= maybe_fold_tmr (*t
);
266 tem
= maybe_fold_reference (expr
, is_lhs
);
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
283 fold_gimple_assign (gimple_stmt_iterator
*si
)
285 gimple stmt
= gsi_stmt (*si
);
286 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
287 location_t loc
= gimple_location (stmt
);
289 tree result
= NULL_TREE
;
291 switch (get_gimple_rhs_class (subcode
))
293 case GIMPLE_SINGLE_RHS
:
295 tree rhs
= gimple_assign_rhs1 (stmt
);
297 if (REFERENCE_CLASS_P (rhs
))
298 return maybe_fold_reference (rhs
, false);
300 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
302 tree ref
= TREE_OPERAND (rhs
, 0);
303 tree tem
= maybe_fold_reference (ref
, true);
305 && TREE_CODE (tem
) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem
, 1)))
307 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
309 result
= fold_convert (TREE_TYPE (rhs
),
310 build_fold_addr_expr_loc (loc
, tem
));
311 else if (TREE_CODE (ref
) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref
, 1)))
313 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
316 else if (TREE_CODE (rhs
) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs
)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
326 if (TREE_CODE (val
) != INTEGER_CST
327 && TREE_CODE (val
) != REAL_CST
328 && TREE_CODE (val
) != FIXED_CST
)
331 return build_vector_from_ctor (TREE_TYPE (rhs
),
332 CONSTRUCTOR_ELTS (rhs
));
335 else if (DECL_P (rhs
))
336 return unshare_expr (get_symbol_constant_value (rhs
));
338 /* If we couldn't fold the RHS, hand over to the generic
340 if (result
== NULL_TREE
)
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result
);
348 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
355 case GIMPLE_UNARY_RHS
:
357 tree rhs
= gimple_assign_rhs1 (stmt
);
359 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode
)
368 && TREE_CODE (result
) == INTEGER_CST
369 && TREE_CODE (rhs
) == INTEGER_CST
)
370 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
372 STRIP_USELESS_TYPE_CONVERSION (result
);
373 if (valid_gimple_rhs_p (result
))
379 case GIMPLE_BINARY_RHS
:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
385 tree lhs
= gimple_assign_lhs (stmt
);
386 tree op1
= gimple_assign_rhs1 (stmt
);
387 tree op2
= gimple_assign_rhs2 (stmt
);
388 tree type
= TREE_TYPE (op1
);
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
394 if (TREE_CODE (op2
) == INTEGER_CST
395 && (integer_zerop (op2
) || integer_onep (op2
))
396 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
398 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
399 bool is_logical_not
= false;
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
404 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
405 is_logical_not
= true;
407 if (is_logical_not
== false)
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type
) == 1)
412 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
414 /* Otherwise we use !X -> X ^ 1. */
416 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
417 type
, op1
, build_int_cst (type
, 1));
423 result
= fold_binary_loc (loc
, subcode
,
424 TREE_TYPE (gimple_assign_lhs (stmt
)),
425 gimple_assign_rhs1 (stmt
),
426 gimple_assign_rhs2 (stmt
));
430 STRIP_USELESS_TYPE_CONVERSION (result
);
431 if (valid_gimple_rhs_p (result
))
436 case GIMPLE_TERNARY_RHS
:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
440 tree op0
= gimple_assign_rhs1 (stmt
);
443 location_t cond_loc
= gimple_location (stmt
);
445 if (COMPARISON_CLASS_P (op0
))
447 fold_defer_overflow_warnings ();
448 tem
= fold_binary_loc (cond_loc
,
449 TREE_CODE (op0
), TREE_TYPE (op0
),
450 TREE_OPERAND (op0
, 0),
451 TREE_OPERAND (op0
, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set
= (tem
&& is_gimple_condexpr (tem
)
456 && valid_gimple_rhs_p (tem
));
457 fold_undefer_overflow_warnings (set
, stmt
, 0);
459 else if (is_gimple_min_invariant (op0
))
468 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
469 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
470 gimple_assign_rhs2 (stmt
),
471 gimple_assign_rhs3 (stmt
));
475 result
= fold_ternary_loc (loc
, subcode
,
476 TREE_TYPE (gimple_assign_lhs (stmt
)),
477 gimple_assign_rhs1 (stmt
),
478 gimple_assign_rhs2 (stmt
),
479 gimple_assign_rhs3 (stmt
));
483 STRIP_USELESS_TYPE_CONVERSION (result
);
484 if (valid_gimple_rhs_p (result
))
489 case GIMPLE_INVALID_RHS
:
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
502 fold_gimple_cond (gimple stmt
)
504 tree result
= fold_binary_loc (gimple_location (stmt
),
505 gimple_cond_code (stmt
),
507 gimple_cond_lhs (stmt
),
508 gimple_cond_rhs (stmt
));
512 STRIP_USELESS_TYPE_CONVERSION (result
);
513 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
515 gimple_cond_set_condition_from_tree (stmt
, result
);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
537 tree tmp
= NULL_TREE
; /* Silence warning. */
538 gimple stmt
, new_stmt
;
539 gimple_stmt_iterator i
;
540 gimple_seq stmts
= gimple_seq_alloc();
541 struct gimplify_ctx gctx
;
543 gimple laststore
= NULL
;
546 stmt
= gsi_stmt (*si_p
);
548 gcc_assert (is_gimple_call (stmt
));
550 lhs
= gimple_call_lhs (stmt
);
551 reaching_vuse
= gimple_vuse (stmt
);
553 push_gimplify_context (&gctx
);
554 gctx
.into_ssa
= gimple_in_ssa_p (cfun
);
556 if (lhs
== NULL_TREE
)
558 gimplify_and_add (expr
, &stmts
);
559 /* We can end up with folding a memcpy of an empty class assignment
560 which gets optimized away by C++ gimplification. */
561 if (gimple_seq_empty_p (stmts
))
563 pop_gimplify_context (NULL
);
564 if (gimple_in_ssa_p (cfun
))
566 unlink_stmt_vdef (stmt
);
569 gsi_remove (si_p
, true);
574 tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
576 pop_gimplify_context (NULL
);
578 if (gimple_has_location (stmt
))
579 annotate_all_with_location (stmts
, gimple_location (stmt
));
581 /* The replacement can expose previously unreferenced variables. */
582 for (i
= gsi_start (stmts
); !gsi_end_p (i
); gsi_next (&i
))
586 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
589 new_stmt
= gsi_stmt (i
);
590 if (gimple_in_ssa_p (cfun
))
592 find_new_referenced_vars (new_stmt
);
593 mark_symbols_for_renaming (new_stmt
);
595 /* If the new statement has a VUSE, update it with exact SSA name we
596 know will reach this one. */
597 if (gimple_vuse (new_stmt
))
599 /* If we've also seen a previous store create a new VDEF for
600 the latter one, and make that the new reaching VUSE. */
603 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
604 gimple_set_vdef (laststore
, reaching_vuse
);
605 update_stmt (laststore
);
608 gimple_set_vuse (new_stmt
, reaching_vuse
);
609 gimple_set_modified (new_stmt
, true);
611 if (gimple_assign_single_p (new_stmt
)
612 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
614 laststore
= new_stmt
;
619 if (lhs
== NULL_TREE
)
621 /* If we replace a call without LHS that has a VDEF and our new
622 sequence ends with a store we must make that store have the same
623 vdef in order not to break the sequencing. This can happen
624 for instance when folding memcpy calls into assignments. */
625 if (gimple_vdef (stmt
) && laststore
)
627 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
628 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
629 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
630 update_stmt (laststore
);
632 else if (gimple_in_ssa_p (cfun
))
634 unlink_stmt_vdef (stmt
);
643 gsi_insert_before (si_p
, last
, GSI_NEW_STMT
);
646 if (laststore
&& is_gimple_reg (lhs
))
648 gimple_set_vdef (laststore
, gimple_vdef (stmt
));
649 update_stmt (laststore
);
650 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
651 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = laststore
;
656 reaching_vuse
= make_ssa_name (gimple_vop (cfun
), laststore
);
657 gimple_set_vdef (laststore
, reaching_vuse
);
658 update_stmt (laststore
);
661 new_stmt
= gimple_build_assign (lhs
, tmp
);
662 if (!is_gimple_reg (tmp
))
663 gimple_set_vuse (new_stmt
, reaching_vuse
);
664 if (!is_gimple_reg (lhs
))
666 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
667 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
668 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = new_stmt
;
670 else if (reaching_vuse
== gimple_vuse (stmt
))
671 unlink_stmt_vdef (stmt
);
674 gimple_set_location (new_stmt
, gimple_location (stmt
));
675 gsi_replace (si_p
, new_stmt
, false);
678 /* Return the string length, maximum string length or maximum value of
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
688 get_maxval_strlen (tree arg
, tree
*length
, bitmap visited
, int type
)
693 if (TREE_CODE (arg
) != SSA_NAME
)
695 if (TREE_CODE (arg
) == COND_EXPR
)
696 return get_maxval_strlen (COND_EXPR_THEN (arg
), length
, visited
, type
)
697 && get_maxval_strlen (COND_EXPR_ELSE (arg
), length
, visited
, type
);
698 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
699 else if (TREE_CODE (arg
) == ADDR_EXPR
700 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
703 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
704 if (TREE_CODE (aop0
) == INDIRECT_REF
705 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
706 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
707 length
, visited
, type
);
713 if (TREE_CODE (val
) != INTEGER_CST
714 || tree_int_cst_sgn (val
) < 0)
718 val
= c_strlen (arg
, 1);
726 if (TREE_CODE (*length
) != INTEGER_CST
727 || TREE_CODE (val
) != INTEGER_CST
)
730 if (tree_int_cst_lt (*length
, val
))
734 else if (simple_cst_equal (val
, *length
) != 1)
742 /* If we were already here, break the infinite cycle. */
743 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (arg
)))
747 def_stmt
= SSA_NAME_DEF_STMT (var
);
749 switch (gimple_code (def_stmt
))
752 /* The RHS of the statement defining VAR must either have a
753 constant length or come from another SSA_NAME with a constant
755 if (gimple_assign_single_p (def_stmt
)
756 || gimple_assign_unary_nop_p (def_stmt
))
758 tree rhs
= gimple_assign_rhs1 (def_stmt
);
759 return get_maxval_strlen (rhs
, length
, visited
, type
);
765 /* All the arguments of the PHI node must have the same constant
769 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
771 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
773 /* If this PHI has itself as an argument, we cannot
774 determine the string length of this argument. However,
775 if we can find a constant string length for the other
776 PHI args then we can still be sure that this is a
777 constant string length. So be optimistic and just
778 continue with the next argument. */
779 if (arg
== gimple_phi_result (def_stmt
))
782 if (!get_maxval_strlen (arg
, length
, visited
, type
))
794 /* Fold builtin call in statement STMT. Returns a simplified tree.
795 We may return a non-constant expression, including another call
796 to a different function and with different arguments, e.g.,
797 substituting memcpy for strcpy when the string length is known.
798 Note that some builtins expand into inline code that may not
799 be valid in GIMPLE. Callers must take care. */
802 gimple_fold_builtin (gimple stmt
)
810 location_t loc
= gimple_location (stmt
);
812 gcc_assert (is_gimple_call (stmt
));
814 ignore
= (gimple_call_lhs (stmt
) == NULL
);
816 /* First try the generic builtin folder. If that succeeds, return the
818 result
= fold_call_stmt (stmt
, ignore
);
826 /* Ignore MD builtins. */
827 callee
= gimple_call_fndecl (stmt
);
828 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_MD
)
831 /* If the builtin could not be folded, and it has no argument list,
833 nargs
= gimple_call_num_args (stmt
);
837 /* Limit the work only for builtins we know how to simplify. */
838 switch (DECL_FUNCTION_CODE (callee
))
840 case BUILT_IN_STRLEN
:
842 case BUILT_IN_FPUTS_UNLOCKED
:
846 case BUILT_IN_STRCPY
:
847 case BUILT_IN_STRNCPY
:
851 case BUILT_IN_MEMCPY_CHK
:
852 case BUILT_IN_MEMPCPY_CHK
:
853 case BUILT_IN_MEMMOVE_CHK
:
854 case BUILT_IN_MEMSET_CHK
:
855 case BUILT_IN_STRNCPY_CHK
:
859 case BUILT_IN_STRCPY_CHK
:
860 case BUILT_IN_STPCPY_CHK
:
864 case BUILT_IN_SNPRINTF_CHK
:
865 case BUILT_IN_VSNPRINTF_CHK
:
873 if (arg_idx
>= nargs
)
876 /* Try to use the dataflow information gathered by the CCP process. */
877 visited
= BITMAP_ALLOC (NULL
);
878 bitmap_clear (visited
);
880 memset (val
, 0, sizeof (val
));
881 a
= gimple_call_arg (stmt
, arg_idx
);
882 if (!get_maxval_strlen (a
, &val
[arg_idx
], visited
, type
))
883 val
[arg_idx
] = NULL_TREE
;
885 BITMAP_FREE (visited
);
888 switch (DECL_FUNCTION_CODE (callee
))
890 case BUILT_IN_STRLEN
:
891 if (val
[0] && nargs
== 1)
894 fold_convert (TREE_TYPE (gimple_call_lhs (stmt
)), val
[0]);
896 /* If the result is not a valid gimple value, or not a cast
897 of a valid gimple value, then we cannot use the result. */
898 if (is_gimple_val (new_val
)
899 || (CONVERT_EXPR_P (new_val
)
900 && is_gimple_val (TREE_OPERAND (new_val
, 0))))
905 case BUILT_IN_STRCPY
:
906 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 2)
907 result
= fold_builtin_strcpy (loc
, callee
,
908 gimple_call_arg (stmt
, 0),
909 gimple_call_arg (stmt
, 1),
913 case BUILT_IN_STRNCPY
:
914 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
915 result
= fold_builtin_strncpy (loc
, callee
,
916 gimple_call_arg (stmt
, 0),
917 gimple_call_arg (stmt
, 1),
918 gimple_call_arg (stmt
, 2),
924 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
925 gimple_call_arg (stmt
, 1),
926 ignore
, false, val
[0]);
929 case BUILT_IN_FPUTS_UNLOCKED
:
931 result
= fold_builtin_fputs (loc
, gimple_call_arg (stmt
, 0),
932 gimple_call_arg (stmt
, 1),
933 ignore
, true, val
[0]);
936 case BUILT_IN_MEMCPY_CHK
:
937 case BUILT_IN_MEMPCPY_CHK
:
938 case BUILT_IN_MEMMOVE_CHK
:
939 case BUILT_IN_MEMSET_CHK
:
940 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
941 result
= fold_builtin_memory_chk (loc
, callee
,
942 gimple_call_arg (stmt
, 0),
943 gimple_call_arg (stmt
, 1),
944 gimple_call_arg (stmt
, 2),
945 gimple_call_arg (stmt
, 3),
947 DECL_FUNCTION_CODE (callee
));
950 case BUILT_IN_STRCPY_CHK
:
951 case BUILT_IN_STPCPY_CHK
:
952 if (val
[1] && is_gimple_val (val
[1]) && nargs
== 3)
953 result
= fold_builtin_stxcpy_chk (loc
, callee
,
954 gimple_call_arg (stmt
, 0),
955 gimple_call_arg (stmt
, 1),
956 gimple_call_arg (stmt
, 2),
958 DECL_FUNCTION_CODE (callee
));
961 case BUILT_IN_STRNCPY_CHK
:
962 if (val
[2] && is_gimple_val (val
[2]) && nargs
== 4)
963 result
= fold_builtin_strncpy_chk (loc
, gimple_call_arg (stmt
, 0),
964 gimple_call_arg (stmt
, 1),
965 gimple_call_arg (stmt
, 2),
966 gimple_call_arg (stmt
, 3),
970 case BUILT_IN_SNPRINTF_CHK
:
971 case BUILT_IN_VSNPRINTF_CHK
:
972 if (val
[1] && is_gimple_val (val
[1]))
973 result
= gimple_fold_builtin_snprintf_chk (stmt
, val
[1],
974 DECL_FUNCTION_CODE (callee
));
981 if (result
&& ignore
)
982 result
= fold_ignored_result (result
);
986 /* Generate code adjusting the first parameter of a call statement determined
990 gimple_adjust_this_by_delta (gimple_stmt_iterator
*gsi
, tree delta
)
992 gimple call_stmt
= gsi_stmt (*gsi
);
996 delta
= convert_to_ptrofftype (delta
);
997 gcc_assert (gimple_call_num_args (call_stmt
) >= 1);
998 parm
= gimple_call_arg (call_stmt
, 0);
999 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm
)));
1000 tmp
= create_tmp_var (TREE_TYPE (parm
), NULL
);
1001 add_referenced_var (tmp
);
1003 tmp
= make_ssa_name (tmp
, NULL
);
1004 new_stmt
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
, tmp
, parm
, delta
);
1005 SSA_NAME_DEF_STMT (tmp
) = new_stmt
;
1006 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1007 gimple_call_set_arg (call_stmt
, 0, tmp
);
1010 /* Return a binfo to be used for devirtualization of calls based on an object
1011 represented by a declaration (i.e. a global or automatically allocated one)
1012 or NULL if it cannot be found or is not safe. CST is expected to be an
1013 ADDR_EXPR of such object or the function will return NULL. Currently it is
1014 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1017 gimple_extract_devirt_binfo_from_cst (tree cst
)
1019 HOST_WIDE_INT offset
, size
, max_size
;
1020 tree base
, type
, expected_type
, binfo
;
1021 bool last_artificial
= false;
1023 if (!flag_devirtualize
1024 || TREE_CODE (cst
) != ADDR_EXPR
1025 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst
))) != RECORD_TYPE
)
1028 cst
= TREE_OPERAND (cst
, 0);
1029 expected_type
= TREE_TYPE (cst
);
1030 base
= get_ref_base_and_extent (cst
, &offset
, &size
, &max_size
);
1031 type
= TREE_TYPE (base
);
1035 || TREE_CODE (type
) != RECORD_TYPE
)
1038 /* Find the sub-object the constant actually refers to and mark whether it is
1039 an artificial one (as opposed to a user-defined one). */
1042 HOST_WIDE_INT pos
, size
;
1045 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (expected_type
))
1050 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1052 if (TREE_CODE (fld
) != FIELD_DECL
)
1055 pos
= int_bit_position (fld
);
1056 size
= tree_low_cst (DECL_SIZE (fld
), 1);
1057 if (pos
<= offset
&& (pos
+ size
) > offset
)
1060 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
1063 last_artificial
= DECL_ARTIFICIAL (fld
);
1064 type
= TREE_TYPE (fld
);
1067 /* Artifical sub-objects are ancestors, we do not want to use them for
1068 devirtualization, at least not here. */
1069 if (last_artificial
)
1071 binfo
= TYPE_BINFO (type
);
1072 if (!binfo
|| BINFO_N_BASE_BINFOS (binfo
) > 0)
1078 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1079 The statement may be replaced by another statement, e.g., if the call
1080 simplifies to a constant value. Return true if any changes were made.
1081 It is assumed that the operands have been previously folded. */
1084 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
1086 gimple stmt
= gsi_stmt (*gsi
);
1089 /* Check for builtins that CCP can handle using information not
1090 available in the generic fold routines. */
1091 callee
= gimple_call_fndecl (stmt
);
1092 if (!inplace
&& callee
&& DECL_BUILT_IN (callee
))
1094 tree result
= gimple_fold_builtin (stmt
);
1098 if (!update_call_from_tree (gsi
, result
))
1099 gimplify_and_update_call_from_tree (gsi
, result
);
1104 /* Check for virtual calls that became direct calls. */
1105 callee
= gimple_call_fn (stmt
);
1106 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
1108 tree binfo
, fndecl
, obj
;
1109 HOST_WIDE_INT token
;
1111 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
1113 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
1117 obj
= OBJ_TYPE_REF_OBJECT (callee
);
1118 binfo
= gimple_extract_devirt_binfo_from_cst (obj
);
1121 token
= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee
));
1122 fndecl
= gimple_get_virt_method_for_binfo (token
, binfo
);
1125 gimple_call_set_fndecl (stmt
, fndecl
);
1132 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1133 distinguishes both cases. */
1136 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
1138 bool changed
= false;
1139 gimple stmt
= gsi_stmt (*gsi
);
1141 gimple_stmt_iterator gsinext
= *gsi
;
1144 gsi_next (&gsinext
);
1145 next_stmt
= gsi_end_p (gsinext
) ? NULL
: gsi_stmt (gsinext
);
1147 /* Fold the main computation performed by the statement. */
1148 switch (gimple_code (stmt
))
1152 unsigned old_num_ops
= gimple_num_ops (stmt
);
1153 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
1154 tree lhs
= gimple_assign_lhs (stmt
);
1156 /* First canonicalize operand order. This avoids building new
1157 trees if this is the only thing fold would later do. */
1158 if ((commutative_tree_code (subcode
)
1159 || commutative_ternary_tree_code (subcode
))
1160 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
1161 gimple_assign_rhs2 (stmt
), false))
1163 tree tem
= gimple_assign_rhs1 (stmt
);
1164 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
1165 gimple_assign_set_rhs2 (stmt
, tem
);
1168 new_rhs
= fold_gimple_assign (gsi
);
1170 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1171 TREE_TYPE (new_rhs
)))
1172 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
1175 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
1177 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
1184 changed
|= fold_gimple_cond (stmt
);
1188 /* Fold *& in call arguments. */
1189 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1190 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
1192 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
1195 gimple_call_set_arg (stmt
, i
, tmp
);
1199 changed
|= gimple_fold_call (gsi
, inplace
);
1203 /* Fold *& in asm operands. */
1204 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1206 tree link
= gimple_asm_output_op (stmt
, i
);
1207 tree op
= TREE_VALUE (link
);
1208 if (REFERENCE_CLASS_P (op
)
1209 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
1211 TREE_VALUE (link
) = op
;
1215 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1217 tree link
= gimple_asm_input_op (stmt
, i
);
1218 tree op
= TREE_VALUE (link
);
1219 if (REFERENCE_CLASS_P (op
)
1220 && (op
= maybe_fold_reference (op
, false)) != NULL_TREE
)
1222 TREE_VALUE (link
) = op
;
1229 if (gimple_debug_bind_p (stmt
))
1231 tree val
= gimple_debug_bind_get_value (stmt
);
1233 && REFERENCE_CLASS_P (val
))
1235 tree tem
= maybe_fold_reference (val
, false);
1238 gimple_debug_bind_set_value (stmt
, tem
);
1248 /* If stmt folds into nothing and it was the last stmt in a bb,
1249 don't call gsi_stmt. */
1250 if (gsi_end_p (*gsi
))
1252 gcc_assert (next_stmt
== NULL
);
1256 stmt
= gsi_stmt (*gsi
);
1258 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1259 as we'd changing the next stmt. */
1260 if (gimple_has_lhs (stmt
) && stmt
!= next_stmt
)
1262 tree lhs
= gimple_get_lhs (stmt
);
1263 if (lhs
&& REFERENCE_CLASS_P (lhs
))
1265 tree new_lhs
= maybe_fold_reference (lhs
, true);
1268 gimple_set_lhs (stmt
, new_lhs
);
1277 /* Fold the statement pointed to by GSI. In some cases, this function may
1278 replace the whole statement with a new one. Returns true iff folding
1280 The statement pointed to by GSI should be in valid gimple form but may
1281 be in unfolded state as resulting from for example constant propagation
1282 which can produce *&x = 0. */
1285 fold_stmt (gimple_stmt_iterator
*gsi
)
1287 return fold_stmt_1 (gsi
, false);
1290 /* Perform the minimal folding on statement *GSI. Only operations like
1291 *&x created by constant propagation are handled. The statement cannot
1292 be replaced with a new one. Return true if the statement was
1293 changed, false otherwise.
1294 The statement *GSI should be in valid gimple form but may
1295 be in unfolded state as resulting from for example constant propagation
1296 which can produce *&x = 0. */
1299 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
1301 gimple stmt
= gsi_stmt (*gsi
);
1302 bool changed
= fold_stmt_1 (gsi
, true);
1303 gcc_assert (gsi_stmt (*gsi
) == stmt
);
1307 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1308 if EXPR is null or we don't know how.
1309 If non-null, the result always has boolean type. */
1312 canonicalize_bool (tree expr
, bool invert
)
1318 if (integer_nonzerop (expr
))
1319 return boolean_false_node
;
1320 else if (integer_zerop (expr
))
1321 return boolean_true_node
;
1322 else if (TREE_CODE (expr
) == SSA_NAME
)
1323 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
1324 build_int_cst (TREE_TYPE (expr
), 0));
1325 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1326 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
1328 TREE_OPERAND (expr
, 0),
1329 TREE_OPERAND (expr
, 1));
1335 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1337 if (integer_nonzerop (expr
))
1338 return boolean_true_node
;
1339 else if (integer_zerop (expr
))
1340 return boolean_false_node
;
1341 else if (TREE_CODE (expr
) == SSA_NAME
)
1342 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
1343 build_int_cst (TREE_TYPE (expr
), 0));
1344 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
1345 return fold_build2 (TREE_CODE (expr
),
1347 TREE_OPERAND (expr
, 0),
1348 TREE_OPERAND (expr
, 1));
1354 /* Check to see if a boolean expression EXPR is logically equivalent to the
1355 comparison (OP1 CODE OP2). Check for various identities involving
1359 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
1360 const_tree op1
, const_tree op2
)
1364 /* The obvious case. */
1365 if (TREE_CODE (expr
) == code
1366 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
1367 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
1370 /* Check for comparing (name, name != 0) and the case where expr
1371 is an SSA_NAME with a definition matching the comparison. */
1372 if (TREE_CODE (expr
) == SSA_NAME
1373 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
1375 if (operand_equal_p (expr
, op1
, 0))
1376 return ((code
== NE_EXPR
&& integer_zerop (op2
))
1377 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
1378 s
= SSA_NAME_DEF_STMT (expr
);
1379 if (is_gimple_assign (s
)
1380 && gimple_assign_rhs_code (s
) == code
1381 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
1382 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
1386 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1387 of name is a comparison, recurse. */
1388 if (TREE_CODE (op1
) == SSA_NAME
1389 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
1391 s
= SSA_NAME_DEF_STMT (op1
);
1392 if (is_gimple_assign (s
)
1393 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
1395 enum tree_code c
= gimple_assign_rhs_code (s
);
1396 if ((c
== NE_EXPR
&& integer_zerop (op2
))
1397 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
1398 return same_bool_comparison_p (expr
, c
,
1399 gimple_assign_rhs1 (s
),
1400 gimple_assign_rhs2 (s
));
1401 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
1402 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
1403 return same_bool_comparison_p (expr
,
1404 invert_tree_comparison (c
, false),
1405 gimple_assign_rhs1 (s
),
1406 gimple_assign_rhs2 (s
));
1412 /* Check to see if two boolean expressions OP1 and OP2 are logically
1416 same_bool_result_p (const_tree op1
, const_tree op2
)
1418 /* Simple cases first. */
1419 if (operand_equal_p (op1
, op2
, 0))
1422 /* Check the cases where at least one of the operands is a comparison.
1423 These are a bit smarter than operand_equal_p in that they apply some
1424 identifies on SSA_NAMEs. */
1425 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
1426 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
1427 TREE_OPERAND (op2
, 0),
1428 TREE_OPERAND (op2
, 1)))
1430 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
1431 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
1432 TREE_OPERAND (op1
, 0),
1433 TREE_OPERAND (op1
, 1)))
1440 /* Forward declarations for some mutually recursive functions. */
1443 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1444 enum tree_code code2
, tree op2a
, tree op2b
);
1446 and_var_with_comparison (tree var
, bool invert
,
1447 enum tree_code code2
, tree op2a
, tree op2b
);
1449 and_var_with_comparison_1 (gimple stmt
,
1450 enum tree_code code2
, tree op2a
, tree op2b
);
1452 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1453 enum tree_code code2
, tree op2a
, tree op2b
);
1455 or_var_with_comparison (tree var
, bool invert
,
1456 enum tree_code code2
, tree op2a
, tree op2b
);
1458 or_var_with_comparison_1 (gimple stmt
,
1459 enum tree_code code2
, tree op2a
, tree op2b
);
1461 /* Helper function for and_comparisons_1: try to simplify the AND of the
1462 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1463 If INVERT is true, invert the value of the VAR before doing the AND.
1464 Return NULL_EXPR if we can't simplify this to a single expression. */
1467 and_var_with_comparison (tree var
, bool invert
,
1468 enum tree_code code2
, tree op2a
, tree op2b
)
1471 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1473 /* We can only deal with variables whose definitions are assignments. */
1474 if (!is_gimple_assign (stmt
))
1477 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1478 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1479 Then we only have to consider the simpler non-inverted cases. */
1481 t
= or_var_with_comparison_1 (stmt
,
1482 invert_tree_comparison (code2
, false),
1485 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1486 return canonicalize_bool (t
, invert
);
1489 /* Try to simplify the AND of the ssa variable defined by the assignment
1490 STMT with the comparison specified by (OP2A CODE2 OP2B).
1491 Return NULL_EXPR if we can't simplify this to a single expression. */
1494 and_var_with_comparison_1 (gimple stmt
,
1495 enum tree_code code2
, tree op2a
, tree op2b
)
1497 tree var
= gimple_assign_lhs (stmt
);
1498 tree true_test_var
= NULL_TREE
;
1499 tree false_test_var
= NULL_TREE
;
1500 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1502 /* Check for identities like (var AND (var == 0)) => false. */
1503 if (TREE_CODE (op2a
) == SSA_NAME
1504 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1506 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1507 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1509 true_test_var
= op2a
;
1510 if (var
== true_test_var
)
1513 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1514 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1516 false_test_var
= op2a
;
1517 if (var
== false_test_var
)
1518 return boolean_false_node
;
1522 /* If the definition is a comparison, recurse on it. */
1523 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1525 tree t
= and_comparisons_1 (innercode
,
1526 gimple_assign_rhs1 (stmt
),
1527 gimple_assign_rhs2 (stmt
),
1535 /* If the definition is an AND or OR expression, we may be able to
1536 simplify by reassociating. */
1537 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1538 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
1540 tree inner1
= gimple_assign_rhs1 (stmt
);
1541 tree inner2
= gimple_assign_rhs2 (stmt
);
1544 tree partial
= NULL_TREE
;
1545 bool is_and
= (innercode
== BIT_AND_EXPR
);
1547 /* Check for boolean identities that don't require recursive examination
1549 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1550 inner1 AND (inner1 OR inner2) => inner1
1551 !inner1 AND (inner1 AND inner2) => false
1552 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1553 Likewise for similar cases involving inner2. */
1554 if (inner1
== true_test_var
)
1555 return (is_and
? var
: inner1
);
1556 else if (inner2
== true_test_var
)
1557 return (is_and
? var
: inner2
);
1558 else if (inner1
== false_test_var
)
1560 ? boolean_false_node
1561 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
1562 else if (inner2
== false_test_var
)
1564 ? boolean_false_node
1565 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
1567 /* Next, redistribute/reassociate the AND across the inner tests.
1568 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1569 if (TREE_CODE (inner1
) == SSA_NAME
1570 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
1571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1572 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1573 gimple_assign_rhs1 (s
),
1574 gimple_assign_rhs2 (s
),
1575 code2
, op2a
, op2b
)))
1577 /* Handle the AND case, where we are reassociating:
1578 (inner1 AND inner2) AND (op2a code2 op2b)
1580 If the partial result t is a constant, we win. Otherwise
1581 continue on to try reassociating with the other inner test. */
1584 if (integer_onep (t
))
1586 else if (integer_zerop (t
))
1587 return boolean_false_node
;
1590 /* Handle the OR case, where we are redistributing:
1591 (inner1 OR inner2) AND (op2a code2 op2b)
1592 => (t OR (inner2 AND (op2a code2 op2b))) */
1593 else if (integer_onep (t
))
1594 return boolean_true_node
;
1596 /* Save partial result for later. */
1600 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1601 if (TREE_CODE (inner2
) == SSA_NAME
1602 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
1603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
1604 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
1605 gimple_assign_rhs1 (s
),
1606 gimple_assign_rhs2 (s
),
1607 code2
, op2a
, op2b
)))
1609 /* Handle the AND case, where we are reassociating:
1610 (inner1 AND inner2) AND (op2a code2 op2b)
1611 => (inner1 AND t) */
1614 if (integer_onep (t
))
1616 else if (integer_zerop (t
))
1617 return boolean_false_node
;
1618 /* If both are the same, we can apply the identity
1620 else if (partial
&& same_bool_result_p (t
, partial
))
1624 /* Handle the OR case. where we are redistributing:
1625 (inner1 OR inner2) AND (op2a code2 op2b)
1626 => (t OR (inner1 AND (op2a code2 op2b)))
1627 => (t OR partial) */
1630 if (integer_onep (t
))
1631 return boolean_true_node
;
1634 /* We already got a simplification for the other
1635 operand to the redistributed OR expression. The
1636 interesting case is when at least one is false.
1637 Or, if both are the same, we can apply the identity
1639 if (integer_zerop (partial
))
1641 else if (integer_zerop (t
))
1643 else if (same_bool_result_p (t
, partial
))
1652 /* Try to simplify the AND of two comparisons defined by
1653 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1654 If this can be done without constructing an intermediate value,
1655 return the resulting tree; otherwise NULL_TREE is returned.
1656 This function is deliberately asymmetric as it recurses on SSA_DEFs
1657 in the first comparison but not the second. */
1660 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
1661 enum tree_code code2
, tree op2a
, tree op2b
)
1663 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1664 if (operand_equal_p (op1a
, op2a
, 0)
1665 && operand_equal_p (op1b
, op2b
, 0))
1667 /* Result will be either NULL_TREE, or a combined comparison. */
1668 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1669 TRUTH_ANDIF_EXPR
, code1
, code2
,
1670 boolean_type_node
, op1a
, op1b
);
1675 /* Likewise the swapped case of the above. */
1676 if (operand_equal_p (op1a
, op2b
, 0)
1677 && operand_equal_p (op1b
, op2a
, 0))
1679 /* Result will be either NULL_TREE, or a combined comparison. */
1680 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
1681 TRUTH_ANDIF_EXPR
, code1
,
1682 swap_tree_comparison (code2
),
1683 boolean_type_node
, op1a
, op1b
);
1688 /* If both comparisons are of the same value against constants, we might
1689 be able to merge them. */
1690 if (operand_equal_p (op1a
, op2a
, 0)
1691 && TREE_CODE (op1b
) == INTEGER_CST
1692 && TREE_CODE (op2b
) == INTEGER_CST
)
1694 int cmp
= tree_int_cst_compare (op1b
, op2b
);
1696 /* If we have (op1a == op1b), we should either be able to
1697 return that or FALSE, depending on whether the constant op1b
1698 also satisfies the other comparison against op2b. */
1699 if (code1
== EQ_EXPR
)
1705 case EQ_EXPR
: val
= (cmp
== 0); break;
1706 case NE_EXPR
: val
= (cmp
!= 0); break;
1707 case LT_EXPR
: val
= (cmp
< 0); break;
1708 case GT_EXPR
: val
= (cmp
> 0); break;
1709 case LE_EXPR
: val
= (cmp
<= 0); break;
1710 case GE_EXPR
: val
= (cmp
>= 0); break;
1711 default: done
= false;
1716 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1718 return boolean_false_node
;
1721 /* Likewise if the second comparison is an == comparison. */
1722 else if (code2
== EQ_EXPR
)
1728 case EQ_EXPR
: val
= (cmp
== 0); break;
1729 case NE_EXPR
: val
= (cmp
!= 0); break;
1730 case LT_EXPR
: val
= (cmp
> 0); break;
1731 case GT_EXPR
: val
= (cmp
< 0); break;
1732 case LE_EXPR
: val
= (cmp
>= 0); break;
1733 case GE_EXPR
: val
= (cmp
<= 0); break;
1734 default: done
= false;
1739 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1741 return boolean_false_node
;
1745 /* Same business with inequality tests. */
1746 else if (code1
== NE_EXPR
)
1751 case EQ_EXPR
: val
= (cmp
!= 0); break;
1752 case NE_EXPR
: val
= (cmp
== 0); break;
1753 case LT_EXPR
: val
= (cmp
>= 0); break;
1754 case GT_EXPR
: val
= (cmp
<= 0); break;
1755 case LE_EXPR
: val
= (cmp
> 0); break;
1756 case GE_EXPR
: val
= (cmp
< 0); break;
1761 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1763 else if (code2
== NE_EXPR
)
1768 case EQ_EXPR
: val
= (cmp
== 0); break;
1769 case NE_EXPR
: val
= (cmp
!= 0); break;
1770 case LT_EXPR
: val
= (cmp
<= 0); break;
1771 case GT_EXPR
: val
= (cmp
>= 0); break;
1772 case LE_EXPR
: val
= (cmp
< 0); break;
1773 case GE_EXPR
: val
= (cmp
> 0); break;
1778 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1781 /* Chose the more restrictive of two < or <= comparisons. */
1782 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
1783 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1785 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
1786 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1788 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1791 /* Likewise chose the more restrictive of two > or >= comparisons. */
1792 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
1793 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1795 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
1796 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
1798 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
1801 /* Check for singleton ranges. */
1803 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
1804 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
1805 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
1807 /* Check for disjoint ranges. */
1809 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
1810 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
1811 return boolean_false_node
;
1813 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
1814 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
1815 return boolean_false_node
;
1818 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1819 NAME's definition is a truth value. See if there are any simplifications
1820 that can be done against the NAME's definition. */
1821 if (TREE_CODE (op1a
) == SSA_NAME
1822 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
1823 && (integer_zerop (op1b
) || integer_onep (op1b
)))
1825 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
1826 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
1827 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
1828 switch (gimple_code (stmt
))
1831 /* Try to simplify by copy-propagating the definition. */
1832 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
1835 /* If every argument to the PHI produces the same result when
1836 ANDed with the second comparison, we win.
1837 Do not do this unless the type is bool since we need a bool
1838 result here anyway. */
1839 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
1841 tree result
= NULL_TREE
;
1843 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1845 tree arg
= gimple_phi_arg_def (stmt
, i
);
1847 /* If this PHI has itself as an argument, ignore it.
1848 If all the other args produce the same result,
1850 if (arg
== gimple_phi_result (stmt
))
1852 else if (TREE_CODE (arg
) == INTEGER_CST
)
1854 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
1857 result
= boolean_false_node
;
1858 else if (!integer_zerop (result
))
1862 result
= fold_build2 (code2
, boolean_type_node
,
1864 else if (!same_bool_comparison_p (result
,
1868 else if (TREE_CODE (arg
) == SSA_NAME
1869 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
1872 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
1873 /* In simple cases we can look through PHI nodes,
1874 but we have to be careful with loops.
1876 if (! dom_info_available_p (CDI_DOMINATORS
)
1877 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
1878 || dominated_by_p (CDI_DOMINATORS
,
1879 gimple_bb (def_stmt
),
1882 temp
= and_var_with_comparison (arg
, invert
, code2
,
1888 else if (!same_bool_result_p (result
, temp
))
1904 /* Try to simplify the AND of two comparisons, specified by
1905 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1906 If this can be simplified to a single expression (without requiring
1907 introducing more SSA variables to hold intermediate values),
1908 return the resulting tree. Otherwise return NULL_TREE.
1909 If the result expression is non-null, it has boolean type. */
1912 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
1913 enum tree_code code2
, tree op2a
, tree op2b
)
1915 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
1919 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
1922 /* Helper function for or_comparisons_1: try to simplify the OR of the
1923 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1924 If INVERT is true, invert the value of VAR before doing the OR.
1925 Return NULL_EXPR if we can't simplify this to a single expression. */
1928 or_var_with_comparison (tree var
, bool invert
,
1929 enum tree_code code2
, tree op2a
, tree op2b
)
1932 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1934 /* We can only deal with variables whose definitions are assignments. */
1935 if (!is_gimple_assign (stmt
))
1938 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1939 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1940 Then we only have to consider the simpler non-inverted cases. */
1942 t
= and_var_with_comparison_1 (stmt
,
1943 invert_tree_comparison (code2
, false),
1946 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
1947 return canonicalize_bool (t
, invert
);
1950 /* Try to simplify the OR of the ssa variable defined by the assignment
1951 STMT with the comparison specified by (OP2A CODE2 OP2B).
1952 Return NULL_EXPR if we can't simplify this to a single expression. */
1955 or_var_with_comparison_1 (gimple stmt
,
1956 enum tree_code code2
, tree op2a
, tree op2b
)
1958 tree var
= gimple_assign_lhs (stmt
);
1959 tree true_test_var
= NULL_TREE
;
1960 tree false_test_var
= NULL_TREE
;
1961 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
1963 /* Check for identities like (var OR (var != 0)) => true . */
1964 if (TREE_CODE (op2a
) == SSA_NAME
1965 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
1967 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
1968 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
1970 true_test_var
= op2a
;
1971 if (var
== true_test_var
)
1974 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
1975 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
1977 false_test_var
= op2a
;
1978 if (var
== false_test_var
)
1979 return boolean_true_node
;
1983 /* If the definition is a comparison, recurse on it. */
1984 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
1986 tree t
= or_comparisons_1 (innercode
,
1987 gimple_assign_rhs1 (stmt
),
1988 gimple_assign_rhs2 (stmt
),
1996 /* If the definition is an AND or OR expression, we may be able to
1997 simplify by reassociating. */
1998 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
1999 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
2001 tree inner1
= gimple_assign_rhs1 (stmt
);
2002 tree inner2
= gimple_assign_rhs2 (stmt
);
2005 tree partial
= NULL_TREE
;
2006 bool is_or
= (innercode
== BIT_IOR_EXPR
);
2008 /* Check for boolean identities that don't require recursive examination
2010 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2011 inner1 OR (inner1 AND inner2) => inner1
2012 !inner1 OR (inner1 OR inner2) => true
2013 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2015 if (inner1
== true_test_var
)
2016 return (is_or
? var
: inner1
);
2017 else if (inner2
== true_test_var
)
2018 return (is_or
? var
: inner2
);
2019 else if (inner1
== false_test_var
)
2022 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
2023 else if (inner2
== false_test_var
)
2026 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
2028 /* Next, redistribute/reassociate the OR across the inner tests.
2029 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2030 if (TREE_CODE (inner1
) == SSA_NAME
2031 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
2032 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2033 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2034 gimple_assign_rhs1 (s
),
2035 gimple_assign_rhs2 (s
),
2036 code2
, op2a
, op2b
)))
2038 /* Handle the OR case, where we are reassociating:
2039 (inner1 OR inner2) OR (op2a code2 op2b)
2041 If the partial result t is a constant, we win. Otherwise
2042 continue on to try reassociating with the other inner test. */
2045 if (integer_onep (t
))
2046 return boolean_true_node
;
2047 else if (integer_zerop (t
))
2051 /* Handle the AND case, where we are redistributing:
2052 (inner1 AND inner2) OR (op2a code2 op2b)
2053 => (t AND (inner2 OR (op2a code op2b))) */
2054 else if (integer_zerop (t
))
2055 return boolean_false_node
;
2057 /* Save partial result for later. */
2061 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2062 if (TREE_CODE (inner2
) == SSA_NAME
2063 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
2064 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
2065 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
2066 gimple_assign_rhs1 (s
),
2067 gimple_assign_rhs2 (s
),
2068 code2
, op2a
, op2b
)))
2070 /* Handle the OR case, where we are reassociating:
2071 (inner1 OR inner2) OR (op2a code2 op2b)
2073 => (t OR partial) */
2076 if (integer_zerop (t
))
2078 else if (integer_onep (t
))
2079 return boolean_true_node
;
2080 /* If both are the same, we can apply the identity
2082 else if (partial
&& same_bool_result_p (t
, partial
))
2086 /* Handle the AND case, where we are redistributing:
2087 (inner1 AND inner2) OR (op2a code2 op2b)
2088 => (t AND (inner1 OR (op2a code2 op2b)))
2089 => (t AND partial) */
2092 if (integer_zerop (t
))
2093 return boolean_false_node
;
2096 /* We already got a simplification for the other
2097 operand to the redistributed AND expression. The
2098 interesting case is when at least one is true.
2099 Or, if both are the same, we can apply the identity
2101 if (integer_onep (partial
))
2103 else if (integer_onep (t
))
2105 else if (same_bool_result_p (t
, partial
))
2114 /* Try to simplify the OR of two comparisons defined by
2115 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2116 If this can be done without constructing an intermediate value,
2117 return the resulting tree; otherwise NULL_TREE is returned.
2118 This function is deliberately asymmetric as it recurses on SSA_DEFs
2119 in the first comparison but not the second. */
2122 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
2123 enum tree_code code2
, tree op2a
, tree op2b
)
2125 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2126 if (operand_equal_p (op1a
, op2a
, 0)
2127 && operand_equal_p (op1b
, op2b
, 0))
2129 /* Result will be either NULL_TREE, or a combined comparison. */
2130 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2131 TRUTH_ORIF_EXPR
, code1
, code2
,
2132 boolean_type_node
, op1a
, op1b
);
2137 /* Likewise the swapped case of the above. */
2138 if (operand_equal_p (op1a
, op2b
, 0)
2139 && operand_equal_p (op1b
, op2a
, 0))
2141 /* Result will be either NULL_TREE, or a combined comparison. */
2142 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
2143 TRUTH_ORIF_EXPR
, code1
,
2144 swap_tree_comparison (code2
),
2145 boolean_type_node
, op1a
, op1b
);
2150 /* If both comparisons are of the same value against constants, we might
2151 be able to merge them. */
2152 if (operand_equal_p (op1a
, op2a
, 0)
2153 && TREE_CODE (op1b
) == INTEGER_CST
2154 && TREE_CODE (op2b
) == INTEGER_CST
)
2156 int cmp
= tree_int_cst_compare (op1b
, op2b
);
2158 /* If we have (op1a != op1b), we should either be able to
2159 return that or TRUE, depending on whether the constant op1b
2160 also satisfies the other comparison against op2b. */
2161 if (code1
== NE_EXPR
)
2167 case EQ_EXPR
: val
= (cmp
== 0); break;
2168 case NE_EXPR
: val
= (cmp
!= 0); break;
2169 case LT_EXPR
: val
= (cmp
< 0); break;
2170 case GT_EXPR
: val
= (cmp
> 0); break;
2171 case LE_EXPR
: val
= (cmp
<= 0); break;
2172 case GE_EXPR
: val
= (cmp
>= 0); break;
2173 default: done
= false;
2178 return boolean_true_node
;
2180 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2183 /* Likewise if the second comparison is a != comparison. */
2184 else if (code2
== NE_EXPR
)
2190 case EQ_EXPR
: val
= (cmp
== 0); break;
2191 case NE_EXPR
: val
= (cmp
!= 0); break;
2192 case LT_EXPR
: val
= (cmp
> 0); break;
2193 case GT_EXPR
: val
= (cmp
< 0); break;
2194 case LE_EXPR
: val
= (cmp
>= 0); break;
2195 case GE_EXPR
: val
= (cmp
<= 0); break;
2196 default: done
= false;
2201 return boolean_true_node
;
2203 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2207 /* See if an equality test is redundant with the other comparison. */
2208 else if (code1
== EQ_EXPR
)
2213 case EQ_EXPR
: val
= (cmp
== 0); break;
2214 case NE_EXPR
: val
= (cmp
!= 0); break;
2215 case LT_EXPR
: val
= (cmp
< 0); break;
2216 case GT_EXPR
: val
= (cmp
> 0); break;
2217 case LE_EXPR
: val
= (cmp
<= 0); break;
2218 case GE_EXPR
: val
= (cmp
>= 0); break;
2223 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2225 else if (code2
== EQ_EXPR
)
2230 case EQ_EXPR
: val
= (cmp
== 0); break;
2231 case NE_EXPR
: val
= (cmp
!= 0); break;
2232 case LT_EXPR
: val
= (cmp
> 0); break;
2233 case GT_EXPR
: val
= (cmp
< 0); break;
2234 case LE_EXPR
: val
= (cmp
>= 0); break;
2235 case GE_EXPR
: val
= (cmp
<= 0); break;
2240 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2243 /* Chose the less restrictive of two < or <= comparisons. */
2244 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
2245 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2247 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
2248 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2250 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2253 /* Likewise chose the less restrictive of two > or >= comparisons. */
2254 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
2255 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2257 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
2258 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
2260 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
2263 /* Check for singleton ranges. */
2265 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
2266 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
2267 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
2269 /* Check for less/greater pairs that don't restrict the range at all. */
2271 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
2272 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
2273 return boolean_true_node
;
2275 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
2276 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
2277 return boolean_true_node
;
2280 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2281 NAME's definition is a truth value. See if there are any simplifications
2282 that can be done against the NAME's definition. */
2283 if (TREE_CODE (op1a
) == SSA_NAME
2284 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
2285 && (integer_zerop (op1b
) || integer_onep (op1b
)))
2287 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
2288 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
2289 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
2290 switch (gimple_code (stmt
))
2293 /* Try to simplify by copy-propagating the definition. */
2294 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
2297 /* If every argument to the PHI produces the same result when
2298 ORed with the second comparison, we win.
2299 Do not do this unless the type is bool since we need a bool
2300 result here anyway. */
2301 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
2303 tree result
= NULL_TREE
;
2305 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
2307 tree arg
= gimple_phi_arg_def (stmt
, i
);
2309 /* If this PHI has itself as an argument, ignore it.
2310 If all the other args produce the same result,
2312 if (arg
== gimple_phi_result (stmt
))
2314 else if (TREE_CODE (arg
) == INTEGER_CST
)
2316 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
2319 result
= boolean_true_node
;
2320 else if (!integer_onep (result
))
2324 result
= fold_build2 (code2
, boolean_type_node
,
2326 else if (!same_bool_comparison_p (result
,
2330 else if (TREE_CODE (arg
) == SSA_NAME
2331 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
2334 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
2335 /* In simple cases we can look through PHI nodes,
2336 but we have to be careful with loops.
2338 if (! dom_info_available_p (CDI_DOMINATORS
)
2339 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
2340 || dominated_by_p (CDI_DOMINATORS
,
2341 gimple_bb (def_stmt
),
2344 temp
= or_var_with_comparison (arg
, invert
, code2
,
2350 else if (!same_bool_result_p (result
, temp
))
2366 /* Try to simplify the OR of two comparisons, specified by
2367 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2368 If this can be simplified to a single expression (without requiring
2369 introducing more SSA variables to hold intermediate values),
2370 return the resulting tree. Otherwise return NULL_TREE.
2371 If the result expression is non-null, it has boolean type. */
2374 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
2375 enum tree_code code2
, tree op2a
, tree op2b
)
2377 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
2381 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
2385 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2387 Either NULL_TREE, a simplified but non-constant or a constant
2390 ??? This should go into a gimple-fold-inline.h file to be eventually
2391 privatized with the single valueize function used in the various TUs
2392 to avoid the indirect function call overhead. */
2395 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
2397 location_t loc
= gimple_location (stmt
);
2398 switch (gimple_code (stmt
))
2402 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2404 switch (get_gimple_rhs_class (subcode
))
2406 case GIMPLE_SINGLE_RHS
:
2408 tree rhs
= gimple_assign_rhs1 (stmt
);
2409 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
2411 if (TREE_CODE (rhs
) == SSA_NAME
)
2413 /* If the RHS is an SSA_NAME, return its known constant value,
2415 return (*valueize
) (rhs
);
2417 /* Handle propagating invariant addresses into address
2419 else if (TREE_CODE (rhs
) == ADDR_EXPR
2420 && !is_gimple_min_invariant (rhs
))
2422 HOST_WIDE_INT offset
;
2424 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
2428 && (CONSTANT_CLASS_P (base
)
2429 || decl_address_invariant_p (base
)))
2430 return build_invariant_address (TREE_TYPE (rhs
),
2433 else if (TREE_CODE (rhs
) == CONSTRUCTOR
2434 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
2435 && (CONSTRUCTOR_NELTS (rhs
)
2436 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
2442 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
2444 val
= (*valueize
) (val
);
2445 if (TREE_CODE (val
) == INTEGER_CST
2446 || TREE_CODE (val
) == REAL_CST
2447 || TREE_CODE (val
) == FIXED_CST
)
2448 list
= tree_cons (NULL_TREE
, val
, list
);
2453 return build_vector (TREE_TYPE (rhs
), nreverse (list
));
2456 if (kind
== tcc_reference
)
2458 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
2459 || TREE_CODE (rhs
) == REALPART_EXPR
2460 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
2461 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2463 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2464 return fold_unary_loc (EXPR_LOCATION (rhs
),
2466 TREE_TYPE (rhs
), val
);
2468 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
2469 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2471 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2472 return fold_ternary_loc (EXPR_LOCATION (rhs
),
2474 TREE_TYPE (rhs
), val
,
2475 TREE_OPERAND (rhs
, 1),
2476 TREE_OPERAND (rhs
, 2));
2478 else if (TREE_CODE (rhs
) == MEM_REF
2479 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
2481 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
2482 if (TREE_CODE (val
) == ADDR_EXPR
2483 && is_gimple_min_invariant (val
))
2485 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
2487 TREE_OPERAND (rhs
, 1));
2492 return fold_const_aggregate_ref_1 (rhs
, valueize
);
2494 else if (kind
== tcc_declaration
)
2495 return get_symbol_constant_value (rhs
);
2499 case GIMPLE_UNARY_RHS
:
2501 /* Handle unary operators that can appear in GIMPLE form.
2502 Note that we know the single operand must be a constant,
2503 so this should almost always return a simplified RHS. */
2504 tree lhs
= gimple_assign_lhs (stmt
);
2505 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2507 /* Conversions are useless for CCP purposes if they are
2508 value-preserving. Thus the restrictions that
2509 useless_type_conversion_p places for restrict qualification
2510 of pointer types should not apply here.
2511 Substitution later will only substitute to allowed places. */
2512 if (CONVERT_EXPR_CODE_P (subcode
)
2513 && POINTER_TYPE_P (TREE_TYPE (lhs
))
2514 && POINTER_TYPE_P (TREE_TYPE (op0
))
2515 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs
))
2516 == TYPE_ADDR_SPACE (TREE_TYPE (op0
))))
2520 fold_unary_ignore_overflow_loc (loc
, subcode
,
2521 gimple_expr_type (stmt
), op0
);
2524 case GIMPLE_BINARY_RHS
:
2526 /* Handle binary operators that can appear in GIMPLE form. */
2527 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2528 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2530 /* Translate &x + CST into an invariant form suitable for
2531 further propagation. */
2532 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
2533 && TREE_CODE (op0
) == ADDR_EXPR
2534 && TREE_CODE (op1
) == INTEGER_CST
)
2536 tree off
= fold_convert (ptr_type_node
, op1
);
2537 return build_fold_addr_expr_loc
2539 fold_build2 (MEM_REF
,
2540 TREE_TYPE (TREE_TYPE (op0
)),
2541 unshare_expr (op0
), off
));
2544 return fold_binary_loc (loc
, subcode
,
2545 gimple_expr_type (stmt
), op0
, op1
);
2548 case GIMPLE_TERNARY_RHS
:
2550 /* Handle ternary operators that can appear in GIMPLE form. */
2551 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
2552 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
2553 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
2555 return fold_ternary_loc (loc
, subcode
,
2556 gimple_expr_type (stmt
), op0
, op1
, op2
);
2568 if (gimple_call_internal_p (stmt
))
2569 /* No folding yet for these functions. */
2572 fn
= (*valueize
) (gimple_call_fn (stmt
));
2573 if (TREE_CODE (fn
) == ADDR_EXPR
2574 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2575 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0)))
2577 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
2580 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2581 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
2582 call
= build_call_array_loc (loc
,
2583 gimple_call_return_type (stmt
),
2584 fn
, gimple_call_num_args (stmt
), args
);
2585 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
2587 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2588 STRIP_NOPS (retval
);
2599 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2600 Returns NULL_TREE if folding to a constant is not possible, otherwise
2601 returns a constant according to is_gimple_min_invariant. */
2604 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
2606 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
2607 if (res
&& is_gimple_min_invariant (res
))
2613 /* The following set of functions are supposed to fold references using
2614 their constant initializers. */
2616 static tree
fold_ctor_reference (tree type
, tree ctor
,
2617 unsigned HOST_WIDE_INT offset
,
2618 unsigned HOST_WIDE_INT size
);
2620 /* See if we can find constructor defining value of BASE.
2621 When we know the consructor with constant offset (such as
2622 base is array[40] and we do know constructor of array), then
2623 BIT_OFFSET is adjusted accordingly.
2625 As a special case, return error_mark_node when constructor
2626 is not explicitly available, but it is known to be zero
2627 such as 'static const int a;'. */
2629 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
2630 tree (*valueize
)(tree
))
2632 HOST_WIDE_INT bit_offset2
, size
, max_size
;
2633 if (TREE_CODE (base
) == MEM_REF
)
2635 if (!integer_zerop (TREE_OPERAND (base
, 1)))
2637 if (!host_integerp (TREE_OPERAND (base
, 1), 0))
2639 *bit_offset
+= (mem_ref_offset (base
).low
2644 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2645 base
= valueize (TREE_OPERAND (base
, 0));
2646 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
2648 base
= TREE_OPERAND (base
, 0);
2651 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2652 DECL_INITIAL. If BASE is a nested reference into another
2653 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2654 the inner reference. */
2655 switch (TREE_CODE (base
))
2658 if (!const_value_known_p (base
))
2663 if (!DECL_INITIAL (base
)
2664 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
)))
2665 return error_mark_node
;
2666 return DECL_INITIAL (base
);
2670 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
2671 if (max_size
== -1 || size
!= max_size
)
2673 *bit_offset
+= bit_offset2
;
2674 return get_base_constructor (base
, bit_offset
, valueize
);
2685 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2686 to the memory at bit OFFSET.
2688 We do only simple job of folding byte accesses. */
2691 fold_string_cst_ctor_reference (tree type
, tree ctor
,
2692 unsigned HOST_WIDE_INT offset
,
2693 unsigned HOST_WIDE_INT size
)
2695 if (INTEGRAL_TYPE_P (type
)
2696 && (TYPE_MODE (type
)
2697 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2698 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
))))
2700 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor
)))) == 1
2701 && size
== BITS_PER_UNIT
2702 && !(offset
% BITS_PER_UNIT
))
2704 offset
/= BITS_PER_UNIT
;
2705 if (offset
< (unsigned HOST_WIDE_INT
) TREE_STRING_LENGTH (ctor
))
2706 return build_int_cst_type (type
, (TREE_STRING_POINTER (ctor
)
2709 const char a[20]="hello";
2712 might lead to offset greater than string length. In this case we
2713 know value is either initialized to 0 or out of bounds. Return 0
2715 return build_zero_cst (type
);
2720 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2721 SIZE to the memory at bit OFFSET. */
2724 fold_array_ctor_reference (tree type
, tree ctor
,
2725 unsigned HOST_WIDE_INT offset
,
2726 unsigned HOST_WIDE_INT size
)
2728 unsigned HOST_WIDE_INT cnt
;
2730 double_int low_bound
, elt_size
;
2731 double_int index
, max_index
;
2732 double_int access_index
;
2733 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
2734 HOST_WIDE_INT inner_offset
;
2736 /* Compute low bound and elt size. */
2737 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
2739 /* Static constructors for variably sized objects makes no sense. */
2740 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
2741 low_bound
= tree_to_double_int (TYPE_MIN_VALUE (domain_type
));
2744 low_bound
= double_int_zero
;
2745 /* Static constructors for variably sized objects makes no sense. */
2746 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
2749 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
2752 /* We can handle only constantly sized accesses that are known to not
2753 be larger than size of array element. */
2754 if (!TYPE_SIZE_UNIT (type
)
2755 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
2756 || double_int_cmp (elt_size
,
2757 tree_to_double_int (TYPE_SIZE_UNIT (type
)), 0) < 0)
2760 /* Compute the array index we look for. */
2761 access_index
= double_int_udiv (uhwi_to_double_int (offset
/ BITS_PER_UNIT
),
2762 elt_size
, TRUNC_DIV_EXPR
);
2763 access_index
= double_int_add (access_index
, low_bound
);
2765 /* And offset within the access. */
2766 inner_offset
= offset
% (double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
);
2768 /* See if the array field is large enough to span whole access. We do not
2769 care to fold accesses spanning multiple array indexes. */
2770 if (inner_offset
+ size
> double_int_to_uhwi (elt_size
) * BITS_PER_UNIT
)
2773 index
= double_int_sub (low_bound
, double_int_one
);
2774 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
2776 /* Array constructor might explicitely set index, or specify range
2777 or leave index NULL meaning that it is next index after previous
2781 if (TREE_CODE (cfield
) == INTEGER_CST
)
2782 max_index
= index
= tree_to_double_int (cfield
);
2785 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
2786 index
= tree_to_double_int (TREE_OPERAND (cfield
, 0));
2787 max_index
= tree_to_double_int (TREE_OPERAND (cfield
, 1));
2791 max_index
= index
= double_int_add (index
, double_int_one
);
2793 /* Do we have match? */
2794 if (double_int_cmp (access_index
, index
, 1) >= 0
2795 && double_int_cmp (access_index
, max_index
, 1) <= 0)
2796 return fold_ctor_reference (type
, cval
, inner_offset
, size
);
2798 /* When memory is not explicitely mentioned in constructor,
2799 it is 0 (or out of range). */
2800 return build_zero_cst (type
);
2803 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2804 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2807 fold_nonarray_ctor_reference (tree type
, tree ctor
,
2808 unsigned HOST_WIDE_INT offset
,
2809 unsigned HOST_WIDE_INT size
)
2811 unsigned HOST_WIDE_INT cnt
;
2814 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
2817 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
2818 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
2819 tree field_size
= DECL_SIZE (cfield
);
2820 double_int bitoffset
;
2821 double_int byte_offset_cst
= tree_to_double_int (byte_offset
);
2822 double_int bits_per_unit_cst
= uhwi_to_double_int (BITS_PER_UNIT
);
2823 double_int bitoffset_end
, access_end
;
2825 /* Variable sized objects in static constructors makes no sense,
2826 but field_size can be NULL for flexible array members. */
2827 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
2828 && TREE_CODE (byte_offset
) == INTEGER_CST
2829 && (field_size
!= NULL_TREE
2830 ? TREE_CODE (field_size
) == INTEGER_CST
2831 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
2833 /* Compute bit offset of the field. */
2834 bitoffset
= double_int_add (tree_to_double_int (field_offset
),
2835 double_int_mul (byte_offset_cst
,
2836 bits_per_unit_cst
));
2837 /* Compute bit offset where the field ends. */
2838 if (field_size
!= NULL_TREE
)
2839 bitoffset_end
= double_int_add (bitoffset
,
2840 tree_to_double_int (field_size
));
2842 bitoffset_end
= double_int_zero
;
2844 access_end
= double_int_add (uhwi_to_double_int (offset
),
2845 uhwi_to_double_int (size
));
2847 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2848 [BITOFFSET, BITOFFSET_END)? */
2849 if (double_int_cmp (access_end
, bitoffset
, 0) > 0
2850 && (field_size
== NULL_TREE
2851 || double_int_cmp (uhwi_to_double_int (offset
),
2852 bitoffset_end
, 0) < 0))
2854 double_int inner_offset
= double_int_sub (uhwi_to_double_int (offset
),
2856 /* We do have overlap. Now see if field is large enough to
2857 cover the access. Give up for accesses spanning multiple
2859 if (double_int_cmp (access_end
, bitoffset_end
, 0) > 0)
2861 if (double_int_cmp (uhwi_to_double_int (offset
), bitoffset
, 0) < 0)
2863 return fold_ctor_reference (type
, cval
,
2864 double_int_to_uhwi (inner_offset
), size
);
2867 /* When memory is not explicitely mentioned in constructor, it is 0. */
2868 return build_zero_cst (type
);
2871 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2872 to the memory at bit OFFSET. */
2875 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
2876 unsigned HOST_WIDE_INT size
)
2880 /* We found the field with exact match. */
2881 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
2883 return canonicalize_constructor_val (ctor
);
2885 /* We are at the end of walk, see if we can view convert the
2887 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
2888 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2889 && operand_equal_p (TYPE_SIZE (type
),
2890 TYPE_SIZE (TREE_TYPE (ctor
)), 0))
2892 ret
= canonicalize_constructor_val (ctor
);
2893 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
2898 if (TREE_CODE (ctor
) == STRING_CST
)
2899 return fold_string_cst_ctor_reference (type
, ctor
, offset
, size
);
2900 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
2903 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
2904 return fold_array_ctor_reference (type
, ctor
, offset
, size
);
2906 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
);
2912 /* Return the tree representing the element referenced by T if T is an
2913 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2914 names using VALUEIZE. Return NULL_TREE otherwise. */
2917 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
2919 tree ctor
, idx
, base
;
2920 HOST_WIDE_INT offset
, size
, max_size
;
2923 if (TREE_THIS_VOLATILE (t
))
2926 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
2927 return get_symbol_constant_value (t
);
2929 tem
= fold_read_from_constant_string (t
);
2933 switch (TREE_CODE (t
))
2936 case ARRAY_RANGE_REF
:
2937 /* Constant indexes are handled well by get_base_constructor.
2938 Only special case variable offsets.
2939 FIXME: This code can't handle nested references with variable indexes
2940 (they will be handled only by iteration of ccp). Perhaps we can bring
2941 get_ref_base_and_extent here and make it use a valueize callback. */
2942 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
2944 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
2945 && host_integerp (idx
, 0))
2947 tree low_bound
, unit_size
;
2949 /* If the resulting bit-offset is constant, track it. */
2950 if ((low_bound
= array_ref_low_bound (t
),
2951 host_integerp (low_bound
, 0))
2952 && (unit_size
= array_ref_element_size (t
),
2953 host_integerp (unit_size
, 1)))
2955 offset
= TREE_INT_CST_LOW (idx
);
2956 offset
-= TREE_INT_CST_LOW (low_bound
);
2957 offset
*= TREE_INT_CST_LOW (unit_size
);
2958 offset
*= BITS_PER_UNIT
;
2960 base
= TREE_OPERAND (t
, 0);
2961 ctor
= get_base_constructor (base
, &offset
, valueize
);
2962 /* Empty constructor. Always fold to 0. */
2963 if (ctor
== error_mark_node
)
2964 return build_zero_cst (TREE_TYPE (t
));
2965 /* Out of bound array access. Value is undefined,
2969 /* We can not determine ctor. */
2972 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
2973 TREE_INT_CST_LOW (unit_size
)
2981 case TARGET_MEM_REF
:
2983 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
2984 ctor
= get_base_constructor (base
, &offset
, valueize
);
2986 /* Empty constructor. Always fold to 0. */
2987 if (ctor
== error_mark_node
)
2988 return build_zero_cst (TREE_TYPE (t
));
2989 /* We do not know precise address. */
2990 if (max_size
== -1 || max_size
!= size
)
2992 /* We can not determine ctor. */
2996 /* Out of bound array access. Value is undefined, but don't fold. */
3000 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
);
3005 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
3006 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
3007 return fold_build1_loc (EXPR_LOCATION (t
),
3008 TREE_CODE (t
), TREE_TYPE (t
), c
);
3020 fold_const_aggregate_ref (tree t
)
3022 return fold_const_aggregate_ref_1 (t
, NULL
);
3025 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3026 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3027 KNOWN_BINFO carries the binfo describing the true type of
3028 OBJ_TYPE_REF_OBJECT(REF). */
3031 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
)
3033 unsigned HOST_WIDE_INT offset
, size
;
3036 v
= BINFO_VTABLE (known_binfo
);
3037 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3041 if (TREE_CODE (v
) == POINTER_PLUS_EXPR
)
3043 offset
= tree_low_cst (TREE_OPERAND (v
, 1), 1) * BITS_PER_UNIT
;
3044 v
= TREE_OPERAND (v
, 0);
3049 if (TREE_CODE (v
) != ADDR_EXPR
)
3051 v
= TREE_OPERAND (v
, 0);
3053 if (TREE_CODE (v
) != VAR_DECL
3054 || !DECL_VIRTUAL_P (v
)
3055 || !DECL_INITIAL (v
)
3056 || DECL_INITIAL (v
) == error_mark_node
)
3058 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
3059 size
= tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))), 1);
3060 offset
+= token
* size
;
3061 fn
= fold_ctor_reference (TREE_TYPE (TREE_TYPE (v
)), DECL_INITIAL (v
),
3065 gcc_assert (TREE_CODE (fn
) == ADDR_EXPR
3066 || TREE_CODE (fn
) == FDESC_EXPR
);
3067 fn
= TREE_OPERAND (fn
, 0);
3068 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
3070 /* When cgraph node is missing and function is not public, we cannot
3071 devirtualize. This can happen in WHOPR when the actual method
3072 ends up in other partition, because we found devirtualization
3073 possibility too late. */
3074 if (!can_refer_decl_in_current_unit_p (fn
))
3080 /* Return true iff VAL is a gimple expression that is known to be
3081 non-negative. Restricted to floating-point inputs. */
3084 gimple_val_nonnegative_real_p (tree val
)
3088 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
3090 /* Use existing logic for non-gimple trees. */
3091 if (tree_expr_nonnegative_p (val
))
3094 if (TREE_CODE (val
) != SSA_NAME
)
3097 /* Currently we look only at the immediately defining statement
3098 to make this determination, since recursion on defining
3099 statements of operands can lead to quadratic behavior in the
3100 worst case. This is expected to catch almost all occurrences
3101 in practice. It would be possible to implement limited-depth
3102 recursion if important cases are lost. Alternatively, passes
3103 that need this information (such as the pow/powi lowering code
3104 in the cse_sincos pass) could be revised to provide it through
3105 dataflow propagation. */
3107 def_stmt
= SSA_NAME_DEF_STMT (val
);
3109 if (is_gimple_assign (def_stmt
))
3113 /* See fold-const.c:tree_expr_nonnegative_p for additional
3114 cases that could be handled with recursion. */
3116 switch (gimple_assign_rhs_code (def_stmt
))
3119 /* Always true for floating-point operands. */
3123 /* True if the two operands are identical (since we are
3124 restricted to floating-point inputs). */
3125 op0
= gimple_assign_rhs1 (def_stmt
);
3126 op1
= gimple_assign_rhs2 (def_stmt
);
3129 || operand_equal_p (op0
, op1
, 0))
3136 else if (is_gimple_call (def_stmt
))
3138 tree fndecl
= gimple_call_fndecl (def_stmt
);
3140 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
3144 switch (DECL_FUNCTION_CODE (fndecl
))
3146 CASE_FLT_FN (BUILT_IN_ACOS
):
3147 CASE_FLT_FN (BUILT_IN_ACOSH
):
3148 CASE_FLT_FN (BUILT_IN_CABS
):
3149 CASE_FLT_FN (BUILT_IN_COSH
):
3150 CASE_FLT_FN (BUILT_IN_ERFC
):
3151 CASE_FLT_FN (BUILT_IN_EXP
):
3152 CASE_FLT_FN (BUILT_IN_EXP10
):
3153 CASE_FLT_FN (BUILT_IN_EXP2
):
3154 CASE_FLT_FN (BUILT_IN_FABS
):
3155 CASE_FLT_FN (BUILT_IN_FDIM
):
3156 CASE_FLT_FN (BUILT_IN_HYPOT
):
3157 CASE_FLT_FN (BUILT_IN_POW10
):
3160 CASE_FLT_FN (BUILT_IN_SQRT
):
3161 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3162 nonnegative inputs. */
3163 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
3168 CASE_FLT_FN (BUILT_IN_POWI
):
3169 /* True if the second argument is an even integer. */
3170 arg1
= gimple_call_arg (def_stmt
, 1);
3172 if (TREE_CODE (arg1
) == INTEGER_CST
3173 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
3178 CASE_FLT_FN (BUILT_IN_POW
):
3179 /* True if the second argument is an even integer-valued
3181 arg1
= gimple_call_arg (def_stmt
, 1);
3183 if (TREE_CODE (arg1
) == REAL_CST
)
3188 c
= TREE_REAL_CST (arg1
);
3189 n
= real_to_integer (&c
);
3193 REAL_VALUE_TYPE cint
;
3194 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
3195 if (real_identical (&c
, &cint
))