1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
79 #include "tree-ssanames.h"
81 #include "stringpool.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
88 int folding_initializer
= 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code
{
112 static bool negate_expr_p (tree
);
113 static tree
negate_expr (tree
);
114 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
115 static enum comparison_code
comparison_to_compcode (enum tree_code
);
116 static enum tree_code
compcode_to_comparison (enum comparison_code
);
117 static bool twoval_comparison_p (tree
, tree
*, tree
*);
118 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static bool simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
fold_binary_op_with_conditional_arg (location_t
,
132 enum tree_code
, tree
,
135 static tree
fold_negate_const (tree
, tree
);
136 static tree
fold_not_const (const_tree
, tree
);
137 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
139 static tree
fold_view_convert_expr (tree
, tree
);
140 static tree
fold_negate_expr (location_t
, tree
);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
147 expr_location_or (tree t
, location_t loc
)
149 location_t tloc
= EXPR_LOCATION (t
);
150 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
157 protected_set_expr_location_unshare (tree x
, location_t loc
)
159 if (CAN_HAVE_LOCATION_P (x
)
160 && EXPR_LOCATION (x
) != loc
161 && !(TREE_CODE (x
) == SAVE_EXPR
162 || TREE_CODE (x
) == TARGET_EXPR
163 || TREE_CODE (x
) == BIND_EXPR
))
166 SET_EXPR_LOCATION (x
, loc
);
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
176 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
180 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
182 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
196 static int fold_deferring_overflow_warnings
;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning
;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings
;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
229 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
234 gcc_assert (fold_deferring_overflow_warnings
> 0);
235 --fold_deferring_overflow_warnings
;
236 if (fold_deferring_overflow_warnings
> 0)
238 if (fold_deferred_overflow_warning
!= NULL
240 && code
< (int) fold_deferred_overflow_code
)
241 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
245 warnmsg
= fold_deferred_overflow_warning
;
246 fold_deferred_overflow_warning
= NULL
;
248 if (!issue
|| warnmsg
== NULL
)
251 if (gimple_no_warning_p (stmt
))
254 /* Use the smallest code level when deciding to issue the
256 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
257 code
= fold_deferred_overflow_code
;
259 if (!issue_strict_overflow_warning (code
))
263 locus
= input_location
;
265 locus
= gimple_location (stmt
);
266 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
269 /* Stop deferring overflow warnings, ignoring any deferred
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL
, 0);
278 /* Whether we are deferring overflow warnings. */
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings
> 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
290 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
292 if (fold_deferring_overflow_warnings
> 0)
294 if (fold_deferred_overflow_warning
== NULL
295 || wc
< fold_deferred_overflow_code
)
297 fold_deferred_overflow_warning
= gmsgid
;
298 fold_deferred_overflow_code
= wc
;
301 else if (issue_strict_overflow_warning (wc
))
302 warning (OPT_Wstrict_overflow
, gmsgid
);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
309 negate_mathfn_p (combined_fn fn
)
332 CASE_CFN_ROUNDEVEN_FN
:
344 return !flag_rounding_math
;
352 /* Check whether we may negate an integer constant T without causing
356 may_negate_without_overflow_p (const_tree t
)
360 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
362 type
= TREE_TYPE (t
);
363 if (TYPE_UNSIGNED (type
))
366 return !wi::only_sign_bit_p (wi::to_wide (t
));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
373 negate_expr_p (tree t
)
380 type
= TREE_TYPE (t
);
383 switch (TREE_CODE (t
))
386 if (INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
))
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t
);
392 return (INTEGRAL_TYPE_P (type
)
393 && TYPE_OVERFLOW_WRAPS (type
));
399 return !TYPE_OVERFLOW_SANITIZED (type
);
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
407 return negate_expr_p (TREE_REALPART (t
))
408 && negate_expr_p (TREE_IMAGPART (t
));
412 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
415 /* Steps don't prevent negation. */
416 unsigned int count
= vector_cst_encoded_nelts (t
);
417 for (unsigned int i
= 0; i
< count
; ++i
)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t
, i
)))
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
433 || HONOR_SIGNED_ZEROS (element_mode (type
))
434 || (ANY_INTEGRAL_TYPE_P (type
)
435 && ! TYPE_OVERFLOW_WRAPS (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
446 && !HONOR_SIGNED_ZEROS (element_mode (type
))
447 && (! ANY_INTEGRAL_TYPE_P (type
)
448 || TYPE_OVERFLOW_WRAPS (type
));
451 if (TYPE_UNSIGNED (type
))
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
457 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
459 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
462 (wi::abs (wi::to_wide (TREE_OPERAND (t
, 1))))) != 1)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
469 return negate_expr_p (TREE_OPERAND (t
, 1))
470 || negate_expr_p (TREE_OPERAND (t
, 0));
476 if (TYPE_UNSIGNED (type
))
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t
, 0)))
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
488 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t
, 1))))
490 return negate_expr_p (TREE_OPERAND (t
, 1));
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type
) == REAL_TYPE
)
497 tree tem
= strip_float_extensions (t
);
499 return negate_expr_p (tem
);
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t
)))
506 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
513 tree op1
= TREE_OPERAND (t
, 1);
514 if (wi::to_wide (op1
) == element_precision (type
) - 1)
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
531 fold_negate_expr_1 (location_t loc
, tree t
)
533 tree type
= TREE_TYPE (t
);
536 switch (TREE_CODE (t
))
538 /* Convert - (~A) to A + 1. */
540 if (INTEGRAL_TYPE_P (type
))
541 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
542 build_one_cst (type
));
546 tem
= fold_negate_const (t
, type
);
547 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
548 || (ANY_INTEGRAL_TYPE_P (type
)
549 && !TYPE_OVERFLOW_TRAPS (type
)
550 && TYPE_OVERFLOW_WRAPS (type
))
551 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
558 tem
= fold_negate_const (t
, type
);
563 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
564 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
566 return build_complex (type
, rpart
, ipart
);
572 tree_vector_builder elts
;
573 elts
.new_unary_operation (type
, t
, true);
574 unsigned int count
= elts
.encoded_nelts ();
575 for (unsigned int i
= 0; i
< count
; ++i
)
577 tree elt
= fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
578 if (elt
== NULL_TREE
)
580 elts
.quick_push (elt
);
583 return elts
.build ();
587 if (negate_expr_p (t
))
588 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
589 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
594 if (negate_expr_p (t
))
595 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
596 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
600 if (!TYPE_OVERFLOW_SANITIZED (type
))
601 return TREE_OPERAND (t
, 0);
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
606 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t
, 1)))
611 tem
= negate_expr (TREE_OPERAND (t
, 1));
612 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
613 tem
, TREE_OPERAND (t
, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t
, 0)))
619 tem
= negate_expr (TREE_OPERAND (t
, 0));
620 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
621 tem
, TREE_OPERAND (t
, 1));
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
629 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
630 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
631 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
635 if (TYPE_UNSIGNED (type
))
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
643 tem
= TREE_OPERAND (t
, 1);
644 if (negate_expr_p (tem
))
645 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
646 TREE_OPERAND (t
, 0), negate_expr (tem
));
647 tem
= TREE_OPERAND (t
, 0);
648 if (negate_expr_p (tem
))
649 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
650 negate_expr (tem
), TREE_OPERAND (t
, 1));
657 if (TYPE_UNSIGNED (type
))
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t
, 0)))
663 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
664 negate_expr (TREE_OPERAND (t
, 0)),
665 TREE_OPERAND (t
, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t
))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
671 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t
, 1))))
673 && negate_expr_p (TREE_OPERAND (t
, 1)))
674 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
676 negate_expr (TREE_OPERAND (t
, 1)));
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type
) == REAL_TYPE
)
683 tem
= strip_float_extensions (t
);
684 if (tem
!= t
&& negate_expr_p (tem
))
685 return fold_convert_loc (loc
, type
, negate_expr (tem
));
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t
))
692 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
696 fndecl
= get_callee_fndecl (t
);
697 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
698 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
706 tree op1
= TREE_OPERAND (t
, 1);
707 if (wi::to_wide (op1
) == element_precision (type
) - 1)
709 tree ntype
= TYPE_UNSIGNED (type
)
710 ? signed_type_for (type
)
711 : unsigned_type_for (type
);
712 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
713 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
714 return fold_convert_loc (loc
, type
, temp
);
726 /* A wrapper for fold_negate_expr_1. */
729 fold_negate_expr (location_t loc
, tree t
)
731 tree type
= TREE_TYPE (t
);
733 tree tem
= fold_negate_expr_1 (loc
, t
);
734 if (tem
== NULL_TREE
)
736 return fold_convert_loc (loc
, type
, tem
);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
752 loc
= EXPR_LOCATION (t
);
753 type
= TREE_TYPE (t
);
756 tem
= fold_negate_expr (loc
, t
);
758 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
759 return fold_convert_loc (loc
, type
, tem
);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
786 split_tree (tree in
, tree type
, enum tree_code code
,
787 tree
*minus_varp
, tree
*conp
, tree
*minus_conp
,
788 tree
*litp
, tree
*minus_litp
, int negate_p
)
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in
);
800 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
801 || TREE_CODE (in
) == FIXED_CST
)
803 else if (TREE_CODE (in
) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == POINTER_PLUS_EXPR
)
811 || (code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
812 || (code
== MINUS_EXPR
813 && (TREE_CODE (in
) == PLUS_EXPR
814 || TREE_CODE (in
) == POINTER_PLUS_EXPR
)))))
816 tree op0
= TREE_OPERAND (in
, 0);
817 tree op1
= TREE_OPERAND (in
, 1);
818 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
819 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
823 || TREE_CODE (op0
) == FIXED_CST
)
824 *litp
= op0
, op0
= 0;
825 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
826 || TREE_CODE (op1
) == FIXED_CST
)
827 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
829 if (op0
!= 0 && TREE_CONSTANT (op0
))
830 *conp
= op0
, op0
= 0;
831 else if (op1
!= 0 && TREE_CONSTANT (op1
))
832 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0
!= 0 && op1
!= 0)
841 var
= op1
, neg_var_p
= neg1_p
;
843 /* Now do any needed negations. */
845 *minus_litp
= *litp
, *litp
= 0;
846 if (neg_conp_p
&& *conp
)
847 *minus_conp
= *conp
, *conp
= 0;
848 if (neg_var_p
&& var
)
849 *minus_varp
= var
, var
= 0;
851 else if (TREE_CONSTANT (in
))
853 else if (TREE_CODE (in
) == BIT_NOT_EXPR
854 && code
== PLUS_EXPR
)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp
= build_minus_one_cst (type
);
859 *minus_varp
= TREE_OPERAND (in
, 0);
867 *minus_litp
= *litp
, *litp
= 0;
868 else if (*minus_litp
)
869 *litp
= *minus_litp
, *minus_litp
= 0;
871 *minus_conp
= *conp
, *conp
= 0;
872 else if (*minus_conp
)
873 *conp
= *minus_conp
, *minus_conp
= 0;
875 *minus_varp
= var
, var
= 0;
876 else if (*minus_varp
)
877 var
= *minus_varp
, *minus_varp
= 0;
881 && TREE_OVERFLOW_P (*litp
))
882 *litp
= drop_tree_overflow (*litp
);
884 && TREE_OVERFLOW_P (*minus_litp
))
885 *minus_litp
= drop_tree_overflow (*minus_litp
);
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
896 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
900 gcc_assert (t2
== 0 || code
!= MINUS_EXPR
);
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
910 || TREE_CODE (t1
) == PLUS_EXPR
|| TREE_CODE (t2
) == PLUS_EXPR
911 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
913 if (code
== PLUS_EXPR
)
915 if (TREE_CODE (t1
) == NEGATE_EXPR
)
916 return build2_loc (loc
, MINUS_EXPR
, type
,
917 fold_convert_loc (loc
, type
, t2
),
918 fold_convert_loc (loc
, type
,
919 TREE_OPERAND (t1
, 0)));
920 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
921 return build2_loc (loc
, MINUS_EXPR
, type
,
922 fold_convert_loc (loc
, type
, t1
),
923 fold_convert_loc (loc
, type
,
924 TREE_OPERAND (t2
, 0)));
925 else if (integer_zerop (t2
))
926 return fold_convert_loc (loc
, type
, t1
);
928 else if (code
== MINUS_EXPR
)
930 if (integer_zerop (t2
))
931 return fold_convert_loc (loc
, type
, t1
);
934 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
935 fold_convert_loc (loc
, type
, t2
));
938 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
939 fold_convert_loc (loc
, type
, t2
));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
946 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
948 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
950 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
965 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
966 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
967 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
975 wide_int_binop (wide_int
&res
,
976 enum tree_code code
, const wide_int
&arg1
, const wide_int
&arg2
,
977 signop sign
, wi::overflow_type
*overflow
)
980 *overflow
= wi::OVF_NONE
;
984 res
= wi::bit_or (arg1
, arg2
);
988 res
= wi::bit_xor (arg1
, arg2
);
992 res
= wi::bit_and (arg1
, arg2
);
997 if (wi::neg_p (arg2
))
1000 if (code
== RSHIFT_EXPR
)
1008 if (code
== RSHIFT_EXPR
)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res
= wi::rshift (arg1
, tmp
, sign
);
1014 res
= wi::lshift (arg1
, tmp
);
1019 if (wi::neg_p (arg2
))
1022 if (code
== RROTATE_EXPR
)
1023 code
= LROTATE_EXPR
;
1025 code
= RROTATE_EXPR
;
1030 if (code
== RROTATE_EXPR
)
1031 res
= wi::rrotate (arg1
, tmp
);
1033 res
= wi::lrotate (arg1
, tmp
);
1037 res
= wi::add (arg1
, arg2
, sign
, overflow
);
1041 res
= wi::sub (arg1
, arg2
, sign
, overflow
);
1045 res
= wi::mul (arg1
, arg2
, sign
, overflow
);
1048 case MULT_HIGHPART_EXPR
:
1049 res
= wi::mul_high (arg1
, arg2
, sign
);
1052 case TRUNC_DIV_EXPR
:
1053 case EXACT_DIV_EXPR
:
1056 res
= wi::div_trunc (arg1
, arg2
, sign
, overflow
);
1059 case FLOOR_DIV_EXPR
:
1062 res
= wi::div_floor (arg1
, arg2
, sign
, overflow
);
1068 res
= wi::div_ceil (arg1
, arg2
, sign
, overflow
);
1071 case ROUND_DIV_EXPR
:
1074 res
= wi::div_round (arg1
, arg2
, sign
, overflow
);
1077 case TRUNC_MOD_EXPR
:
1080 res
= wi::mod_trunc (arg1
, arg2
, sign
, overflow
);
1083 case FLOOR_MOD_EXPR
:
1086 res
= wi::mod_floor (arg1
, arg2
, sign
, overflow
);
1092 res
= wi::mod_ceil (arg1
, arg2
, sign
, overflow
);
1095 case ROUND_MOD_EXPR
:
1098 res
= wi::mod_round (arg1
, arg2
, sign
, overflow
);
1102 res
= wi::min (arg1
, arg2
, sign
);
1106 res
= wi::max (arg1
, arg2
, sign
);
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1120 poly_int_binop (poly_wide_int
&res
, enum tree_code code
,
1121 const_tree arg1
, const_tree arg2
,
1122 signop sign
, wi::overflow_type
*overflow
)
1124 gcc_assert (NUM_POLY_INT_COEFFS
!= 1);
1125 gcc_assert (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
));
1129 res
= wi::add (wi::to_poly_wide (arg1
),
1130 wi::to_poly_wide (arg2
), sign
, overflow
);
1134 res
= wi::sub (wi::to_poly_wide (arg1
),
1135 wi::to_poly_wide (arg2
), sign
, overflow
);
1139 if (TREE_CODE (arg2
) == INTEGER_CST
)
1140 res
= wi::mul (wi::to_poly_wide (arg1
),
1141 wi::to_wide (arg2
), sign
, overflow
);
1142 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1143 res
= wi::mul (wi::to_poly_wide (arg2
),
1144 wi::to_wide (arg1
), sign
, overflow
);
1150 if (TREE_CODE (arg2
) == INTEGER_CST
)
1151 res
= wi::to_poly_wide (arg1
) << wi::to_wide (arg2
);
1157 if (TREE_CODE (arg2
) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1
), wi::to_wide (arg2
),
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1174 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
,
1177 poly_wide_int poly_res
;
1178 tree type
= TREE_TYPE (arg1
);
1179 signop sign
= TYPE_SIGN (type
);
1180 wi::overflow_type overflow
= wi::OVF_NONE
;
1182 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1184 wide_int warg1
= wi::to_wide (arg1
), res
;
1185 wide_int warg2
= wi::to_wide (arg2
, TYPE_PRECISION (type
));
1186 if (!wide_int_binop (res
, code
, warg1
, warg2
, sign
, &overflow
))
1190 else if (!poly_int_tree_p (arg1
)
1191 || !poly_int_tree_p (arg2
)
1192 || !poly_int_binop (poly_res
, code
, arg1
, arg2
, sign
, &overflow
))
1194 return force_fit_type (type
, poly_res
, overflowable
,
1195 (((sign
== SIGNED
|| overflowable
== -1)
1197 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1204 distributes_over_addition_p (tree_code op
, int opno
)
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1227 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1229 /* Sanity check for the recursive cases. */
1236 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1238 if (code
== POINTER_PLUS_EXPR
)
1239 return int_const_binop (PLUS_EXPR
,
1240 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1242 return int_const_binop (code
, arg1
, arg2
);
1245 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1250 REAL_VALUE_TYPE value
;
1251 REAL_VALUE_TYPE result
;
1255 /* The following codes are handled by real_arithmetic. */
1270 d1
= TREE_REAL_CST (arg1
);
1271 d2
= TREE_REAL_CST (arg2
);
1273 type
= TREE_TYPE (arg1
);
1274 mode
= TYPE_MODE (type
);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode
)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code
== RDIV_EXPR
1286 && real_equal (&d2
, &dconst0
)
1287 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1
))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 t
= build_real (type
, d1
);
1300 else if (REAL_VALUE_ISNAN (d2
))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1305 t
= build_real (type
, d2
);
1309 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1310 real_convert (&result
, mode
, &value
);
1312 /* Don't constant fold this floating point operation if
1313 both operands are not NaN but the result is NaN, and
1314 flag_trapping_math. Such operations should raise an
1315 invalid operation exception. */
1316 if (flag_trapping_math
1317 && MODE_HAS_NANS (mode
)
1318 && REAL_VALUE_ISNAN (result
)
1319 && !REAL_VALUE_ISNAN (d1
)
1320 && !REAL_VALUE_ISNAN (d2
))
1323 /* Don't constant fold this floating point operation if
1324 the result has overflowed and flag_trapping_math. */
1325 if (flag_trapping_math
1326 && MODE_HAS_INFINITIES (mode
)
1327 && REAL_VALUE_ISINF (result
)
1328 && !REAL_VALUE_ISINF (d1
)
1329 && !REAL_VALUE_ISINF (d2
))
1332 /* Don't constant fold this floating point operation if the
1333 result may dependent upon the run-time rounding mode and
1334 flag_rounding_math is set, or if GCC's software emulation
1335 is unable to accurately represent the result. */
1336 if ((flag_rounding_math
1337 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1338 && (inexact
|| !real_identical (&result
, &value
)))
1341 t
= build_real (type
, result
);
1343 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1347 if (TREE_CODE (arg1
) == FIXED_CST
)
1349 FIXED_VALUE_TYPE f1
;
1350 FIXED_VALUE_TYPE f2
;
1351 FIXED_VALUE_TYPE result
;
1356 /* The following codes are handled by fixed_arithmetic. */
1362 case TRUNC_DIV_EXPR
:
1363 if (TREE_CODE (arg2
) != FIXED_CST
)
1365 f2
= TREE_FIXED_CST (arg2
);
1371 if (TREE_CODE (arg2
) != INTEGER_CST
)
1373 wi::tree_to_wide_ref w2
= wi::to_wide (arg2
);
1374 f2
.data
.high
= w2
.elt (1);
1375 f2
.data
.low
= w2
.ulow ();
1384 f1
= TREE_FIXED_CST (arg1
);
1385 type
= TREE_TYPE (arg1
);
1386 sat_p
= TYPE_SATURATING (type
);
1387 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1388 t
= build_fixed (type
, result
);
1389 /* Propagate overflow flags. */
1390 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1391 TREE_OVERFLOW (t
) = 1;
1395 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1397 tree type
= TREE_TYPE (arg1
);
1398 tree r1
= TREE_REALPART (arg1
);
1399 tree i1
= TREE_IMAGPART (arg1
);
1400 tree r2
= TREE_REALPART (arg2
);
1401 tree i2
= TREE_IMAGPART (arg2
);
1408 real
= const_binop (code
, r1
, r2
);
1409 imag
= const_binop (code
, i1
, i2
);
1413 if (COMPLEX_FLOAT_TYPE_P (type
))
1414 return do_mpc_arg2 (arg1
, arg2
, type
,
1415 /* do_nonfinite= */ folding_initializer
,
1418 real
= const_binop (MINUS_EXPR
,
1419 const_binop (MULT_EXPR
, r1
, r2
),
1420 const_binop (MULT_EXPR
, i1
, i2
));
1421 imag
= const_binop (PLUS_EXPR
,
1422 const_binop (MULT_EXPR
, r1
, i2
),
1423 const_binop (MULT_EXPR
, i1
, r2
));
1427 if (COMPLEX_FLOAT_TYPE_P (type
))
1428 return do_mpc_arg2 (arg1
, arg2
, type
,
1429 /* do_nonfinite= */ folding_initializer
,
1432 case TRUNC_DIV_EXPR
:
1434 case FLOOR_DIV_EXPR
:
1435 case ROUND_DIV_EXPR
:
1436 if (flag_complex_method
== 0)
1438 /* Keep this algorithm in sync with
1439 tree-complex.c:expand_complex_div_straight().
1441 Expand complex division to scalars, straightforward algorithm.
1442 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1446 = const_binop (PLUS_EXPR
,
1447 const_binop (MULT_EXPR
, r2
, r2
),
1448 const_binop (MULT_EXPR
, i2
, i2
));
1450 = const_binop (PLUS_EXPR
,
1451 const_binop (MULT_EXPR
, r1
, r2
),
1452 const_binop (MULT_EXPR
, i1
, i2
));
1454 = const_binop (MINUS_EXPR
,
1455 const_binop (MULT_EXPR
, i1
, r2
),
1456 const_binop (MULT_EXPR
, r1
, i2
));
1458 real
= const_binop (code
, t1
, magsquared
);
1459 imag
= const_binop (code
, t2
, magsquared
);
1463 /* Keep this algorithm in sync with
1464 tree-complex.c:expand_complex_div_wide().
1466 Expand complex division to scalars, modified algorithm to minimize
1467 overflow with wide input ranges. */
1468 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1469 fold_abs_const (r2
, TREE_TYPE (type
)),
1470 fold_abs_const (i2
, TREE_TYPE (type
)));
1472 if (integer_nonzerop (compare
))
1474 /* In the TRUE branch, we compute
1476 div = (br * ratio) + bi;
1477 tr = (ar * ratio) + ai;
1478 ti = (ai * ratio) - ar;
1481 tree ratio
= const_binop (code
, r2
, i2
);
1482 tree div
= const_binop (PLUS_EXPR
, i2
,
1483 const_binop (MULT_EXPR
, r2
, ratio
));
1484 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1485 real
= const_binop (PLUS_EXPR
, real
, i1
);
1486 real
= const_binop (code
, real
, div
);
1488 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1489 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1490 imag
= const_binop (code
, imag
, div
);
1494 /* In the FALSE branch, we compute
1496 divisor = (d * ratio) + c;
1497 tr = (b * ratio) + a;
1498 ti = b - (a * ratio);
1501 tree ratio
= const_binop (code
, i2
, r2
);
1502 tree div
= const_binop (PLUS_EXPR
, r2
,
1503 const_binop (MULT_EXPR
, i2
, ratio
));
1505 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1506 real
= const_binop (PLUS_EXPR
, real
, r1
);
1507 real
= const_binop (code
, real
, div
);
1509 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1510 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1511 imag
= const_binop (code
, imag
, div
);
1521 return build_complex (type
, real
, imag
);
1524 if (TREE_CODE (arg1
) == VECTOR_CST
1525 && TREE_CODE (arg2
) == VECTOR_CST
1526 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)),
1527 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
))))
1529 tree type
= TREE_TYPE (arg1
);
1531 if (VECTOR_CST_STEPPED_P (arg1
)
1532 && VECTOR_CST_STEPPED_P (arg2
))
1533 /* We can operate directly on the encoding if:
1535 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1537 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1539 Addition and subtraction are the supported operators
1540 for which this is true. */
1541 step_ok_p
= (code
== PLUS_EXPR
|| code
== MINUS_EXPR
);
1542 else if (VECTOR_CST_STEPPED_P (arg1
))
1543 /* We can operate directly on stepped encodings if:
1547 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1549 which is true if (x -> x op c) distributes over addition. */
1550 step_ok_p
= distributes_over_addition_p (code
, 1);
1552 /* Similarly in reverse. */
1553 step_ok_p
= distributes_over_addition_p (code
, 2);
1554 tree_vector_builder elts
;
1555 if (!elts
.new_binary_operation (type
, arg1
, arg2
, step_ok_p
))
1557 unsigned int count
= elts
.encoded_nelts ();
1558 for (unsigned int i
= 0; i
< count
; ++i
)
1560 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1561 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1563 tree elt
= const_binop (code
, elem1
, elem2
);
1565 /* It is possible that const_binop cannot handle the given
1566 code and return NULL_TREE */
1567 if (elt
== NULL_TREE
)
1569 elts
.quick_push (elt
);
1572 return elts
.build ();
1575 /* Shifts allow a scalar offset for a vector. */
1576 if (TREE_CODE (arg1
) == VECTOR_CST
1577 && TREE_CODE (arg2
) == INTEGER_CST
)
1579 tree type
= TREE_TYPE (arg1
);
1580 bool step_ok_p
= distributes_over_addition_p (code
, 1);
1581 tree_vector_builder elts
;
1582 if (!elts
.new_unary_operation (type
, arg1
, step_ok_p
))
1584 unsigned int count
= elts
.encoded_nelts ();
1585 for (unsigned int i
= 0; i
< count
; ++i
)
1587 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1589 tree elt
= const_binop (code
, elem1
, arg2
);
1591 /* It is possible that const_binop cannot handle the given
1592 code and return NULL_TREE. */
1593 if (elt
== NULL_TREE
)
1595 elts
.quick_push (elt
);
1598 return elts
.build ();
1603 /* Overload that adds a TYPE parameter to be able to dispatch
1604 to fold_relational_const. */
1607 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1609 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1610 return fold_relational_const (code
, type
, arg1
, arg2
);
1612 /* ??? Until we make the const_binop worker take the type of the
1613 result as argument put those cases that need it here. */
1616 case VEC_SERIES_EXPR
:
1617 if (CONSTANT_CLASS_P (arg1
)
1618 && CONSTANT_CLASS_P (arg2
))
1619 return build_vec_series (type
, arg1
, arg2
);
1623 if ((TREE_CODE (arg1
) == REAL_CST
1624 && TREE_CODE (arg2
) == REAL_CST
)
1625 || (TREE_CODE (arg1
) == INTEGER_CST
1626 && TREE_CODE (arg2
) == INTEGER_CST
))
1627 return build_complex (type
, arg1
, arg2
);
1630 case POINTER_DIFF_EXPR
:
1631 if (poly_int_tree_p (arg1
) && poly_int_tree_p (arg2
))
1633 poly_offset_int res
= (wi::to_poly_offset (arg1
)
1634 - wi::to_poly_offset (arg2
));
1635 return force_fit_type (type
, res
, 1,
1636 TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1640 case VEC_PACK_TRUNC_EXPR
:
1641 case VEC_PACK_FIX_TRUNC_EXPR
:
1642 case VEC_PACK_FLOAT_EXPR
:
1644 unsigned int HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1646 if (TREE_CODE (arg1
) != VECTOR_CST
1647 || TREE_CODE (arg2
) != VECTOR_CST
)
1650 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1653 out_nelts
= in_nelts
* 2;
1654 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1655 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1657 tree_vector_builder
elts (type
, out_nelts
, 1);
1658 for (i
= 0; i
< out_nelts
; i
++)
1660 tree elt
= (i
< in_nelts
1661 ? VECTOR_CST_ELT (arg1
, i
)
1662 : VECTOR_CST_ELT (arg2
, i
- in_nelts
));
1663 elt
= fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1665 : code
== VEC_PACK_FLOAT_EXPR
1666 ? FLOAT_EXPR
: FIX_TRUNC_EXPR
,
1667 TREE_TYPE (type
), elt
);
1668 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1670 elts
.quick_push (elt
);
1673 return elts
.build ();
1676 case VEC_WIDEN_MULT_LO_EXPR
:
1677 case VEC_WIDEN_MULT_HI_EXPR
:
1678 case VEC_WIDEN_MULT_EVEN_EXPR
:
1679 case VEC_WIDEN_MULT_ODD_EXPR
:
1681 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, out
, ofs
, scale
;
1683 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1686 if (!VECTOR_CST_NELTS (arg1
).is_constant (&in_nelts
))
1688 out_nelts
= in_nelts
/ 2;
1689 gcc_assert (known_eq (in_nelts
, VECTOR_CST_NELTS (arg2
))
1690 && known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1692 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1693 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? out_nelts
: 0;
1694 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1695 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : out_nelts
;
1696 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1698 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1701 tree_vector_builder
elts (type
, out_nelts
, 1);
1702 for (out
= 0; out
< out_nelts
; out
++)
1704 unsigned int in
= (out
<< scale
) + ofs
;
1705 tree t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1706 VECTOR_CST_ELT (arg1
, in
));
1707 tree t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
1708 VECTOR_CST_ELT (arg2
, in
));
1710 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1712 tree elt
= const_binop (MULT_EXPR
, t1
, t2
);
1713 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1715 elts
.quick_push (elt
);
1718 return elts
.build ();
1724 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1727 /* Make sure type and arg0 have the same saturating flag. */
1728 gcc_checking_assert (TYPE_SATURATING (type
)
1729 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1731 return const_binop (code
, arg1
, arg2
);
1734 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1735 Return zero if computing the constants is not possible. */
1738 const_unop (enum tree_code code
, tree type
, tree arg0
)
1740 /* Don't perform the operation, other than NEGATE and ABS, if
1741 flag_signaling_nans is on and the operand is a signaling NaN. */
1742 if (TREE_CODE (arg0
) == REAL_CST
1743 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1744 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1745 && code
!= NEGATE_EXPR
1747 && code
!= ABSU_EXPR
)
1754 case FIX_TRUNC_EXPR
:
1755 case FIXED_CONVERT_EXPR
:
1756 return fold_convert_const (code
, type
, arg0
);
1758 case ADDR_SPACE_CONVERT_EXPR
:
1759 /* If the source address is 0, and the source address space
1760 cannot have a valid object at 0, fold to dest type null. */
1761 if (integer_zerop (arg0
)
1762 && !(targetm
.addr_space
.zero_address_valid
1763 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1764 return fold_convert_const (code
, type
, arg0
);
1767 case VIEW_CONVERT_EXPR
:
1768 return fold_view_convert_expr (type
, arg0
);
1772 /* Can't call fold_negate_const directly here as that doesn't
1773 handle all cases and we might not be able to negate some
1775 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1776 if (tem
&& CONSTANT_CLASS_P (tem
))
1783 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1784 return fold_abs_const (arg0
, type
);
1788 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1790 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1792 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1797 if (TREE_CODE (arg0
) == INTEGER_CST
)
1798 return fold_not_const (arg0
, type
);
1799 else if (POLY_INT_CST_P (arg0
))
1800 return wide_int_to_tree (type
, -poly_int_cst_value (arg0
));
1801 /* Perform BIT_NOT_EXPR on each element individually. */
1802 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1806 /* This can cope with stepped encodings because ~x == -1 - x. */
1807 tree_vector_builder elements
;
1808 elements
.new_unary_operation (type
, arg0
, true);
1809 unsigned int i
, count
= elements
.encoded_nelts ();
1810 for (i
= 0; i
< count
; ++i
)
1812 elem
= VECTOR_CST_ELT (arg0
, i
);
1813 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1814 if (elem
== NULL_TREE
)
1816 elements
.quick_push (elem
);
1819 return elements
.build ();
1823 case TRUTH_NOT_EXPR
:
1824 if (TREE_CODE (arg0
) == INTEGER_CST
)
1825 return constant_boolean_node (integer_zerop (arg0
), type
);
1829 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1830 return fold_convert (type
, TREE_REALPART (arg0
));
1834 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1835 return fold_convert (type
, TREE_IMAGPART (arg0
));
1838 case VEC_UNPACK_LO_EXPR
:
1839 case VEC_UNPACK_HI_EXPR
:
1840 case VEC_UNPACK_FLOAT_LO_EXPR
:
1841 case VEC_UNPACK_FLOAT_HI_EXPR
:
1842 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
1843 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
1845 unsigned HOST_WIDE_INT out_nelts
, in_nelts
, i
;
1846 enum tree_code subcode
;
1848 if (TREE_CODE (arg0
) != VECTOR_CST
)
1851 if (!VECTOR_CST_NELTS (arg0
).is_constant (&in_nelts
))
1853 out_nelts
= in_nelts
/ 2;
1854 gcc_assert (known_eq (out_nelts
, TYPE_VECTOR_SUBPARTS (type
)));
1856 unsigned int offset
= 0;
1857 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1858 || code
== VEC_UNPACK_FLOAT_LO_EXPR
1859 || code
== VEC_UNPACK_FIX_TRUNC_LO_EXPR
))
1862 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1864 else if (code
== VEC_UNPACK_FLOAT_LO_EXPR
1865 || code
== VEC_UNPACK_FLOAT_HI_EXPR
)
1866 subcode
= FLOAT_EXPR
;
1868 subcode
= FIX_TRUNC_EXPR
;
1870 tree_vector_builder
elts (type
, out_nelts
, 1);
1871 for (i
= 0; i
< out_nelts
; i
++)
1873 tree elt
= fold_convert_const (subcode
, TREE_TYPE (type
),
1874 VECTOR_CST_ELT (arg0
, i
+ offset
));
1875 if (elt
== NULL_TREE
|| !CONSTANT_CLASS_P (elt
))
1877 elts
.quick_push (elt
);
1880 return elts
.build ();
1883 case VEC_DUPLICATE_EXPR
:
1884 if (CONSTANT_CLASS_P (arg0
))
1885 return build_vector_from_val (type
, arg0
);
1895 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1896 indicates which particular sizetype to create. */
1899 size_int_kind (poly_int64 number
, enum size_type_kind kind
)
1901 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1904 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1905 is a tree code. The type of the result is taken from the operands.
1906 Both must be equivalent integer types, ala int_binop_types_match_p.
1907 If the operands are constant, so is the result. */
1910 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1912 tree type
= TREE_TYPE (arg0
);
1914 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1915 return error_mark_node
;
1917 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1920 /* Handle the special case of two poly_int constants faster. */
1921 if (poly_int_tree_p (arg0
) && poly_int_tree_p (arg1
))
1923 /* And some specific cases even faster than that. */
1924 if (code
== PLUS_EXPR
)
1926 if (integer_zerop (arg0
)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1929 if (integer_zerop (arg1
)
1930 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1933 else if (code
== MINUS_EXPR
)
1935 if (integer_zerop (arg1
)
1936 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1
)))
1939 else if (code
== MULT_EXPR
)
1941 if (integer_onep (arg0
)
1942 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0
)))
1946 /* Handle general case of two integer constants. For sizetype
1947 constant calculations we always want to know about overflow,
1948 even in the unsigned case. */
1949 tree res
= int_const_binop (code
, arg0
, arg1
, -1);
1950 if (res
!= NULL_TREE
)
1954 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1957 /* Given two values, either both of sizetype or both of bitsizetype,
1958 compute the difference between the two values. Return the value
1959 in signed type corresponding to the type of the operands. */
1962 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1964 tree type
= TREE_TYPE (arg0
);
1967 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1970 /* If the type is already signed, just do the simple thing. */
1971 if (!TYPE_UNSIGNED (type
))
1972 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1974 if (type
== sizetype
)
1976 else if (type
== bitsizetype
)
1977 ctype
= sbitsizetype
;
1979 ctype
= signed_type_for (type
);
1981 /* If either operand is not a constant, do the conversions to the signed
1982 type and subtract. The hardware will do the right thing with any
1983 overflow in the subtraction. */
1984 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1985 return size_binop_loc (loc
, MINUS_EXPR
,
1986 fold_convert_loc (loc
, ctype
, arg0
),
1987 fold_convert_loc (loc
, ctype
, arg1
));
1989 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1990 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1991 overflow) and negate (which can't either). Special-case a result
1992 of zero while we're here. */
1993 if (tree_int_cst_equal (arg0
, arg1
))
1994 return build_int_cst (ctype
, 0);
1995 else if (tree_int_cst_lt (arg1
, arg0
))
1996 return fold_convert_loc (loc
, ctype
,
1997 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1999 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
2000 fold_convert_loc (loc
, ctype
,
2001 size_binop_loc (loc
,
2006 /* A subroutine of fold_convert_const handling conversions of an
2007 INTEGER_CST to another integer type. */
2010 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
2012 /* Given an integer constant, make new constant with new type,
2013 appropriately sign-extended or truncated. Use widest_int
2014 so that any extension is done according ARG1's type. */
2015 return force_fit_type (type
, wi::to_widest (arg1
),
2016 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
2017 TREE_OVERFLOW (arg1
));
2020 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2021 to an integer type. */
2024 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
2026 bool overflow
= false;
2029 /* The following code implements the floating point to integer
2030 conversion rules required by the Java Language Specification,
2031 that IEEE NaNs are mapped to zero and values that overflow
2032 the target precision saturate, i.e. values greater than
2033 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2034 are mapped to INT_MIN. These semantics are allowed by the
2035 C and C++ standards that simply state that the behavior of
2036 FP-to-integer conversion is unspecified upon overflow. */
2040 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
2044 case FIX_TRUNC_EXPR
:
2045 real_trunc (&r
, VOIDmode
, &x
);
2052 /* If R is NaN, return zero and show we have an overflow. */
2053 if (REAL_VALUE_ISNAN (r
))
2056 val
= wi::zero (TYPE_PRECISION (type
));
2059 /* See if R is less than the lower bound or greater than the
2064 tree lt
= TYPE_MIN_VALUE (type
);
2065 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
2066 if (real_less (&r
, &l
))
2069 val
= wi::to_wide (lt
);
2075 tree ut
= TYPE_MAX_VALUE (type
);
2078 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
2079 if (real_less (&u
, &r
))
2082 val
= wi::to_wide (ut
);
2088 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
2090 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
2094 /* A subroutine of fold_convert_const handling conversions of a
2095 FIXED_CST to an integer type. */
2098 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
2101 double_int temp
, temp_trunc
;
2104 /* Right shift FIXED_CST to temp by fbit. */
2105 temp
= TREE_FIXED_CST (arg1
).data
;
2106 mode
= TREE_FIXED_CST (arg1
).mode
;
2107 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
2109 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
2110 HOST_BITS_PER_DOUBLE_INT
,
2111 SIGNED_FIXED_POINT_MODE_P (mode
));
2113 /* Left shift temp to temp_trunc by fbit. */
2114 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
2115 HOST_BITS_PER_DOUBLE_INT
,
2116 SIGNED_FIXED_POINT_MODE_P (mode
));
2120 temp
= double_int_zero
;
2121 temp_trunc
= double_int_zero
;
2124 /* If FIXED_CST is negative, we need to round the value toward 0.
2125 By checking if the fractional bits are not zero to add 1 to temp. */
2126 if (SIGNED_FIXED_POINT_MODE_P (mode
)
2127 && temp_trunc
.is_negative ()
2128 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
2129 temp
+= double_int_one
;
2131 /* Given a fixed-point constant, make new constant with new type,
2132 appropriately sign-extended or truncated. */
2133 t
= force_fit_type (type
, temp
, -1,
2134 (temp
.is_negative ()
2135 && (TYPE_UNSIGNED (type
)
2136 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2137 | TREE_OVERFLOW (arg1
));
2142 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2143 to another floating point type. */
2146 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
2148 REAL_VALUE_TYPE value
;
2151 /* Don't perform the operation if flag_signaling_nans is on
2152 and the operand is a signaling NaN. */
2153 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
2154 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
2157 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
2158 t
= build_real (type
, value
);
2160 /* If converting an infinity or NAN to a representation that doesn't
2161 have one, set the overflow bit so that we can produce some kind of
2162 error message at the appropriate point if necessary. It's not the
2163 most user-friendly message, but it's better than nothing. */
2164 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
2165 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2166 TREE_OVERFLOW (t
) = 1;
2167 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2168 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2169 TREE_OVERFLOW (t
) = 1;
2170 /* Regular overflow, conversion produced an infinity in a mode that
2171 can't represent them. */
2172 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2173 && REAL_VALUE_ISINF (value
)
2174 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2175 TREE_OVERFLOW (t
) = 1;
2177 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182 to a floating point type. */
2185 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2187 REAL_VALUE_TYPE value
;
2190 real_convert_from_fixed (&value
, SCALAR_FLOAT_TYPE_MODE (type
),
2191 &TREE_FIXED_CST (arg1
));
2192 t
= build_real (type
, value
);
2194 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2198 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2199 to another fixed-point type. */
2202 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2204 FIXED_VALUE_TYPE value
;
2208 overflow_p
= fixed_convert (&value
, SCALAR_TYPE_MODE (type
),
2209 &TREE_FIXED_CST (arg1
), TYPE_SATURATING (type
));
2210 t
= build_fixed (type
, value
);
2212 /* Propagate overflow flags. */
2213 if (overflow_p
| TREE_OVERFLOW (arg1
))
2214 TREE_OVERFLOW (t
) = 1;
2218 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2219 to a fixed-point type. */
2222 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2224 FIXED_VALUE_TYPE value
;
2229 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2231 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2232 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2233 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2235 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2237 overflow_p
= fixed_convert_from_int (&value
, SCALAR_TYPE_MODE (type
), di
,
2238 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2239 TYPE_SATURATING (type
));
2240 t
= build_fixed (type
, value
);
2242 /* Propagate overflow flags. */
2243 if (overflow_p
| TREE_OVERFLOW (arg1
))
2244 TREE_OVERFLOW (t
) = 1;
2248 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2249 to a fixed-point type. */
2252 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2254 FIXED_VALUE_TYPE value
;
2258 overflow_p
= fixed_convert_from_real (&value
, SCALAR_TYPE_MODE (type
),
2259 &TREE_REAL_CST (arg1
),
2260 TYPE_SATURATING (type
));
2261 t
= build_fixed (type
, value
);
2263 /* Propagate overflow flags. */
2264 if (overflow_p
| TREE_OVERFLOW (arg1
))
2265 TREE_OVERFLOW (t
) = 1;
2269 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2270 type TYPE. If no simplification can be done return NULL_TREE. */
2273 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2275 tree arg_type
= TREE_TYPE (arg1
);
2276 if (arg_type
== type
)
2279 /* We can't widen types, since the runtime value could overflow the
2280 original type before being extended to the new type. */
2281 if (POLY_INT_CST_P (arg1
)
2282 && (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
2283 && TYPE_PRECISION (type
) <= TYPE_PRECISION (arg_type
))
2284 return build_poly_int_cst (type
,
2285 poly_wide_int::from (poly_int_cst_value (arg1
),
2286 TYPE_PRECISION (type
),
2287 TYPE_SIGN (arg_type
)));
2289 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2290 || TREE_CODE (type
) == OFFSET_TYPE
)
2292 if (TREE_CODE (arg1
) == INTEGER_CST
)
2293 return fold_convert_const_int_from_int (type
, arg1
);
2294 else if (TREE_CODE (arg1
) == REAL_CST
)
2295 return fold_convert_const_int_from_real (code
, type
, arg1
);
2296 else if (TREE_CODE (arg1
) == FIXED_CST
)
2297 return fold_convert_const_int_from_fixed (type
, arg1
);
2299 else if (TREE_CODE (type
) == REAL_TYPE
)
2301 if (TREE_CODE (arg1
) == INTEGER_CST
)
2302 return build_real_from_int_cst (type
, arg1
);
2303 else if (TREE_CODE (arg1
) == REAL_CST
)
2304 return fold_convert_const_real_from_real (type
, arg1
);
2305 else if (TREE_CODE (arg1
) == FIXED_CST
)
2306 return fold_convert_const_real_from_fixed (type
, arg1
);
2308 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2310 if (TREE_CODE (arg1
) == FIXED_CST
)
2311 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2312 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2313 return fold_convert_const_fixed_from_int (type
, arg1
);
2314 else if (TREE_CODE (arg1
) == REAL_CST
)
2315 return fold_convert_const_fixed_from_real (type
, arg1
);
2317 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2319 if (TREE_CODE (arg1
) == VECTOR_CST
2320 && known_eq (TYPE_VECTOR_SUBPARTS (type
), VECTOR_CST_NELTS (arg1
)))
2322 tree elttype
= TREE_TYPE (type
);
2323 tree arg1_elttype
= TREE_TYPE (TREE_TYPE (arg1
));
2324 /* We can't handle steps directly when extending, since the
2325 values need to wrap at the original precision first. */
2327 = (INTEGRAL_TYPE_P (elttype
)
2328 && INTEGRAL_TYPE_P (arg1_elttype
)
2329 && TYPE_PRECISION (elttype
) <= TYPE_PRECISION (arg1_elttype
));
2330 tree_vector_builder v
;
2331 if (!v
.new_unary_operation (type
, arg1
, step_ok_p
))
2333 unsigned int len
= v
.encoded_nelts ();
2334 for (unsigned int i
= 0; i
< len
; ++i
)
2336 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2337 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2338 if (cvt
== NULL_TREE
)
2348 /* Construct a vector of zero elements of vector type TYPE. */
2351 build_zero_vector (tree type
)
2355 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2356 return build_vector_from_val (type
, t
);
2359 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2362 fold_convertible_p (const_tree type
, const_tree arg
)
2364 tree orig
= TREE_TYPE (arg
);
2369 if (TREE_CODE (arg
) == ERROR_MARK
2370 || TREE_CODE (type
) == ERROR_MARK
2371 || TREE_CODE (orig
) == ERROR_MARK
)
2374 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2377 switch (TREE_CODE (type
))
2379 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2380 case POINTER_TYPE
: case REFERENCE_TYPE
:
2382 return (INTEGRAL_TYPE_P (orig
)
2383 || (POINTER_TYPE_P (orig
)
2384 && TYPE_PRECISION (type
) <= TYPE_PRECISION (orig
))
2385 || TREE_CODE (orig
) == OFFSET_TYPE
);
2388 case FIXED_POINT_TYPE
:
2390 return TREE_CODE (type
) == TREE_CODE (orig
);
2393 return (VECTOR_TYPE_P (orig
)
2394 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
2395 TYPE_VECTOR_SUBPARTS (orig
))
2396 && fold_convertible_p (TREE_TYPE (type
), TREE_TYPE (orig
)));
2403 /* Convert expression ARG to type TYPE. Used by the middle-end for
2404 simple conversions in preference to calling the front-end's convert. */
2407 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2409 tree orig
= TREE_TYPE (arg
);
2415 if (TREE_CODE (arg
) == ERROR_MARK
2416 || TREE_CODE (type
) == ERROR_MARK
2417 || TREE_CODE (orig
) == ERROR_MARK
)
2418 return error_mark_node
;
2420 switch (TREE_CODE (type
))
2423 case REFERENCE_TYPE
:
2424 /* Handle conversions between pointers to different address spaces. */
2425 if (POINTER_TYPE_P (orig
)
2426 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2427 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2428 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2431 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2433 if (TREE_CODE (arg
) == INTEGER_CST
)
2435 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2436 if (tem
!= NULL_TREE
)
2439 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2440 || TREE_CODE (orig
) == OFFSET_TYPE
)
2441 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2442 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2443 return fold_convert_loc (loc
, type
,
2444 fold_build1_loc (loc
, REALPART_EXPR
,
2445 TREE_TYPE (orig
), arg
));
2446 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2447 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2448 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2451 if (TREE_CODE (arg
) == INTEGER_CST
)
2453 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2454 if (tem
!= NULL_TREE
)
2457 else if (TREE_CODE (arg
) == REAL_CST
)
2459 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2460 if (tem
!= NULL_TREE
)
2463 else if (TREE_CODE (arg
) == FIXED_CST
)
2465 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2466 if (tem
!= NULL_TREE
)
2470 switch (TREE_CODE (orig
))
2473 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2474 case POINTER_TYPE
: case REFERENCE_TYPE
:
2475 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2478 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2480 case FIXED_POINT_TYPE
:
2481 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2484 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2485 return fold_convert_loc (loc
, type
, tem
);
2491 case FIXED_POINT_TYPE
:
2492 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2493 || TREE_CODE (arg
) == REAL_CST
)
2495 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2496 if (tem
!= NULL_TREE
)
2497 goto fold_convert_exit
;
2500 switch (TREE_CODE (orig
))
2502 case FIXED_POINT_TYPE
:
2507 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2510 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2511 return fold_convert_loc (loc
, type
, tem
);
2518 switch (TREE_CODE (orig
))
2521 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2522 case POINTER_TYPE
: case REFERENCE_TYPE
:
2524 case FIXED_POINT_TYPE
:
2525 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2526 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2527 fold_convert_loc (loc
, TREE_TYPE (type
),
2528 integer_zero_node
));
2533 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2535 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2536 TREE_OPERAND (arg
, 0));
2537 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2538 TREE_OPERAND (arg
, 1));
2539 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2542 arg
= save_expr (arg
);
2543 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2544 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2545 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2546 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2547 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2555 if (integer_zerop (arg
))
2556 return build_zero_vector (type
);
2557 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2558 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2559 || TREE_CODE (orig
) == VECTOR_TYPE
);
2560 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2563 tem
= fold_ignored_result (arg
);
2564 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2567 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2568 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2572 protected_set_expr_location_unshare (tem
, loc
);
2576 /* Return false if expr can be assumed not to be an lvalue, true
2580 maybe_lvalue_p (const_tree x
)
2582 /* We only need to wrap lvalue tree codes. */
2583 switch (TREE_CODE (x
))
2596 case ARRAY_RANGE_REF
:
2602 case PREINCREMENT_EXPR
:
2603 case PREDECREMENT_EXPR
:
2605 case TRY_CATCH_EXPR
:
2606 case WITH_CLEANUP_EXPR
:
2612 case VIEW_CONVERT_EXPR
:
2616 /* Assume the worst for front-end tree codes. */
2617 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2625 /* Return an expr equal to X but certainly not valid as an lvalue. */
2628 non_lvalue_loc (location_t loc
, tree x
)
2630 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2635 if (! maybe_lvalue_p (x
))
2637 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2640 /* When pedantic, return an expr equal to X but certainly not valid as a
2641 pedantic lvalue. Otherwise, return X. */
2644 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2646 return protected_set_expr_location_unshare (x
, loc
);
2649 /* Given a tree comparison code, return the code that is the logical inverse.
2650 It is generally not safe to do this for floating-point comparisons, except
2651 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2652 ERROR_MARK in this case. */
2655 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2657 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2658 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2668 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2670 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2672 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2674 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2688 return UNORDERED_EXPR
;
2689 case UNORDERED_EXPR
:
2690 return ORDERED_EXPR
;
2696 /* Similar, but return the comparison that results if the operands are
2697 swapped. This is safe for floating-point. */
2700 swap_tree_comparison (enum tree_code code
)
2707 case UNORDERED_EXPR
:
2733 /* Convert a comparison tree code from an enum tree_code representation
2734 into a compcode bit-based encoding. This function is the inverse of
2735 compcode_to_comparison. */
2737 static enum comparison_code
2738 comparison_to_compcode (enum tree_code code
)
2755 return COMPCODE_ORD
;
2756 case UNORDERED_EXPR
:
2757 return COMPCODE_UNORD
;
2759 return COMPCODE_UNLT
;
2761 return COMPCODE_UNEQ
;
2763 return COMPCODE_UNLE
;
2765 return COMPCODE_UNGT
;
2767 return COMPCODE_LTGT
;
2769 return COMPCODE_UNGE
;
2775 /* Convert a compcode bit-based encoding of a comparison operator back
2776 to GCC's enum tree_code representation. This function is the
2777 inverse of comparison_to_compcode. */
2779 static enum tree_code
2780 compcode_to_comparison (enum comparison_code code
)
2797 return ORDERED_EXPR
;
2798 case COMPCODE_UNORD
:
2799 return UNORDERED_EXPR
;
2817 /* Return true if COND1 tests the opposite condition of COND2. */
2820 inverse_conditions_p (const_tree cond1
, const_tree cond2
)
2822 return (COMPARISON_CLASS_P (cond1
)
2823 && COMPARISON_CLASS_P (cond2
)
2824 && (invert_tree_comparison
2826 HONOR_NANS (TREE_OPERAND (cond1
, 0))) == TREE_CODE (cond2
))
2827 && operand_equal_p (TREE_OPERAND (cond1
, 0),
2828 TREE_OPERAND (cond2
, 0), 0)
2829 && operand_equal_p (TREE_OPERAND (cond1
, 1),
2830 TREE_OPERAND (cond2
, 1), 0));
2833 /* Return a tree for the comparison which is the combination of
2834 doing the AND or OR (depending on CODE) of the two operations LCODE
2835 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2836 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2837 if this makes the transformation invalid. */
2840 combine_comparisons (location_t loc
,
2841 enum tree_code code
, enum tree_code lcode
,
2842 enum tree_code rcode
, tree truth_type
,
2843 tree ll_arg
, tree lr_arg
)
2845 bool honor_nans
= HONOR_NANS (ll_arg
);
2846 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2847 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2852 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2853 compcode
= lcompcode
& rcompcode
;
2856 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2857 compcode
= lcompcode
| rcompcode
;
2866 /* Eliminate unordered comparisons, as well as LTGT and ORD
2867 which are not used unless the mode has NaNs. */
2868 compcode
&= ~COMPCODE_UNORD
;
2869 if (compcode
== COMPCODE_LTGT
)
2870 compcode
= COMPCODE_NE
;
2871 else if (compcode
== COMPCODE_ORD
)
2872 compcode
= COMPCODE_TRUE
;
2874 else if (flag_trapping_math
)
2876 /* Check that the original operation and the optimized ones will trap
2877 under the same condition. */
2878 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2879 && (lcompcode
!= COMPCODE_EQ
)
2880 && (lcompcode
!= COMPCODE_ORD
);
2881 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2882 && (rcompcode
!= COMPCODE_EQ
)
2883 && (rcompcode
!= COMPCODE_ORD
);
2884 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2885 && (compcode
!= COMPCODE_EQ
)
2886 && (compcode
!= COMPCODE_ORD
);
2888 /* In a short-circuited boolean expression the LHS might be
2889 such that the RHS, if evaluated, will never trap. For
2890 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2891 if neither x nor y is NaN. (This is a mixed blessing: for
2892 example, the expression above will never trap, hence
2893 optimizing it to x < y would be invalid). */
2894 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2895 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2898 /* If the comparison was short-circuited, and only the RHS
2899 trapped, we may now generate a spurious trap. */
2901 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2904 /* If we changed the conditions that cause a trap, we lose. */
2905 if ((ltrap
|| rtrap
) != trap
)
2909 if (compcode
== COMPCODE_TRUE
)
2910 return constant_boolean_node (true, truth_type
);
2911 else if (compcode
== COMPCODE_FALSE
)
2912 return constant_boolean_node (false, truth_type
);
2915 enum tree_code tcode
;
2917 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2918 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2922 /* Return nonzero if two operands (typically of the same tree node)
2923 are necessarily equal. FLAGS modifies behavior as follows:
2925 If OEP_ONLY_CONST is set, only return nonzero for constants.
2926 This function tests whether the operands are indistinguishable;
2927 it does not test whether they are equal using C's == operation.
2928 The distinction is important for IEEE floating point, because
2929 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2930 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2932 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2933 even though it may hold multiple values during a function.
2934 This is because a GCC tree node guarantees that nothing else is
2935 executed between the evaluation of its "operands" (which may often
2936 be evaluated in arbitrary order). Hence if the operands themselves
2937 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2938 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2939 unset means assuming isochronic (or instantaneous) tree equivalence.
2940 Unless comparing arbitrary expression trees, such as from different
2941 statements, this flag can usually be left unset.
2943 If OEP_PURE_SAME is set, then pure functions with identical arguments
2944 are considered the same. It is used when the caller has other ways
2945 to ensure that global memory is unchanged in between.
2947 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2948 not values of expressions.
2950 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2951 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2953 If OEP_BITWISE is set, then require the values to be bitwise identical
2954 rather than simply numerically equal. Do not take advantage of things
2955 like math-related flags or undefined behavior; only return true for
2956 values that are provably bitwise identical in all circumstances.
2958 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2959 any operand with side effect. This is unnecesarily conservative in the
2960 case we know that arg0 and arg1 are in disjoint code paths (such as in
2961 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2962 addresses with TREE_CONSTANT flag set so we know that &var == &var
2963 even if var is volatile. */
2966 operand_compare::operand_equal_p (const_tree arg0
, const_tree arg1
,
2970 if (verify_hash_value (arg0
, arg1
, flags
, &r
))
2973 STRIP_ANY_LOCATION_WRAPPER (arg0
);
2974 STRIP_ANY_LOCATION_WRAPPER (arg1
);
2976 /* If either is ERROR_MARK, they aren't equal. */
2977 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2978 || TREE_TYPE (arg0
) == error_mark_node
2979 || TREE_TYPE (arg1
) == error_mark_node
)
2982 /* Similar, if either does not have a type (like a template id),
2983 they aren't equal. */
2984 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2987 /* Bitwise identity makes no sense if the values have different layouts. */
2988 if ((flags
& OEP_BITWISE
)
2989 && !tree_nop_conversion_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
2992 /* We cannot consider pointers to different address space equal. */
2993 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2994 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2995 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2996 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2999 /* Check equality of integer constants before bailing out due to
3000 precision differences. */
3001 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
3003 /* Address of INTEGER_CST is not defined; check that we did not forget
3004 to drop the OEP_ADDRESS_OF flags. */
3005 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3006 return tree_int_cst_equal (arg0
, arg1
);
3009 if (!(flags
& OEP_ADDRESS_OF
))
3011 /* If both types don't have the same signedness, then we can't consider
3012 them equal. We must check this before the STRIP_NOPS calls
3013 because they may change the signedness of the arguments. As pointers
3014 strictly don't have a signedness, require either two pointers or
3015 two non-pointers as well. */
3016 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
3017 || POINTER_TYPE_P (TREE_TYPE (arg0
))
3018 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
3021 /* If both types don't have the same precision, then it is not safe
3023 if (element_precision (TREE_TYPE (arg0
))
3024 != element_precision (TREE_TYPE (arg1
)))
3031 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3032 sanity check once the issue is solved. */
3034 /* Addresses of conversions and SSA_NAMEs (and many other things)
3035 are not defined. Check that we did not forget to drop the
3036 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3037 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
3038 && TREE_CODE (arg0
) != SSA_NAME
);
3041 /* In case both args are comparisons but with different comparison
3042 code, try to swap the comparison operands of one arg to produce
3043 a match and compare that variant. */
3044 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3045 && COMPARISON_CLASS_P (arg0
)
3046 && COMPARISON_CLASS_P (arg1
))
3048 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
3050 if (TREE_CODE (arg0
) == swap_code
)
3051 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3052 TREE_OPERAND (arg1
, 1), flags
)
3053 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3054 TREE_OPERAND (arg1
, 0), flags
);
3057 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
3059 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3060 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
3062 else if (flags
& OEP_ADDRESS_OF
)
3064 /* If we are interested in comparing addresses ignore
3065 MEM_REF wrappings of the base that can appear just for
3067 if (TREE_CODE (arg0
) == MEM_REF
3069 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
3070 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
3071 && integer_zerop (TREE_OPERAND (arg0
, 1)))
3073 else if (TREE_CODE (arg1
) == MEM_REF
3075 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
3076 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
3077 && integer_zerop (TREE_OPERAND (arg1
, 1)))
3085 /* When not checking adddresses, this is needed for conversions and for
3086 COMPONENT_REF. Might as well play it safe and always test this. */
3087 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
3088 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
3089 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
3090 && !(flags
& OEP_ADDRESS_OF
)))
3093 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3094 We don't care about side effects in that case because the SAVE_EXPR
3095 takes care of that for us. In all other cases, two expressions are
3096 equal if they have no side effects. If we have two identical
3097 expressions with side effects that should be treated the same due
3098 to the only side effects being identical SAVE_EXPR's, that will
3099 be detected in the recursive calls below.
3100 If we are taking an invariant address of two identical objects
3101 they are necessarily equal as well. */
3102 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
3103 && (TREE_CODE (arg0
) == SAVE_EXPR
3104 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
3105 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
3108 /* Next handle constant cases, those for which we can return 1 even
3109 if ONLY_CONST is set. */
3110 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
3111 switch (TREE_CODE (arg0
))
3114 return tree_int_cst_equal (arg0
, arg1
);
3117 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
3118 TREE_FIXED_CST (arg1
));
3121 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
3124 if (!(flags
& OEP_BITWISE
) && !HONOR_SIGNED_ZEROS (arg0
))
3126 /* If we do not distinguish between signed and unsigned zero,
3127 consider them equal. */
3128 if (real_zerop (arg0
) && real_zerop (arg1
))
3135 if (VECTOR_CST_LOG2_NPATTERNS (arg0
)
3136 != VECTOR_CST_LOG2_NPATTERNS (arg1
))
3139 if (VECTOR_CST_NELTS_PER_PATTERN (arg0
)
3140 != VECTOR_CST_NELTS_PER_PATTERN (arg1
))
3143 unsigned int count
= vector_cst_encoded_nelts (arg0
);
3144 for (unsigned int i
= 0; i
< count
; ++i
)
3145 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0
, i
),
3146 VECTOR_CST_ENCODED_ELT (arg1
, i
), flags
))
3152 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
3154 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
3158 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
3159 && ! memcmp (TREE_STRING_POINTER (arg0
),
3160 TREE_STRING_POINTER (arg1
),
3161 TREE_STRING_LENGTH (arg0
)));
3164 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3165 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
3166 flags
| OEP_ADDRESS_OF
3167 | OEP_MATCH_SIDE_EFFECTS
);
3169 /* In GIMPLE empty constructors are allowed in initializers of
3171 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
3176 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3177 two instances of undefined behavior will give identical results. */
3178 if (flags
& (OEP_ONLY_CONST
| OEP_BITWISE
))
3181 /* Define macros to test an operand from arg0 and arg1 for equality and a
3182 variant that allows null and views null as being different from any
3183 non-null value. In the latter case, if either is null, the both
3184 must be; otherwise, do the normal comparison. */
3185 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3186 TREE_OPERAND (arg1, N), flags)
3188 #define OP_SAME_WITH_NULL(N) \
3189 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3190 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3192 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
3195 /* Two conversions are equal only if signedness and modes match. */
3196 switch (TREE_CODE (arg0
))
3199 case FIX_TRUNC_EXPR
:
3200 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
3201 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3211 case tcc_comparison
:
3213 if (OP_SAME (0) && OP_SAME (1))
3216 /* For commutative ops, allow the other order. */
3217 return (commutative_tree_code (TREE_CODE (arg0
))
3218 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3219 TREE_OPERAND (arg1
, 1), flags
)
3220 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3221 TREE_OPERAND (arg1
, 0), flags
));
3224 /* If either of the pointer (or reference) expressions we are
3225 dereferencing contain a side effect, these cannot be equal,
3226 but their addresses can be. */
3227 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3228 && (TREE_SIDE_EFFECTS (arg0
)
3229 || TREE_SIDE_EFFECTS (arg1
)))
3232 switch (TREE_CODE (arg0
))
3235 if (!(flags
& OEP_ADDRESS_OF
))
3237 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3238 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3240 /* Verify that the access types are compatible. */
3241 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0
))
3242 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)))
3245 flags
&= ~OEP_ADDRESS_OF
;
3249 /* Require the same offset. */
3250 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3251 TYPE_SIZE (TREE_TYPE (arg1
)),
3252 flags
& ~OEP_ADDRESS_OF
))
3257 case VIEW_CONVERT_EXPR
:
3260 case TARGET_MEM_REF
:
3262 if (!(flags
& OEP_ADDRESS_OF
))
3264 /* Require equal access sizes */
3265 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3266 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3267 || !TYPE_SIZE (TREE_TYPE (arg1
))
3268 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3269 TYPE_SIZE (TREE_TYPE (arg1
)),
3272 /* Verify that access happens in similar types. */
3273 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3275 /* Verify that accesses are TBAA compatible. */
3276 if (!alias_ptr_types_compatible_p
3277 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3278 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3279 || (MR_DEPENDENCE_CLIQUE (arg0
)
3280 != MR_DEPENDENCE_CLIQUE (arg1
))
3281 || (MR_DEPENDENCE_BASE (arg0
)
3282 != MR_DEPENDENCE_BASE (arg1
)))
3284 /* Verify that alignment is compatible. */
3285 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3286 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3289 flags
&= ~OEP_ADDRESS_OF
;
3290 return (OP_SAME (0) && OP_SAME (1)
3291 /* TARGET_MEM_REF require equal extra operands. */
3292 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3293 || (OP_SAME_WITH_NULL (2)
3294 && OP_SAME_WITH_NULL (3)
3295 && OP_SAME_WITH_NULL (4))));
3298 case ARRAY_RANGE_REF
:
3301 flags
&= ~OEP_ADDRESS_OF
;
3302 /* Compare the array index by value if it is constant first as we
3303 may have different types but same value here. */
3304 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3305 TREE_OPERAND (arg1
, 1))
3307 && OP_SAME_WITH_NULL (2)
3308 && OP_SAME_WITH_NULL (3)
3309 /* Compare low bound and element size as with OEP_ADDRESS_OF
3310 we have to account for the offset of the ref. */
3311 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3312 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3313 || (operand_equal_p (array_ref_low_bound
3314 (CONST_CAST_TREE (arg0
)),
3316 (CONST_CAST_TREE (arg1
)), flags
)
3317 && operand_equal_p (array_ref_element_size
3318 (CONST_CAST_TREE (arg0
)),
3319 array_ref_element_size
3320 (CONST_CAST_TREE (arg1
)),
3324 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3325 may be NULL when we're called to compare MEM_EXPRs. */
3326 if (!OP_SAME_WITH_NULL (0)
3329 flags
&= ~OEP_ADDRESS_OF
;
3330 return OP_SAME_WITH_NULL (2);
3335 flags
&= ~OEP_ADDRESS_OF
;
3336 return OP_SAME (1) && OP_SAME (2);
3338 /* Virtual table call. */
3341 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0
),
3342 OBJ_TYPE_REF_EXPR (arg1
), flags
))
3344 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0
))
3345 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1
)))
3347 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0
),
3348 OBJ_TYPE_REF_OBJECT (arg1
), flags
))
3350 if (!types_same_for_odr (obj_type_ref_class (arg0
),
3351 obj_type_ref_class (arg1
)))
3360 case tcc_expression
:
3361 switch (TREE_CODE (arg0
))
3364 /* Be sure we pass right ADDRESS_OF flag. */
3365 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3366 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3367 TREE_OPERAND (arg1
, 0),
3368 flags
| OEP_ADDRESS_OF
);
3370 case TRUTH_NOT_EXPR
:
3373 case TRUTH_ANDIF_EXPR
:
3374 case TRUTH_ORIF_EXPR
:
3375 return OP_SAME (0) && OP_SAME (1);
3377 case WIDEN_MULT_PLUS_EXPR
:
3378 case WIDEN_MULT_MINUS_EXPR
:
3381 /* The multiplcation operands are commutative. */
3384 case TRUTH_AND_EXPR
:
3386 case TRUTH_XOR_EXPR
:
3387 if (OP_SAME (0) && OP_SAME (1))
3390 /* Otherwise take into account this is a commutative operation. */
3391 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3392 TREE_OPERAND (arg1
, 1), flags
)
3393 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3394 TREE_OPERAND (arg1
, 0), flags
));
3397 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3399 flags
&= ~OEP_ADDRESS_OF
;
3402 case BIT_INSERT_EXPR
:
3403 /* BIT_INSERT_EXPR has an implict operand as the type precision
3404 of op1. Need to check to make sure they are the same. */
3405 if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3406 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3407 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
3408 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
3414 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3419 case PREDECREMENT_EXPR
:
3420 case PREINCREMENT_EXPR
:
3421 case POSTDECREMENT_EXPR
:
3422 case POSTINCREMENT_EXPR
:
3423 if (flags
& OEP_LEXICOGRAPHIC
)
3424 return OP_SAME (0) && OP_SAME (1);
3427 case CLEANUP_POINT_EXPR
:
3430 if (flags
& OEP_LEXICOGRAPHIC
)
3439 switch (TREE_CODE (arg0
))
3442 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3443 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3444 /* If not both CALL_EXPRs are either internal or normal function
3445 functions, then they are not equal. */
3447 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3449 /* If the CALL_EXPRs call different internal functions, then they
3451 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3456 /* If the CALL_EXPRs call different functions, then they are not
3458 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3463 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3465 unsigned int cef
= call_expr_flags (arg0
);
3466 if (flags
& OEP_PURE_SAME
)
3467 cef
&= ECF_CONST
| ECF_PURE
;
3470 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3474 /* Now see if all the arguments are the same. */
3476 const_call_expr_arg_iterator iter0
, iter1
;
3478 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3479 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3481 a0
= next_const_call_expr_arg (&iter0
),
3482 a1
= next_const_call_expr_arg (&iter1
))
3483 if (! operand_equal_p (a0
, a1
, flags
))
3486 /* If we get here and both argument lists are exhausted
3487 then the CALL_EXPRs are equal. */
3488 return ! (a0
|| a1
);
3494 case tcc_declaration
:
3495 /* Consider __builtin_sqrt equal to sqrt. */
3496 return (TREE_CODE (arg0
) == FUNCTION_DECL
3497 && fndecl_built_in_p (arg0
) && fndecl_built_in_p (arg1
)
3498 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3499 && (DECL_UNCHECKED_FUNCTION_CODE (arg0
)
3500 == DECL_UNCHECKED_FUNCTION_CODE (arg1
)));
3502 case tcc_exceptional
:
3503 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3505 if (CONSTRUCTOR_NO_CLEARING (arg0
) != CONSTRUCTOR_NO_CLEARING (arg1
))
3508 /* In GIMPLE constructors are used only to build vectors from
3509 elements. Individual elements in the constructor must be
3510 indexed in increasing order and form an initial sequence.
3512 We make no effort to compare constructors in generic.
3513 (see sem_variable::equals in ipa-icf which can do so for
3515 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3516 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3519 /* Be sure that vectors constructed have the same representation.
3520 We only tested element precision and modes to match.
3521 Vectors may be BLKmode and thus also check that the number of
3523 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)),
3524 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
))))
3527 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3528 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3529 unsigned int len
= vec_safe_length (v0
);
3531 if (len
!= vec_safe_length (v1
))
3534 for (unsigned int i
= 0; i
< len
; i
++)
3536 constructor_elt
*c0
= &(*v0
)[i
];
3537 constructor_elt
*c1
= &(*v1
)[i
];
3539 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3540 /* In GIMPLE the indexes can be either NULL or matching i.
3541 Double check this so we won't get false
3542 positives for GENERIC. */
3544 && (TREE_CODE (c0
->index
) != INTEGER_CST
3545 || compare_tree_int (c0
->index
, i
)))
3547 && (TREE_CODE (c1
->index
) != INTEGER_CST
3548 || compare_tree_int (c1
->index
, i
))))
3553 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3554 && (flags
& OEP_LEXICOGRAPHIC
))
3556 /* Compare the STATEMENT_LISTs. */
3557 tree_stmt_iterator tsi1
, tsi2
;
3558 tree body1
= CONST_CAST_TREE (arg0
);
3559 tree body2
= CONST_CAST_TREE (arg1
);
3560 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3561 tsi_next (&tsi1
), tsi_next (&tsi2
))
3563 /* The lists don't have the same number of statements. */
3564 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3566 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3568 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3569 flags
& (OEP_LEXICOGRAPHIC
3570 | OEP_NO_HASH_CHECK
)))
3577 switch (TREE_CODE (arg0
))
3580 if (flags
& OEP_LEXICOGRAPHIC
)
3581 return OP_SAME_WITH_NULL (0);
3583 case DEBUG_BEGIN_STMT
:
3584 if (flags
& OEP_LEXICOGRAPHIC
)
3596 #undef OP_SAME_WITH_NULL
3599 /* Generate a hash value for an expression. This can be used iteratively
3600 by passing a previous result as the HSTATE argument. */
3603 operand_compare::hash_operand (const_tree t
, inchash::hash
&hstate
,
3607 enum tree_code code
;
3608 enum tree_code_class tclass
;
3610 if (t
== NULL_TREE
|| t
== error_mark_node
)
3612 hstate
.merge_hash (0);
3616 STRIP_ANY_LOCATION_WRAPPER (t
);
3618 if (!(flags
& OEP_ADDRESS_OF
))
3621 code
= TREE_CODE (t
);
3625 /* Alas, constants aren't shared, so we can't rely on pointer
3628 hstate
.merge_hash (0);
3631 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3632 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
3633 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
3638 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
3641 val2
= real_hash (TREE_REAL_CST_PTR (t
));
3642 hstate
.merge_hash (val2
);
3647 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
3648 hstate
.merge_hash (val2
);
3652 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
3653 TREE_STRING_LENGTH (t
));
3656 hash_operand (TREE_REALPART (t
), hstate
, flags
);
3657 hash_operand (TREE_IMAGPART (t
), hstate
, flags
);
3661 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
3662 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
3663 unsigned int count
= vector_cst_encoded_nelts (t
);
3664 for (unsigned int i
= 0; i
< count
; ++i
)
3665 hash_operand (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
3669 /* We can just compare by pointer. */
3670 hstate
.add_hwi (SSA_NAME_VERSION (t
));
3672 case PLACEHOLDER_EXPR
:
3673 /* The node itself doesn't matter. */
3680 /* A list of expressions, for a CALL_EXPR or as the elements of a
3682 for (; t
; t
= TREE_CHAIN (t
))
3683 hash_operand (TREE_VALUE (t
), hstate
, flags
);
3687 unsigned HOST_WIDE_INT idx
;
3689 flags
&= ~OEP_ADDRESS_OF
;
3690 hstate
.add_int (CONSTRUCTOR_NO_CLEARING (t
));
3691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
3693 /* In GIMPLE the indexes can be either NULL or matching i. */
3694 if (field
== NULL_TREE
)
3695 field
= bitsize_int (idx
);
3696 hash_operand (field
, hstate
, flags
);
3697 hash_operand (value
, hstate
, flags
);
3701 case STATEMENT_LIST
:
3703 tree_stmt_iterator i
;
3704 for (i
= tsi_start (CONST_CAST_TREE (t
));
3705 !tsi_end_p (i
); tsi_next (&i
))
3706 hash_operand (tsi_stmt (i
), hstate
, flags
);
3710 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
3711 hash_operand (TREE_VEC_ELT (t
, i
), hstate
, flags
);
3713 case IDENTIFIER_NODE
:
3714 hstate
.add_object (IDENTIFIER_HASH_VALUE (t
));
3717 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3718 Otherwise nodes that compare equal according to operand_equal_p might
3719 get different hash codes. However, don't do this for machine specific
3720 or front end builtins, since the function code is overloaded in those
3722 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
3723 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
3725 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
3726 code
= TREE_CODE (t
);
3730 if (POLY_INT_CST_P (t
))
3732 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3733 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
3736 tclass
= TREE_CODE_CLASS (code
);
3738 if (tclass
== tcc_declaration
)
3740 /* DECL's have a unique ID */
3741 hstate
.add_hwi (DECL_UID (t
));
3743 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
3745 /* For comparisons that can be swapped, use the lower
3747 enum tree_code ccode
= swap_tree_comparison (code
);
3750 hstate
.add_object (ccode
);
3751 hash_operand (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
3752 hash_operand (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
3754 else if (CONVERT_EXPR_CODE_P (code
))
3756 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3758 enum tree_code ccode
= NOP_EXPR
;
3759 hstate
.add_object (ccode
);
3761 /* Don't hash the type, that can lead to having nodes which
3762 compare equal according to operand_equal_p, but which
3763 have different hash codes. Make sure to include signedness
3764 in the hash computation. */
3765 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3766 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3768 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3769 else if (code
== MEM_REF
3770 && (flags
& OEP_ADDRESS_OF
) != 0
3771 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
3772 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
3773 && integer_zerop (TREE_OPERAND (t
, 1)))
3774 hash_operand (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
3776 /* Don't ICE on FE specific trees, or their arguments etc.
3777 during operand_equal_p hash verification. */
3778 else if (!IS_EXPR_CODE_CLASS (tclass
))
3779 gcc_assert (flags
& OEP_HASH_CHECK
);
3782 unsigned int sflags
= flags
;
3784 hstate
.add_object (code
);
3789 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3790 flags
|= OEP_ADDRESS_OF
;
3796 case TARGET_MEM_REF
:
3797 flags
&= ~OEP_ADDRESS_OF
;
3802 case ARRAY_RANGE_REF
:
3805 sflags
&= ~OEP_ADDRESS_OF
;
3809 flags
&= ~OEP_ADDRESS_OF
;
3812 case WIDEN_MULT_PLUS_EXPR
:
3813 case WIDEN_MULT_MINUS_EXPR
:
3815 /* The multiplication operands are commutative. */
3816 inchash::hash one
, two
;
3817 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3818 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3819 hstate
.add_commutative (one
, two
);
3820 hash_operand (TREE_OPERAND (t
, 2), two
, flags
);
3825 if (CALL_EXPR_FN (t
) == NULL_TREE
)
3826 hstate
.add_int (CALL_EXPR_IFN (t
));
3830 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3831 Usually different TARGET_EXPRs just should use
3832 different temporaries in their slots. */
3833 hash_operand (TARGET_EXPR_SLOT (t
), hstate
, flags
);
3836 /* Virtual table call. */
3838 inchash::add_expr (OBJ_TYPE_REF_EXPR (t
), hstate
, flags
);
3839 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t
), hstate
, flags
);
3840 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t
), hstate
, flags
);
3846 /* Don't hash the type, that can lead to having nodes which
3847 compare equal according to operand_equal_p, but which
3848 have different hash codes. */
3849 if (code
== NON_LVALUE_EXPR
)
3851 /* Make sure to include signness in the hash computation. */
3852 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
3853 hash_operand (TREE_OPERAND (t
, 0), hstate
, flags
);
3856 else if (commutative_tree_code (code
))
3858 /* It's a commutative expression. We want to hash it the same
3859 however it appears. We do this by first hashing both operands
3860 and then rehashing based on the order of their independent
3862 inchash::hash one
, two
;
3863 hash_operand (TREE_OPERAND (t
, 0), one
, flags
);
3864 hash_operand (TREE_OPERAND (t
, 1), two
, flags
);
3865 hstate
.add_commutative (one
, two
);
3868 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
3869 hash_operand (TREE_OPERAND (t
, i
), hstate
,
3870 i
== 0 ? flags
: sflags
);
3877 operand_compare::verify_hash_value (const_tree arg0
, const_tree arg1
,
3878 unsigned int flags
, bool *ret
)
3880 /* When checking, verify at the outermost operand_equal_p call that
3881 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3883 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
3885 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
3889 inchash::hash
hstate0 (0), hstate1 (0);
3890 hash_operand (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
3891 hash_operand (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
3892 hashval_t h0
= hstate0
.end ();
3893 hashval_t h1
= hstate1
.end ();
3894 gcc_assert (h0
== h1
);
3908 static operand_compare default_compare_instance
;
3910 /* Conveinece wrapper around operand_compare class because usually we do
3911 not need to play with the valueizer. */
3914 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
3916 return default_compare_instance
.operand_equal_p (arg0
, arg1
, flags
);
3922 /* Generate a hash value for an expression. This can be used iteratively
3923 by passing a previous result as the HSTATE argument.
3925 This function is intended to produce the same hash for expressions which
3926 would compare equal using operand_equal_p. */
3928 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
3930 default_compare_instance
.hash_operand (t
, hstate
, flags
);
3935 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3936 with a different signedness or a narrower precision. */
3939 operand_equal_for_comparison_p (tree arg0
, tree arg1
)
3941 if (operand_equal_p (arg0
, arg1
, 0))
3944 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3945 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3948 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3949 and see if the inner values are the same. This removes any
3950 signedness comparison, which doesn't matter here. */
3955 if (operand_equal_p (op0
, op1
, 0))
3958 /* Discard a single widening conversion from ARG1 and see if the inner
3959 value is the same as ARG0. */
3960 if (CONVERT_EXPR_P (arg1
)
3961 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3962 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
3963 < TYPE_PRECISION (TREE_TYPE (arg1
))
3964 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
3970 /* See if ARG is an expression that is either a comparison or is performing
3971 arithmetic on comparisons. The comparisons must only be comparing
3972 two different values, which will be stored in *CVAL1 and *CVAL2; if
3973 they are nonzero it means that some operands have already been found.
3974 No variables may be used anywhere else in the expression except in the
3977 If this is true, return 1. Otherwise, return zero. */
3980 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
)
3982 enum tree_code code
= TREE_CODE (arg
);
3983 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3985 /* We can handle some of the tcc_expression cases here. */
3986 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3988 else if (tclass
== tcc_expression
3989 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3990 || code
== COMPOUND_EXPR
))
3991 tclass
= tcc_binary
;
3996 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
);
3999 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4000 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
));
4005 case tcc_expression
:
4006 if (code
== COND_EXPR
)
4007 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
)
4008 && twoval_comparison_p (TREE_OPERAND (arg
, 1), cval1
, cval2
)
4009 && twoval_comparison_p (TREE_OPERAND (arg
, 2), cval1
, cval2
));
4012 case tcc_comparison
:
4013 /* First see if we can handle the first operand, then the second. For
4014 the second operand, we know *CVAL1 can't be zero. It must be that
4015 one side of the comparison is each of the values; test for the
4016 case where this isn't true by failing if the two operands
4019 if (operand_equal_p (TREE_OPERAND (arg
, 0),
4020 TREE_OPERAND (arg
, 1), 0))
4024 *cval1
= TREE_OPERAND (arg
, 0);
4025 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
4027 else if (*cval2
== 0)
4028 *cval2
= TREE_OPERAND (arg
, 0);
4029 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
4034 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
4036 else if (*cval2
== 0)
4037 *cval2
= TREE_OPERAND (arg
, 1);
4038 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
4050 /* ARG is a tree that is known to contain just arithmetic operations and
4051 comparisons. Evaluate the operations in the tree substituting NEW0 for
4052 any occurrence of OLD0 as an operand of a comparison and likewise for
4056 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
4057 tree old1
, tree new1
)
4059 tree type
= TREE_TYPE (arg
);
4060 enum tree_code code
= TREE_CODE (arg
);
4061 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
4063 /* We can handle some of the tcc_expression cases here. */
4064 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
4066 else if (tclass
== tcc_expression
4067 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
4068 tclass
= tcc_binary
;
4073 return fold_build1_loc (loc
, code
, type
,
4074 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4075 old0
, new0
, old1
, new1
));
4078 return fold_build2_loc (loc
, code
, type
,
4079 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4080 old0
, new0
, old1
, new1
),
4081 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4082 old0
, new0
, old1
, new1
));
4084 case tcc_expression
:
4088 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
4092 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
4096 return fold_build3_loc (loc
, code
, type
,
4097 eval_subst (loc
, TREE_OPERAND (arg
, 0),
4098 old0
, new0
, old1
, new1
),
4099 eval_subst (loc
, TREE_OPERAND (arg
, 1),
4100 old0
, new0
, old1
, new1
),
4101 eval_subst (loc
, TREE_OPERAND (arg
, 2),
4102 old0
, new0
, old1
, new1
));
4106 /* Fall through - ??? */
4108 case tcc_comparison
:
4110 tree arg0
= TREE_OPERAND (arg
, 0);
4111 tree arg1
= TREE_OPERAND (arg
, 1);
4113 /* We need to check both for exact equality and tree equality. The
4114 former will be true if the operand has a side-effect. In that
4115 case, we know the operand occurred exactly once. */
4117 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
4119 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
4122 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
4124 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
4127 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
4135 /* Return a tree for the case when the result of an expression is RESULT
4136 converted to TYPE and OMITTED was previously an operand of the expression
4137 but is now not needed (e.g., we folded OMITTED * 0).
4139 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4140 the conversion of RESULT to TYPE. */
4143 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
4145 tree t
= fold_convert_loc (loc
, type
, result
);
4147 /* If the resulting operand is an empty statement, just return the omitted
4148 statement casted to void. */
4149 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
4150 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
4151 fold_ignored_result (omitted
));
4153 if (TREE_SIDE_EFFECTS (omitted
))
4154 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4155 fold_ignored_result (omitted
), t
);
4157 return non_lvalue_loc (loc
, t
);
4160 /* Return a tree for the case when the result of an expression is RESULT
4161 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4162 of the expression but are now not needed.
4164 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4165 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4166 evaluated before OMITTED2. Otherwise, if neither has side effects,
4167 just do the conversion of RESULT to TYPE. */
4170 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
4171 tree omitted1
, tree omitted2
)
4173 tree t
= fold_convert_loc (loc
, type
, result
);
4175 if (TREE_SIDE_EFFECTS (omitted2
))
4176 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
4177 if (TREE_SIDE_EFFECTS (omitted1
))
4178 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
4180 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
4184 /* Return a simplified tree node for the truth-negation of ARG. This
4185 never alters ARG itself. We assume that ARG is an operation that
4186 returns a truth value (0 or 1).
4188 FIXME: one would think we would fold the result, but it causes
4189 problems with the dominator optimizer. */
4192 fold_truth_not_expr (location_t loc
, tree arg
)
4194 tree type
= TREE_TYPE (arg
);
4195 enum tree_code code
= TREE_CODE (arg
);
4196 location_t loc1
, loc2
;
4198 /* If this is a comparison, we can simply invert it, except for
4199 floating-point non-equality comparisons, in which case we just
4200 enclose a TRUTH_NOT_EXPR around what we have. */
4202 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4204 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
4205 if (FLOAT_TYPE_P (op_type
)
4206 && flag_trapping_math
4207 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
4208 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
4211 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
4212 if (code
== ERROR_MARK
)
4215 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
4216 TREE_OPERAND (arg
, 1));
4217 if (TREE_NO_WARNING (arg
))
4218 TREE_NO_WARNING (ret
) = 1;
4225 return constant_boolean_node (integer_zerop (arg
), type
);
4227 case TRUTH_AND_EXPR
:
4228 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4229 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4230 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
4231 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4232 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4235 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4236 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4237 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
4238 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4239 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4241 case TRUTH_XOR_EXPR
:
4242 /* Here we can invert either operand. We invert the first operand
4243 unless the second operand is a TRUTH_NOT_EXPR in which case our
4244 result is the XOR of the first operand with the inside of the
4245 negation of the second operand. */
4247 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
4248 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
4249 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
4251 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
4252 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
4253 TREE_OPERAND (arg
, 1));
4255 case TRUTH_ANDIF_EXPR
:
4256 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4257 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4258 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
4259 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4260 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4262 case TRUTH_ORIF_EXPR
:
4263 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4264 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4265 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
4266 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
4267 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
4269 case TRUTH_NOT_EXPR
:
4270 return TREE_OPERAND (arg
, 0);
4274 tree arg1
= TREE_OPERAND (arg
, 1);
4275 tree arg2
= TREE_OPERAND (arg
, 2);
4277 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4278 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
4280 /* A COND_EXPR may have a throw as one operand, which
4281 then has void type. Just leave void operands
4283 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
4284 VOID_TYPE_P (TREE_TYPE (arg1
))
4285 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
4286 VOID_TYPE_P (TREE_TYPE (arg2
))
4287 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
4291 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
4292 return build2_loc (loc
, COMPOUND_EXPR
, type
,
4293 TREE_OPERAND (arg
, 0),
4294 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
4296 case NON_LVALUE_EXPR
:
4297 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4298 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
4301 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
4302 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4307 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4308 return build1_loc (loc
, TREE_CODE (arg
), type
,
4309 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4312 if (!integer_onep (TREE_OPERAND (arg
, 1)))
4314 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
4317 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
4319 case CLEANUP_POINT_EXPR
:
4320 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
4321 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
4322 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
4329 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4330 assume that ARG is an operation that returns a truth value (0 or 1
4331 for scalars, 0 or -1 for vectors). Return the folded expression if
4332 folding is successful. Otherwise, return NULL_TREE. */
4335 fold_invert_truthvalue (location_t loc
, tree arg
)
4337 tree type
= TREE_TYPE (arg
);
4338 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
4344 /* Return a simplified tree node for the truth-negation of ARG. This
4345 never alters ARG itself. We assume that ARG is an operation that
4346 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4349 invert_truthvalue_loc (location_t loc
, tree arg
)
4351 if (TREE_CODE (arg
) == ERROR_MARK
)
4354 tree type
= TREE_TYPE (arg
);
4355 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
4361 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4362 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4363 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4364 is the original memory reference used to preserve the alias set of
4368 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
4369 HOST_WIDE_INT bitsize
, poly_int64 bitpos
,
4370 int unsignedp
, int reversep
)
4372 tree result
, bftype
;
4374 /* Attempt not to lose the access path if possible. */
4375 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
4377 tree ninner
= TREE_OPERAND (orig_inner
, 0);
4379 poly_int64 nbitsize
, nbitpos
;
4381 int nunsignedp
, nreversep
, nvolatilep
= 0;
4382 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
4383 &noffset
, &nmode
, &nunsignedp
,
4384 &nreversep
, &nvolatilep
);
4386 && noffset
== NULL_TREE
4387 && known_subrange_p (bitpos
, bitsize
, nbitpos
, nbitsize
)
4397 alias_set_type iset
= get_alias_set (orig_inner
);
4398 if (iset
== 0 && get_alias_set (inner
) != iset
)
4399 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
4400 build_fold_addr_expr (inner
),
4401 build_int_cst (ptr_type_node
, 0));
4403 if (known_eq (bitpos
, 0) && !reversep
)
4405 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
4406 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
4407 || POINTER_TYPE_P (TREE_TYPE (inner
)))
4408 && tree_fits_shwi_p (size
)
4409 && tree_to_shwi (size
) == bitsize
)
4410 return fold_convert_loc (loc
, type
, inner
);
4414 if (TYPE_PRECISION (bftype
) != bitsize
4415 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
4416 bftype
= build_nonstandard_integer_type (bitsize
, 0);
4418 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
4419 bitsize_int (bitsize
), bitsize_int (bitpos
));
4420 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
4423 result
= fold_convert_loc (loc
, type
, result
);
4428 /* Optimize a bit-field compare.
4430 There are two cases: First is a compare against a constant and the
4431 second is a comparison of two items where the fields are at the same
4432 bit position relative to the start of a chunk (byte, halfword, word)
4433 large enough to contain it. In these cases we can avoid the shift
4434 implicit in bitfield extractions.
4436 For constants, we emit a compare of the shifted constant with the
4437 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4438 compared. For two fields at the same position, we do the ANDs with the
4439 similar mask and compare the result of the ANDs.
4441 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4442 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4443 are the left and right operands of the comparison, respectively.
4445 If the optimization described above can be done, we return the resulting
4446 tree. Otherwise we return zero. */
4449 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
4450 tree compare_type
, tree lhs
, tree rhs
)
4452 poly_int64 plbitpos
, plbitsize
, rbitpos
, rbitsize
;
4453 HOST_WIDE_INT lbitpos
, lbitsize
, nbitpos
, nbitsize
;
4454 tree type
= TREE_TYPE (lhs
);
4456 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
4457 machine_mode lmode
, rmode
;
4458 scalar_int_mode nmode
;
4459 int lunsignedp
, runsignedp
;
4460 int lreversep
, rreversep
;
4461 int lvolatilep
= 0, rvolatilep
= 0;
4462 tree linner
, rinner
= NULL_TREE
;
4466 /* Get all the information about the extractions being done. If the bit size
4467 is the same as the size of the underlying object, we aren't doing an
4468 extraction at all and so can do nothing. We also don't want to
4469 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4470 then will no longer be able to replace it. */
4471 linner
= get_inner_reference (lhs
, &plbitsize
, &plbitpos
, &offset
, &lmode
,
4472 &lunsignedp
, &lreversep
, &lvolatilep
);
4474 || !known_size_p (plbitsize
)
4475 || !plbitsize
.is_constant (&lbitsize
)
4476 || !plbitpos
.is_constant (&lbitpos
)
4477 || known_eq (lbitsize
, GET_MODE_BITSIZE (lmode
))
4479 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
4484 rreversep
= lreversep
;
4487 /* If this is not a constant, we can only do something if bit positions,
4488 sizes, signedness and storage order are the same. */
4490 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
4491 &runsignedp
, &rreversep
, &rvolatilep
);
4494 || maybe_ne (lbitpos
, rbitpos
)
4495 || maybe_ne (lbitsize
, rbitsize
)
4496 || lunsignedp
!= runsignedp
4497 || lreversep
!= rreversep
4499 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
4504 /* Honor the C++ memory model and mimic what RTL expansion does. */
4505 poly_uint64 bitstart
= 0;
4506 poly_uint64 bitend
= 0;
4507 if (TREE_CODE (lhs
) == COMPONENT_REF
)
4509 get_bit_range (&bitstart
, &bitend
, lhs
, &plbitpos
, &offset
);
4510 if (!plbitpos
.is_constant (&lbitpos
) || offset
!= NULL_TREE
)
4514 /* See if we can find a mode to refer to this field. We should be able to,
4515 but fail if we can't. */
4516 if (!get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
4517 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
4518 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
4519 TYPE_ALIGN (TREE_TYPE (rinner
))),
4520 BITS_PER_WORD
, false, &nmode
))
4523 /* Set signed and unsigned types of the precision of this mode for the
4525 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4527 /* Compute the bit position and size for the new reference and our offset
4528 within it. If the new reference is the same size as the original, we
4529 won't optimize anything, so return zero. */
4530 nbitsize
= GET_MODE_BITSIZE (nmode
);
4531 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4533 if (nbitsize
== lbitsize
)
4536 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4537 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4539 /* Make the mask to be used against the extracted field. */
4540 mask
= build_int_cst_type (unsigned_type
, -1);
4541 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4542 mask
= const_binop (RSHIFT_EXPR
, mask
,
4543 size_int (nbitsize
- lbitsize
- lbitpos
));
4550 /* If not comparing with constant, just rework the comparison
4552 tree t1
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4553 nbitsize
, nbitpos
, 1, lreversep
);
4554 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t1
, mask
);
4555 tree t2
= make_bit_field_ref (loc
, rinner
, rhs
, unsigned_type
,
4556 nbitsize
, nbitpos
, 1, rreversep
);
4557 t2
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
, t2
, mask
);
4558 return fold_build2_loc (loc
, code
, compare_type
, t1
, t2
);
4561 /* Otherwise, we are handling the constant case. See if the constant is too
4562 big for the field. Warn and return a tree for 0 (false) if so. We do
4563 this not only for its own sake, but to avoid having to test for this
4564 error case below. If we didn't, we might generate wrong code.
4566 For unsigned fields, the constant shifted right by the field length should
4567 be all zero. For signed fields, the high-order bits should agree with
4572 if (wi::lrshift (wi::to_wide (rhs
), lbitsize
) != 0)
4574 warning (0, "comparison is always %d due to width of bit-field",
4576 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4581 wide_int tem
= wi::arshift (wi::to_wide (rhs
), lbitsize
- 1);
4582 if (tem
!= 0 && tem
!= -1)
4584 warning (0, "comparison is always %d due to width of bit-field",
4586 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4593 /* Single-bit compares should always be against zero. */
4594 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4596 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4597 rhs
= build_int_cst (type
, 0);
4600 /* Make a new bitfield reference, shift the constant over the
4601 appropriate number of bits and mask it with the computed mask
4602 (in case this was a signed field). If we changed it, make a new one. */
4603 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4604 nbitsize
, nbitpos
, 1, lreversep
);
4606 rhs
= const_binop (BIT_AND_EXPR
,
4607 const_binop (LSHIFT_EXPR
,
4608 fold_convert_loc (loc
, unsigned_type
, rhs
),
4609 size_int (lbitpos
)),
4612 lhs
= build2_loc (loc
, code
, compare_type
,
4613 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4617 /* Subroutine for fold_truth_andor_1: decode a field reference.
4619 If EXP is a comparison reference, we return the innermost reference.
4621 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4622 set to the starting bit number.
4624 If the innermost field can be completely contained in a mode-sized
4625 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4627 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4628 otherwise it is not changed.
4630 *PUNSIGNEDP is set to the signedness of the field.
4632 *PREVERSEP is set to the storage order of the field.
4634 *PMASK is set to the mask used. This is either contained in a
4635 BIT_AND_EXPR or derived from the width of the field.
4637 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4639 Return 0 if this is not a component reference or is one that we can't
4640 do anything with. */
4643 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4644 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4645 int *punsignedp
, int *preversep
, int *pvolatilep
,
4646 tree
*pmask
, tree
*pand_mask
)
4649 tree outer_type
= 0;
4651 tree mask
, inner
, offset
;
4653 unsigned int precision
;
4655 /* All the optimizations using this function assume integer fields.
4656 There are problems with FP fields since the type_for_size call
4657 below can fail for, e.g., XFmode. */
4658 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4661 /* We are interested in the bare arrangement of bits, so strip everything
4662 that doesn't affect the machine mode. However, record the type of the
4663 outermost expression if it may matter below. */
4664 if (CONVERT_EXPR_P (exp
)
4665 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4666 outer_type
= TREE_TYPE (exp
);
4669 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4671 and_mask
= TREE_OPERAND (exp
, 1);
4672 exp
= TREE_OPERAND (exp
, 0);
4673 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4674 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4678 poly_int64 poly_bitsize
, poly_bitpos
;
4679 inner
= get_inner_reference (exp
, &poly_bitsize
, &poly_bitpos
, &offset
,
4680 pmode
, punsignedp
, preversep
, pvolatilep
);
4681 if ((inner
== exp
&& and_mask
== 0)
4682 || !poly_bitsize
.is_constant (pbitsize
)
4683 || !poly_bitpos
.is_constant (pbitpos
)
4686 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4687 /* Reject out-of-bound accesses (PR79731). */
4688 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4689 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4690 *pbitpos
+ *pbitsize
) < 0))
4693 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4694 if (unsigned_type
== NULL_TREE
)
4699 /* If the number of bits in the reference is the same as the bitsize of
4700 the outer type, then the outer type gives the signedness. Otherwise
4701 (in case of a small bitfield) the signedness is unchanged. */
4702 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4703 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4705 /* Compute the mask to access the bitfield. */
4706 precision
= TYPE_PRECISION (unsigned_type
);
4708 mask
= build_int_cst_type (unsigned_type
, -1);
4710 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4711 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4713 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4715 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4716 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4719 *pand_mask
= and_mask
;
4723 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4724 bit positions and MASK is SIGNED. */
4727 all_ones_mask_p (const_tree mask
, unsigned int size
)
4729 tree type
= TREE_TYPE (mask
);
4730 unsigned int precision
= TYPE_PRECISION (type
);
4732 /* If this function returns true when the type of the mask is
4733 UNSIGNED, then there will be errors. In particular see
4734 gcc.c-torture/execute/990326-1.c. There does not appear to be
4735 any documentation paper trail as to why this is so. But the pre
4736 wide-int worked with that restriction and it has been preserved
4738 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4741 return wi::mask (size
, false, precision
) == wi::to_wide (mask
);
4744 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4745 represents the sign bit of EXP's type. If EXP represents a sign
4746 or zero extension, also test VAL against the unextended type.
4747 The return value is the (sub)expression whose sign bit is VAL,
4748 or NULL_TREE otherwise. */
4751 sign_bit_p (tree exp
, const_tree val
)
4756 /* Tree EXP must have an integral type. */
4757 t
= TREE_TYPE (exp
);
4758 if (! INTEGRAL_TYPE_P (t
))
4761 /* Tree VAL must be an integer constant. */
4762 if (TREE_CODE (val
) != INTEGER_CST
4763 || TREE_OVERFLOW (val
))
4766 width
= TYPE_PRECISION (t
);
4767 if (wi::only_sign_bit_p (wi::to_wide (val
), width
))
4770 /* Handle extension from a narrower type. */
4771 if (TREE_CODE (exp
) == NOP_EXPR
4772 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4773 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4778 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4779 to be evaluated unconditionally. */
4782 simple_operand_p (const_tree exp
)
4784 /* Strip any conversions that don't change the machine mode. */
4787 return (CONSTANT_CLASS_P (exp
)
4788 || TREE_CODE (exp
) == SSA_NAME
4790 && ! TREE_ADDRESSABLE (exp
)
4791 && ! TREE_THIS_VOLATILE (exp
)
4792 && ! DECL_NONLOCAL (exp
)
4793 /* Don't regard global variables as simple. They may be
4794 allocated in ways unknown to the compiler (shared memory,
4795 #pragma weak, etc). */
4796 && ! TREE_PUBLIC (exp
)
4797 && ! DECL_EXTERNAL (exp
)
4798 /* Weakrefs are not safe to be read, since they can be NULL.
4799 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4800 have DECL_WEAK flag set. */
4801 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4802 /* Loading a static variable is unduly expensive, but global
4803 registers aren't expensive. */
4804 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4807 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4808 to be evaluated unconditionally.
4809 I addition to simple_operand_p, we assume that comparisons, conversions,
4810 and logic-not operations are simple, if their operands are simple, too. */
4813 simple_operand_p_2 (tree exp
)
4815 enum tree_code code
;
4817 if (TREE_SIDE_EFFECTS (exp
) || generic_expr_could_trap_p (exp
))
4820 while (CONVERT_EXPR_P (exp
))
4821 exp
= TREE_OPERAND (exp
, 0);
4823 code
= TREE_CODE (exp
);
4825 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4826 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4827 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4829 if (code
== TRUTH_NOT_EXPR
)
4830 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4832 return simple_operand_p (exp
);
4836 /* The following functions are subroutines to fold_range_test and allow it to
4837 try to change a logical combination of comparisons into a range test.
4840 X == 2 || X == 3 || X == 4 || X == 5
4844 (unsigned) (X - 2) <= 3
4846 We describe each set of comparisons as being either inside or outside
4847 a range, using a variable named like IN_P, and then describe the
4848 range with a lower and upper bound. If one of the bounds is omitted,
4849 it represents either the highest or lowest value of the type.
4851 In the comments below, we represent a range by two numbers in brackets
4852 preceded by a "+" to designate being inside that range, or a "-" to
4853 designate being outside that range, so the condition can be inverted by
4854 flipping the prefix. An omitted bound is represented by a "-". For
4855 example, "- [-, 10]" means being outside the range starting at the lowest
4856 possible value and ending at 10, in other words, being greater than 10.
4857 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4860 We set up things so that the missing bounds are handled in a consistent
4861 manner so neither a missing bound nor "true" and "false" need to be
4862 handled using a special case. */
4864 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4865 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4866 and UPPER1_P are nonzero if the respective argument is an upper bound
4867 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4868 must be specified for a comparison. ARG1 will be converted to ARG0's
4869 type if both are specified. */
4872 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4873 tree arg1
, int upper1_p
)
4879 /* If neither arg represents infinity, do the normal operation.
4880 Else, if not a comparison, return infinity. Else handle the special
4881 comparison rules. Note that most of the cases below won't occur, but
4882 are handled for consistency. */
4884 if (arg0
!= 0 && arg1
!= 0)
4886 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4887 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4889 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4892 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4895 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4896 for neither. In real maths, we cannot assume open ended ranges are
4897 the same. But, this is computer arithmetic, where numbers are finite.
4898 We can therefore make the transformation of any unbounded range with
4899 the value Z, Z being greater than any representable number. This permits
4900 us to treat unbounded ranges as equal. */
4901 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4902 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4906 result
= sgn0
== sgn1
;
4909 result
= sgn0
!= sgn1
;
4912 result
= sgn0
< sgn1
;
4915 result
= sgn0
<= sgn1
;
4918 result
= sgn0
> sgn1
;
4921 result
= sgn0
>= sgn1
;
4927 return constant_boolean_node (result
, type
);
4930 /* Helper routine for make_range. Perform one step for it, return
4931 new expression if the loop should continue or NULL_TREE if it should
4935 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4936 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4937 bool *strict_overflow_p
)
4939 tree arg0_type
= TREE_TYPE (arg0
);
4940 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4941 int in_p
= *p_in_p
, n_in_p
;
4945 case TRUTH_NOT_EXPR
:
4946 /* We can only do something if the range is testing for zero. */
4947 if (low
== NULL_TREE
|| high
== NULL_TREE
4948 || ! integer_zerop (low
) || ! integer_zerop (high
))
4953 case EQ_EXPR
: case NE_EXPR
:
4954 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4955 /* We can only do something if the range is testing for zero
4956 and if the second operand is an integer constant. Note that
4957 saying something is "in" the range we make is done by
4958 complementing IN_P since it will set in the initial case of
4959 being not equal to zero; "out" is leaving it alone. */
4960 if (low
== NULL_TREE
|| high
== NULL_TREE
4961 || ! integer_zerop (low
) || ! integer_zerop (high
)
4962 || TREE_CODE (arg1
) != INTEGER_CST
)
4967 case NE_EXPR
: /* - [c, c] */
4970 case EQ_EXPR
: /* + [c, c] */
4971 in_p
= ! in_p
, low
= high
= arg1
;
4973 case GT_EXPR
: /* - [-, c] */
4974 low
= 0, high
= arg1
;
4976 case GE_EXPR
: /* + [c, -] */
4977 in_p
= ! in_p
, low
= arg1
, high
= 0;
4979 case LT_EXPR
: /* - [c, -] */
4980 low
= arg1
, high
= 0;
4982 case LE_EXPR
: /* + [-, c] */
4983 in_p
= ! in_p
, low
= 0, high
= arg1
;
4989 /* If this is an unsigned comparison, we also know that EXP is
4990 greater than or equal to zero. We base the range tests we make
4991 on that fact, so we record it here so we can parse existing
4992 range tests. We test arg0_type since often the return type
4993 of, e.g. EQ_EXPR, is boolean. */
4994 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4996 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4998 build_int_cst (arg0_type
, 0),
5002 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
5004 /* If the high bound is missing, but we have a nonzero low
5005 bound, reverse the range so it goes from zero to the low bound
5007 if (high
== 0 && low
&& ! integer_zerop (low
))
5010 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
5011 build_int_cst (TREE_TYPE (low
), 1), 0);
5012 low
= build_int_cst (arg0_type
, 0);
5022 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5023 low and high are non-NULL, then normalize will DTRT. */
5024 if (!TYPE_UNSIGNED (arg0_type
)
5025 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5027 if (low
== NULL_TREE
)
5028 low
= TYPE_MIN_VALUE (arg0_type
);
5029 if (high
== NULL_TREE
)
5030 high
= TYPE_MAX_VALUE (arg0_type
);
5033 /* (-x) IN [a,b] -> x in [-b, -a] */
5034 n_low
= range_binop (MINUS_EXPR
, exp_type
,
5035 build_int_cst (exp_type
, 0),
5037 n_high
= range_binop (MINUS_EXPR
, exp_type
,
5038 build_int_cst (exp_type
, 0),
5040 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
5046 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
5047 build_int_cst (exp_type
, 1));
5051 if (TREE_CODE (arg1
) != INTEGER_CST
)
5054 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5055 move a constant to the other side. */
5056 if (!TYPE_UNSIGNED (arg0_type
)
5057 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5060 /* If EXP is signed, any overflow in the computation is undefined,
5061 so we don't worry about it so long as our computations on
5062 the bounds don't overflow. For unsigned, overflow is defined
5063 and this is exactly the right thing. */
5064 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5065 arg0_type
, low
, 0, arg1
, 0);
5066 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
5067 arg0_type
, high
, 1, arg1
, 0);
5068 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
5069 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
5072 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
5073 *strict_overflow_p
= true;
5076 /* Check for an unsigned range which has wrapped around the maximum
5077 value thus making n_high < n_low, and normalize it. */
5078 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
5080 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
5081 build_int_cst (TREE_TYPE (n_high
), 1), 0);
5082 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
5083 build_int_cst (TREE_TYPE (n_low
), 1), 0);
5085 /* If the range is of the form +/- [ x+1, x ], we won't
5086 be able to normalize it. But then, it represents the
5087 whole range or the empty set, so make it
5089 if (tree_int_cst_equal (n_low
, low
)
5090 && tree_int_cst_equal (n_high
, high
))
5096 low
= n_low
, high
= n_high
;
5104 case NON_LVALUE_EXPR
:
5105 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
5108 if (! INTEGRAL_TYPE_P (arg0_type
)
5109 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
5110 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
5113 n_low
= low
, n_high
= high
;
5116 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
5119 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
5121 /* If we're converting arg0 from an unsigned type, to exp,
5122 a signed type, we will be doing the comparison as unsigned.
5123 The tests above have already verified that LOW and HIGH
5126 So we have to ensure that we will handle large unsigned
5127 values the same way that the current signed bounds treat
5130 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
5134 /* For fixed-point modes, we need to pass the saturating flag
5135 as the 2nd parameter. */
5136 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
5138 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
5139 TYPE_SATURATING (arg0_type
));
5142 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
5144 /* A range without an upper bound is, naturally, unbounded.
5145 Since convert would have cropped a very large value, use
5146 the max value for the destination type. */
5148 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
5149 : TYPE_MAX_VALUE (arg0_type
);
5151 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
5152 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
5153 fold_convert_loc (loc
, arg0_type
,
5155 build_int_cst (arg0_type
, 1));
5157 /* If the low bound is specified, "and" the range with the
5158 range for which the original unsigned value will be
5162 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
5163 1, fold_convert_loc (loc
, arg0_type
,
5168 in_p
= (n_in_p
== in_p
);
5172 /* Otherwise, "or" the range with the range of the input
5173 that will be interpreted as negative. */
5174 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
5175 1, fold_convert_loc (loc
, arg0_type
,
5180 in_p
= (in_p
!= n_in_p
);
5184 /* Otherwise, if we are converting arg0 from signed type, to exp,
5185 an unsigned type, we will do the comparison as signed. If
5186 high is non-NULL, we punt above if it doesn't fit in the signed
5187 type, so if we get through here, +[-, high] or +[low, high] are
5188 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5189 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5190 high is not, the +[low, -] range is equivalent to union of
5191 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5192 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5193 low being 0, which should be treated as [-, -]. */
5194 else if (TYPE_UNSIGNED (exp_type
)
5195 && !TYPE_UNSIGNED (arg0_type
)
5199 if (integer_zerop (low
))
5203 n_high
= fold_build2_loc (loc
, PLUS_EXPR
, arg0_type
,
5204 n_low
, build_int_cst (arg0_type
, -1));
5205 n_low
= build_zero_cst (arg0_type
);
5220 /* Given EXP, a logical expression, set the range it is testing into
5221 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5222 actually being tested. *PLOW and *PHIGH will be made of the same
5223 type as the returned expression. If EXP is not a comparison, we
5224 will most likely not be returning a useful value and range. Set
5225 *STRICT_OVERFLOW_P to true if the return value is only valid
5226 because signed overflow is undefined; otherwise, do not change
5227 *STRICT_OVERFLOW_P. */
5230 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
5231 bool *strict_overflow_p
)
5233 enum tree_code code
;
5234 tree arg0
, arg1
= NULL_TREE
;
5235 tree exp_type
, nexp
;
5238 location_t loc
= EXPR_LOCATION (exp
);
5240 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5241 and see if we can refine the range. Some of the cases below may not
5242 happen, but it doesn't seem worth worrying about this. We "continue"
5243 the outer loop when we've changed something; otherwise we "break"
5244 the switch, which will "break" the while. */
5247 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
5251 code
= TREE_CODE (exp
);
5252 exp_type
= TREE_TYPE (exp
);
5255 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
5257 if (TREE_OPERAND_LENGTH (exp
) > 0)
5258 arg0
= TREE_OPERAND (exp
, 0);
5259 if (TREE_CODE_CLASS (code
) == tcc_binary
5260 || TREE_CODE_CLASS (code
) == tcc_comparison
5261 || (TREE_CODE_CLASS (code
) == tcc_expression
5262 && TREE_OPERAND_LENGTH (exp
) > 1))
5263 arg1
= TREE_OPERAND (exp
, 1);
5265 if (arg0
== NULL_TREE
)
5268 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
5269 &high
, &in_p
, strict_overflow_p
);
5270 if (nexp
== NULL_TREE
)
5275 /* If EXP is a constant, we can evaluate whether this is true or false. */
5276 if (TREE_CODE (exp
) == INTEGER_CST
)
5278 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
5280 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5286 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5290 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5291 a bitwise check i.e. when
5292 LOW == 0xXX...X00...0
5293 HIGH == 0xXX...X11...1
5294 Return corresponding mask in MASK and stem in VALUE. */
5297 maskable_range_p (const_tree low
, const_tree high
, tree type
, tree
*mask
,
5300 if (TREE_CODE (low
) != INTEGER_CST
5301 || TREE_CODE (high
) != INTEGER_CST
)
5304 unsigned prec
= TYPE_PRECISION (type
);
5305 wide_int lo
= wi::to_wide (low
, prec
);
5306 wide_int hi
= wi::to_wide (high
, prec
);
5308 wide_int end_mask
= lo
^ hi
;
5309 if ((end_mask
& (end_mask
+ 1)) != 0
5310 || (lo
& end_mask
) != 0)
5313 wide_int stem_mask
= ~end_mask
;
5314 wide_int stem
= lo
& stem_mask
;
5315 if (stem
!= (hi
& stem_mask
))
5318 *mask
= wide_int_to_tree (type
, stem_mask
);
5319 *value
= wide_int_to_tree (type
, stem
);
5324 /* Helper routine for build_range_check and match.pd. Return the type to
5325 perform the check or NULL if it shouldn't be optimized. */
5328 range_check_type (tree etype
)
5330 /* First make sure that arithmetics in this type is valid, then make sure
5331 that it wraps around. */
5332 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
5333 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
), 1);
5335 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_UNSIGNED (etype
))
5337 tree utype
, minv
, maxv
;
5339 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5340 for the type in question, as we rely on this here. */
5341 utype
= unsigned_type_for (etype
);
5342 maxv
= fold_convert (utype
, TYPE_MAX_VALUE (etype
));
5343 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
5344 build_int_cst (TREE_TYPE (maxv
), 1), 1);
5345 minv
= fold_convert (utype
, TYPE_MIN_VALUE (etype
));
5347 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
5353 else if (POINTER_TYPE_P (etype
))
5354 etype
= unsigned_type_for (etype
);
5358 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5359 type, TYPE, return an expression to test if EXP is in (or out of, depending
5360 on IN_P) the range. Return 0 if the test couldn't be created. */
5363 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
5364 tree low
, tree high
)
5366 tree etype
= TREE_TYPE (exp
), mask
, value
;
5368 /* Disable this optimization for function pointer expressions
5369 on targets that require function pointer canonicalization. */
5370 if (targetm
.have_canonicalize_funcptr_for_compare ()
5371 && POINTER_TYPE_P (etype
)
5372 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype
)))
5377 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
5379 return invert_truthvalue_loc (loc
, value
);
5384 if (low
== 0 && high
== 0)
5385 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
5388 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
5389 fold_convert_loc (loc
, etype
, high
));
5392 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
5393 fold_convert_loc (loc
, etype
, low
));
5395 if (operand_equal_p (low
, high
, 0))
5396 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
5397 fold_convert_loc (loc
, etype
, low
));
5399 if (TREE_CODE (exp
) == BIT_AND_EXPR
5400 && maskable_range_p (low
, high
, etype
, &mask
, &value
))
5401 return fold_build2_loc (loc
, EQ_EXPR
, type
,
5402 fold_build2_loc (loc
, BIT_AND_EXPR
, etype
,
5406 if (integer_zerop (low
))
5408 if (! TYPE_UNSIGNED (etype
))
5410 etype
= unsigned_type_for (etype
);
5411 high
= fold_convert_loc (loc
, etype
, high
);
5412 exp
= fold_convert_loc (loc
, etype
, exp
);
5414 return build_range_check (loc
, type
, exp
, 1, 0, high
);
5417 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5418 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
5420 int prec
= TYPE_PRECISION (etype
);
5422 if (wi::mask
<widest_int
> (prec
- 1, false) == wi::to_widest (high
))
5424 if (TYPE_UNSIGNED (etype
))
5426 tree signed_etype
= signed_type_for (etype
);
5427 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
5429 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
5431 etype
= signed_etype
;
5432 exp
= fold_convert_loc (loc
, etype
, exp
);
5434 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
5435 build_int_cst (etype
, 0));
5439 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5440 This requires wrap-around arithmetics for the type of the expression. */
5441 etype
= range_check_type (etype
);
5442 if (etype
== NULL_TREE
)
5445 high
= fold_convert_loc (loc
, etype
, high
);
5446 low
= fold_convert_loc (loc
, etype
, low
);
5447 exp
= fold_convert_loc (loc
, etype
, exp
);
5449 value
= const_binop (MINUS_EXPR
, high
, low
);
5451 if (value
!= 0 && !TREE_OVERFLOW (value
))
5452 return build_range_check (loc
, type
,
5453 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
5454 1, build_int_cst (etype
, 0), value
);
5459 /* Return the predecessor of VAL in its type, handling the infinite case. */
5462 range_predecessor (tree val
)
5464 tree type
= TREE_TYPE (val
);
5466 if (INTEGRAL_TYPE_P (type
)
5467 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
5470 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
5471 build_int_cst (TREE_TYPE (val
), 1), 0);
5474 /* Return the successor of VAL in its type, handling the infinite case. */
5477 range_successor (tree val
)
5479 tree type
= TREE_TYPE (val
);
5481 if (INTEGRAL_TYPE_P (type
)
5482 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
5485 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
5486 build_int_cst (TREE_TYPE (val
), 1), 0);
5489 /* Given two ranges, see if we can merge them into one. Return 1 if we
5490 can, 0 if we can't. Set the output range into the specified parameters. */
5493 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
5494 tree high0
, int in1_p
, tree low1
, tree high1
)
5502 int lowequal
= ((low0
== 0 && low1
== 0)
5503 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5504 low0
, 0, low1
, 0)));
5505 int highequal
= ((high0
== 0 && high1
== 0)
5506 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5507 high0
, 1, high1
, 1)));
5509 /* Make range 0 be the range that starts first, or ends last if they
5510 start at the same value. Swap them if it isn't. */
5511 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5514 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
5515 high1
, 1, high0
, 1))))
5517 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
5518 tem
= low0
, low0
= low1
, low1
= tem
;
5519 tem
= high0
, high0
= high1
, high1
= tem
;
5522 /* If the second range is != high1 where high1 is the type maximum of
5523 the type, try first merging with < high1 range. */
5526 && TREE_CODE (low1
) == INTEGER_CST
5527 && (TREE_CODE (TREE_TYPE (low1
)) == INTEGER_TYPE
5528 || (TREE_CODE (TREE_TYPE (low1
)) == ENUMERAL_TYPE
5529 && known_eq (TYPE_PRECISION (TREE_TYPE (low1
)),
5530 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1
))))))
5531 && operand_equal_p (low1
, high1
, 0))
5533 if (tree_int_cst_equal (low1
, TYPE_MAX_VALUE (TREE_TYPE (low1
)))
5534 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5535 !in1_p
, NULL_TREE
, range_predecessor (low1
)))
5537 /* Similarly for the second range != low1 where low1 is the type minimum
5538 of the type, try first merging with > low1 range. */
5539 if (tree_int_cst_equal (low1
, TYPE_MIN_VALUE (TREE_TYPE (low1
)))
5540 && merge_ranges (pin_p
, plow
, phigh
, in0_p
, low0
, high0
,
5541 !in1_p
, range_successor (low1
), NULL_TREE
))
5545 /* Now flag two cases, whether the ranges are disjoint or whether the
5546 second range is totally subsumed in the first. Note that the tests
5547 below are simplified by the ones above. */
5548 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
5549 high0
, 1, low1
, 0));
5550 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
5551 high1
, 1, high0
, 1));
5553 /* We now have four cases, depending on whether we are including or
5554 excluding the two ranges. */
5557 /* If they don't overlap, the result is false. If the second range
5558 is a subset it is the result. Otherwise, the range is from the start
5559 of the second to the end of the first. */
5561 in_p
= 0, low
= high
= 0;
5563 in_p
= 1, low
= low1
, high
= high1
;
5565 in_p
= 1, low
= low1
, high
= high0
;
5568 else if (in0_p
&& ! in1_p
)
5570 /* If they don't overlap, the result is the first range. If they are
5571 equal, the result is false. If the second range is a subset of the
5572 first, and the ranges begin at the same place, we go from just after
5573 the end of the second range to the end of the first. If the second
5574 range is not a subset of the first, or if it is a subset and both
5575 ranges end at the same place, the range starts at the start of the
5576 first range and ends just before the second range.
5577 Otherwise, we can't describe this as a single range. */
5579 in_p
= 1, low
= low0
, high
= high0
;
5580 else if (lowequal
&& highequal
)
5581 in_p
= 0, low
= high
= 0;
5582 else if (subset
&& lowequal
)
5584 low
= range_successor (high1
);
5589 /* We are in the weird situation where high0 > high1 but
5590 high1 has no successor. Punt. */
5594 else if (! subset
|| highequal
)
5597 high
= range_predecessor (low1
);
5601 /* low0 < low1 but low1 has no predecessor. Punt. */
5609 else if (! in0_p
&& in1_p
)
5611 /* If they don't overlap, the result is the second range. If the second
5612 is a subset of the first, the result is false. Otherwise,
5613 the range starts just after the first range and ends at the
5614 end of the second. */
5616 in_p
= 1, low
= low1
, high
= high1
;
5617 else if (subset
|| highequal
)
5618 in_p
= 0, low
= high
= 0;
5621 low
= range_successor (high0
);
5626 /* high1 > high0 but high0 has no successor. Punt. */
5634 /* The case where we are excluding both ranges. Here the complex case
5635 is if they don't overlap. In that case, the only time we have a
5636 range is if they are adjacent. If the second is a subset of the
5637 first, the result is the first. Otherwise, the range to exclude
5638 starts at the beginning of the first range and ends at the end of the
5642 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5643 range_successor (high0
),
5645 in_p
= 0, low
= low0
, high
= high1
;
5648 /* Canonicalize - [min, x] into - [-, x]. */
5649 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5650 switch (TREE_CODE (TREE_TYPE (low0
)))
5653 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0
)),
5655 (TYPE_MODE (TREE_TYPE (low0
)))))
5659 if (tree_int_cst_equal (low0
,
5660 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5664 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5665 && integer_zerop (low0
))
5672 /* Canonicalize - [x, max] into - [x, -]. */
5673 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5674 switch (TREE_CODE (TREE_TYPE (high1
)))
5677 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1
)),
5679 (TYPE_MODE (TREE_TYPE (high1
)))))
5683 if (tree_int_cst_equal (high1
,
5684 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5688 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5689 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5691 build_int_cst (TREE_TYPE (high1
), 1),
5699 /* The ranges might be also adjacent between the maximum and
5700 minimum values of the given type. For
5701 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5702 return + [x + 1, y - 1]. */
5703 if (low0
== 0 && high1
== 0)
5705 low
= range_successor (high0
);
5706 high
= range_predecessor (low1
);
5707 if (low
== 0 || high
== 0)
5717 in_p
= 0, low
= low0
, high
= high0
;
5719 in_p
= 0, low
= low0
, high
= high1
;
5722 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5727 /* Subroutine of fold, looking inside expressions of the form
5728 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5729 of the COND_EXPR. This function is being used also to optimize
5730 A op B ? C : A, by reversing the comparison first.
5732 Return a folded expression whose code is not a COND_EXPR
5733 anymore, or NULL_TREE if no folding opportunity is found. */
5736 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5737 tree arg0
, tree arg1
, tree arg2
)
5739 enum tree_code comp_code
= TREE_CODE (arg0
);
5740 tree arg00
= TREE_OPERAND (arg0
, 0);
5741 tree arg01
= TREE_OPERAND (arg0
, 1);
5742 tree arg1_type
= TREE_TYPE (arg1
);
5748 /* If we have A op 0 ? A : -A, consider applying the following
5751 A == 0? A : -A same as -A
5752 A != 0? A : -A same as A
5753 A >= 0? A : -A same as abs (A)
5754 A > 0? A : -A same as abs (A)
5755 A <= 0? A : -A same as -abs (A)
5756 A < 0? A : -A same as -abs (A)
5758 None of these transformations work for modes with signed
5759 zeros. If A is +/-0, the first two transformations will
5760 change the sign of the result (from +0 to -0, or vice
5761 versa). The last four will fix the sign of the result,
5762 even though the original expressions could be positive or
5763 negative, depending on the sign of A.
5765 Note that all these transformations are correct if A is
5766 NaN, since the two alternatives (A and -A) are also NaNs. */
5767 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5768 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5769 ? real_zerop (arg01
)
5770 : integer_zerop (arg01
))
5771 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5772 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5773 /* In the case that A is of the form X-Y, '-A' (arg2) may
5774 have already been folded to Y-X, check for that. */
5775 || (TREE_CODE (arg1
) == MINUS_EXPR
5776 && TREE_CODE (arg2
) == MINUS_EXPR
5777 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5778 TREE_OPERAND (arg2
, 1), 0)
5779 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5780 TREE_OPERAND (arg2
, 0), 0))))
5785 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5786 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5789 return fold_convert_loc (loc
, type
, arg1
);
5792 if (flag_trapping_math
)
5797 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5799 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5800 return fold_convert_loc (loc
, type
, tem
);
5803 if (flag_trapping_math
)
5808 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5810 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
5811 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
5813 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5814 is not, invokes UB both in abs and in the negation of it.
5815 So, use ABSU_EXPR instead. */
5816 tree utype
= unsigned_type_for (TREE_TYPE (arg1
));
5817 tem
= fold_build1_loc (loc
, ABSU_EXPR
, utype
, arg1
);
5818 tem
= negate_expr (tem
);
5819 return fold_convert_loc (loc
, type
, tem
);
5823 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5824 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5827 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5831 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5832 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5833 both transformations are correct when A is NaN: A != 0
5834 is then true, and A == 0 is false. */
5836 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5837 && integer_zerop (arg01
) && integer_zerop (arg2
))
5839 if (comp_code
== NE_EXPR
)
5840 return fold_convert_loc (loc
, type
, arg1
);
5841 else if (comp_code
== EQ_EXPR
)
5842 return build_zero_cst (type
);
5845 /* Try some transformations of A op B ? A : B.
5847 A == B? A : B same as B
5848 A != B? A : B same as A
5849 A >= B? A : B same as max (A, B)
5850 A > B? A : B same as max (B, A)
5851 A <= B? A : B same as min (A, B)
5852 A < B? A : B same as min (B, A)
5854 As above, these transformations don't work in the presence
5855 of signed zeros. For example, if A and B are zeros of
5856 opposite sign, the first two transformations will change
5857 the sign of the result. In the last four, the original
5858 expressions give different results for (A=+0, B=-0) and
5859 (A=-0, B=+0), but the transformed expressions do not.
5861 The first two transformations are correct if either A or B
5862 is a NaN. In the first transformation, the condition will
5863 be false, and B will indeed be chosen. In the case of the
5864 second transformation, the condition A != B will be true,
5865 and A will be chosen.
5867 The conversions to max() and min() are not correct if B is
5868 a number and A is not. The conditions in the original
5869 expressions will be false, so all four give B. The min()
5870 and max() versions would give a NaN instead. */
5871 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5872 && operand_equal_for_comparison_p (arg01
, arg2
)
5873 /* Avoid these transformations if the COND_EXPR may be used
5874 as an lvalue in the C++ front-end. PR c++/19199. */
5876 || VECTOR_TYPE_P (type
)
5877 || (! lang_GNU_CXX ()
5878 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5879 || ! maybe_lvalue_p (arg1
)
5880 || ! maybe_lvalue_p (arg2
)))
5882 tree comp_op0
= arg00
;
5883 tree comp_op1
= arg01
;
5884 tree comp_type
= TREE_TYPE (comp_op0
);
5889 return fold_convert_loc (loc
, type
, arg2
);
5891 return fold_convert_loc (loc
, type
, arg1
);
5896 /* In C++ a ?: expression can be an lvalue, so put the
5897 operand which will be used if they are equal first
5898 so that we can convert this back to the
5899 corresponding COND_EXPR. */
5900 if (!HONOR_NANS (arg1
))
5902 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5903 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5904 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5905 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5906 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5907 comp_op1
, comp_op0
);
5908 return fold_convert_loc (loc
, type
, tem
);
5915 if (!HONOR_NANS (arg1
))
5917 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5918 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5919 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5920 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5921 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5922 comp_op1
, comp_op0
);
5923 return fold_convert_loc (loc
, type
, tem
);
5927 if (!HONOR_NANS (arg1
))
5928 return fold_convert_loc (loc
, type
, arg2
);
5931 if (!HONOR_NANS (arg1
))
5932 return fold_convert_loc (loc
, type
, arg1
);
5935 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5945 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5946 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5947 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5951 /* EXP is some logical combination of boolean tests. See if we can
5952 merge it into some range test. Return the new tree if so. */
5955 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5958 int or_op
= (code
== TRUTH_ORIF_EXPR
5959 || code
== TRUTH_OR_EXPR
);
5960 int in0_p
, in1_p
, in_p
;
5961 tree low0
, low1
, low
, high0
, high1
, high
;
5962 bool strict_overflow_p
= false;
5964 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5965 "when simplifying range test");
5967 if (!INTEGRAL_TYPE_P (type
))
5970 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5971 /* If op0 is known true or false and this is a short-circuiting
5972 operation we must not merge with op1 since that makes side-effects
5973 unconditional. So special-case this. */
5975 && ((code
== TRUTH_ORIF_EXPR
&& in0_p
)
5976 || (code
== TRUTH_ANDIF_EXPR
&& !in0_p
)))
5978 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5980 /* If this is an OR operation, invert both sides; we will invert
5981 again at the end. */
5983 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5985 /* If both expressions are the same, if we can merge the ranges, and we
5986 can build the range test, return it or it inverted. If one of the
5987 ranges is always true or always false, consider it to be the same
5988 expression as the other. */
5989 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5990 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5992 && (tem
= (build_range_check (loc
, type
,
5994 : rhs
!= 0 ? rhs
: integer_zero_node
,
5995 in_p
, low
, high
))) != 0)
5997 if (strict_overflow_p
)
5998 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5999 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
6002 /* On machines where the branch cost is expensive, if this is a
6003 short-circuited branch and the underlying object on both sides
6004 is the same, make a non-short-circuit operation. */
6005 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
6006 if (param_logical_op_non_short_circuit
!= -1)
6007 logical_op_non_short_circuit
6008 = param_logical_op_non_short_circuit
;
6009 if (logical_op_non_short_circuit
6010 && !flag_sanitize_coverage
6011 && lhs
!= 0 && rhs
!= 0
6012 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6013 && operand_equal_p (lhs
, rhs
, 0))
6015 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6016 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6017 which cases we can't do this. */
6018 if (simple_operand_p (lhs
))
6019 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6020 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6023 else if (!lang_hooks
.decls
.global_bindings_p ()
6024 && !CONTAINS_PLACEHOLDER_P (lhs
))
6026 tree common
= save_expr (lhs
);
6028 if ((lhs
= build_range_check (loc
, type
, common
,
6029 or_op
? ! in0_p
: in0_p
,
6031 && (rhs
= build_range_check (loc
, type
, common
,
6032 or_op
? ! in1_p
: in1_p
,
6035 if (strict_overflow_p
)
6036 fold_overflow_warning (warnmsg
,
6037 WARN_STRICT_OVERFLOW_COMPARISON
);
6038 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
6039 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
6048 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6049 bit value. Arrange things so the extra bits will be set to zero if and
6050 only if C is signed-extended to its full width. If MASK is nonzero,
6051 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6054 unextend (tree c
, int p
, int unsignedp
, tree mask
)
6056 tree type
= TREE_TYPE (c
);
6057 int modesize
= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type
));
6060 if (p
== modesize
|| unsignedp
)
6063 /* We work by getting just the sign bit into the low-order bit, then
6064 into the high-order bit, then sign-extend. We then XOR that value
6066 temp
= build_int_cst (TREE_TYPE (c
),
6067 wi::extract_uhwi (wi::to_wide (c
), p
- 1, 1));
6069 /* We must use a signed type in order to get an arithmetic right shift.
6070 However, we must also avoid introducing accidental overflows, so that
6071 a subsequent call to integer_zerop will work. Hence we must
6072 do the type conversion here. At this point, the constant is either
6073 zero or one, and the conversion to a signed type can never overflow.
6074 We could get an overflow if this conversion is done anywhere else. */
6075 if (TYPE_UNSIGNED (type
))
6076 temp
= fold_convert (signed_type_for (type
), temp
);
6078 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
6079 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
6081 temp
= const_binop (BIT_AND_EXPR
, temp
,
6082 fold_convert (TREE_TYPE (c
), mask
));
6083 /* If necessary, convert the type back to match the type of C. */
6084 if (TYPE_UNSIGNED (type
))
6085 temp
= fold_convert (type
, temp
);
6087 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
6090 /* For an expression that has the form
6094 we can drop one of the inner expressions and simplify to
6098 LOC is the location of the resulting expression. OP is the inner
6099 logical operation; the left-hand side in the examples above, while CMPOP
6100 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6101 removing a condition that guards another, as in
6102 (A != NULL && A->...) || A == NULL
6103 which we must not transform. If RHS_ONLY is true, only eliminate the
6104 right-most operand of the inner logical operation. */
6107 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
6110 tree type
= TREE_TYPE (cmpop
);
6111 enum tree_code code
= TREE_CODE (cmpop
);
6112 enum tree_code truthop_code
= TREE_CODE (op
);
6113 tree lhs
= TREE_OPERAND (op
, 0);
6114 tree rhs
= TREE_OPERAND (op
, 1);
6115 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
6116 enum tree_code rhs_code
= TREE_CODE (rhs
);
6117 enum tree_code lhs_code
= TREE_CODE (lhs
);
6118 enum tree_code inv_code
;
6120 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
6123 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
6126 if (rhs_code
== truthop_code
)
6128 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
6129 if (newrhs
!= NULL_TREE
)
6132 rhs_code
= TREE_CODE (rhs
);
6135 if (lhs_code
== truthop_code
&& !rhs_only
)
6137 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
6138 if (newlhs
!= NULL_TREE
)
6141 lhs_code
= TREE_CODE (lhs
);
6145 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
6146 if (inv_code
== rhs_code
6147 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6148 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6150 if (!rhs_only
&& inv_code
== lhs_code
6151 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
6152 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
6154 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
6155 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
6160 /* Find ways of folding logical expressions of LHS and RHS:
6161 Try to merge two comparisons to the same innermost item.
6162 Look for range tests like "ch >= '0' && ch <= '9'".
6163 Look for combinations of simple terms on machines with expensive branches
6164 and evaluate the RHS unconditionally.
6166 For example, if we have p->a == 2 && p->b == 4 and we can make an
6167 object large enough to span both A and B, we can do this with a comparison
6168 against the object ANDed with the a mask.
6170 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6171 operations to do this with one comparison.
6173 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6174 function and the one above.
6176 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6177 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6179 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6182 We return the simplified tree or 0 if no optimization is possible. */
6185 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
6188 /* If this is the "or" of two comparisons, we can do something if
6189 the comparisons are NE_EXPR. If this is the "and", we can do something
6190 if the comparisons are EQ_EXPR. I.e.,
6191 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6193 WANTED_CODE is this operation code. For single bit fields, we can
6194 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6195 comparison for one-bit fields. */
6197 enum tree_code wanted_code
;
6198 enum tree_code lcode
, rcode
;
6199 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
6200 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
6201 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
6202 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
6203 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
6204 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
6205 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
6206 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
6207 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
6208 scalar_int_mode lnmode
, rnmode
;
6209 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
6210 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
6211 tree l_const
, r_const
;
6212 tree lntype
, rntype
, result
;
6213 HOST_WIDE_INT first_bit
, end_bit
;
6216 /* Start by getting the comparison codes. Fail if anything is volatile.
6217 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6218 it were surrounded with a NE_EXPR. */
6220 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
6223 lcode
= TREE_CODE (lhs
);
6224 rcode
= TREE_CODE (rhs
);
6226 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
6228 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
6229 build_int_cst (TREE_TYPE (lhs
), 0));
6233 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
6235 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
6236 build_int_cst (TREE_TYPE (rhs
), 0));
6240 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
6241 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
6244 ll_arg
= TREE_OPERAND (lhs
, 0);
6245 lr_arg
= TREE_OPERAND (lhs
, 1);
6246 rl_arg
= TREE_OPERAND (rhs
, 0);
6247 rr_arg
= TREE_OPERAND (rhs
, 1);
6249 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6250 if (simple_operand_p (ll_arg
)
6251 && simple_operand_p (lr_arg
))
6253 if (operand_equal_p (ll_arg
, rl_arg
, 0)
6254 && operand_equal_p (lr_arg
, rr_arg
, 0))
6256 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
6257 truth_type
, ll_arg
, lr_arg
);
6261 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
6262 && operand_equal_p (lr_arg
, rl_arg
, 0))
6264 result
= combine_comparisons (loc
, code
, lcode
,
6265 swap_tree_comparison (rcode
),
6266 truth_type
, ll_arg
, lr_arg
);
6272 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
6273 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
6275 /* If the RHS can be evaluated unconditionally and its operands are
6276 simple, it wins to evaluate the RHS unconditionally on machines
6277 with expensive branches. In this case, this isn't a comparison
6278 that can be merged. */
6280 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
6282 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
6283 && simple_operand_p (rl_arg
)
6284 && simple_operand_p (rr_arg
))
6286 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6287 if (code
== TRUTH_OR_EXPR
6288 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
6289 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
6290 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6291 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6292 return build2_loc (loc
, NE_EXPR
, truth_type
,
6293 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6295 build_int_cst (TREE_TYPE (ll_arg
), 0));
6297 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6298 if (code
== TRUTH_AND_EXPR
6299 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
6300 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
6301 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
6302 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
6303 return build2_loc (loc
, EQ_EXPR
, truth_type
,
6304 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
6306 build_int_cst (TREE_TYPE (ll_arg
), 0));
6309 /* See if the comparisons can be merged. Then get all the parameters for
6312 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
6313 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
6316 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
6318 ll_inner
= decode_field_reference (loc
, &ll_arg
,
6319 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
6320 &ll_unsignedp
, &ll_reversep
, &volatilep
,
6321 &ll_mask
, &ll_and_mask
);
6322 lr_inner
= decode_field_reference (loc
, &lr_arg
,
6323 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
6324 &lr_unsignedp
, &lr_reversep
, &volatilep
,
6325 &lr_mask
, &lr_and_mask
);
6326 rl_inner
= decode_field_reference (loc
, &rl_arg
,
6327 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
6328 &rl_unsignedp
, &rl_reversep
, &volatilep
,
6329 &rl_mask
, &rl_and_mask
);
6330 rr_inner
= decode_field_reference (loc
, &rr_arg
,
6331 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
6332 &rr_unsignedp
, &rr_reversep
, &volatilep
,
6333 &rr_mask
, &rr_and_mask
);
6335 /* It must be true that the inner operation on the lhs of each
6336 comparison must be the same if we are to be able to do anything.
6337 Then see if we have constants. If not, the same must be true for
6340 || ll_reversep
!= rl_reversep
6341 || ll_inner
== 0 || rl_inner
== 0
6342 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
6345 if (TREE_CODE (lr_arg
) == INTEGER_CST
6346 && TREE_CODE (rr_arg
) == INTEGER_CST
)
6348 l_const
= lr_arg
, r_const
= rr_arg
;
6349 lr_reversep
= ll_reversep
;
6351 else if (lr_reversep
!= rr_reversep
6352 || lr_inner
== 0 || rr_inner
== 0
6353 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
6356 l_const
= r_const
= 0;
6358 /* If either comparison code is not correct for our logical operation,
6359 fail. However, we can convert a one-bit comparison against zero into
6360 the opposite comparison against that bit being set in the field. */
6362 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
6363 if (lcode
!= wanted_code
)
6365 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
6367 /* Make the left operand unsigned, since we are only interested
6368 in the value of one bit. Otherwise we are doing the wrong
6377 /* This is analogous to the code for l_const above. */
6378 if (rcode
!= wanted_code
)
6380 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
6389 /* See if we can find a mode that contains both fields being compared on
6390 the left. If we can't, fail. Otherwise, update all constants and masks
6391 to be relative to a field of that size. */
6392 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
6393 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
6394 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6395 TYPE_ALIGN (TREE_TYPE (ll_inner
)), BITS_PER_WORD
,
6396 volatilep
, &lnmode
))
6399 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
6400 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
6401 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
6402 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
6404 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6406 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
6407 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
6410 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
6411 size_int (xll_bitpos
));
6412 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
6413 size_int (xrl_bitpos
));
6417 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
6418 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
6419 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
6420 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
6421 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6424 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6426 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6431 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
6432 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
6433 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
6434 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
6435 fold_build1_loc (loc
, BIT_NOT_EXPR
,
6438 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
6440 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
6444 /* If the right sides are not constant, do the same for it. Also,
6445 disallow this optimization if a size, signedness or storage order
6446 mismatch occurs between the left and right sides. */
6449 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
6450 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
6451 || ll_reversep
!= lr_reversep
6452 /* Make sure the two fields on the right
6453 correspond to the left without being swapped. */
6454 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
6457 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
6458 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
6459 if (!get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
6460 TYPE_ALIGN (TREE_TYPE (lr_inner
)), BITS_PER_WORD
,
6461 volatilep
, &rnmode
))
6464 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
6465 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
6466 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
6467 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
6469 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6471 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
6472 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
6475 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6477 size_int (xlr_bitpos
));
6478 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
6480 size_int (xrr_bitpos
));
6482 /* Make a mask that corresponds to both fields being compared.
6483 Do this for both items being compared. If the operands are the
6484 same size and the bits being compared are in the same position
6485 then we can do this by masking both and comparing the masked
6487 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6488 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
6489 if (lnbitsize
== rnbitsize
6490 && xll_bitpos
== xlr_bitpos
6494 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6495 lntype
, lnbitsize
, lnbitpos
,
6496 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6497 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6498 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
6500 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
6501 rntype
, rnbitsize
, rnbitpos
,
6502 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
6503 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
6504 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
6506 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6509 /* There is still another way we can do something: If both pairs of
6510 fields being compared are adjacent, we may be able to make a wider
6511 field containing them both.
6513 Note that we still must mask the lhs/rhs expressions. Furthermore,
6514 the mask must be shifted to account for the shift done by
6515 make_bit_field_ref. */
6516 if (((ll_bitsize
+ ll_bitpos
== rl_bitpos
6517 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
6518 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
6519 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
6527 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
6528 ll_bitsize
+ rl_bitsize
,
6529 MIN (ll_bitpos
, rl_bitpos
),
6530 ll_unsignedp
, ll_reversep
);
6531 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
6532 lr_bitsize
+ rr_bitsize
,
6533 MIN (lr_bitpos
, rr_bitpos
),
6534 lr_unsignedp
, lr_reversep
);
6536 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
6537 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
6538 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
6539 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
6541 /* Convert to the smaller type before masking out unwanted bits. */
6543 if (lntype
!= rntype
)
6545 if (lnbitsize
> rnbitsize
)
6547 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
6548 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
6551 else if (lnbitsize
< rnbitsize
)
6553 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
6554 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
6559 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
6560 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
6562 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
6563 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
6565 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
6571 /* Handle the case of comparisons with constants. If there is something in
6572 common between the masks, those bits of the constants must be the same.
6573 If not, the condition is always false. Test for this to avoid generating
6574 incorrect code below. */
6575 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
6576 if (! integer_zerop (result
)
6577 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
6578 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
6580 if (wanted_code
== NE_EXPR
)
6582 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6583 return constant_boolean_node (true, truth_type
);
6587 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6588 return constant_boolean_node (false, truth_type
);
6595 /* Construct the expression we will return. First get the component
6596 reference we will make. Unless the mask is all ones the width of
6597 that field, perform the mask operation. Then compare with the
6599 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
6600 lntype
, lnbitsize
, lnbitpos
,
6601 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
6603 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
6604 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
6605 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
6607 return build2_loc (loc
, wanted_code
, truth_type
, result
,
6608 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
6611 /* T is an integer expression that is being multiplied, divided, or taken a
6612 modulus (CODE says which and what kind of divide or modulus) by a
6613 constant C. See if we can eliminate that operation by folding it with
6614 other operations already in T. WIDE_TYPE, if non-null, is a type that
6615 should be used for the computation if wider than our type.
6617 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6618 (X * 2) + (Y * 4). We must, however, be assured that either the original
6619 expression would not overflow or that overflow is undefined for the type
6620 in the language in question.
6622 If we return a non-null expression, it is an equivalent form of the
6623 original computation, but need not be in the original type.
6625 We set *STRICT_OVERFLOW_P to true if the return values depends on
6626 signed overflow being undefined. Otherwise we do not change
6627 *STRICT_OVERFLOW_P. */
6630 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6631 bool *strict_overflow_p
)
6633 /* To avoid exponential search depth, refuse to allow recursion past
6634 three levels. Beyond that (1) it's highly unlikely that we'll find
6635 something interesting and (2) we've probably processed it before
6636 when we built the inner expression. */
6645 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
6652 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
6653 bool *strict_overflow_p
)
6655 tree type
= TREE_TYPE (t
);
6656 enum tree_code tcode
= TREE_CODE (t
);
6657 tree ctype
= (wide_type
!= 0
6658 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type
))
6659 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
)))
6660 ? wide_type
: type
);
6662 int same_p
= tcode
== code
;
6663 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6664 bool sub_strict_overflow_p
;
6666 /* Don't deal with constants of zero here; they confuse the code below. */
6667 if (integer_zerop (c
))
6670 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6671 op0
= TREE_OPERAND (t
, 0);
6673 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6674 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6676 /* Note that we need not handle conditional operations here since fold
6677 already handles those cases. So just do arithmetic here. */
6681 /* For a constant, we can always simplify if we are a multiply
6682 or (for divide and modulus) if it is a multiple of our constant. */
6683 if (code
== MULT_EXPR
6684 || wi::multiple_of_p (wi::to_wide (t
), wi::to_wide (c
),
6687 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6688 fold_convert (ctype
, c
));
6689 /* If the multiplication overflowed, we lost information on it.
6690 See PR68142 and PR69845. */
6691 if (TREE_OVERFLOW (tem
))
6697 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6698 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0
)))
6700 /* If op0 is an expression ... */
6701 if ((COMPARISON_CLASS_P (op0
)
6702 || UNARY_CLASS_P (op0
)
6703 || BINARY_CLASS_P (op0
)
6704 || VL_EXP_CLASS_P (op0
)
6705 || EXPRESSION_CLASS_P (op0
))
6706 /* ... and has wrapping overflow, and its type is smaller
6707 than ctype, then we cannot pass through as widening. */
6708 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
6709 && (TYPE_PRECISION (ctype
)
6710 > TYPE_PRECISION (TREE_TYPE (op0
))))
6711 /* ... or this is a truncation (t is narrower than op0),
6712 then we cannot pass through this narrowing. */
6713 || (TYPE_PRECISION (type
)
6714 < TYPE_PRECISION (TREE_TYPE (op0
)))
6715 /* ... or signedness changes for division or modulus,
6716 then we cannot pass through this conversion. */
6717 || (code
!= MULT_EXPR
6718 && (TYPE_UNSIGNED (ctype
)
6719 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6720 /* ... or has undefined overflow while the converted to
6721 type has not, we cannot do the operation in the inner type
6722 as that would introduce undefined overflow. */
6723 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
6724 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6727 /* Pass the constant down and see if we can make a simplification. If
6728 we can, replace this expression with the inner simplification for
6729 possible later conversion to our or some other type. */
6730 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6731 && TREE_CODE (t2
) == INTEGER_CST
6732 && !TREE_OVERFLOW (t2
)
6733 && (t1
= extract_muldiv (op0
, t2
, code
,
6734 code
== MULT_EXPR
? ctype
: NULL_TREE
,
6735 strict_overflow_p
)) != 0)
6740 /* If widening the type changes it from signed to unsigned, then we
6741 must avoid building ABS_EXPR itself as unsigned. */
6742 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6744 tree cstype
= (*signed_type_for
) (ctype
);
6745 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6748 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6749 return fold_convert (ctype
, t1
);
6753 /* If the constant is negative, we cannot simplify this. */
6754 if (tree_int_cst_sgn (c
) == -1)
6758 /* For division and modulus, type can't be unsigned, as e.g.
6759 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6760 For signed types, even with wrapping overflow, this is fine. */
6761 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6763 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6765 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6768 case MIN_EXPR
: case MAX_EXPR
:
6769 /* If widening the type changes the signedness, then we can't perform
6770 this optimization as that changes the result. */
6771 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6774 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6775 sub_strict_overflow_p
= false;
6776 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6777 &sub_strict_overflow_p
)) != 0
6778 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6779 &sub_strict_overflow_p
)) != 0)
6781 if (tree_int_cst_sgn (c
) < 0)
6782 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6783 if (sub_strict_overflow_p
)
6784 *strict_overflow_p
= true;
6785 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6786 fold_convert (ctype
, t2
));
6790 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6791 /* If the second operand is constant, this is a multiplication
6792 or floor division, by a power of two, so we can treat it that
6793 way unless the multiplier or divisor overflows. Signed
6794 left-shift overflow is implementation-defined rather than
6795 undefined in C90, so do not convert signed left shift into
6797 if (TREE_CODE (op1
) == INTEGER_CST
6798 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6799 /* const_binop may not detect overflow correctly,
6800 so check for it explicitly here. */
6801 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
6803 && (t1
= fold_convert (ctype
,
6804 const_binop (LSHIFT_EXPR
, size_one_node
,
6806 && !TREE_OVERFLOW (t1
))
6807 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6808 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6810 fold_convert (ctype
, op0
),
6812 c
, code
, wide_type
, strict_overflow_p
);
6815 case PLUS_EXPR
: case MINUS_EXPR
:
6816 /* See if we can eliminate the operation on both sides. If we can, we
6817 can return a new PLUS or MINUS. If we can't, the only remaining
6818 cases where we can do anything are if the second operand is a
6820 sub_strict_overflow_p
= false;
6821 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6822 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6823 if (t1
!= 0 && t2
!= 0
6824 && TYPE_OVERFLOW_WRAPS (ctype
)
6825 && (code
== MULT_EXPR
6826 /* If not multiplication, we can only do this if both operands
6827 are divisible by c. */
6828 || (multiple_of_p (ctype
, op0
, c
)
6829 && multiple_of_p (ctype
, op1
, c
))))
6831 if (sub_strict_overflow_p
)
6832 *strict_overflow_p
= true;
6833 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6834 fold_convert (ctype
, t2
));
6837 /* If this was a subtraction, negate OP1 and set it to be an addition.
6838 This simplifies the logic below. */
6839 if (tcode
== MINUS_EXPR
)
6841 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6842 /* If OP1 was not easily negatable, the constant may be OP0. */
6843 if (TREE_CODE (op0
) == INTEGER_CST
)
6845 std::swap (op0
, op1
);
6850 if (TREE_CODE (op1
) != INTEGER_CST
)
6853 /* If either OP1 or C are negative, this optimization is not safe for
6854 some of the division and remainder types while for others we need
6855 to change the code. */
6856 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6858 if (code
== CEIL_DIV_EXPR
)
6859 code
= FLOOR_DIV_EXPR
;
6860 else if (code
== FLOOR_DIV_EXPR
)
6861 code
= CEIL_DIV_EXPR
;
6862 else if (code
!= MULT_EXPR
6863 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6867 /* If it's a multiply or a division/modulus operation of a multiple
6868 of our constant, do the operation and verify it doesn't overflow. */
6869 if (code
== MULT_EXPR
6870 || wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6873 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6874 fold_convert (ctype
, c
));
6875 /* We allow the constant to overflow with wrapping semantics. */
6877 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6883 /* If we have an unsigned type, we cannot widen the operation since it
6884 will change the result if the original computation overflowed. */
6885 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6888 /* The last case is if we are a multiply. In that case, we can
6889 apply the distributive law to commute the multiply and addition
6890 if the multiplication of the constants doesn't overflow
6891 and overflow is defined. With undefined overflow
6892 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6893 But fold_plusminus_mult_expr would factor back any power-of-two
6894 value so do not distribute in the first place in this case. */
6895 if (code
== MULT_EXPR
6896 && TYPE_OVERFLOW_WRAPS (ctype
)
6897 && !(tree_fits_shwi_p (c
) && pow2p_hwi (absu_hwi (tree_to_shwi (c
)))))
6898 return fold_build2 (tcode
, ctype
,
6899 fold_build2 (code
, ctype
,
6900 fold_convert (ctype
, op0
),
6901 fold_convert (ctype
, c
)),
6907 /* We have a special case here if we are doing something like
6908 (C * 8) % 4 since we know that's zero. */
6909 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6910 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6911 /* If the multiplication can overflow we cannot optimize this. */
6912 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6913 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6914 && wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6917 *strict_overflow_p
= true;
6918 return omit_one_operand (type
, integer_zero_node
, op0
);
6921 /* ... fall through ... */
6923 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6924 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6925 /* If we can extract our operation from the LHS, do so and return a
6926 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6927 do something only if the second operand is a constant. */
6929 && TYPE_OVERFLOW_WRAPS (ctype
)
6930 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6931 strict_overflow_p
)) != 0)
6932 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6933 fold_convert (ctype
, op1
));
6934 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6935 && TYPE_OVERFLOW_WRAPS (ctype
)
6936 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6937 strict_overflow_p
)) != 0)
6938 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6939 fold_convert (ctype
, t1
));
6940 else if (TREE_CODE (op1
) != INTEGER_CST
)
6943 /* If these are the same operation types, we can associate them
6944 assuming no overflow. */
6947 bool overflow_p
= false;
6948 wi::overflow_type overflow_mul
;
6949 signop sign
= TYPE_SIGN (ctype
);
6950 unsigned prec
= TYPE_PRECISION (ctype
);
6951 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6952 wi::to_wide (c
, prec
),
6953 sign
, &overflow_mul
);
6954 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6956 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6959 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6960 wide_int_to_tree (ctype
, mul
));
6963 /* If these operations "cancel" each other, we have the main
6964 optimizations of this pass, which occur when either constant is a
6965 multiple of the other, in which case we replace this with either an
6966 operation or CODE or TCODE.
6968 If we have an unsigned type, we cannot do this since it will change
6969 the result if the original computation overflowed. */
6970 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6971 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6972 || (tcode
== MULT_EXPR
6973 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6974 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6975 && code
!= MULT_EXPR
)))
6977 if (wi::multiple_of_p (wi::to_wide (op1
), wi::to_wide (c
),
6980 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6981 *strict_overflow_p
= true;
6982 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6983 fold_convert (ctype
,
6984 const_binop (TRUNC_DIV_EXPR
,
6987 else if (wi::multiple_of_p (wi::to_wide (c
), wi::to_wide (op1
),
6990 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6991 *strict_overflow_p
= true;
6992 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6993 fold_convert (ctype
,
6994 const_binop (TRUNC_DIV_EXPR
,
7007 /* Return a node which has the indicated constant VALUE (either 0 or
7008 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7009 and is of the indicated TYPE. */
7012 constant_boolean_node (bool value
, tree type
)
7014 if (type
== integer_type_node
)
7015 return value
? integer_one_node
: integer_zero_node
;
7016 else if (type
== boolean_type_node
)
7017 return value
? boolean_true_node
: boolean_false_node
;
7018 else if (TREE_CODE (type
) == VECTOR_TYPE
)
7019 return build_vector_from_val (type
,
7020 build_int_cst (TREE_TYPE (type
),
7023 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
7027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7028 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7029 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7030 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7031 COND is the first argument to CODE; otherwise (as in the example
7032 given here), it is the second argument. TYPE is the type of the
7033 original expression. Return NULL_TREE if no simplification is
7037 fold_binary_op_with_conditional_arg (location_t loc
,
7038 enum tree_code code
,
7039 tree type
, tree op0
, tree op1
,
7040 tree cond
, tree arg
, int cond_first_p
)
7042 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
7043 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
7044 tree test
, true_value
, false_value
;
7045 tree lhs
= NULL_TREE
;
7046 tree rhs
= NULL_TREE
;
7047 enum tree_code cond_code
= COND_EXPR
;
7049 /* Do not move possibly trapping operations into the conditional as this
7050 pessimizes code and causes gimplification issues when applied late. */
7051 if (operation_could_trap_p (code
, FLOAT_TYPE_P (type
),
7052 ANY_INTEGRAL_TYPE_P (type
)
7053 && TYPE_OVERFLOW_TRAPS (type
), op1
))
7056 if (TREE_CODE (cond
) == COND_EXPR
7057 || TREE_CODE (cond
) == VEC_COND_EXPR
)
7059 test
= TREE_OPERAND (cond
, 0);
7060 true_value
= TREE_OPERAND (cond
, 1);
7061 false_value
= TREE_OPERAND (cond
, 2);
7062 /* If this operand throws an expression, then it does not make
7063 sense to try to perform a logical or arithmetic operation
7065 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
7067 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
7070 else if (!(TREE_CODE (type
) != VECTOR_TYPE
7071 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
7073 tree testtype
= TREE_TYPE (cond
);
7075 true_value
= constant_boolean_node (true, testtype
);
7076 false_value
= constant_boolean_node (false, testtype
);
7079 /* Detect the case of mixing vector and scalar types - bail out. */
7082 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
7083 cond_code
= VEC_COND_EXPR
;
7085 /* This transformation is only worthwhile if we don't have to wrap ARG
7086 in a SAVE_EXPR and the operation can be simplified without recursing
7087 on at least one of the branches once its pushed inside the COND_EXPR. */
7088 if (!TREE_CONSTANT (arg
)
7089 && (TREE_SIDE_EFFECTS (arg
)
7090 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
7091 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
7094 arg
= fold_convert_loc (loc
, arg_type
, arg
);
7097 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
7099 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
7101 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
7105 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
7107 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
7109 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
7112 /* Check that we have simplified at least one of the branches. */
7113 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
7116 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
7120 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7122 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7123 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7124 ADDEND is the same as X.
7126 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7127 and finite. The problematic cases are when X is zero, and its mode
7128 has signed zeros. In the case of rounding towards -infinity,
7129 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7130 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7133 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
7135 if (!real_zerop (addend
))
7138 /* Don't allow the fold with -fsignaling-nans. */
7139 if (HONOR_SNANS (type
))
7142 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7143 if (!HONOR_SIGNED_ZEROS (type
))
7146 /* There is no case that is safe for all rounding modes. */
7147 if (HONOR_SIGN_DEPENDENT_ROUNDING (type
))
7150 /* In a vector or complex, we would need to check the sign of all zeros. */
7151 if (TREE_CODE (addend
) == VECTOR_CST
)
7152 addend
= uniform_vector_p (addend
);
7153 if (!addend
|| TREE_CODE (addend
) != REAL_CST
)
7156 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7157 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
7160 /* The mode has signed zeros, and we have to honor their sign.
7161 In this situation, there is only one case we can return true for.
7162 X - 0 is the same as X with default rounding. */
7166 /* Subroutine of match.pd that optimizes comparisons of a division by
7167 a nonzero integer constant against an integer constant, i.e.
7170 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7171 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7174 fold_div_compare (enum tree_code code
, tree c1
, tree c2
, tree
*lo
,
7175 tree
*hi
, bool *neg_overflow
)
7177 tree prod
, tmp
, type
= TREE_TYPE (c1
);
7178 signop sign
= TYPE_SIGN (type
);
7179 wi::overflow_type overflow
;
7181 /* We have to do this the hard way to detect unsigned overflow.
7182 prod = int_const_binop (MULT_EXPR, c1, c2); */
7183 wide_int val
= wi::mul (wi::to_wide (c1
), wi::to_wide (c2
), sign
, &overflow
);
7184 prod
= force_fit_type (type
, val
, -1, overflow
);
7185 *neg_overflow
= false;
7187 if (sign
== UNSIGNED
)
7189 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7192 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7193 val
= wi::add (wi::to_wide (prod
), wi::to_wide (tmp
), sign
, &overflow
);
7194 *hi
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (prod
));
7196 else if (tree_int_cst_sgn (c1
) >= 0)
7198 tmp
= int_const_binop (MINUS_EXPR
, c1
, build_int_cst (type
, 1));
7199 switch (tree_int_cst_sgn (c2
))
7202 *neg_overflow
= true;
7203 *lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7208 *lo
= fold_negate_const (tmp
, type
);
7213 *hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7223 /* A negative divisor reverses the relational operators. */
7224 code
= swap_tree_comparison (code
);
7226 tmp
= int_const_binop (PLUS_EXPR
, c1
, build_int_cst (type
, 1));
7227 switch (tree_int_cst_sgn (c2
))
7230 *hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
7235 *hi
= fold_negate_const (tmp
, type
);
7240 *neg_overflow
= true;
7241 *lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
7250 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
7253 if (TREE_OVERFLOW (*lo
)
7254 || operand_equal_p (*lo
, TYPE_MIN_VALUE (type
), 0))
7256 if (TREE_OVERFLOW (*hi
)
7257 || operand_equal_p (*hi
, TYPE_MAX_VALUE (type
), 0))
7264 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7265 equality/inequality test, then return a simplified form of the test
7266 using a sign testing. Otherwise return NULL. TYPE is the desired
7270 fold_single_bit_test_into_sign_test (location_t loc
,
7271 enum tree_code code
, tree arg0
, tree arg1
,
7274 /* If this is testing a single bit, we can optimize the test. */
7275 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7276 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7277 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7279 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7280 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7281 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
7283 if (arg00
!= NULL_TREE
7284 /* This is only a win if casting to a signed type is cheap,
7285 i.e. when arg00's type is not a partial mode. */
7286 && type_has_mode_precision_p (TREE_TYPE (arg00
)))
7288 tree stype
= signed_type_for (TREE_TYPE (arg00
));
7289 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
7291 fold_convert_loc (loc
, stype
, arg00
),
7292 build_int_cst (stype
, 0));
7299 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7300 equality/inequality test, then return a simplified form of
7301 the test using shifts and logical operations. Otherwise return
7302 NULL. TYPE is the desired result type. */
7305 fold_single_bit_test (location_t loc
, enum tree_code code
,
7306 tree arg0
, tree arg1
, tree result_type
)
7308 /* If this is testing a single bit, we can optimize the test. */
7309 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
7310 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
7311 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
7313 tree inner
= TREE_OPERAND (arg0
, 0);
7314 tree type
= TREE_TYPE (arg0
);
7315 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
7316 scalar_int_mode operand_mode
= SCALAR_INT_TYPE_MODE (type
);
7318 tree signed_type
, unsigned_type
, intermediate_type
;
7321 /* First, see if we can fold the single bit test into a sign-bit
7323 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
7328 /* Otherwise we have (A & C) != 0 where C is a single bit,
7329 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7330 Similarly for (A & C) == 0. */
7332 /* If INNER is a right shift of a constant and it plus BITNUM does
7333 not overflow, adjust BITNUM and INNER. */
7334 if (TREE_CODE (inner
) == RSHIFT_EXPR
7335 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
7336 && bitnum
< TYPE_PRECISION (type
)
7337 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner
, 1)),
7338 TYPE_PRECISION (type
) - bitnum
))
7340 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
7341 inner
= TREE_OPERAND (inner
, 0);
7344 /* If we are going to be able to omit the AND below, we must do our
7345 operations as unsigned. If we must use the AND, we have a choice.
7346 Normally unsigned is faster, but for some machines signed is. */
7347 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
7348 && !flag_syntax_only
) ? 0 : 1;
7350 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
7351 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
7352 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
7353 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
7356 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
7357 inner
, size_int (bitnum
));
7359 one
= build_int_cst (intermediate_type
, 1);
7361 if (code
== EQ_EXPR
)
7362 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
7364 /* Put the AND last so it can combine with more things. */
7365 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
7367 /* Make sure to return the proper type. */
7368 inner
= fold_convert_loc (loc
, result_type
, inner
);
7375 /* Test whether it is preferable two swap two operands, ARG0 and
7376 ARG1, for example because ARG0 is an integer constant and ARG1
7380 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
7382 if (CONSTANT_CLASS_P (arg1
))
7384 if (CONSTANT_CLASS_P (arg0
))
7390 if (TREE_CONSTANT (arg1
))
7392 if (TREE_CONSTANT (arg0
))
7395 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7396 for commutative and comparison operators. Ensuring a canonical
7397 form allows the optimizers to find additional redundancies without
7398 having to explicitly check for both orderings. */
7399 if (TREE_CODE (arg0
) == SSA_NAME
7400 && TREE_CODE (arg1
) == SSA_NAME
7401 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
7404 /* Put SSA_NAMEs last. */
7405 if (TREE_CODE (arg1
) == SSA_NAME
)
7407 if (TREE_CODE (arg0
) == SSA_NAME
)
7410 /* Put variables last. */
7420 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7421 means A >= Y && A != MAX, but in this case we know that
7422 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7425 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7427 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7429 if (TREE_CODE (bound
) == LT_EXPR
)
7430 a
= TREE_OPERAND (bound
, 0);
7431 else if (TREE_CODE (bound
) == GT_EXPR
)
7432 a
= TREE_OPERAND (bound
, 1);
7436 typea
= TREE_TYPE (a
);
7437 if (!INTEGRAL_TYPE_P (typea
)
7438 && !POINTER_TYPE_P (typea
))
7441 if (TREE_CODE (ineq
) == LT_EXPR
)
7443 a1
= TREE_OPERAND (ineq
, 1);
7444 y
= TREE_OPERAND (ineq
, 0);
7446 else if (TREE_CODE (ineq
) == GT_EXPR
)
7448 a1
= TREE_OPERAND (ineq
, 0);
7449 y
= TREE_OPERAND (ineq
, 1);
7454 if (TREE_TYPE (a1
) != typea
)
7457 if (POINTER_TYPE_P (typea
))
7459 /* Convert the pointer types into integer before taking the difference. */
7460 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7461 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7462 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7465 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7467 if (!diff
|| !integer_onep (diff
))
7470 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7473 /* Fold a sum or difference of at least one multiplication.
7474 Returns the folded tree or NULL if no simplification could be made. */
7477 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7478 tree arg0
, tree arg1
)
7480 tree arg00
, arg01
, arg10
, arg11
;
7481 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7483 /* (A * C) +- (B * C) -> (A+-B) * C.
7484 (A * C) +- A -> A * (C+-1).
7485 We are most concerned about the case where C is a constant,
7486 but other combinations show up during loop reduction. Since
7487 it is not difficult, try all four possibilities. */
7489 if (TREE_CODE (arg0
) == MULT_EXPR
)
7491 arg00
= TREE_OPERAND (arg0
, 0);
7492 arg01
= TREE_OPERAND (arg0
, 1);
7494 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7496 arg00
= build_one_cst (type
);
7501 /* We cannot generate constant 1 for fract. */
7502 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7505 arg01
= build_one_cst (type
);
7507 if (TREE_CODE (arg1
) == MULT_EXPR
)
7509 arg10
= TREE_OPERAND (arg1
, 0);
7510 arg11
= TREE_OPERAND (arg1
, 1);
7512 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7514 arg10
= build_one_cst (type
);
7515 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7516 the purpose of this canonicalization. */
7517 if (wi::neg_p (wi::to_wide (arg1
), TYPE_SIGN (TREE_TYPE (arg1
)))
7518 && negate_expr_p (arg1
)
7519 && code
== PLUS_EXPR
)
7521 arg11
= negate_expr (arg1
);
7529 /* We cannot generate constant 1 for fract. */
7530 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7533 arg11
= build_one_cst (type
);
7537 /* Prefer factoring a common non-constant. */
7538 if (operand_equal_p (arg00
, arg10
, 0))
7539 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7540 else if (operand_equal_p (arg01
, arg11
, 0))
7541 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7542 else if (operand_equal_p (arg00
, arg11
, 0))
7543 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7544 else if (operand_equal_p (arg01
, arg10
, 0))
7545 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7547 /* No identical multiplicands; see if we can find a common
7548 power-of-two factor in non-power-of-two multiplies. This
7549 can help in multi-dimensional array access. */
7550 else if (tree_fits_shwi_p (arg01
) && tree_fits_shwi_p (arg11
))
7552 HOST_WIDE_INT int01
= tree_to_shwi (arg01
);
7553 HOST_WIDE_INT int11
= tree_to_shwi (arg11
);
7558 /* Move min of absolute values to int11. */
7559 if (absu_hwi (int01
) < absu_hwi (int11
))
7561 tmp
= int01
, int01
= int11
, int11
= tmp
;
7562 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7569 const unsigned HOST_WIDE_INT factor
= absu_hwi (int11
);
7571 && pow2p_hwi (factor
)
7572 && (int01
& (factor
- 1)) == 0
7573 /* The remainder should not be a constant, otherwise we
7574 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7575 increased the number of multiplications necessary. */
7576 && TREE_CODE (arg10
) != INTEGER_CST
)
7578 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7579 build_int_cst (TREE_TYPE (arg00
),
7584 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7591 if (! ANY_INTEGRAL_TYPE_P (type
)
7592 || TYPE_OVERFLOW_WRAPS (type
)
7593 /* We are neither factoring zero nor minus one. */
7594 || TREE_CODE (same
) == INTEGER_CST
)
7595 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7596 fold_build2_loc (loc
, code
, type
,
7597 fold_convert_loc (loc
, type
, alt0
),
7598 fold_convert_loc (loc
, type
, alt1
)),
7599 fold_convert_loc (loc
, type
, same
));
7601 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7602 same may be minus one and thus the multiplication may overflow. Perform
7603 the sum operation in an unsigned type. */
7604 tree utype
= unsigned_type_for (type
);
7605 tree tem
= fold_build2_loc (loc
, code
, utype
,
7606 fold_convert_loc (loc
, utype
, alt0
),
7607 fold_convert_loc (loc
, utype
, alt1
));
7608 /* If the sum evaluated to a constant that is not -INF the multiplication
7610 if (TREE_CODE (tem
) == INTEGER_CST
7611 && (wi::to_wide (tem
)
7612 != wi::min_value (TYPE_PRECISION (utype
), SIGNED
)))
7613 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7614 fold_convert (type
, tem
), same
);
7616 /* Do not resort to unsigned multiplication because
7617 we lose the no-overflow property of the expression. */
7621 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7622 specified by EXPR into the buffer PTR of length LEN bytes.
7623 Return the number of bytes placed in the buffer, or zero
7627 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7629 tree type
= TREE_TYPE (expr
);
7630 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
7631 int byte
, offset
, word
, words
;
7632 unsigned char value
;
7634 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7641 return MIN (len
, total_bytes
- off
);
7643 words
= total_bytes
/ UNITS_PER_WORD
;
7645 for (byte
= 0; byte
< total_bytes
; byte
++)
7647 int bitpos
= byte
* BITS_PER_UNIT
;
7648 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7650 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7652 if (total_bytes
> UNITS_PER_WORD
)
7654 word
= byte
/ UNITS_PER_WORD
;
7655 if (WORDS_BIG_ENDIAN
)
7656 word
= (words
- 1) - word
;
7657 offset
= word
* UNITS_PER_WORD
;
7658 if (BYTES_BIG_ENDIAN
)
7659 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7661 offset
+= byte
% UNITS_PER_WORD
;
7664 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7665 if (offset
>= off
&& offset
- off
< len
)
7666 ptr
[offset
- off
] = value
;
7668 return MIN (len
, total_bytes
- off
);
7672 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7673 specified by EXPR into the buffer PTR of length LEN bytes.
7674 Return the number of bytes placed in the buffer, or zero
7678 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7680 tree type
= TREE_TYPE (expr
);
7681 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
7682 int total_bytes
= GET_MODE_SIZE (mode
);
7683 FIXED_VALUE_TYPE value
;
7684 tree i_value
, i_type
;
7686 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7689 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7691 if (NULL_TREE
== i_type
|| TYPE_PRECISION (i_type
) != total_bytes
)
7694 value
= TREE_FIXED_CST (expr
);
7695 i_value
= double_int_to_tree (i_type
, value
.data
);
7697 return native_encode_int (i_value
, ptr
, len
, off
);
7701 /* Subroutine of native_encode_expr. Encode the REAL_CST
7702 specified by EXPR into the buffer PTR of length LEN bytes.
7703 Return the number of bytes placed in the buffer, or zero
7707 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7709 tree type
= TREE_TYPE (expr
);
7710 int total_bytes
= GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type
));
7711 int byte
, offset
, word
, words
, bitpos
;
7712 unsigned char value
;
7714 /* There are always 32 bits in each long, no matter the size of
7715 the hosts long. We handle floating point representations with
7719 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7726 return MIN (len
, total_bytes
- off
);
7728 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7730 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7732 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7733 bitpos
+= BITS_PER_UNIT
)
7735 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7736 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7738 if (UNITS_PER_WORD
< 4)
7740 word
= byte
/ UNITS_PER_WORD
;
7741 if (WORDS_BIG_ENDIAN
)
7742 word
= (words
- 1) - word
;
7743 offset
= word
* UNITS_PER_WORD
;
7744 if (BYTES_BIG_ENDIAN
)
7745 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7747 offset
+= byte
% UNITS_PER_WORD
;
7752 if (BYTES_BIG_ENDIAN
)
7754 /* Reverse bytes within each long, or within the entire float
7755 if it's smaller than a long (for HFmode). */
7756 offset
= MIN (3, total_bytes
- 1) - offset
;
7757 gcc_assert (offset
>= 0);
7760 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7762 && offset
- off
< len
)
7763 ptr
[offset
- off
] = value
;
7765 return MIN (len
, total_bytes
- off
);
7768 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7769 specified by EXPR into the buffer PTR of length LEN bytes.
7770 Return the number of bytes placed in the buffer, or zero
7774 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7779 part
= TREE_REALPART (expr
);
7780 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7781 if (off
== -1 && rsize
== 0)
7783 part
= TREE_IMAGPART (expr
);
7785 off
= MAX (0, off
- GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part
))));
7786 isize
= native_encode_expr (part
, ptr
? ptr
+ rsize
: NULL
,
7788 if (off
== -1 && isize
!= rsize
)
7790 return rsize
+ isize
;
7793 /* Like native_encode_vector, but only encode the first COUNT elements.
7794 The other arguments are as for native_encode_vector. */
7797 native_encode_vector_part (const_tree expr
, unsigned char *ptr
, int len
,
7798 int off
, unsigned HOST_WIDE_INT count
)
7800 tree itype
= TREE_TYPE (TREE_TYPE (expr
));
7801 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr
))
7802 && TYPE_PRECISION (itype
) <= BITS_PER_UNIT
)
7804 /* This is the only case in which elements can be smaller than a byte.
7805 Element 0 is always in the lsb of the containing byte. */
7806 unsigned int elt_bits
= TYPE_PRECISION (itype
);
7807 int total_bytes
= CEIL (elt_bits
* count
, BITS_PER_UNIT
);
7808 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7814 /* Zero the buffer and then set bits later where necessary. */
7815 int extract_bytes
= MIN (len
, total_bytes
- off
);
7817 memset (ptr
, 0, extract_bytes
);
7819 unsigned int elts_per_byte
= BITS_PER_UNIT
/ elt_bits
;
7820 unsigned int first_elt
= off
* elts_per_byte
;
7821 unsigned int extract_elts
= extract_bytes
* elts_per_byte
;
7822 for (unsigned int i
= 0; i
< extract_elts
; ++i
)
7824 tree elt
= VECTOR_CST_ELT (expr
, first_elt
+ i
);
7825 if (TREE_CODE (elt
) != INTEGER_CST
)
7828 if (ptr
&& wi::extract_uhwi (wi::to_wide (elt
), 0, 1))
7830 unsigned int bit
= i
* elt_bits
;
7831 ptr
[bit
/ BITS_PER_UNIT
] |= 1 << (bit
% BITS_PER_UNIT
);
7834 return extract_bytes
;
7838 int size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (itype
));
7839 for (unsigned HOST_WIDE_INT i
= 0; i
< count
; i
++)
7846 tree elem
= VECTOR_CST_ELT (expr
, i
);
7847 int res
= native_encode_expr (elem
, ptr
? ptr
+ offset
: NULL
,
7849 if ((off
== -1 && res
!= size
) || res
== 0)
7853 return (off
== -1 && i
< count
- 1) ? 0 : offset
;
7860 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7861 specified by EXPR into the buffer PTR of length LEN bytes.
7862 Return the number of bytes placed in the buffer, or zero
7866 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7868 unsigned HOST_WIDE_INT count
;
7869 if (!VECTOR_CST_NELTS (expr
).is_constant (&count
))
7871 return native_encode_vector_part (expr
, ptr
, len
, off
, count
);
7875 /* Subroutine of native_encode_expr. Encode the STRING_CST
7876 specified by EXPR into the buffer PTR of length LEN bytes.
7877 Return the number of bytes placed in the buffer, or zero
7881 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7883 tree type
= TREE_TYPE (expr
);
7885 /* Wide-char strings are encoded in target byte-order so native
7886 encoding them is trivial. */
7887 if (BITS_PER_UNIT
!= CHAR_BIT
7888 || TREE_CODE (type
) != ARRAY_TYPE
7889 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7890 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7893 HOST_WIDE_INT total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr
)));
7894 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7898 len
= MIN (total_bytes
- off
, len
);
7904 if (off
< TREE_STRING_LENGTH (expr
))
7906 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7907 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7909 memset (ptr
+ written
, 0, len
- written
);
7915 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7916 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7917 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7918 anything, just do a dry run. If OFF is not -1 then start
7919 the encoding at byte offset OFF and encode at most LEN bytes.
7920 Return the number of bytes placed in the buffer, or zero upon failure. */
7923 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7925 /* We don't support starting at negative offset and -1 is special. */
7929 switch (TREE_CODE (expr
))
7932 return native_encode_int (expr
, ptr
, len
, off
);
7935 return native_encode_real (expr
, ptr
, len
, off
);
7938 return native_encode_fixed (expr
, ptr
, len
, off
);
7941 return native_encode_complex (expr
, ptr
, len
, off
);
7944 return native_encode_vector (expr
, ptr
, len
, off
);
7947 return native_encode_string (expr
, ptr
, len
, off
);
7954 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7955 NON_LVALUE_EXPRs and nops. */
7958 native_encode_initializer (tree init
, unsigned char *ptr
, int len
,
7961 /* We don't support starting at negative offset and -1 is special. */
7962 if (off
< -1 || init
== NULL_TREE
)
7966 switch (TREE_CODE (init
))
7968 case VIEW_CONVERT_EXPR
:
7969 case NON_LVALUE_EXPR
:
7970 return native_encode_initializer (TREE_OPERAND (init
, 0), ptr
, len
, off
);
7972 return native_encode_expr (init
, ptr
, len
, off
);
7974 tree type
= TREE_TYPE (init
);
7975 HOST_WIDE_INT total_bytes
= int_size_in_bytes (type
);
7976 if (total_bytes
< 0)
7978 if ((off
== -1 && total_bytes
> len
) || off
>= total_bytes
)
7980 int o
= off
== -1 ? 0 : off
;
7981 if (TREE_CODE (type
) == ARRAY_TYPE
)
7984 unsigned HOST_WIDE_INT cnt
;
7985 HOST_WIDE_INT curpos
= 0, fieldsize
;
7986 constructor_elt
*ce
;
7988 if (!TYPE_DOMAIN (type
)
7989 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type
))) != INTEGER_CST
)
7992 fieldsize
= int_size_in_bytes (TREE_TYPE (type
));
7996 min_index
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
7998 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8000 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init
), cnt
, ce
)
8002 tree val
= ce
->value
;
8003 tree index
= ce
->index
;
8004 HOST_WIDE_INT pos
= curpos
, count
= 0;
8006 if (index
&& TREE_CODE (index
) == RANGE_EXPR
)
8008 if (TREE_CODE (TREE_OPERAND (index
, 0)) != INTEGER_CST
8009 || TREE_CODE (TREE_OPERAND (index
, 1)) != INTEGER_CST
)
8012 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 0))
8013 - wi::to_offset (min_index
),
8014 TYPE_PRECISION (sizetype
));
8016 if (!wi::fits_shwi_p (pos
))
8018 pos
= wpos
.to_shwi ();
8020 = wi::sext (wi::to_offset (TREE_OPERAND (index
, 1))
8021 - wi::to_offset (TREE_OPERAND (index
, 0)),
8022 TYPE_PRECISION (sizetype
));
8023 if (!wi::fits_shwi_p (wcount
))
8025 count
= wcount
.to_shwi ();
8029 if (TREE_CODE (index
) != INTEGER_CST
)
8032 = wi::sext (wi::to_offset (index
)
8033 - wi::to_offset (min_index
),
8034 TYPE_PRECISION (sizetype
));
8036 if (!wi::fits_shwi_p (wpos
))
8038 pos
= wpos
.to_shwi ();
8047 && (curpos
+ fieldsize
8048 <= (HOST_WIDE_INT
) off
+ len
)))
8053 memcpy (ptr
+ (curpos
- o
), ptr
+ (pos
- o
),
8056 else if (!native_encode_initializer (val
,
8070 else if (curpos
+ fieldsize
> off
8071 && curpos
< (HOST_WIDE_INT
) off
+ len
)
8073 /* Partial overlap. */
8074 unsigned char *p
= NULL
;
8080 p
= ptr
+ curpos
- off
;
8081 l
= MIN ((HOST_WIDE_INT
) off
+ len
- curpos
,
8090 if (!native_encode_initializer (val
, p
, l
, no
))
8093 curpos
+= fieldsize
;
8095 while (count
-- != 0);
8097 return MIN (total_bytes
- off
, len
);
8099 else if (TREE_CODE (type
) == RECORD_TYPE
8100 || TREE_CODE (type
) == UNION_TYPE
)
8102 unsigned HOST_WIDE_INT cnt
;
8103 constructor_elt
*ce
;
8106 memset (ptr
, '\0', MIN (total_bytes
- off
, len
));
8107 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init
), cnt
, ce
)
8109 tree field
= ce
->index
;
8110 tree val
= ce
->value
;
8111 HOST_WIDE_INT pos
, fieldsize
;
8113 if (field
== NULL_TREE
)
8116 pos
= int_byte_position (field
);
8117 if (off
!= -1 && (HOST_WIDE_INT
) off
+ len
<= pos
)
8120 if (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
8121 && TYPE_DOMAIN (TREE_TYPE (field
))
8122 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field
))))
8124 if (DECL_SIZE_UNIT (field
) == NULL_TREE
8125 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field
)))
8127 fieldsize
= tree_to_shwi (DECL_SIZE_UNIT (field
));
8131 if (off
!= -1 && pos
+ fieldsize
<= off
)
8134 if (DECL_BIT_FIELD (field
))
8137 if (val
== NULL_TREE
)
8142 && (pos
+ fieldsize
<= (HOST_WIDE_INT
) off
+ len
)))
8144 if (!native_encode_initializer (val
, ptr
? ptr
+ pos
- o
8147 off
== -1 ? -1 : 0))
8152 /* Partial overlap. */
8153 unsigned char *p
= NULL
;
8159 p
= ptr
+ pos
- off
;
8160 l
= MIN ((HOST_WIDE_INT
) off
+ len
- pos
,
8169 if (!native_encode_initializer (val
, p
, l
, no
))
8173 return MIN (total_bytes
- off
, len
);
8180 /* Subroutine of native_interpret_expr. Interpret the contents of
8181 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8182 If the buffer cannot be interpreted, return NULL_TREE. */
8185 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
8187 int total_bytes
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
8189 if (total_bytes
> len
8190 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8193 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
8195 return wide_int_to_tree (type
, result
);
8199 /* Subroutine of native_interpret_expr. Interpret the contents of
8200 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8201 If the buffer cannot be interpreted, return NULL_TREE. */
8204 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
8206 scalar_mode mode
= SCALAR_TYPE_MODE (type
);
8207 int total_bytes
= GET_MODE_SIZE (mode
);
8209 FIXED_VALUE_TYPE fixed_value
;
8211 if (total_bytes
> len
8212 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
8215 result
= double_int::from_buffer (ptr
, total_bytes
);
8216 fixed_value
= fixed_from_double_int (result
, mode
);
8218 return build_fixed (type
, fixed_value
);
8222 /* Subroutine of native_interpret_expr. Interpret the contents of
8223 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8224 If the buffer cannot be interpreted, return NULL_TREE. */
8227 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
8229 scalar_float_mode mode
= SCALAR_FLOAT_TYPE_MODE (type
);
8230 int total_bytes
= GET_MODE_SIZE (mode
);
8231 unsigned char value
;
8232 /* There are always 32 bits in each long, no matter the size of
8233 the hosts long. We handle floating point representations with
8238 if (total_bytes
> len
|| total_bytes
> 24)
8240 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
8242 memset (tmp
, 0, sizeof (tmp
));
8243 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
8244 bitpos
+= BITS_PER_UNIT
)
8246 /* Both OFFSET and BYTE index within a long;
8247 bitpos indexes the whole float. */
8248 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
8249 if (UNITS_PER_WORD
< 4)
8251 int word
= byte
/ UNITS_PER_WORD
;
8252 if (WORDS_BIG_ENDIAN
)
8253 word
= (words
- 1) - word
;
8254 offset
= word
* UNITS_PER_WORD
;
8255 if (BYTES_BIG_ENDIAN
)
8256 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
8258 offset
+= byte
% UNITS_PER_WORD
;
8263 if (BYTES_BIG_ENDIAN
)
8265 /* Reverse bytes within each long, or within the entire float
8266 if it's smaller than a long (for HFmode). */
8267 offset
= MIN (3, total_bytes
- 1) - offset
;
8268 gcc_assert (offset
>= 0);
8271 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
8273 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
8276 real_from_target (&r
, tmp
, mode
);
8277 tree ret
= build_real (type
, r
);
8278 if (MODE_COMPOSITE_P (mode
))
8280 /* For floating point values in composite modes, punt if this folding
8281 doesn't preserve bit representation. As the mode doesn't have fixed
8282 precision while GCC pretends it does, there could be valid values that
8283 GCC can't really represent accurately. See PR95450. */
8284 unsigned char buf
[24];
8285 if (native_encode_expr (ret
, buf
, total_bytes
, 0) != total_bytes
8286 || memcmp (ptr
, buf
, total_bytes
) != 0)
8293 /* Subroutine of native_interpret_expr. Interpret the contents of
8294 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8295 If the buffer cannot be interpreted, return NULL_TREE. */
8298 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
8300 tree etype
, rpart
, ipart
;
8303 etype
= TREE_TYPE (type
);
8304 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8307 rpart
= native_interpret_expr (etype
, ptr
, size
);
8310 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
8313 return build_complex (type
, rpart
, ipart
);
8316 /* Read a vector of type TYPE from the target memory image given by BYTES,
8317 which contains LEN bytes. The vector is known to be encodable using
8318 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8320 Return the vector on success, otherwise return null. */
8323 native_interpret_vector_part (tree type
, const unsigned char *bytes
,
8324 unsigned int len
, unsigned int npatterns
,
8325 unsigned int nelts_per_pattern
)
8327 tree elt_type
= TREE_TYPE (type
);
8328 if (VECTOR_BOOLEAN_TYPE_P (type
)
8329 && TYPE_PRECISION (elt_type
) <= BITS_PER_UNIT
)
8331 /* This is the only case in which elements can be smaller than a byte.
8332 Element 0 is always in the lsb of the containing byte. */
8333 unsigned int elt_bits
= TYPE_PRECISION (elt_type
);
8334 if (elt_bits
* npatterns
* nelts_per_pattern
> len
* BITS_PER_UNIT
)
8337 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8338 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8340 unsigned int bit_index
= i
* elt_bits
;
8341 unsigned int byte_index
= bit_index
/ BITS_PER_UNIT
;
8342 unsigned int lsb
= bit_index
% BITS_PER_UNIT
;
8343 builder
.quick_push (bytes
[byte_index
] & (1 << lsb
)
8344 ? build_all_ones_cst (elt_type
)
8345 : build_zero_cst (elt_type
));
8347 return builder
.build ();
8350 unsigned int elt_bytes
= tree_to_uhwi (TYPE_SIZE_UNIT (elt_type
));
8351 if (elt_bytes
* npatterns
* nelts_per_pattern
> len
)
8354 tree_vector_builder
builder (type
, npatterns
, nelts_per_pattern
);
8355 for (unsigned int i
= 0; i
< builder
.encoded_nelts (); ++i
)
8357 tree elt
= native_interpret_expr (elt_type
, bytes
, elt_bytes
);
8360 builder
.quick_push (elt
);
8363 return builder
.build ();
8366 /* Subroutine of native_interpret_expr. Interpret the contents of
8367 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8368 If the buffer cannot be interpreted, return NULL_TREE. */
8371 native_interpret_vector (tree type
, const unsigned char *ptr
, unsigned int len
)
8375 unsigned HOST_WIDE_INT count
;
8377 etype
= TREE_TYPE (type
);
8378 size
= GET_MODE_SIZE (SCALAR_TYPE_MODE (etype
));
8379 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&count
)
8380 || size
* count
> len
)
8383 return native_interpret_vector_part (type
, ptr
, len
, count
, 1);
8387 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8388 the buffer PTR of length LEN as a constant of type TYPE. For
8389 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8390 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8391 return NULL_TREE. */
8394 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
8396 switch (TREE_CODE (type
))
8402 case REFERENCE_TYPE
:
8403 return native_interpret_int (type
, ptr
, len
);
8406 return native_interpret_real (type
, ptr
, len
);
8408 case FIXED_POINT_TYPE
:
8409 return native_interpret_fixed (type
, ptr
, len
);
8412 return native_interpret_complex (type
, ptr
, len
);
8415 return native_interpret_vector (type
, ptr
, len
);
8422 /* Returns true if we can interpret the contents of a native encoding
8426 can_native_interpret_type_p (tree type
)
8428 switch (TREE_CODE (type
))
8434 case REFERENCE_TYPE
:
8435 case FIXED_POINT_TYPE
:
8445 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8446 or extracted constant positions and/or sizes aren't byte aligned. */
8448 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8449 bits between adjacent elements. AMNT should be within
8452 00011111|11100000 << 2 = 01111111|10000000
8453 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8456 shift_bytes_in_array_left (unsigned char *ptr
, unsigned int sz
,
8462 unsigned char carry_over
= 0U;
8463 unsigned char carry_mask
= (~0U) << (unsigned char) (BITS_PER_UNIT
- amnt
);
8464 unsigned char clear_mask
= (~0U) << amnt
;
8466 for (unsigned int i
= 0; i
< sz
; i
++)
8468 unsigned prev_carry_over
= carry_over
;
8469 carry_over
= (ptr
[i
] & carry_mask
) >> (BITS_PER_UNIT
- amnt
);
8474 ptr
[i
] &= clear_mask
;
8475 ptr
[i
] |= prev_carry_over
;
8480 /* Like shift_bytes_in_array_left but for big-endian.
8481 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8482 bits between adjacent elements. AMNT should be within
8485 00011111|11100000 >> 2 = 00000111|11111000
8486 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8489 shift_bytes_in_array_right (unsigned char *ptr
, unsigned int sz
,
8495 unsigned char carry_over
= 0U;
8496 unsigned char carry_mask
= ~(~0U << amnt
);
8498 for (unsigned int i
= 0; i
< sz
; i
++)
8500 unsigned prev_carry_over
= carry_over
;
8501 carry_over
= ptr
[i
] & carry_mask
;
8503 carry_over
<<= (unsigned char) BITS_PER_UNIT
- amnt
;
8505 ptr
[i
] |= prev_carry_over
;
8509 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8510 directly on the VECTOR_CST encoding, in a way that works for variable-
8511 length vectors. Return the resulting VECTOR_CST on success or null
8515 fold_view_convert_vector_encoding (tree type
, tree expr
)
8517 tree expr_type
= TREE_TYPE (expr
);
8518 poly_uint64 type_bits
, expr_bits
;
8519 if (!poly_int_tree_p (TYPE_SIZE (type
), &type_bits
)
8520 || !poly_int_tree_p (TYPE_SIZE (expr_type
), &expr_bits
))
8523 poly_uint64 type_units
= TYPE_VECTOR_SUBPARTS (type
);
8524 poly_uint64 expr_units
= TYPE_VECTOR_SUBPARTS (expr_type
);
8525 unsigned int type_elt_bits
= vector_element_size (type_bits
, type_units
);
8526 unsigned int expr_elt_bits
= vector_element_size (expr_bits
, expr_units
);
8528 /* We can only preserve the semantics of a stepped pattern if the new
8529 vector element is an integer of the same size. */
8530 if (VECTOR_CST_STEPPED_P (expr
)
8531 && (!INTEGRAL_TYPE_P (type
) || type_elt_bits
!= expr_elt_bits
))
8534 /* The number of bits needed to encode one element from every pattern
8535 of the original vector. */
8536 unsigned int expr_sequence_bits
8537 = VECTOR_CST_NPATTERNS (expr
) * expr_elt_bits
;
8539 /* The number of bits needed to encode one element from every pattern
8541 unsigned int type_sequence_bits
8542 = least_common_multiple (expr_sequence_bits
, type_elt_bits
);
8544 /* Don't try to read more bytes than are available, which can happen
8545 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8546 The general VIEW_CONVERT handling can cope with that case, so there's
8547 no point complicating things here. */
8548 unsigned int nelts_per_pattern
= VECTOR_CST_NELTS_PER_PATTERN (expr
);
8549 unsigned int buffer_bytes
= CEIL (nelts_per_pattern
* type_sequence_bits
,
8551 unsigned int buffer_bits
= buffer_bytes
* BITS_PER_UNIT
;
8552 if (known_gt (buffer_bits
, expr_bits
))
8555 /* Get enough bytes of EXPR to form the new encoding. */
8556 auto_vec
<unsigned char, 128> buffer (buffer_bytes
);
8557 buffer
.quick_grow (buffer_bytes
);
8558 if (native_encode_vector_part (expr
, buffer
.address (), buffer_bytes
, 0,
8559 buffer_bits
/ expr_elt_bits
)
8560 != (int) buffer_bytes
)
8563 /* Reencode the bytes as TYPE. */
8564 unsigned int type_npatterns
= type_sequence_bits
/ type_elt_bits
;
8565 return native_interpret_vector_part (type
, &buffer
[0], buffer
.length (),
8566 type_npatterns
, nelts_per_pattern
);
8569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8570 TYPE at compile-time. If we're unable to perform the conversion
8571 return NULL_TREE. */
8574 fold_view_convert_expr (tree type
, tree expr
)
8576 /* We support up to 512-bit values (for V8DFmode). */
8577 unsigned char buffer
[64];
8580 /* Check that the host and target are sane. */
8581 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
8584 if (VECTOR_TYPE_P (type
) && TREE_CODE (expr
) == VECTOR_CST
)
8585 if (tree res
= fold_view_convert_vector_encoding (type
, expr
))
8588 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
8592 return native_interpret_expr (type
, buffer
, len
);
8595 /* Build an expression for the address of T. Folds away INDIRECT_REF
8596 to avoid confusing the gimplify process. */
8599 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
8601 /* The size of the object is not relevant when talking about its address. */
8602 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
8603 t
= TREE_OPERAND (t
, 0);
8605 if (TREE_CODE (t
) == INDIRECT_REF
)
8607 t
= TREE_OPERAND (t
, 0);
8609 if (TREE_TYPE (t
) != ptrtype
)
8610 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
8612 else if (TREE_CODE (t
) == MEM_REF
8613 && integer_zerop (TREE_OPERAND (t
, 1)))
8615 t
= TREE_OPERAND (t
, 0);
8617 if (TREE_TYPE (t
) != ptrtype
)
8618 t
= fold_convert_loc (loc
, ptrtype
, t
);
8620 else if (TREE_CODE (t
) == MEM_REF
8621 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
8622 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
8623 TREE_OPERAND (t
, 0),
8624 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
8625 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
8627 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
8629 if (TREE_TYPE (t
) != ptrtype
)
8630 t
= fold_convert_loc (loc
, ptrtype
, t
);
8633 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
8638 /* Build an expression for the address of T. */
8641 build_fold_addr_expr_loc (location_t loc
, tree t
)
8643 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
8645 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
8648 /* Fold a unary expression of code CODE and type TYPE with operand
8649 OP0. Return the folded expression if folding is successful.
8650 Otherwise, return NULL_TREE. */
8653 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
8657 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
8659 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
8660 && TREE_CODE_LENGTH (code
) == 1);
8665 if (CONVERT_EXPR_CODE_P (code
)
8666 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
8668 /* Don't use STRIP_NOPS, because signedness of argument type
8670 STRIP_SIGN_NOPS (arg0
);
8674 /* Strip any conversions that don't change the mode. This
8675 is safe for every expression, except for a comparison
8676 expression because its signedness is derived from its
8679 Note that this is done as an internal manipulation within
8680 the constant folder, in order to find the simplest
8681 representation of the arguments so that their form can be
8682 studied. In any cases, the appropriate type conversions
8683 should be put back in the tree that will get out of the
8688 if (CONSTANT_CLASS_P (arg0
))
8690 tree tem
= const_unop (code
, type
, arg0
);
8693 if (TREE_TYPE (tem
) != type
)
8694 tem
= fold_convert_loc (loc
, type
, tem
);
8700 tem
= generic_simplify (loc
, code
, type
, op0
);
8704 if (TREE_CODE_CLASS (code
) == tcc_unary
)
8706 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
8707 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8708 fold_build1_loc (loc
, code
, type
,
8709 fold_convert_loc (loc
, TREE_TYPE (op0
),
8710 TREE_OPERAND (arg0
, 1))));
8711 else if (TREE_CODE (arg0
) == COND_EXPR
)
8713 tree arg01
= TREE_OPERAND (arg0
, 1);
8714 tree arg02
= TREE_OPERAND (arg0
, 2);
8715 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
8716 arg01
= fold_build1_loc (loc
, code
, type
,
8717 fold_convert_loc (loc
,
8718 TREE_TYPE (op0
), arg01
));
8719 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
8720 arg02
= fold_build1_loc (loc
, code
, type
,
8721 fold_convert_loc (loc
,
8722 TREE_TYPE (op0
), arg02
));
8723 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
8726 /* If this was a conversion, and all we did was to move into
8727 inside the COND_EXPR, bring it back out. But leave it if
8728 it is a conversion from integer to integer and the
8729 result precision is no wider than a word since such a
8730 conversion is cheap and may be optimized away by combine,
8731 while it couldn't if it were outside the COND_EXPR. Then return
8732 so we don't get into an infinite recursion loop taking the
8733 conversion out and then back in. */
8735 if ((CONVERT_EXPR_CODE_P (code
)
8736 || code
== NON_LVALUE_EXPR
)
8737 && TREE_CODE (tem
) == COND_EXPR
8738 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
8739 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
8740 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
8741 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
8742 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
8743 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
8744 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8746 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
8747 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
8748 || flag_syntax_only
))
8749 tem
= build1_loc (loc
, code
, type
,
8751 TREE_TYPE (TREE_OPERAND
8752 (TREE_OPERAND (tem
, 1), 0)),
8753 TREE_OPERAND (tem
, 0),
8754 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
8755 TREE_OPERAND (TREE_OPERAND (tem
, 2),
8763 case NON_LVALUE_EXPR
:
8764 if (!maybe_lvalue_p (op0
))
8765 return fold_convert_loc (loc
, type
, op0
);
8770 case FIX_TRUNC_EXPR
:
8771 if (COMPARISON_CLASS_P (op0
))
8773 /* If we have (type) (a CMP b) and type is an integral type, return
8774 new expression involving the new type. Canonicalize
8775 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8777 Do not fold the result as that would not simplify further, also
8778 folding again results in recursions. */
8779 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8780 return build2_loc (loc
, TREE_CODE (op0
), type
,
8781 TREE_OPERAND (op0
, 0),
8782 TREE_OPERAND (op0
, 1));
8783 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
8784 && TREE_CODE (type
) != VECTOR_TYPE
)
8785 return build3_loc (loc
, COND_EXPR
, type
, op0
,
8786 constant_boolean_node (true, type
),
8787 constant_boolean_node (false, type
));
8790 /* Handle (T *)&A.B.C for A being of type T and B and C
8791 living at offset zero. This occurs frequently in
8792 C++ upcasting and then accessing the base. */
8793 if (TREE_CODE (op0
) == ADDR_EXPR
8794 && POINTER_TYPE_P (type
)
8795 && handled_component_p (TREE_OPERAND (op0
, 0)))
8797 poly_int64 bitsize
, bitpos
;
8800 int unsignedp
, reversep
, volatilep
;
8802 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
8803 &offset
, &mode
, &unsignedp
, &reversep
,
8805 /* If the reference was to a (constant) zero offset, we can use
8806 the address of the base if it has the same base type
8807 as the result type and the pointer type is unqualified. */
8809 && known_eq (bitpos
, 0)
8810 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
8811 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
8812 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
8813 return fold_convert_loc (loc
, type
,
8814 build_fold_addr_expr_loc (loc
, base
));
8817 if (TREE_CODE (op0
) == MODIFY_EXPR
8818 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
8819 /* Detect assigning a bitfield. */
8820 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8822 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8824 /* Don't leave an assignment inside a conversion
8825 unless assigning a bitfield. */
8826 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8827 /* First do the assignment, then return converted constant. */
8828 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8829 TREE_NO_WARNING (tem
) = 1;
8830 TREE_USED (tem
) = 1;
8834 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8835 constants (if x has signed type, the sign bit cannot be set
8836 in c). This folds extension into the BIT_AND_EXPR.
8837 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8838 very likely don't have maximal range for their precision and this
8839 transformation effectively doesn't preserve non-maximal ranges. */
8840 if (TREE_CODE (type
) == INTEGER_TYPE
8841 && TREE_CODE (op0
) == BIT_AND_EXPR
8842 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8844 tree and_expr
= op0
;
8845 tree and0
= TREE_OPERAND (and_expr
, 0);
8846 tree and1
= TREE_OPERAND (and_expr
, 1);
8849 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8850 || (TYPE_PRECISION (type
)
8851 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8853 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8854 <= HOST_BITS_PER_WIDE_INT
8855 && tree_fits_uhwi_p (and1
))
8857 unsigned HOST_WIDE_INT cst
;
8859 cst
= tree_to_uhwi (and1
);
8860 cst
&= HOST_WIDE_INT_M1U
8861 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8862 change
= (cst
== 0);
8864 && !flag_syntax_only
8865 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
8868 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8869 and0
= fold_convert_loc (loc
, uns
, and0
);
8870 and1
= fold_convert_loc (loc
, uns
, and1
);
8875 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8876 TREE_OVERFLOW (and1
));
8877 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8878 fold_convert_loc (loc
, type
, and0
), tem
);
8882 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8883 cast (T1)X will fold away. We assume that this happens when X itself
8885 if (POINTER_TYPE_P (type
)
8886 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8887 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
8889 tree arg00
= TREE_OPERAND (arg0
, 0);
8890 tree arg01
= TREE_OPERAND (arg0
, 1);
8892 return fold_build_pointer_plus_loc
8893 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8896 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8897 of the same precision, and X is an integer type not narrower than
8898 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8899 if (INTEGRAL_TYPE_P (type
)
8900 && TREE_CODE (op0
) == BIT_NOT_EXPR
8901 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8902 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8903 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8905 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8906 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8907 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8908 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8909 fold_convert_loc (loc
, type
, tem
));
8912 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8913 type of X and Y (integer types only). */
8914 if (INTEGRAL_TYPE_P (type
)
8915 && TREE_CODE (op0
) == MULT_EXPR
8916 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8917 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8919 /* Be careful not to introduce new overflows. */
8921 if (TYPE_OVERFLOW_WRAPS (type
))
8924 mult_type
= unsigned_type_for (type
);
8926 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8928 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8929 fold_convert_loc (loc
, mult_type
,
8930 TREE_OPERAND (op0
, 0)),
8931 fold_convert_loc (loc
, mult_type
,
8932 TREE_OPERAND (op0
, 1)));
8933 return fold_convert_loc (loc
, type
, tem
);
8939 case VIEW_CONVERT_EXPR
:
8940 if (TREE_CODE (op0
) == MEM_REF
)
8942 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
8943 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
8944 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
8945 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8946 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
8953 tem
= fold_negate_expr (loc
, arg0
);
8955 return fold_convert_loc (loc
, type
, tem
);
8959 /* Convert fabs((double)float) into (double)fabsf(float). */
8960 if (TREE_CODE (arg0
) == NOP_EXPR
8961 && TREE_CODE (type
) == REAL_TYPE
)
8963 tree targ0
= strip_float_extensions (arg0
);
8965 return fold_convert_loc (loc
, type
,
8966 fold_build1_loc (loc
, ABS_EXPR
,
8973 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8974 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8975 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8976 fold_convert_loc (loc
, type
,
8977 TREE_OPERAND (arg0
, 0)))))
8978 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8979 fold_convert_loc (loc
, type
,
8980 TREE_OPERAND (arg0
, 1)));
8981 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8982 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8983 fold_convert_loc (loc
, type
,
8984 TREE_OPERAND (arg0
, 1)))))
8985 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8986 fold_convert_loc (loc
, type
,
8987 TREE_OPERAND (arg0
, 0)), tem
);
8991 case TRUTH_NOT_EXPR
:
8992 /* Note that the operand of this must be an int
8993 and its values must be 0 or 1.
8994 ("true" is a fixed value perhaps depending on the language,
8995 but we don't handle values other than 1 correctly yet.) */
8996 tem
= fold_truth_not_expr (loc
, arg0
);
8999 return fold_convert_loc (loc
, type
, tem
);
9002 /* Fold *&X to X if X is an lvalue. */
9003 if (TREE_CODE (op0
) == ADDR_EXPR
)
9005 tree op00
= TREE_OPERAND (op0
, 0);
9007 || TREE_CODE (op00
) == PARM_DECL
9008 || TREE_CODE (op00
) == RESULT_DECL
)
9009 && !TREE_READONLY (op00
))
9016 } /* switch (code) */
9020 /* If the operation was a conversion do _not_ mark a resulting constant
9021 with TREE_OVERFLOW if the original constant was not. These conversions
9022 have implementation defined behavior and retaining the TREE_OVERFLOW
9023 flag here would confuse later passes such as VRP. */
9025 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
9026 tree type
, tree op0
)
9028 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
9030 && TREE_CODE (res
) == INTEGER_CST
9031 && TREE_CODE (op0
) == INTEGER_CST
9032 && CONVERT_EXPR_CODE_P (code
))
9033 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
9038 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9039 operands OP0 and OP1. LOC is the location of the resulting expression.
9040 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9041 Return the folded expression if folding is successful. Otherwise,
9042 return NULL_TREE. */
9044 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
9045 tree arg0
, tree arg1
, tree op0
, tree op1
)
9049 /* We only do these simplifications if we are optimizing. */
9053 /* Check for things like (A || B) && (A || C). We can convert this
9054 to A || (B && C). Note that either operator can be any of the four
9055 truth and/or operations and the transformation will still be
9056 valid. Also note that we only care about order for the
9057 ANDIF and ORIF operators. If B contains side effects, this
9058 might change the truth-value of A. */
9059 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9060 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
9061 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
9062 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
9063 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
9064 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
9066 tree a00
= TREE_OPERAND (arg0
, 0);
9067 tree a01
= TREE_OPERAND (arg0
, 1);
9068 tree a10
= TREE_OPERAND (arg1
, 0);
9069 tree a11
= TREE_OPERAND (arg1
, 1);
9070 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
9071 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
9072 && (code
== TRUTH_AND_EXPR
9073 || code
== TRUTH_OR_EXPR
));
9075 if (operand_equal_p (a00
, a10
, 0))
9076 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9077 fold_build2_loc (loc
, code
, type
, a01
, a11
));
9078 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
9079 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
9080 fold_build2_loc (loc
, code
, type
, a01
, a10
));
9081 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
9082 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
9083 fold_build2_loc (loc
, code
, type
, a00
, a11
));
9085 /* This case if tricky because we must either have commutative
9086 operators or else A10 must not have side-effects. */
9088 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
9089 && operand_equal_p (a01
, a11
, 0))
9090 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
9091 fold_build2_loc (loc
, code
, type
, a00
, a10
),
9095 /* See if we can build a range comparison. */
9096 if ((tem
= fold_range_test (loc
, code
, type
, op0
, op1
)) != 0)
9099 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
9100 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
9102 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
9104 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
9107 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
9108 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
9110 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
9112 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
9115 /* Check for the possibility of merging component references. If our
9116 lhs is another similar operation, try to merge its rhs with our
9117 rhs. Then try to merge our lhs and rhs. */
9118 if (TREE_CODE (arg0
) == code
9119 && (tem
= fold_truth_andor_1 (loc
, code
, type
,
9120 TREE_OPERAND (arg0
, 1), arg1
)) != 0)
9121 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9123 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
9126 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
9127 if (param_logical_op_non_short_circuit
!= -1)
9128 logical_op_non_short_circuit
9129 = param_logical_op_non_short_circuit
;
9130 if (logical_op_non_short_circuit
9131 && !flag_sanitize_coverage
9132 && (code
== TRUTH_AND_EXPR
9133 || code
== TRUTH_ANDIF_EXPR
9134 || code
== TRUTH_OR_EXPR
9135 || code
== TRUTH_ORIF_EXPR
))
9137 enum tree_code ncode
, icode
;
9139 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
9140 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
9141 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
9143 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9144 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9145 We don't want to pack more than two leafs to a non-IF AND/OR
9147 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9148 equal to IF-CODE, then we don't want to add right-hand operand.
9149 If the inner right-hand side of left-hand operand has
9150 side-effects, or isn't simple, then we can't add to it,
9151 as otherwise we might destroy if-sequence. */
9152 if (TREE_CODE (arg0
) == icode
9153 && simple_operand_p_2 (arg1
)
9154 /* Needed for sequence points to handle trappings, and
9156 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
9158 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
9160 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
9163 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9164 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9165 else if (TREE_CODE (arg1
) == icode
9166 && simple_operand_p_2 (arg0
)
9167 /* Needed for sequence points to handle trappings, and
9169 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
9171 tem
= fold_build2_loc (loc
, ncode
, type
,
9172 arg0
, TREE_OPERAND (arg1
, 0));
9173 return fold_build2_loc (loc
, icode
, type
, tem
,
9174 TREE_OPERAND (arg1
, 1));
9176 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9178 For sequence point consistancy, we need to check for trapping,
9179 and side-effects. */
9180 else if (code
== icode
&& simple_operand_p_2 (arg0
)
9181 && simple_operand_p_2 (arg1
))
9182 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
9188 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9189 by changing CODE to reduce the magnitude of constants involved in
9190 ARG0 of the comparison.
9191 Returns a canonicalized comparison tree if a simplification was
9192 possible, otherwise returns NULL_TREE.
9193 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9194 valid if signed overflow is undefined. */
9197 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
9198 tree arg0
, tree arg1
,
9199 bool *strict_overflow_p
)
9201 enum tree_code code0
= TREE_CODE (arg0
);
9202 tree t
, cst0
= NULL_TREE
;
9205 /* Match A +- CST code arg1. We can change this only if overflow
9207 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9208 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
9209 /* In principle pointers also have undefined overflow behavior,
9210 but that causes problems elsewhere. */
9211 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
9212 && (code0
== MINUS_EXPR
9213 || code0
== PLUS_EXPR
)
9214 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
9217 /* Identify the constant in arg0 and its sign. */
9218 cst0
= TREE_OPERAND (arg0
, 1);
9219 sgn0
= tree_int_cst_sgn (cst0
);
9221 /* Overflowed constants and zero will cause problems. */
9222 if (integer_zerop (cst0
)
9223 || TREE_OVERFLOW (cst0
))
9226 /* See if we can reduce the magnitude of the constant in
9227 arg0 by changing the comparison code. */
9228 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9230 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9232 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9233 else if (code
== GT_EXPR
9234 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9236 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9237 else if (code
== LE_EXPR
9238 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
9240 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9241 else if (code
== GE_EXPR
9242 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
9246 *strict_overflow_p
= true;
9248 /* Now build the constant reduced in magnitude. But not if that
9249 would produce one outside of its types range. */
9250 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
9252 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
9253 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
9255 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
9256 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
9259 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
9260 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
9261 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
9262 t
= fold_convert (TREE_TYPE (arg1
), t
);
9264 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
9267 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9268 overflow further. Try to decrease the magnitude of constants involved
9269 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9270 and put sole constants at the second argument position.
9271 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9274 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
9275 tree arg0
, tree arg1
)
9278 bool strict_overflow_p
;
9279 const char * const warnmsg
= G_("assuming signed overflow does not occur "
9280 "when reducing constant in comparison");
9282 /* Try canonicalization by simplifying arg0. */
9283 strict_overflow_p
= false;
9284 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
9285 &strict_overflow_p
);
9288 if (strict_overflow_p
)
9289 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9293 /* Try canonicalization by simplifying arg1 using the swapped
9295 code
= swap_tree_comparison (code
);
9296 strict_overflow_p
= false;
9297 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
9298 &strict_overflow_p
);
9299 if (t
&& strict_overflow_p
)
9300 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
9304 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9305 space. This is used to avoid issuing overflow warnings for
9306 expressions like &p->x which cannot wrap. */
9309 pointer_may_wrap_p (tree base
, tree offset
, poly_int64 bitpos
)
9311 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
9314 if (maybe_lt (bitpos
, 0))
9317 poly_wide_int wi_offset
;
9318 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
9319 if (offset
== NULL_TREE
)
9320 wi_offset
= wi::zero (precision
);
9321 else if (!poly_int_tree_p (offset
) || TREE_OVERFLOW (offset
))
9324 wi_offset
= wi::to_poly_wide (offset
);
9326 wi::overflow_type overflow
;
9327 poly_wide_int units
= wi::shwi (bits_to_bytes_round_down (bitpos
),
9329 poly_wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
9333 poly_uint64 total_hwi
, size
;
9334 if (!total
.to_uhwi (&total_hwi
)
9335 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base
))),
9337 || known_eq (size
, 0U))
9340 if (known_le (total_hwi
, size
))
9343 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9345 if (TREE_CODE (base
) == ADDR_EXPR
9346 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base
, 0))),
9348 && maybe_ne (size
, 0U)
9349 && known_le (total_hwi
, size
))
9355 /* Return a positive integer when the symbol DECL is known to have
9356 a nonzero address, zero when it's known not to (e.g., it's a weak
9357 symbol), and a negative integer when the symbol is not yet in the
9358 symbol table and so whether or not its address is zero is unknown.
9359 For function local objects always return positive integer. */
9361 maybe_nonzero_address (tree decl
)
9363 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
9364 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
9365 return symbol
->nonzero_address ();
9367 /* Function local objects are never NULL. */
9369 && (DECL_CONTEXT (decl
)
9370 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
9371 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
9377 /* Subroutine of fold_binary. This routine performs all of the
9378 transformations that are common to the equality/inequality
9379 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9380 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9381 fold_binary should call fold_binary. Fold a comparison with
9382 tree code CODE and type TYPE with operands OP0 and OP1. Return
9383 the folded comparison or NULL_TREE. */
9386 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
9389 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
9390 tree arg0
, arg1
, tem
;
9395 STRIP_SIGN_NOPS (arg0
);
9396 STRIP_SIGN_NOPS (arg1
);
9398 /* For comparisons of pointers we can decompose it to a compile time
9399 comparison of the base objects and the offsets into the object.
9400 This requires at least one operand being an ADDR_EXPR or a
9401 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9402 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9403 && (TREE_CODE (arg0
) == ADDR_EXPR
9404 || TREE_CODE (arg1
) == ADDR_EXPR
9405 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9406 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9408 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9409 poly_int64 bitsize
, bitpos0
= 0, bitpos1
= 0;
9411 int volatilep
, reversep
, unsignedp
;
9412 bool indirect_base0
= false, indirect_base1
= false;
9414 /* Get base and offset for the access. Strip ADDR_EXPR for
9415 get_inner_reference, but put it back by stripping INDIRECT_REF
9416 off the base object if possible. indirect_baseN will be true
9417 if baseN is not an address but refers to the object itself. */
9419 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9422 = get_inner_reference (TREE_OPERAND (arg0
, 0),
9423 &bitsize
, &bitpos0
, &offset0
, &mode
,
9424 &unsignedp
, &reversep
, &volatilep
);
9425 if (TREE_CODE (base0
) == INDIRECT_REF
)
9426 base0
= TREE_OPERAND (base0
, 0);
9428 indirect_base0
= true;
9430 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9432 base0
= TREE_OPERAND (arg0
, 0);
9433 STRIP_SIGN_NOPS (base0
);
9434 if (TREE_CODE (base0
) == ADDR_EXPR
)
9437 = get_inner_reference (TREE_OPERAND (base0
, 0),
9438 &bitsize
, &bitpos0
, &offset0
, &mode
,
9439 &unsignedp
, &reversep
, &volatilep
);
9440 if (TREE_CODE (base0
) == INDIRECT_REF
)
9441 base0
= TREE_OPERAND (base0
, 0);
9443 indirect_base0
= true;
9445 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
9446 offset0
= TREE_OPERAND (arg0
, 1);
9448 offset0
= size_binop (PLUS_EXPR
, offset0
,
9449 TREE_OPERAND (arg0
, 1));
9450 if (poly_int_tree_p (offset0
))
9452 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset0
),
9453 TYPE_PRECISION (sizetype
));
9454 tem
<<= LOG2_BITS_PER_UNIT
;
9456 if (tem
.to_shwi (&bitpos0
))
9457 offset0
= NULL_TREE
;
9462 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9465 = get_inner_reference (TREE_OPERAND (arg1
, 0),
9466 &bitsize
, &bitpos1
, &offset1
, &mode
,
9467 &unsignedp
, &reversep
, &volatilep
);
9468 if (TREE_CODE (base1
) == INDIRECT_REF
)
9469 base1
= TREE_OPERAND (base1
, 0);
9471 indirect_base1
= true;
9473 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9475 base1
= TREE_OPERAND (arg1
, 0);
9476 STRIP_SIGN_NOPS (base1
);
9477 if (TREE_CODE (base1
) == ADDR_EXPR
)
9480 = get_inner_reference (TREE_OPERAND (base1
, 0),
9481 &bitsize
, &bitpos1
, &offset1
, &mode
,
9482 &unsignedp
, &reversep
, &volatilep
);
9483 if (TREE_CODE (base1
) == INDIRECT_REF
)
9484 base1
= TREE_OPERAND (base1
, 0);
9486 indirect_base1
= true;
9488 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
9489 offset1
= TREE_OPERAND (arg1
, 1);
9491 offset1
= size_binop (PLUS_EXPR
, offset1
,
9492 TREE_OPERAND (arg1
, 1));
9493 if (poly_int_tree_p (offset1
))
9495 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset1
),
9496 TYPE_PRECISION (sizetype
));
9497 tem
<<= LOG2_BITS_PER_UNIT
;
9499 if (tem
.to_shwi (&bitpos1
))
9500 offset1
= NULL_TREE
;
9504 /* If we have equivalent bases we might be able to simplify. */
9505 if (indirect_base0
== indirect_base1
9506 && operand_equal_p (base0
, base1
,
9507 indirect_base0
? OEP_ADDRESS_OF
: 0))
9509 /* We can fold this expression to a constant if the non-constant
9510 offset parts are equal. */
9511 if ((offset0
== offset1
9512 || (offset0
&& offset1
9513 && operand_equal_p (offset0
, offset1
, 0)))
9516 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
9517 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
9520 && maybe_ne (bitpos0
, bitpos1
)
9521 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9522 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9523 fold_overflow_warning (("assuming pointer wraparound does not "
9524 "occur when comparing P +- C1 with "
9526 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9531 if (known_eq (bitpos0
, bitpos1
))
9532 return constant_boolean_node (true, type
);
9533 if (known_ne (bitpos0
, bitpos1
))
9534 return constant_boolean_node (false, type
);
9537 if (known_ne (bitpos0
, bitpos1
))
9538 return constant_boolean_node (true, type
);
9539 if (known_eq (bitpos0
, bitpos1
))
9540 return constant_boolean_node (false, type
);
9543 if (known_lt (bitpos0
, bitpos1
))
9544 return constant_boolean_node (true, type
);
9545 if (known_ge (bitpos0
, bitpos1
))
9546 return constant_boolean_node (false, type
);
9549 if (known_le (bitpos0
, bitpos1
))
9550 return constant_boolean_node (true, type
);
9551 if (known_gt (bitpos0
, bitpos1
))
9552 return constant_boolean_node (false, type
);
9555 if (known_ge (bitpos0
, bitpos1
))
9556 return constant_boolean_node (true, type
);
9557 if (known_lt (bitpos0
, bitpos1
))
9558 return constant_boolean_node (false, type
);
9561 if (known_gt (bitpos0
, bitpos1
))
9562 return constant_boolean_node (true, type
);
9563 if (known_le (bitpos0
, bitpos1
))
9564 return constant_boolean_node (false, type
);
9569 /* We can simplify the comparison to a comparison of the variable
9570 offset parts if the constant offset parts are equal.
9571 Be careful to use signed sizetype here because otherwise we
9572 mess with array offsets in the wrong way. This is possible
9573 because pointer arithmetic is restricted to retain within an
9574 object and overflow on pointer differences is undefined as of
9575 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9576 else if (known_eq (bitpos0
, bitpos1
)
9579 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
9580 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
9582 /* By converting to signed sizetype we cover middle-end pointer
9583 arithmetic which operates on unsigned pointer types of size
9584 type size and ARRAY_REF offsets which are properly sign or
9585 zero extended from their type in case it is narrower than
9587 if (offset0
== NULL_TREE
)
9588 offset0
= build_int_cst (ssizetype
, 0);
9590 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9591 if (offset1
== NULL_TREE
)
9592 offset1
= build_int_cst (ssizetype
, 0);
9594 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9597 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9598 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9599 fold_overflow_warning (("assuming pointer wraparound does not "
9600 "occur when comparing P +- C1 with "
9602 WARN_STRICT_OVERFLOW_COMPARISON
);
9604 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9607 /* For equal offsets we can simplify to a comparison of the
9609 else if (known_eq (bitpos0
, bitpos1
)
9611 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9613 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9614 && ((offset0
== offset1
)
9615 || (offset0
&& offset1
9616 && operand_equal_p (offset0
, offset1
, 0))))
9619 base0
= build_fold_addr_expr_loc (loc
, base0
);
9621 base1
= build_fold_addr_expr_loc (loc
, base1
);
9622 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9624 /* Comparison between an ordinary (non-weak) symbol and a null
9625 pointer can be eliminated since such symbols must have a non
9626 null address. In C, relational expressions between pointers
9627 to objects and null pointers are undefined. The results
9628 below follow the C++ rules with the additional property that
9629 every object pointer compares greater than a null pointer.
9631 else if (((DECL_P (base0
)
9632 && maybe_nonzero_address (base0
) > 0
9633 /* Avoid folding references to struct members at offset 0 to
9634 prevent tests like '&ptr->firstmember == 0' from getting
9635 eliminated. When ptr is null, although the -> expression
9636 is strictly speaking invalid, GCC retains it as a matter
9637 of QoI. See PR c/44555. */
9638 && (offset0
== NULL_TREE
&& known_ne (bitpos0
, 0)))
9639 || CONSTANT_CLASS_P (base0
))
9641 /* The caller guarantees that when one of the arguments is
9642 constant (i.e., null in this case) it is second. */
9643 && integer_zerop (arg1
))
9650 return constant_boolean_node (false, type
);
9654 return constant_boolean_node (true, type
);
9661 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9662 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9663 the resulting offset is smaller in absolute value than the
9664 original one and has the same sign. */
9665 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9666 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9667 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9668 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9669 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9670 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9671 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9672 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9674 tree const1
= TREE_OPERAND (arg0
, 1);
9675 tree const2
= TREE_OPERAND (arg1
, 1);
9676 tree variable1
= TREE_OPERAND (arg0
, 0);
9677 tree variable2
= TREE_OPERAND (arg1
, 0);
9679 const char * const warnmsg
= G_("assuming signed overflow does not "
9680 "occur when combining constants around "
9683 /* Put the constant on the side where it doesn't overflow and is
9684 of lower absolute value and of same sign than before. */
9685 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9686 ? MINUS_EXPR
: PLUS_EXPR
,
9688 if (!TREE_OVERFLOW (cst
)
9689 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9690 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9692 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9693 return fold_build2_loc (loc
, code
, type
,
9695 fold_build2_loc (loc
, TREE_CODE (arg1
),
9700 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9701 ? MINUS_EXPR
: PLUS_EXPR
,
9703 if (!TREE_OVERFLOW (cst
)
9704 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9705 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9707 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9708 return fold_build2_loc (loc
, code
, type
,
9709 fold_build2_loc (loc
, TREE_CODE (arg0
),
9716 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9720 /* If we are comparing an expression that just has comparisons
9721 of two integer values, arithmetic expressions of those comparisons,
9722 and constants, we can simplify it. There are only three cases
9723 to check: the two values can either be equal, the first can be
9724 greater, or the second can be greater. Fold the expression for
9725 those three values. Since each value must be 0 or 1, we have
9726 eight possibilities, each of which corresponds to the constant 0
9727 or 1 or one of the six possible comparisons.
9729 This handles common cases like (a > b) == 0 but also handles
9730 expressions like ((x > y) - (y > x)) > 0, which supposedly
9731 occur in macroized code. */
9733 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9735 tree cval1
= 0, cval2
= 0;
9737 if (twoval_comparison_p (arg0
, &cval1
, &cval2
)
9738 /* Don't handle degenerate cases here; they should already
9739 have been handled anyway. */
9740 && cval1
!= 0 && cval2
!= 0
9741 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9742 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9743 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9744 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9745 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9746 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9747 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9749 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9750 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9752 /* We can't just pass T to eval_subst in case cval1 or cval2
9753 was the same as ARG1. */
9756 = fold_build2_loc (loc
, code
, type
,
9757 eval_subst (loc
, arg0
, cval1
, maxval
,
9761 = fold_build2_loc (loc
, code
, type
,
9762 eval_subst (loc
, arg0
, cval1
, maxval
,
9766 = fold_build2_loc (loc
, code
, type
,
9767 eval_subst (loc
, arg0
, cval1
, minval
,
9771 /* All three of these results should be 0 or 1. Confirm they are.
9772 Then use those values to select the proper code to use. */
9774 if (TREE_CODE (high_result
) == INTEGER_CST
9775 && TREE_CODE (equal_result
) == INTEGER_CST
9776 && TREE_CODE (low_result
) == INTEGER_CST
)
9778 /* Make a 3-bit mask with the high-order bit being the
9779 value for `>', the next for '=', and the low for '<'. */
9780 switch ((integer_onep (high_result
) * 4)
9781 + (integer_onep (equal_result
) * 2)
9782 + integer_onep (low_result
))
9786 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9807 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9810 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9819 /* Subroutine of fold_binary. Optimize complex multiplications of the
9820 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9821 argument EXPR represents the expression "z" of type TYPE. */
9824 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9826 tree itype
= TREE_TYPE (type
);
9827 tree rpart
, ipart
, tem
;
9829 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9831 rpart
= TREE_OPERAND (expr
, 0);
9832 ipart
= TREE_OPERAND (expr
, 1);
9834 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9836 rpart
= TREE_REALPART (expr
);
9837 ipart
= TREE_IMAGPART (expr
);
9841 expr
= save_expr (expr
);
9842 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9843 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9846 rpart
= save_expr (rpart
);
9847 ipart
= save_expr (ipart
);
9848 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9849 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9850 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9851 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9852 build_zero_cst (itype
));
9856 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9857 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9858 true if successful. */
9861 vec_cst_ctor_to_array (tree arg
, unsigned int nelts
, tree
*elts
)
9863 unsigned HOST_WIDE_INT i
, nunits
;
9865 if (TREE_CODE (arg
) == VECTOR_CST
9866 && VECTOR_CST_NELTS (arg
).is_constant (&nunits
))
9868 for (i
= 0; i
< nunits
; ++i
)
9869 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9871 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9873 constructor_elt
*elt
;
9875 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9876 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9879 elts
[i
] = elt
->value
;
9883 for (; i
< nelts
; i
++)
9885 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9889 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9890 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9891 NULL_TREE otherwise. */
9894 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const vec_perm_indices
&sel
)
9897 unsigned HOST_WIDE_INT nelts
;
9898 bool need_ctor
= false;
9900 if (!sel
.length ().is_constant (&nelts
))
9902 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
), nelts
)
9903 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)), nelts
)
9904 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)), nelts
));
9905 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9906 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9909 tree
*in_elts
= XALLOCAVEC (tree
, nelts
* 2);
9910 if (!vec_cst_ctor_to_array (arg0
, nelts
, in_elts
)
9911 || !vec_cst_ctor_to_array (arg1
, nelts
, in_elts
+ nelts
))
9914 tree_vector_builder
out_elts (type
, nelts
, 1);
9915 for (i
= 0; i
< nelts
; i
++)
9917 HOST_WIDE_INT index
;
9918 if (!sel
[i
].is_constant (&index
))
9920 if (!CONSTANT_CLASS_P (in_elts
[index
]))
9922 out_elts
.quick_push (unshare_expr (in_elts
[index
]));
9927 vec
<constructor_elt
, va_gc
> *v
;
9928 vec_alloc (v
, nelts
);
9929 for (i
= 0; i
< nelts
; i
++)
9930 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, out_elts
[i
]);
9931 return build_constructor (type
, v
);
9934 return out_elts
.build ();
9937 /* Try to fold a pointer difference of type TYPE two address expressions of
9938 array references AREF0 and AREF1 using location LOC. Return a
9939 simplified expression for the difference or NULL_TREE. */
9942 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9943 tree aref0
, tree aref1
,
9944 bool use_pointer_diff
)
9946 tree base0
= TREE_OPERAND (aref0
, 0);
9947 tree base1
= TREE_OPERAND (aref1
, 0);
9948 tree base_offset
= build_int_cst (type
, 0);
9950 /* If the bases are array references as well, recurse. If the bases
9951 are pointer indirections compute the difference of the pointers.
9952 If the bases are equal, we are set. */
9953 if ((TREE_CODE (base0
) == ARRAY_REF
9954 && TREE_CODE (base1
) == ARRAY_REF
9956 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
,
9958 || (INDIRECT_REF_P (base0
)
9959 && INDIRECT_REF_P (base1
)
9962 ? fold_binary_loc (loc
, POINTER_DIFF_EXPR
, type
,
9963 TREE_OPERAND (base0
, 0),
9964 TREE_OPERAND (base1
, 0))
9965 : fold_binary_loc (loc
, MINUS_EXPR
, type
,
9967 TREE_OPERAND (base0
, 0)),
9969 TREE_OPERAND (base1
, 0)))))
9970 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
9972 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9973 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9974 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9975 tree diff
= fold_build2_loc (loc
, MINUS_EXPR
, type
, op0
, op1
);
9976 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9978 fold_build2_loc (loc
, MULT_EXPR
, type
,
9984 /* If the real or vector real constant CST of type TYPE has an exact
9985 inverse, return it, else return NULL. */
9988 exact_inverse (tree type
, tree cst
)
9994 switch (TREE_CODE (cst
))
9997 r
= TREE_REAL_CST (cst
);
9999 if (exact_real_inverse (TYPE_MODE (type
), &r
))
10000 return build_real (type
, r
);
10006 unit_type
= TREE_TYPE (type
);
10007 mode
= TYPE_MODE (unit_type
);
10009 tree_vector_builder elts
;
10010 if (!elts
.new_unary_operation (type
, cst
, false))
10012 unsigned int count
= elts
.encoded_nelts ();
10013 for (unsigned int i
= 0; i
< count
; ++i
)
10015 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
10016 if (!exact_real_inverse (mode
, &r
))
10018 elts
.quick_push (build_real (unit_type
, r
));
10021 return elts
.build ();
10029 /* Mask out the tz least significant bits of X of type TYPE where
10030 tz is the number of trailing zeroes in Y. */
10032 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
10034 int tz
= wi::ctz (y
);
10036 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
10040 /* Return true when T is an address and is known to be nonzero.
10041 For floating point we further ensure that T is not denormal.
10042 Similar logic is present in nonzero_address in rtlanal.h.
10044 If the return value is based on the assumption that signed overflow
10045 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10046 change *STRICT_OVERFLOW_P. */
10049 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
10051 tree type
= TREE_TYPE (t
);
10052 enum tree_code code
;
10054 /* Doing something useful for floating point would need more work. */
10055 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
10058 code
= TREE_CODE (t
);
10059 switch (TREE_CODE_CLASS (code
))
10062 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10063 strict_overflow_p
);
10065 case tcc_comparison
:
10066 return tree_binary_nonzero_warnv_p (code
, type
,
10067 TREE_OPERAND (t
, 0),
10068 TREE_OPERAND (t
, 1),
10069 strict_overflow_p
);
10071 case tcc_declaration
:
10072 case tcc_reference
:
10073 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10081 case TRUTH_NOT_EXPR
:
10082 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
10083 strict_overflow_p
);
10085 case TRUTH_AND_EXPR
:
10086 case TRUTH_OR_EXPR
:
10087 case TRUTH_XOR_EXPR
:
10088 return tree_binary_nonzero_warnv_p (code
, type
,
10089 TREE_OPERAND (t
, 0),
10090 TREE_OPERAND (t
, 1),
10091 strict_overflow_p
);
10098 case WITH_SIZE_EXPR
:
10100 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
10102 case COMPOUND_EXPR
:
10105 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
10106 strict_overflow_p
);
10109 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
10110 strict_overflow_p
);
10114 tree fndecl
= get_callee_fndecl (t
);
10115 if (!fndecl
) return false;
10116 if (flag_delete_null_pointer_checks
&& !flag_check_new
10117 && DECL_IS_OPERATOR_NEW_P (fndecl
)
10118 && !TREE_NOTHROW (fndecl
))
10120 if (flag_delete_null_pointer_checks
10121 && lookup_attribute ("returns_nonnull",
10122 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
10124 return alloca_call_p (t
);
10133 /* Return true when T is an address and is known to be nonzero.
10134 Handle warnings about undefined signed overflow. */
10137 tree_expr_nonzero_p (tree t
)
10139 bool ret
, strict_overflow_p
;
10141 strict_overflow_p
= false;
10142 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10143 if (strict_overflow_p
)
10144 fold_overflow_warning (("assuming signed overflow does not occur when "
10145 "determining that expression is always "
10147 WARN_STRICT_OVERFLOW_MISC
);
10151 /* Return true if T is known not to be equal to an integer W. */
10154 expr_not_equal_to (tree t
, const wide_int
&w
)
10156 wide_int min
, max
, nz
;
10157 value_range_kind rtype
;
10158 switch (TREE_CODE (t
))
10161 return wi::to_wide (t
) != w
;
10164 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
10166 rtype
= get_range_info (t
, &min
, &max
);
10167 if (rtype
== VR_RANGE
)
10169 if (wi::lt_p (max
, w
, TYPE_SIGN (TREE_TYPE (t
))))
10171 if (wi::lt_p (w
, min
, TYPE_SIGN (TREE_TYPE (t
))))
10174 else if (rtype
== VR_ANTI_RANGE
10175 && wi::le_p (min
, w
, TYPE_SIGN (TREE_TYPE (t
)))
10176 && wi::le_p (w
, max
, TYPE_SIGN (TREE_TYPE (t
))))
10178 /* If T has some known zero bits and W has any of those bits set,
10179 then T is known not to be equal to W. */
10180 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
10181 TYPE_PRECISION (TREE_TYPE (t
))), 0))
10190 /* Fold a binary expression of code CODE and type TYPE with operands
10191 OP0 and OP1. LOC is the location of the resulting expression.
10192 Return the folded expression if folding is successful. Otherwise,
10193 return NULL_TREE. */
10196 fold_binary_loc (location_t loc
, enum tree_code code
, tree type
,
10197 tree op0
, tree op1
)
10199 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10200 tree arg0
, arg1
, tem
;
10201 tree t1
= NULL_TREE
;
10202 bool strict_overflow_p
;
10205 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10206 && TREE_CODE_LENGTH (code
) == 2
10207 && op0
!= NULL_TREE
10208 && op1
!= NULL_TREE
);
10213 /* Strip any conversions that don't change the mode. This is
10214 safe for every expression, except for a comparison expression
10215 because its signedness is derived from its operands. So, in
10216 the latter case, only strip conversions that don't change the
10217 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10220 Note that this is done as an internal manipulation within the
10221 constant folder, in order to find the simplest representation
10222 of the arguments so that their form can be studied. In any
10223 cases, the appropriate type conversions should be put back in
10224 the tree that will get out of the constant folder. */
10226 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10228 STRIP_SIGN_NOPS (arg0
);
10229 STRIP_SIGN_NOPS (arg1
);
10237 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10238 constant but we can't do arithmetic on them. */
10239 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
10241 tem
= const_binop (code
, type
, arg0
, arg1
);
10242 if (tem
!= NULL_TREE
)
10244 if (TREE_TYPE (tem
) != type
)
10245 tem
= fold_convert_loc (loc
, type
, tem
);
10250 /* If this is a commutative operation, and ARG0 is a constant, move it
10251 to ARG1 to reduce the number of tests below. */
10252 if (commutative_tree_code (code
)
10253 && tree_swap_operands_p (arg0
, arg1
))
10254 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10256 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10257 to ARG1 to reduce the number of tests below. */
10258 if (kind
== tcc_comparison
10259 && tree_swap_operands_p (arg0
, arg1
))
10260 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
10262 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
10266 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10268 First check for cases where an arithmetic operation is applied to a
10269 compound, conditional, or comparison operation. Push the arithmetic
10270 operation inside the compound or conditional to see if any folding
10271 can then be done. Convert comparison to conditional for this purpose.
10272 The also optimizes non-constant cases that used to be done in
10275 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10276 one of the operands is a comparison and the other is a comparison, a
10277 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10278 code below would make the expression more complex. Change it to a
10279 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10280 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10282 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10283 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10284 && !VECTOR_TYPE_P (TREE_TYPE (arg0
))
10285 && ((truth_value_p (TREE_CODE (arg0
))
10286 && (truth_value_p (TREE_CODE (arg1
))
10287 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10288 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10289 || (truth_value_p (TREE_CODE (arg1
))
10290 && (truth_value_p (TREE_CODE (arg0
))
10291 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10292 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10294 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10295 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10298 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10299 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10301 if (code
== EQ_EXPR
)
10302 tem
= invert_truthvalue_loc (loc
, tem
);
10304 return fold_convert_loc (loc
, type
, tem
);
10307 if (TREE_CODE_CLASS (code
) == tcc_binary
10308 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10310 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10312 tem
= fold_build2_loc (loc
, code
, type
,
10313 fold_convert_loc (loc
, TREE_TYPE (op0
),
10314 TREE_OPERAND (arg0
, 1)), op1
);
10315 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10318 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
10320 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10321 fold_convert_loc (loc
, TREE_TYPE (op1
),
10322 TREE_OPERAND (arg1
, 1)));
10323 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10327 if (TREE_CODE (arg0
) == COND_EXPR
10328 || TREE_CODE (arg0
) == VEC_COND_EXPR
10329 || COMPARISON_CLASS_P (arg0
))
10331 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10333 /*cond_first_p=*/1);
10334 if (tem
!= NULL_TREE
)
10338 if (TREE_CODE (arg1
) == COND_EXPR
10339 || TREE_CODE (arg1
) == VEC_COND_EXPR
10340 || COMPARISON_CLASS_P (arg1
))
10342 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10344 /*cond_first_p=*/0);
10345 if (tem
!= NULL_TREE
)
10353 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10354 if (TREE_CODE (arg0
) == ADDR_EXPR
10355 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10357 tree iref
= TREE_OPERAND (arg0
, 0);
10358 return fold_build2 (MEM_REF
, type
,
10359 TREE_OPERAND (iref
, 0),
10360 int_const_binop (PLUS_EXPR
, arg1
,
10361 TREE_OPERAND (iref
, 1)));
10364 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10365 if (TREE_CODE (arg0
) == ADDR_EXPR
10366 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10369 poly_int64 coffset
;
10370 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10374 return fold_build2 (MEM_REF
, type
,
10375 build1 (ADDR_EXPR
, TREE_TYPE (arg0
), base
),
10376 int_const_binop (PLUS_EXPR
, arg1
,
10377 size_int (coffset
)));
10382 case POINTER_PLUS_EXPR
:
10383 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10384 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10385 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10386 return fold_convert_loc (loc
, type
,
10387 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10388 fold_convert_loc (loc
, sizetype
,
10390 fold_convert_loc (loc
, sizetype
,
10396 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10398 /* X + (X / CST) * -CST is X % CST. */
10399 if (TREE_CODE (arg1
) == MULT_EXPR
10400 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10401 && operand_equal_p (arg0
,
10402 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10404 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10405 tree cst1
= TREE_OPERAND (arg1
, 1);
10406 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10408 if (sum
&& integer_zerop (sum
))
10409 return fold_convert_loc (loc
, type
,
10410 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10411 TREE_TYPE (arg0
), arg0
,
10416 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10417 one. Make sure the type is not saturating and has the signedness of
10418 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10419 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10420 if ((TREE_CODE (arg0
) == MULT_EXPR
10421 || TREE_CODE (arg1
) == MULT_EXPR
)
10422 && !TYPE_SATURATING (type
)
10423 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10424 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10425 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10427 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10432 if (! FLOAT_TYPE_P (type
))
10434 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10435 (plus (plus (mult) (mult)) (foo)) so that we can
10436 take advantage of the factoring cases below. */
10437 if (ANY_INTEGRAL_TYPE_P (type
)
10438 && TYPE_OVERFLOW_WRAPS (type
)
10439 && (((TREE_CODE (arg0
) == PLUS_EXPR
10440 || TREE_CODE (arg0
) == MINUS_EXPR
)
10441 && TREE_CODE (arg1
) == MULT_EXPR
)
10442 || ((TREE_CODE (arg1
) == PLUS_EXPR
10443 || TREE_CODE (arg1
) == MINUS_EXPR
)
10444 && TREE_CODE (arg0
) == MULT_EXPR
)))
10446 tree parg0
, parg1
, parg
, marg
;
10447 enum tree_code pcode
;
10449 if (TREE_CODE (arg1
) == MULT_EXPR
)
10450 parg
= arg0
, marg
= arg1
;
10452 parg
= arg1
, marg
= arg0
;
10453 pcode
= TREE_CODE (parg
);
10454 parg0
= TREE_OPERAND (parg
, 0);
10455 parg1
= TREE_OPERAND (parg
, 1);
10456 STRIP_NOPS (parg0
);
10457 STRIP_NOPS (parg1
);
10459 if (TREE_CODE (parg0
) == MULT_EXPR
10460 && TREE_CODE (parg1
) != MULT_EXPR
)
10461 return fold_build2_loc (loc
, pcode
, type
,
10462 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10463 fold_convert_loc (loc
, type
,
10465 fold_convert_loc (loc
, type
,
10467 fold_convert_loc (loc
, type
, parg1
));
10468 if (TREE_CODE (parg0
) != MULT_EXPR
10469 && TREE_CODE (parg1
) == MULT_EXPR
)
10471 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10472 fold_convert_loc (loc
, type
, parg0
),
10473 fold_build2_loc (loc
, pcode
, type
,
10474 fold_convert_loc (loc
, type
, marg
),
10475 fold_convert_loc (loc
, type
,
10481 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10482 to __complex__ ( x, y ). This is not the same for SNaNs or
10483 if signed zeros are involved. */
10484 if (!HONOR_SNANS (element_mode (arg0
))
10485 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10486 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10488 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10489 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10490 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10491 bool arg0rz
= false, arg0iz
= false;
10492 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10493 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10495 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10496 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10497 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10499 tree rp
= arg1r
? arg1r
10500 : build1 (REALPART_EXPR
, rtype
, arg1
);
10501 tree ip
= arg0i
? arg0i
10502 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10503 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10505 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10507 tree rp
= arg0r
? arg0r
10508 : build1 (REALPART_EXPR
, rtype
, arg0
);
10509 tree ip
= arg1i
? arg1i
10510 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10511 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10516 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10517 We associate floats only if the user has specified
10518 -fassociative-math. */
10519 if (flag_associative_math
10520 && TREE_CODE (arg1
) == PLUS_EXPR
10521 && TREE_CODE (arg0
) != MULT_EXPR
)
10523 tree tree10
= TREE_OPERAND (arg1
, 0);
10524 tree tree11
= TREE_OPERAND (arg1
, 1);
10525 if (TREE_CODE (tree11
) == MULT_EXPR
10526 && TREE_CODE (tree10
) == MULT_EXPR
)
10529 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10530 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10533 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10534 We associate floats only if the user has specified
10535 -fassociative-math. */
10536 if (flag_associative_math
10537 && TREE_CODE (arg0
) == PLUS_EXPR
10538 && TREE_CODE (arg1
) != MULT_EXPR
)
10540 tree tree00
= TREE_OPERAND (arg0
, 0);
10541 tree tree01
= TREE_OPERAND (arg0
, 1);
10542 if (TREE_CODE (tree01
) == MULT_EXPR
10543 && TREE_CODE (tree00
) == MULT_EXPR
)
10546 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10547 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10553 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10554 is a rotate of A by C1 bits. */
10555 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10556 is a rotate of A by B bits.
10557 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10558 though in this case CODE must be | and not + or ^, otherwise
10559 it doesn't return A when B is 0. */
10561 enum tree_code code0
, code1
;
10563 code0
= TREE_CODE (arg0
);
10564 code1
= TREE_CODE (arg1
);
10565 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10566 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10567 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10568 TREE_OPERAND (arg1
, 0), 0)
10569 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10570 TYPE_UNSIGNED (rtype
))
10571 /* Only create rotates in complete modes. Other cases are not
10572 expanded properly. */
10573 && (element_precision (rtype
)
10574 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
10576 tree tree01
, tree11
;
10577 tree orig_tree01
, orig_tree11
;
10578 enum tree_code code01
, code11
;
10580 tree01
= orig_tree01
= TREE_OPERAND (arg0
, 1);
10581 tree11
= orig_tree11
= TREE_OPERAND (arg1
, 1);
10582 STRIP_NOPS (tree01
);
10583 STRIP_NOPS (tree11
);
10584 code01
= TREE_CODE (tree01
);
10585 code11
= TREE_CODE (tree11
);
10586 if (code11
!= MINUS_EXPR
10587 && (code01
== MINUS_EXPR
|| code01
== BIT_AND_EXPR
))
10589 std::swap (code0
, code1
);
10590 std::swap (code01
, code11
);
10591 std::swap (tree01
, tree11
);
10592 std::swap (orig_tree01
, orig_tree11
);
10594 if (code01
== INTEGER_CST
10595 && code11
== INTEGER_CST
10596 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10597 == element_precision (rtype
)))
10599 tem
= build2_loc (loc
, LROTATE_EXPR
,
10600 rtype
, TREE_OPERAND (arg0
, 0),
10601 code0
== LSHIFT_EXPR
10602 ? orig_tree01
: orig_tree11
);
10603 return fold_convert_loc (loc
, type
, tem
);
10605 else if (code11
== MINUS_EXPR
)
10607 tree tree110
, tree111
;
10608 tree110
= TREE_OPERAND (tree11
, 0);
10609 tree111
= TREE_OPERAND (tree11
, 1);
10610 STRIP_NOPS (tree110
);
10611 STRIP_NOPS (tree111
);
10612 if (TREE_CODE (tree110
) == INTEGER_CST
10613 && compare_tree_int (tree110
,
10614 element_precision (rtype
)) == 0
10615 && operand_equal_p (tree01
, tree111
, 0))
10617 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
10618 ? LROTATE_EXPR
: RROTATE_EXPR
),
10619 rtype
, TREE_OPERAND (arg0
, 0),
10621 return fold_convert_loc (loc
, type
, tem
);
10624 else if (code
== BIT_IOR_EXPR
10625 && code11
== BIT_AND_EXPR
10626 && pow2p_hwi (element_precision (rtype
)))
10628 tree tree110
, tree111
;
10629 tree110
= TREE_OPERAND (tree11
, 0);
10630 tree111
= TREE_OPERAND (tree11
, 1);
10631 STRIP_NOPS (tree110
);
10632 STRIP_NOPS (tree111
);
10633 if (TREE_CODE (tree110
) == NEGATE_EXPR
10634 && TREE_CODE (tree111
) == INTEGER_CST
10635 && compare_tree_int (tree111
,
10636 element_precision (rtype
) - 1) == 0
10637 && operand_equal_p (tree01
, TREE_OPERAND (tree110
, 0), 0))
10639 tem
= build2_loc (loc
, (code0
== LSHIFT_EXPR
10640 ? LROTATE_EXPR
: RROTATE_EXPR
),
10641 rtype
, TREE_OPERAND (arg0
, 0),
10643 return fold_convert_loc (loc
, type
, tem
);
10650 /* In most languages, can't associate operations on floats through
10651 parentheses. Rather than remember where the parentheses were, we
10652 don't associate floats at all, unless the user has specified
10653 -fassociative-math.
10654 And, we need to make sure type is not saturating. */
10656 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10657 && !TYPE_SATURATING (type
))
10659 tree var0
, minus_var0
, con0
, minus_con0
, lit0
, minus_lit0
;
10660 tree var1
, minus_var1
, con1
, minus_con1
, lit1
, minus_lit1
;
10664 /* Split both trees into variables, constants, and literals. Then
10665 associate each group together, the constants with literals,
10666 then the result with variables. This increases the chances of
10667 literals being recombined later and of generating relocatable
10668 expressions for the sum of a constant and literal. */
10669 var0
= split_tree (arg0
, type
, code
,
10670 &minus_var0
, &con0
, &minus_con0
,
10671 &lit0
, &minus_lit0
, 0);
10672 var1
= split_tree (arg1
, type
, code
,
10673 &minus_var1
, &con1
, &minus_con1
,
10674 &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
10676 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10677 if (code
== MINUS_EXPR
)
10680 /* With undefined overflow prefer doing association in a type
10681 which wraps on overflow, if that is one of the operand types. */
10682 if ((POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
))
10683 && !TYPE_OVERFLOW_WRAPS (type
))
10685 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10686 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10687 atype
= TREE_TYPE (arg0
);
10688 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10689 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10690 atype
= TREE_TYPE (arg1
);
10691 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10694 /* With undefined overflow we can only associate constants with one
10695 variable, and constants whose association doesn't overflow. */
10696 if ((POINTER_TYPE_P (atype
) || INTEGRAL_TYPE_P (atype
))
10697 && !TYPE_OVERFLOW_WRAPS (atype
))
10699 if ((var0
&& var1
) || (minus_var0
&& minus_var1
))
10701 /* ??? If split_tree would handle NEGATE_EXPR we could
10702 simply reject these cases and the allowed cases would
10703 be the var0/minus_var1 ones. */
10704 tree tmp0
= var0
? var0
: minus_var0
;
10705 tree tmp1
= var1
? var1
: minus_var1
;
10706 bool one_neg
= false;
10708 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10710 tmp0
= TREE_OPERAND (tmp0
, 0);
10711 one_neg
= !one_neg
;
10713 if (CONVERT_EXPR_P (tmp0
)
10714 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10715 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10716 <= TYPE_PRECISION (atype
)))
10717 tmp0
= TREE_OPERAND (tmp0
, 0);
10718 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10720 tmp1
= TREE_OPERAND (tmp1
, 0);
10721 one_neg
= !one_neg
;
10723 if (CONVERT_EXPR_P (tmp1
)
10724 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10725 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10726 <= TYPE_PRECISION (atype
)))
10727 tmp1
= TREE_OPERAND (tmp1
, 0);
10728 /* The only case we can still associate with two variables
10729 is if they cancel out. */
10731 || !operand_equal_p (tmp0
, tmp1
, 0))
10734 else if ((var0
&& minus_var1
10735 && ! operand_equal_p (var0
, minus_var1
, 0))
10736 || (minus_var0
&& var1
10737 && ! operand_equal_p (minus_var0
, var1
, 0)))
10741 /* Only do something if we found more than two objects. Otherwise,
10742 nothing has changed and we risk infinite recursion. */
10744 && ((var0
!= 0) + (var1
!= 0)
10745 + (minus_var0
!= 0) + (minus_var1
!= 0)
10746 + (con0
!= 0) + (con1
!= 0)
10747 + (minus_con0
!= 0) + (minus_con1
!= 0)
10748 + (lit0
!= 0) + (lit1
!= 0)
10749 + (minus_lit0
!= 0) + (minus_lit1
!= 0)) > 2)
10751 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10752 minus_var0
= associate_trees (loc
, minus_var0
, minus_var1
,
10754 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10755 minus_con0
= associate_trees (loc
, minus_con0
, minus_con1
,
10757 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10758 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10761 if (minus_var0
&& var0
)
10763 var0
= associate_trees (loc
, var0
, minus_var0
,
10764 MINUS_EXPR
, atype
);
10767 if (minus_con0
&& con0
)
10769 con0
= associate_trees (loc
, con0
, minus_con0
,
10770 MINUS_EXPR
, atype
);
10774 /* Preserve the MINUS_EXPR if the negative part of the literal is
10775 greater than the positive part. Otherwise, the multiplicative
10776 folding code (i.e extract_muldiv) may be fooled in case
10777 unsigned constants are subtracted, like in the following
10778 example: ((X*2 + 4) - 8U)/2. */
10779 if (minus_lit0
&& lit0
)
10781 if (TREE_CODE (lit0
) == INTEGER_CST
10782 && TREE_CODE (minus_lit0
) == INTEGER_CST
10783 && tree_int_cst_lt (lit0
, minus_lit0
)
10784 /* But avoid ending up with only negated parts. */
10787 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10788 MINUS_EXPR
, atype
);
10793 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10794 MINUS_EXPR
, atype
);
10799 /* Don't introduce overflows through reassociation. */
10800 if ((lit0
&& TREE_OVERFLOW_P (lit0
))
10801 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
)))
10804 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10805 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10807 minus_con0
= associate_trees (loc
, minus_con0
, minus_lit0
,
10811 /* Eliminate minus_con0. */
10815 con0
= associate_trees (loc
, con0
, minus_con0
,
10816 MINUS_EXPR
, atype
);
10818 var0
= associate_trees (loc
, var0
, minus_con0
,
10819 MINUS_EXPR
, atype
);
10821 gcc_unreachable ();
10825 /* Eliminate minus_var0. */
10829 con0
= associate_trees (loc
, con0
, minus_var0
,
10830 MINUS_EXPR
, atype
);
10832 gcc_unreachable ();
10837 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10844 case POINTER_DIFF_EXPR
:
10846 /* Fold &a[i] - &a[j] to i-j. */
10847 if (TREE_CODE (arg0
) == ADDR_EXPR
10848 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10849 && TREE_CODE (arg1
) == ADDR_EXPR
10850 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10852 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10853 TREE_OPERAND (arg0
, 0),
10854 TREE_OPERAND (arg1
, 0),
10856 == POINTER_DIFF_EXPR
);
10861 /* Further transformations are not for pointers. */
10862 if (code
== POINTER_DIFF_EXPR
)
10865 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10866 if (TREE_CODE (arg0
) == NEGATE_EXPR
10867 && negate_expr_p (op1
)
10868 /* If arg0 is e.g. unsigned int and type is int, then this could
10869 introduce UB, because if A is INT_MIN at runtime, the original
10870 expression can be well defined while the latter is not.
10872 && !(ANY_INTEGRAL_TYPE_P (type
)
10873 && TYPE_OVERFLOW_UNDEFINED (type
)
10874 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10875 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))))
10876 return fold_build2_loc (loc
, MINUS_EXPR
, type
, negate_expr (op1
),
10877 fold_convert_loc (loc
, type
,
10878 TREE_OPERAND (arg0
, 0)));
10880 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10881 __complex__ ( x, -y ). This is not the same for SNaNs or if
10882 signed zeros are involved. */
10883 if (!HONOR_SNANS (element_mode (arg0
))
10884 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10885 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10887 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10888 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10889 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10890 bool arg0rz
= false, arg0iz
= false;
10891 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10892 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10894 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10895 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10896 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10898 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10900 : build1 (REALPART_EXPR
, rtype
, arg1
));
10901 tree ip
= arg0i
? arg0i
10902 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10903 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10905 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10907 tree rp
= arg0r
? arg0r
10908 : build1 (REALPART_EXPR
, rtype
, arg0
);
10909 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10911 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10912 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10917 /* A - B -> A + (-B) if B is easily negatable. */
10918 if (negate_expr_p (op1
)
10919 && ! TYPE_OVERFLOW_SANITIZED (type
)
10920 && ((FLOAT_TYPE_P (type
)
10921 /* Avoid this transformation if B is a positive REAL_CST. */
10922 && (TREE_CODE (op1
) != REAL_CST
10923 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
10924 || INTEGRAL_TYPE_P (type
)))
10925 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10926 fold_convert_loc (loc
, type
, arg0
),
10927 negate_expr (op1
));
10929 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10930 one. Make sure the type is not saturating and has the signedness of
10931 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10932 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10933 if ((TREE_CODE (arg0
) == MULT_EXPR
10934 || TREE_CODE (arg1
) == MULT_EXPR
)
10935 && !TYPE_SATURATING (type
)
10936 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10937 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10938 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10940 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10948 if (! FLOAT_TYPE_P (type
))
10950 /* Transform x * -C into -x * C if x is easily negatable. */
10951 if (TREE_CODE (op1
) == INTEGER_CST
10952 && tree_int_cst_sgn (op1
) == -1
10953 && negate_expr_p (op0
)
10954 && negate_expr_p (op1
)
10955 && (tem
= negate_expr (op1
)) != op1
10956 && ! TREE_OVERFLOW (tem
))
10957 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10958 fold_convert_loc (loc
, type
,
10959 negate_expr (op0
)), tem
);
10961 strict_overflow_p
= false;
10962 if (TREE_CODE (arg1
) == INTEGER_CST
10963 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10964 &strict_overflow_p
)) != 0)
10966 if (strict_overflow_p
)
10967 fold_overflow_warning (("assuming signed overflow does not "
10968 "occur when simplifying "
10970 WARN_STRICT_OVERFLOW_MISC
);
10971 return fold_convert_loc (loc
, type
, tem
);
10974 /* Optimize z * conj(z) for integer complex numbers. */
10975 if (TREE_CODE (arg0
) == CONJ_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10977 return fold_mult_zconjz (loc
, type
, arg1
);
10978 if (TREE_CODE (arg1
) == CONJ_EXPR
10979 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10980 return fold_mult_zconjz (loc
, type
, arg0
);
10984 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10985 This is not the same for NaNs or if signed zeros are
10987 if (!HONOR_NANS (arg0
)
10988 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
10989 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10990 && TREE_CODE (arg1
) == COMPLEX_CST
10991 && real_zerop (TREE_REALPART (arg1
)))
10993 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10994 if (real_onep (TREE_IMAGPART (arg1
)))
10996 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10997 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10999 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11000 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11002 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11003 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11004 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11008 /* Optimize z * conj(z) for floating point complex numbers.
11009 Guarded by flag_unsafe_math_optimizations as non-finite
11010 imaginary components don't produce scalar results. */
11011 if (flag_unsafe_math_optimizations
11012 && TREE_CODE (arg0
) == CONJ_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11014 return fold_mult_zconjz (loc
, type
, arg1
);
11015 if (flag_unsafe_math_optimizations
11016 && TREE_CODE (arg1
) == CONJ_EXPR
11017 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11018 return fold_mult_zconjz (loc
, type
, arg0
);
11023 /* Canonicalize (X & C1) | C2. */
11024 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11025 && TREE_CODE (arg1
) == INTEGER_CST
11026 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11028 int width
= TYPE_PRECISION (type
), w
;
11029 wide_int c1
= wi::to_wide (TREE_OPERAND (arg0
, 1));
11030 wide_int c2
= wi::to_wide (arg1
);
11032 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11033 if ((c1
& c2
) == c1
)
11034 return omit_one_operand_loc (loc
, type
, arg1
,
11035 TREE_OPERAND (arg0
, 0));
11037 wide_int msk
= wi::mask (width
, false,
11038 TYPE_PRECISION (TREE_TYPE (arg1
)));
11040 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11041 if (wi::bit_and_not (msk
, c1
| c2
) == 0)
11043 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11044 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11047 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11048 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11049 mode which allows further optimizations. */
11052 wide_int c3
= wi::bit_and_not (c1
, c2
);
11053 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11055 wide_int mask
= wi::mask (w
, false,
11056 TYPE_PRECISION (type
));
11057 if (((c1
| c2
) & mask
) == mask
11058 && wi::bit_and_not (c1
, mask
) == 0)
11067 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11068 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
,
11069 wide_int_to_tree (type
, c3
));
11070 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
, tem
, arg1
);
11074 /* See if this can be simplified into a rotate first. If that
11075 is unsuccessful continue in the association code. */
11079 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11080 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11081 && INTEGRAL_TYPE_P (type
)
11082 && integer_onep (TREE_OPERAND (arg0
, 1))
11083 && integer_onep (arg1
))
11084 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11085 build_zero_cst (TREE_TYPE (arg0
)));
11087 /* See if this can be simplified into a rotate first. If that
11088 is unsuccessful continue in the association code. */
11092 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11093 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11094 && INTEGRAL_TYPE_P (type
)
11095 && integer_onep (TREE_OPERAND (arg0
, 1))
11096 && integer_onep (arg1
))
11099 tem
= TREE_OPERAND (arg0
, 0);
11100 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11101 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11103 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11104 build_zero_cst (TREE_TYPE (tem
)));
11106 /* Fold ~X & 1 as (X & 1) == 0. */
11107 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11108 && INTEGRAL_TYPE_P (type
)
11109 && integer_onep (arg1
))
11112 tem
= TREE_OPERAND (arg0
, 0);
11113 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11114 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11116 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11117 build_zero_cst (TREE_TYPE (tem
)));
11119 /* Fold !X & 1 as X == 0. */
11120 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11121 && integer_onep (arg1
))
11123 tem
= TREE_OPERAND (arg0
, 0);
11124 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11125 build_zero_cst (TREE_TYPE (tem
)));
11128 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11129 multiple of 1 << CST. */
11130 if (TREE_CODE (arg1
) == INTEGER_CST
)
11132 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
11133 wide_int ncst1
= -cst1
;
11134 if ((cst1
& ncst1
) == ncst1
11135 && multiple_of_p (type
, arg0
,
11136 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11137 return fold_convert_loc (loc
, type
, arg0
);
11140 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11142 if (TREE_CODE (arg1
) == INTEGER_CST
11143 && TREE_CODE (arg0
) == MULT_EXPR
11144 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11146 wi::tree_to_wide_ref warg1
= wi::to_wide (arg1
);
11148 = mask_with_tz (type
, warg1
, wi::to_wide (TREE_OPERAND (arg0
, 1)));
11151 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11153 else if (masked
!= warg1
)
11155 /* Avoid the transform if arg1 is a mask of some
11156 mode which allows further optimizations. */
11157 int pop
= wi::popcount (warg1
);
11158 if (!(pop
>= BITS_PER_UNIT
11160 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11161 return fold_build2_loc (loc
, code
, type
, op0
,
11162 wide_int_to_tree (type
, masked
));
11166 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11167 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11168 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11170 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11172 wide_int mask
= wide_int::from (wi::to_wide (arg1
), prec
, UNSIGNED
);
11175 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11181 /* Don't touch a floating-point divide by zero unless the mode
11182 of the constant can represent infinity. */
11183 if (TREE_CODE (arg1
) == REAL_CST
11184 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11185 && real_zerop (arg1
))
11188 /* (-A) / (-B) -> A / B */
11189 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11190 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11191 TREE_OPERAND (arg0
, 0),
11192 negate_expr (arg1
));
11193 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11194 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11195 negate_expr (arg0
),
11196 TREE_OPERAND (arg1
, 0));
11199 case TRUNC_DIV_EXPR
:
11202 case FLOOR_DIV_EXPR
:
11203 /* Simplify A / (B << N) where A and B are positive and B is
11204 a power of 2, to A >> (N + log2(B)). */
11205 strict_overflow_p
= false;
11206 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11207 && (TYPE_UNSIGNED (type
)
11208 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11210 tree sval
= TREE_OPERAND (arg1
, 0);
11211 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11213 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11214 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
11215 wi::exact_log2 (wi::to_wide (sval
)));
11217 if (strict_overflow_p
)
11218 fold_overflow_warning (("assuming signed overflow does not "
11219 "occur when simplifying A / (B << N)"),
11220 WARN_STRICT_OVERFLOW_MISC
);
11222 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11224 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11225 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11231 case ROUND_DIV_EXPR
:
11232 case CEIL_DIV_EXPR
:
11233 case EXACT_DIV_EXPR
:
11234 if (integer_zerop (arg1
))
11237 /* Convert -A / -B to A / B when the type is signed and overflow is
11239 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11240 && TREE_CODE (op0
) == NEGATE_EXPR
11241 && negate_expr_p (op1
))
11243 if (ANY_INTEGRAL_TYPE_P (type
))
11244 fold_overflow_warning (("assuming signed overflow does not occur "
11245 "when distributing negation across "
11247 WARN_STRICT_OVERFLOW_MISC
);
11248 return fold_build2_loc (loc
, code
, type
,
11249 fold_convert_loc (loc
, type
,
11250 TREE_OPERAND (arg0
, 0)),
11251 negate_expr (op1
));
11253 if ((!ANY_INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11254 && TREE_CODE (arg1
) == NEGATE_EXPR
11255 && negate_expr_p (op0
))
11257 if (ANY_INTEGRAL_TYPE_P (type
))
11258 fold_overflow_warning (("assuming signed overflow does not occur "
11259 "when distributing negation across "
11261 WARN_STRICT_OVERFLOW_MISC
);
11262 return fold_build2_loc (loc
, code
, type
,
11264 fold_convert_loc (loc
, type
,
11265 TREE_OPERAND (arg1
, 0)));
11268 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11269 operation, EXACT_DIV_EXPR.
11271 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11272 At one time others generated faster code, it's not clear if they do
11273 after the last round to changes to the DIV code in expmed.c. */
11274 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11275 && multiple_of_p (type
, arg0
, arg1
))
11276 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
11277 fold_convert (type
, arg0
),
11278 fold_convert (type
, arg1
));
11280 strict_overflow_p
= false;
11281 if (TREE_CODE (arg1
) == INTEGER_CST
11282 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11283 &strict_overflow_p
)) != 0)
11285 if (strict_overflow_p
)
11286 fold_overflow_warning (("assuming signed overflow does not occur "
11287 "when simplifying division"),
11288 WARN_STRICT_OVERFLOW_MISC
);
11289 return fold_convert_loc (loc
, type
, tem
);
11294 case CEIL_MOD_EXPR
:
11295 case FLOOR_MOD_EXPR
:
11296 case ROUND_MOD_EXPR
:
11297 case TRUNC_MOD_EXPR
:
11298 strict_overflow_p
= false;
11299 if (TREE_CODE (arg1
) == INTEGER_CST
11300 && (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11301 &strict_overflow_p
)) != 0)
11303 if (strict_overflow_p
)
11304 fold_overflow_warning (("assuming signed overflow does not occur "
11305 "when simplifying modulus"),
11306 WARN_STRICT_OVERFLOW_MISC
);
11307 return fold_convert_loc (loc
, type
, tem
);
11316 /* Since negative shift count is not well-defined,
11317 don't try to compute it in the compiler. */
11318 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11321 prec
= element_precision (type
);
11323 /* If we have a rotate of a bit operation with the rotate count and
11324 the second operand of the bit operation both constant,
11325 permute the two operations. */
11326 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11327 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11328 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11329 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11330 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11332 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11333 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11334 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11335 fold_build2_loc (loc
, code
, type
,
11337 fold_build2_loc (loc
, code
, type
,
11341 /* Two consecutive rotates adding up to the some integer
11342 multiple of the precision of the type can be ignored. */
11343 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11344 && TREE_CODE (arg0
) == RROTATE_EXPR
11345 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11346 && wi::umod_trunc (wi::to_wide (arg1
)
11347 + wi::to_wide (TREE_OPERAND (arg0
, 1)),
11349 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11357 case TRUTH_ANDIF_EXPR
:
11358 /* Note that the operands of this must be ints
11359 and their values must be 0 or 1.
11360 ("true" is a fixed value perhaps depending on the language.) */
11361 /* If first arg is constant zero, return it. */
11362 if (integer_zerop (arg0
))
11363 return fold_convert_loc (loc
, type
, arg0
);
11365 case TRUTH_AND_EXPR
:
11366 /* If either arg is constant true, drop it. */
11367 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11368 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11369 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11370 /* Preserve sequence points. */
11371 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11372 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11373 /* If second arg is constant zero, result is zero, but first arg
11374 must be evaluated. */
11375 if (integer_zerop (arg1
))
11376 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11377 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11378 case will be handled here. */
11379 if (integer_zerop (arg0
))
11380 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11382 /* !X && X is always false. */
11383 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11385 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11386 /* X && !X is always false. */
11387 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11388 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11389 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11391 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11392 means A >= Y && A != MAX, but in this case we know that
11395 if (!TREE_SIDE_EFFECTS (arg0
)
11396 && !TREE_SIDE_EFFECTS (arg1
))
11398 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11399 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11400 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11402 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11403 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11404 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11407 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11413 case TRUTH_ORIF_EXPR
:
11414 /* Note that the operands of this must be ints
11415 and their values must be 0 or true.
11416 ("true" is a fixed value perhaps depending on the language.) */
11417 /* If first arg is constant true, return it. */
11418 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11419 return fold_convert_loc (loc
, type
, arg0
);
11421 case TRUTH_OR_EXPR
:
11422 /* If either arg is constant zero, drop it. */
11423 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11424 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11425 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11426 /* Preserve sequence points. */
11427 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11428 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11429 /* If second arg is constant true, result is true, but we must
11430 evaluate first arg. */
11431 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11432 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11433 /* Likewise for first arg, but note this only occurs here for
11435 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11436 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11438 /* !X || X is always true. */
11439 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11440 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11441 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11442 /* X || !X is always true. */
11443 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11444 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11445 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11447 /* (X && !Y) || (!X && Y) is X ^ Y */
11448 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
11449 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
11451 tree a0
, a1
, l0
, l1
, n0
, n1
;
11453 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11454 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11456 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11457 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11459 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
11460 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
11462 if ((operand_equal_p (n0
, a0
, 0)
11463 && operand_equal_p (n1
, a1
, 0))
11464 || (operand_equal_p (n0
, a1
, 0)
11465 && operand_equal_p (n1
, a0
, 0)))
11466 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
11469 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
11475 case TRUTH_XOR_EXPR
:
11476 /* If the second arg is constant zero, drop it. */
11477 if (integer_zerop (arg1
))
11478 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11479 /* If the second arg is constant true, this is a logical inversion. */
11480 if (integer_onep (arg1
))
11482 tem
= invert_truthvalue_loc (loc
, arg0
);
11483 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11485 /* Identical arguments cancel to zero. */
11486 if (operand_equal_p (arg0
, arg1
, 0))
11487 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11489 /* !X ^ X is always true. */
11490 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11491 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11492 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11494 /* X ^ !X is always true. */
11495 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11496 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11497 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11506 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11507 if (tem
!= NULL_TREE
)
11510 /* bool_var != 1 becomes !bool_var. */
11511 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11512 && code
== NE_EXPR
)
11513 return fold_convert_loc (loc
, type
,
11514 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11515 TREE_TYPE (arg0
), arg0
));
11517 /* bool_var == 0 becomes !bool_var. */
11518 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11519 && code
== EQ_EXPR
)
11520 return fold_convert_loc (loc
, type
,
11521 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
11522 TREE_TYPE (arg0
), arg0
));
11524 /* !exp != 0 becomes !exp */
11525 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
11526 && code
== NE_EXPR
)
11527 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11529 /* If this is an EQ or NE comparison with zero and ARG0 is
11530 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11531 two operations, but the latter can be done in one less insn
11532 on machines that have only two-operand insns or on which a
11533 constant cannot be the first operand. */
11534 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11535 && integer_zerop (arg1
))
11537 tree arg00
= TREE_OPERAND (arg0
, 0);
11538 tree arg01
= TREE_OPERAND (arg0
, 1);
11539 if (TREE_CODE (arg00
) == LSHIFT_EXPR
11540 && integer_onep (TREE_OPERAND (arg00
, 0)))
11542 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
11543 arg01
, TREE_OPERAND (arg00
, 1));
11544 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11545 build_one_cst (TREE_TYPE (arg0
)));
11546 return fold_build2_loc (loc
, code
, type
,
11547 fold_convert_loc (loc
, TREE_TYPE (arg1
),
11550 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
11551 && integer_onep (TREE_OPERAND (arg01
, 0)))
11553 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
11554 arg00
, TREE_OPERAND (arg01
, 1));
11555 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
11556 build_one_cst (TREE_TYPE (arg0
)));
11557 return fold_build2_loc (loc
, code
, type
,
11558 fold_convert_loc (loc
, TREE_TYPE (arg1
),
11563 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11564 C1 is a valid shift constant, and C2 is a power of two, i.e.
11566 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11567 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
11568 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
11570 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11571 && integer_zerop (arg1
))
11573 tree itype
= TREE_TYPE (arg0
);
11574 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
11575 prec
= TYPE_PRECISION (itype
);
11577 /* Check for a valid shift count. */
11578 if (wi::ltu_p (wi::to_wide (arg001
), prec
))
11580 tree arg01
= TREE_OPERAND (arg0
, 1);
11581 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11582 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
11583 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11584 can be rewritten as (X & (C2 << C1)) != 0. */
11585 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
11587 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
11588 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
11589 return fold_build2_loc (loc
, code
, type
, tem
,
11590 fold_convert_loc (loc
, itype
, arg1
));
11592 /* Otherwise, for signed (arithmetic) shifts,
11593 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11594 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11595 else if (!TYPE_UNSIGNED (itype
))
11596 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
11597 arg000
, build_int_cst (itype
, 0));
11598 /* Otherwise, of unsigned (logical) shifts,
11599 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11600 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11602 return omit_one_operand_loc (loc
, type
,
11603 code
== EQ_EXPR
? integer_one_node
11604 : integer_zero_node
,
11609 /* If this is a comparison of a field, we may be able to simplify it. */
11610 if ((TREE_CODE (arg0
) == COMPONENT_REF
11611 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
11612 /* Handle the constant case even without -O
11613 to make sure the warnings are given. */
11614 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
11616 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
11621 /* Optimize comparisons of strlen vs zero to a compare of the
11622 first character of the string vs zero. To wit,
11623 strlen(ptr) == 0 => *ptr == 0
11624 strlen(ptr) != 0 => *ptr != 0
11625 Other cases should reduce to one of these two (or a constant)
11626 due to the return value of strlen being unsigned. */
11627 if (TREE_CODE (arg0
) == CALL_EXPR
&& integer_zerop (arg1
))
11629 tree fndecl
= get_callee_fndecl (arg0
);
11632 && fndecl_built_in_p (fndecl
, BUILT_IN_STRLEN
)
11633 && call_expr_nargs (arg0
) == 1
11634 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0)))
11638 = build_pointer_type (build_qualified_type (char_type_node
,
11640 tree ptr
= fold_convert_loc (loc
, ptrtype
,
11641 CALL_EXPR_ARG (arg0
, 0));
11642 tree iref
= build_fold_indirect_ref_loc (loc
, ptr
);
11643 return fold_build2_loc (loc
, code
, type
, iref
,
11644 build_int_cst (TREE_TYPE (iref
), 0));
11648 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11649 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11650 if (TREE_CODE (arg0
) == RSHIFT_EXPR
11651 && integer_zerop (arg1
)
11652 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11654 tree arg00
= TREE_OPERAND (arg0
, 0);
11655 tree arg01
= TREE_OPERAND (arg0
, 1);
11656 tree itype
= TREE_TYPE (arg00
);
11657 if (wi::to_wide (arg01
) == element_precision (itype
) - 1)
11659 if (TYPE_UNSIGNED (itype
))
11661 itype
= signed_type_for (itype
);
11662 arg00
= fold_convert_loc (loc
, itype
, arg00
);
11664 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
11665 type
, arg00
, build_zero_cst (itype
));
11669 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11670 (X & C) == 0 when C is a single bit. */
11671 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11672 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
11673 && integer_zerop (arg1
)
11674 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11676 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
11677 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
11678 TREE_OPERAND (arg0
, 1));
11679 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
11681 fold_convert_loc (loc
, TREE_TYPE (arg0
),
11685 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11686 constant C is a power of two, i.e. a single bit. */
11687 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11688 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11689 && integer_zerop (arg1
)
11690 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11691 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11692 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11694 tree arg00
= TREE_OPERAND (arg0
, 0);
11695 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11696 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
11699 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11700 when is C is a power of two, i.e. a single bit. */
11701 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11702 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
11703 && integer_zerop (arg1
)
11704 && integer_pow2p (TREE_OPERAND (arg0
, 1))
11705 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11706 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
11708 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
11709 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
11710 arg000
, TREE_OPERAND (arg0
, 1));
11711 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
11712 tem
, build_int_cst (TREE_TYPE (tem
), 0));
11715 if (integer_zerop (arg1
)
11716 && tree_expr_nonzero_p (arg0
))
11718 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
11719 return omit_one_operand_loc (loc
, type
, res
, arg0
);
11722 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11723 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11724 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11726 tree arg00
= TREE_OPERAND (arg0
, 0);
11727 tree arg01
= TREE_OPERAND (arg0
, 1);
11728 tree arg10
= TREE_OPERAND (arg1
, 0);
11729 tree arg11
= TREE_OPERAND (arg1
, 1);
11730 tree itype
= TREE_TYPE (arg0
);
11732 if (operand_equal_p (arg01
, arg11
, 0))
11734 tem
= fold_convert_loc (loc
, itype
, arg10
);
11735 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11736 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
11737 return fold_build2_loc (loc
, code
, type
, tem
,
11738 build_zero_cst (itype
));
11740 if (operand_equal_p (arg01
, arg10
, 0))
11742 tem
= fold_convert_loc (loc
, itype
, arg11
);
11743 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11744 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg01
);
11745 return fold_build2_loc (loc
, code
, type
, tem
,
11746 build_zero_cst (itype
));
11748 if (operand_equal_p (arg00
, arg11
, 0))
11750 tem
= fold_convert_loc (loc
, itype
, arg10
);
11751 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
11752 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
11753 return fold_build2_loc (loc
, code
, type
, tem
,
11754 build_zero_cst (itype
));
11756 if (operand_equal_p (arg00
, arg10
, 0))
11758 tem
= fold_convert_loc (loc
, itype
, arg11
);
11759 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
, tem
);
11760 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, tem
, arg00
);
11761 return fold_build2_loc (loc
, code
, type
, tem
,
11762 build_zero_cst (itype
));
11766 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11767 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
11769 tree arg00
= TREE_OPERAND (arg0
, 0);
11770 tree arg01
= TREE_OPERAND (arg0
, 1);
11771 tree arg10
= TREE_OPERAND (arg1
, 0);
11772 tree arg11
= TREE_OPERAND (arg1
, 1);
11773 tree itype
= TREE_TYPE (arg0
);
11775 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11776 operand_equal_p guarantees no side-effects so we don't need
11777 to use omit_one_operand on Z. */
11778 if (operand_equal_p (arg01
, arg11
, 0))
11779 return fold_build2_loc (loc
, code
, type
, arg00
,
11780 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11782 if (operand_equal_p (arg01
, arg10
, 0))
11783 return fold_build2_loc (loc
, code
, type
, arg00
,
11784 fold_convert_loc (loc
, TREE_TYPE (arg00
),
11786 if (operand_equal_p (arg00
, arg11
, 0))
11787 return fold_build2_loc (loc
, code
, type
, arg01
,
11788 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11790 if (operand_equal_p (arg00
, arg10
, 0))
11791 return fold_build2_loc (loc
, code
, type
, arg01
,
11792 fold_convert_loc (loc
, TREE_TYPE (arg01
),
11795 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11796 if (TREE_CODE (arg01
) == INTEGER_CST
11797 && TREE_CODE (arg11
) == INTEGER_CST
)
11799 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
11800 fold_convert_loc (loc
, itype
, arg11
));
11801 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
11802 return fold_build2_loc (loc
, code
, type
, tem
,
11803 fold_convert_loc (loc
, itype
, arg10
));
11807 /* Attempt to simplify equality/inequality comparisons of complex
11808 values. Only lower the comparison if the result is known or
11809 can be simplified to a single scalar comparison. */
11810 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
11811 || TREE_CODE (arg0
) == COMPLEX_CST
)
11812 && (TREE_CODE (arg1
) == COMPLEX_EXPR
11813 || TREE_CODE (arg1
) == COMPLEX_CST
))
11815 tree real0
, imag0
, real1
, imag1
;
11818 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
11820 real0
= TREE_OPERAND (arg0
, 0);
11821 imag0
= TREE_OPERAND (arg0
, 1);
11825 real0
= TREE_REALPART (arg0
);
11826 imag0
= TREE_IMAGPART (arg0
);
11829 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
11831 real1
= TREE_OPERAND (arg1
, 0);
11832 imag1
= TREE_OPERAND (arg1
, 1);
11836 real1
= TREE_REALPART (arg1
);
11837 imag1
= TREE_IMAGPART (arg1
);
11840 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
11841 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
11843 if (integer_zerop (rcond
))
11845 if (code
== EQ_EXPR
)
11846 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11848 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
11852 if (code
== NE_EXPR
)
11853 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11855 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
11859 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
11860 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
11862 if (integer_zerop (icond
))
11864 if (code
== EQ_EXPR
)
11865 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
11867 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
11871 if (code
== NE_EXPR
)
11872 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
11874 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
11885 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11886 if (tem
!= NULL_TREE
)
11889 /* Transform comparisons of the form X +- C CMP X. */
11890 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
11891 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11892 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
11893 && !HONOR_SNANS (arg0
))
11895 tree arg01
= TREE_OPERAND (arg0
, 1);
11896 enum tree_code code0
= TREE_CODE (arg0
);
11897 int is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
11899 /* (X - c) > X becomes false. */
11900 if (code
== GT_EXPR
11901 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11902 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11903 return constant_boolean_node (0, type
);
11905 /* Likewise (X + c) < X becomes false. */
11906 if (code
== LT_EXPR
11907 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11908 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11909 return constant_boolean_node (0, type
);
11911 /* Convert (X - c) <= X to true. */
11912 if (!HONOR_NANS (arg1
)
11914 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
11915 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
11916 return constant_boolean_node (1, type
);
11918 /* Convert (X + c) >= X to true. */
11919 if (!HONOR_NANS (arg1
)
11921 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11922 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11923 return constant_boolean_node (1, type
);
11926 /* If we are comparing an ABS_EXPR with a constant, we can
11927 convert all the cases into explicit comparisons, but they may
11928 well not be faster than doing the ABS and one comparison.
11929 But ABS (X) <= C is a range comparison, which becomes a subtraction
11930 and a comparison, and is probably faster. */
11931 if (code
== LE_EXPR
11932 && TREE_CODE (arg1
) == INTEGER_CST
11933 && TREE_CODE (arg0
) == ABS_EXPR
11934 && ! TREE_SIDE_EFFECTS (arg0
)
11935 && (tem
= negate_expr (arg1
)) != 0
11936 && TREE_CODE (tem
) == INTEGER_CST
11937 && !TREE_OVERFLOW (tem
))
11938 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11939 build2 (GE_EXPR
, type
,
11940 TREE_OPERAND (arg0
, 0), tem
),
11941 build2 (LE_EXPR
, type
,
11942 TREE_OPERAND (arg0
, 0), arg1
));
11944 /* Convert ABS_EXPR<x> >= 0 to true. */
11945 strict_overflow_p
= false;
11946 if (code
== GE_EXPR
11947 && (integer_zerop (arg1
)
11948 || (! HONOR_NANS (arg0
)
11949 && real_zerop (arg1
)))
11950 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11952 if (strict_overflow_p
)
11953 fold_overflow_warning (("assuming signed overflow does not occur "
11954 "when simplifying comparison of "
11955 "absolute value and zero"),
11956 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11957 return omit_one_operand_loc (loc
, type
,
11958 constant_boolean_node (true, type
),
11962 /* Convert ABS_EXPR<x> < 0 to false. */
11963 strict_overflow_p
= false;
11964 if (code
== LT_EXPR
11965 && (integer_zerop (arg1
) || real_zerop (arg1
))
11966 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11968 if (strict_overflow_p
)
11969 fold_overflow_warning (("assuming signed overflow does not occur "
11970 "when simplifying comparison of "
11971 "absolute value and zero"),
11972 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11973 return omit_one_operand_loc (loc
, type
,
11974 constant_boolean_node (false, type
),
11978 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11979 and similarly for >= into !=. */
11980 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11981 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11982 && TREE_CODE (arg1
) == LSHIFT_EXPR
11983 && integer_onep (TREE_OPERAND (arg1
, 0)))
11984 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11985 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11986 TREE_OPERAND (arg1
, 1)),
11987 build_zero_cst (TREE_TYPE (arg0
)));
11989 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11990 otherwise Y might be >= # of bits in X's type and thus e.g.
11991 (unsigned char) (1 << Y) for Y 15 might be 0.
11992 If the cast is widening, then 1 << Y should have unsigned type,
11993 otherwise if Y is number of bits in the signed shift type minus 1,
11994 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11995 31 might be 0xffffffff80000000. */
11996 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11997 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11998 && CONVERT_EXPR_P (arg1
)
11999 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12000 && (element_precision (TREE_TYPE (arg1
))
12001 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
12002 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
12003 || (element_precision (TREE_TYPE (arg1
))
12004 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
12005 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12007 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12008 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
12009 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12010 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
12011 build_zero_cst (TREE_TYPE (arg0
)));
12016 case UNORDERED_EXPR
:
12024 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12026 tree targ0
= strip_float_extensions (arg0
);
12027 tree targ1
= strip_float_extensions (arg1
);
12028 tree newtype
= TREE_TYPE (targ0
);
12030 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
12031 newtype
= TREE_TYPE (targ1
);
12033 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
12034 return fold_build2_loc (loc
, code
, type
,
12035 fold_convert_loc (loc
, newtype
, targ0
),
12036 fold_convert_loc (loc
, newtype
, targ1
));
12041 case COMPOUND_EXPR
:
12042 /* When pedantic, a compound expression can be neither an lvalue
12043 nor an integer constant expression. */
12044 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
12046 /* Don't let (0, 0) be null pointer constant. */
12047 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
12048 : fold_convert_loc (loc
, type
, arg1
);
12049 return pedantic_non_lvalue_loc (loc
, tem
);
12052 /* An ASSERT_EXPR should never be passed to fold_binary. */
12053 gcc_unreachable ();
12057 } /* switch (code) */
12060 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12061 ((A & N) + B) & M -> (A + B) & M
12062 Similarly if (N & M) == 0,
12063 ((A | N) + B) & M -> (A + B) & M
12064 and for - instead of + (or unary - instead of +)
12065 and/or ^ instead of |.
12066 If B is constant and (B & M) == 0, fold into A & M.
12068 This function is a helper for match.pd patterns. Return non-NULL
12069 type in which the simplified operation should be performed only
12070 if any optimization is possible.
12072 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12073 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12074 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12077 fold_bit_and_mask (tree type
, tree arg1
, enum tree_code code
,
12078 tree arg00
, enum tree_code code00
, tree arg000
, tree arg001
,
12079 tree arg01
, enum tree_code code01
, tree arg010
, tree arg011
,
12082 gcc_assert (TREE_CODE (arg1
) == INTEGER_CST
);
12083 gcc_assert (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== NEGATE_EXPR
);
12084 wi::tree_to_wide_ref cst1
= wi::to_wide (arg1
);
12086 || (cst1
& (cst1
+ 1)) != 0
12087 || !INTEGRAL_TYPE_P (type
)
12088 || (!TYPE_OVERFLOW_WRAPS (type
)
12089 && TREE_CODE (type
) != INTEGER_TYPE
)
12090 || (wi::max_value (type
) & cst1
) != cst1
)
12093 enum tree_code codes
[2] = { code00
, code01
};
12094 tree arg0xx
[4] = { arg000
, arg001
, arg010
, arg011
};
12098 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12099 arg1 (M) is == (1LL << cst) - 1.
12100 Store C into PMOP[0] and D into PMOP[1]. */
12103 which
= code
!= NEGATE_EXPR
;
12105 for (; which
>= 0; which
--)
12106 switch (codes
[which
])
12111 gcc_assert (TREE_CODE (arg0xx
[2 * which
+ 1]) == INTEGER_CST
);
12112 cst0
= wi::to_wide (arg0xx
[2 * which
+ 1]) & cst1
;
12113 if (codes
[which
] == BIT_AND_EXPR
)
12118 else if (cst0
!= 0)
12120 /* If C or D is of the form (A & N) where
12121 (N & M) == M, or of the form (A | N) or
12122 (A ^ N) where (N & M) == 0, replace it with A. */
12123 pmop
[which
] = arg0xx
[2 * which
];
12126 if (TREE_CODE (pmop
[which
]) != INTEGER_CST
)
12128 /* If C or D is a N where (N & M) == 0, it can be
12129 omitted (replaced with 0). */
12130 if ((code
== PLUS_EXPR
12131 || (code
== MINUS_EXPR
&& which
== 0))
12132 && (cst1
& wi::to_wide (pmop
[which
])) == 0)
12133 pmop
[which
] = build_int_cst (type
, 0);
12134 /* Similarly, with C - N where (-N & M) == 0. */
12135 if (code
== MINUS_EXPR
12137 && (cst1
& -wi::to_wide (pmop
[which
])) == 0)
12138 pmop
[which
] = build_int_cst (type
, 0);
12141 gcc_unreachable ();
12144 /* Only build anything new if we optimized one or both arguments above. */
12145 if (pmop
[0] == arg00
&& pmop
[1] == arg01
)
12148 if (TYPE_OVERFLOW_WRAPS (type
))
12151 return unsigned_type_for (type
);
12154 /* Used by contains_label_[p1]. */
12156 struct contains_label_data
12158 hash_set
<tree
> *pset
;
12159 bool inside_switch_p
;
12162 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12163 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12164 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12167 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data
)
12169 contains_label_data
*d
= (contains_label_data
*) data
;
12170 switch (TREE_CODE (*tp
))
12175 case CASE_LABEL_EXPR
:
12176 if (!d
->inside_switch_p
)
12181 if (!d
->inside_switch_p
)
12183 if (walk_tree (&SWITCH_COND (*tp
), contains_label_1
, data
, d
->pset
))
12185 d
->inside_switch_p
= true;
12186 if (walk_tree (&SWITCH_BODY (*tp
), contains_label_1
, data
, d
->pset
))
12188 d
->inside_switch_p
= false;
12189 *walk_subtrees
= 0;
12194 *walk_subtrees
= 0;
12202 /* Return whether the sub-tree ST contains a label which is accessible from
12203 outside the sub-tree. */
12206 contains_label_p (tree st
)
12208 hash_set
<tree
> pset
;
12209 contains_label_data data
= { &pset
, false };
12210 return walk_tree (&st
, contains_label_1
, &data
, &pset
) != NULL_TREE
;
12213 /* Fold a ternary expression of code CODE and type TYPE with operands
12214 OP0, OP1, and OP2. Return the folded expression if folding is
12215 successful. Otherwise, return NULL_TREE. */
12218 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
12219 tree op0
, tree op1
, tree op2
)
12222 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
12223 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12225 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
12226 && TREE_CODE_LENGTH (code
) == 3);
12228 /* If this is a commutative operation, and OP0 is a constant, move it
12229 to OP1 to reduce the number of tests below. */
12230 if (commutative_ternary_tree_code (code
)
12231 && tree_swap_operands_p (op0
, op1
))
12232 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
12234 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
12238 /* Strip any conversions that don't change the mode. This is safe
12239 for every expression, except for a comparison expression because
12240 its signedness is derived from its operands. So, in the latter
12241 case, only strip conversions that don't change the signedness.
12243 Note that this is done as an internal manipulation within the
12244 constant folder, in order to find the simplest representation of
12245 the arguments so that their form can be studied. In any cases,
12246 the appropriate type conversions should be put back in the tree
12247 that will get out of the constant folder. */
12268 case COMPONENT_REF
:
12269 if (TREE_CODE (arg0
) == CONSTRUCTOR
12270 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
12272 unsigned HOST_WIDE_INT idx
;
12274 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
12281 case VEC_COND_EXPR
:
12282 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12283 so all simple results must be passed through pedantic_non_lvalue. */
12284 if (TREE_CODE (arg0
) == INTEGER_CST
)
12286 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
12287 tem
= integer_zerop (arg0
) ? op2
: op1
;
12288 /* Only optimize constant conditions when the selected branch
12289 has the same type as the COND_EXPR. This avoids optimizing
12290 away "c ? x : throw", where the throw has a void type.
12291 Avoid throwing away that operand which contains label. */
12292 if ((!TREE_SIDE_EFFECTS (unused_op
)
12293 || !contains_label_p (unused_op
))
12294 && (! VOID_TYPE_P (TREE_TYPE (tem
))
12295 || VOID_TYPE_P (type
)))
12296 return pedantic_non_lvalue_loc (loc
, tem
);
12299 else if (TREE_CODE (arg0
) == VECTOR_CST
)
12301 unsigned HOST_WIDE_INT nelts
;
12302 if ((TREE_CODE (arg1
) == VECTOR_CST
12303 || TREE_CODE (arg1
) == CONSTRUCTOR
)
12304 && (TREE_CODE (arg2
) == VECTOR_CST
12305 || TREE_CODE (arg2
) == CONSTRUCTOR
)
12306 && TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
12308 vec_perm_builder
sel (nelts
, nelts
, 1);
12309 for (unsigned int i
= 0; i
< nelts
; i
++)
12311 tree val
= VECTOR_CST_ELT (arg0
, i
);
12312 if (integer_all_onesp (val
))
12313 sel
.quick_push (i
);
12314 else if (integer_zerop (val
))
12315 sel
.quick_push (nelts
+ i
);
12316 else /* Currently unreachable. */
12319 vec_perm_indices
indices (sel
, 2, nelts
);
12320 tree t
= fold_vec_perm (type
, arg1
, arg2
, indices
);
12321 if (t
!= NULL_TREE
)
12326 /* If we have A op B ? A : C, we may be able to convert this to a
12327 simpler expression, depending on the operation and the values
12328 of B and C. Signed zeros prevent all of these transformations,
12329 for reasons given above each one.
12331 Also try swapping the arguments and inverting the conditional. */
12332 if (COMPARISON_CLASS_P (arg0
)
12333 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op1
)
12334 && !HONOR_SIGNED_ZEROS (element_mode (op1
)))
12336 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
12341 if (COMPARISON_CLASS_P (arg0
)
12342 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0), op2
)
12343 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
12345 location_t loc0
= expr_location_or (arg0
, loc
);
12346 tem
= fold_invert_truthvalue (loc0
, arg0
);
12347 if (tem
&& COMPARISON_CLASS_P (tem
))
12349 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
12355 /* If the second operand is simpler than the third, swap them
12356 since that produces better jump optimization results. */
12357 if (truth_value_p (TREE_CODE (arg0
))
12358 && tree_swap_operands_p (op1
, op2
))
12360 location_t loc0
= expr_location_or (arg0
, loc
);
12361 /* See if this can be inverted. If it can't, possibly because
12362 it was a floating-point inequality comparison, don't do
12364 tem
= fold_invert_truthvalue (loc0
, arg0
);
12366 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
12369 /* Convert A ? 1 : 0 to simply A. */
12370 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
12371 : (integer_onep (op1
)
12372 && !VECTOR_TYPE_P (type
)))
12373 && integer_zerop (op2
)
12374 /* If we try to convert OP0 to our type, the
12375 call to fold will try to move the conversion inside
12376 a COND, which will recurse. In that case, the COND_EXPR
12377 is probably the best choice, so leave it alone. */
12378 && type
== TREE_TYPE (arg0
))
12379 return pedantic_non_lvalue_loc (loc
, arg0
);
12381 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12382 over COND_EXPR in cases such as floating point comparisons. */
12383 if (integer_zerop (op1
)
12384 && code
== COND_EXPR
12385 && integer_onep (op2
)
12386 && !VECTOR_TYPE_P (type
)
12387 && truth_value_p (TREE_CODE (arg0
)))
12388 return pedantic_non_lvalue_loc (loc
,
12389 fold_convert_loc (loc
, type
,
12390 invert_truthvalue_loc (loc
,
12393 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12394 if (TREE_CODE (arg0
) == LT_EXPR
12395 && integer_zerop (TREE_OPERAND (arg0
, 1))
12396 && integer_zerop (op2
)
12397 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
12399 /* sign_bit_p looks through both zero and sign extensions,
12400 but for this optimization only sign extensions are
12402 tree tem2
= TREE_OPERAND (arg0
, 0);
12403 while (tem
!= tem2
)
12405 if (TREE_CODE (tem2
) != NOP_EXPR
12406 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
12411 tem2
= TREE_OPERAND (tem2
, 0);
12413 /* sign_bit_p only checks ARG1 bits within A's precision.
12414 If <sign bit of A> has wider type than A, bits outside
12415 of A's precision in <sign bit of A> need to be checked.
12416 If they are all 0, this optimization needs to be done
12417 in unsigned A's type, if they are all 1 in signed A's type,
12418 otherwise this can't be done. */
12420 && TYPE_PRECISION (TREE_TYPE (tem
))
12421 < TYPE_PRECISION (TREE_TYPE (arg1
))
12422 && TYPE_PRECISION (TREE_TYPE (tem
))
12423 < TYPE_PRECISION (type
))
12425 int inner_width
, outer_width
;
12428 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
12429 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
12430 if (outer_width
> TYPE_PRECISION (type
))
12431 outer_width
= TYPE_PRECISION (type
);
12433 wide_int mask
= wi::shifted_mask
12434 (inner_width
, outer_width
- inner_width
, false,
12435 TYPE_PRECISION (TREE_TYPE (arg1
)));
12437 wide_int common
= mask
& wi::to_wide (arg1
);
12438 if (common
== mask
)
12440 tem_type
= signed_type_for (TREE_TYPE (tem
));
12441 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12443 else if (common
== 0)
12445 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
12446 tem
= fold_convert_loc (loc
, tem_type
, tem
);
12454 fold_convert_loc (loc
, type
,
12455 fold_build2_loc (loc
, BIT_AND_EXPR
,
12456 TREE_TYPE (tem
), tem
,
12457 fold_convert_loc (loc
,
12462 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12463 already handled above. */
12464 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12465 && integer_onep (TREE_OPERAND (arg0
, 1))
12466 && integer_zerop (op2
)
12467 && integer_pow2p (arg1
))
12469 tree tem
= TREE_OPERAND (arg0
, 0);
12471 if (TREE_CODE (tem
) == RSHIFT_EXPR
12472 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
12473 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
)
12474 == tree_to_uhwi (TREE_OPERAND (tem
, 1)))
12475 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12476 fold_convert_loc (loc
, type
,
12477 TREE_OPERAND (tem
, 0)),
12481 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12482 is probably obsolete because the first operand should be a
12483 truth value (that's why we have the two cases above), but let's
12484 leave it in until we can confirm this for all front-ends. */
12485 if (integer_zerop (op2
)
12486 && TREE_CODE (arg0
) == NE_EXPR
12487 && integer_zerop (TREE_OPERAND (arg0
, 1))
12488 && integer_pow2p (arg1
)
12489 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12491 arg1
, OEP_ONLY_CONST
)
12492 /* operand_equal_p compares just value, not precision, so e.g.
12493 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12494 second operand 32-bit -128, which is not a power of two (or vice
12496 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)))
12497 return pedantic_non_lvalue_loc (loc
,
12498 fold_convert_loc (loc
, type
,
12499 TREE_OPERAND (arg0
,
12502 /* Disable the transformations below for vectors, since
12503 fold_binary_op_with_conditional_arg may undo them immediately,
12504 yielding an infinite loop. */
12505 if (code
== VEC_COND_EXPR
)
12508 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12509 if (integer_zerop (op2
)
12510 && truth_value_p (TREE_CODE (arg0
))
12511 && truth_value_p (TREE_CODE (arg1
))
12512 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12513 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
12514 : TRUTH_ANDIF_EXPR
,
12515 type
, fold_convert_loc (loc
, type
, arg0
), op1
);
12517 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12518 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
12519 && truth_value_p (TREE_CODE (arg0
))
12520 && truth_value_p (TREE_CODE (arg1
))
12521 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12523 location_t loc0
= expr_location_or (arg0
, loc
);
12524 /* Only perform transformation if ARG0 is easily inverted. */
12525 tem
= fold_invert_truthvalue (loc0
, arg0
);
12527 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12530 type
, fold_convert_loc (loc
, type
, tem
),
12534 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12535 if (integer_zerop (arg1
)
12536 && truth_value_p (TREE_CODE (arg0
))
12537 && truth_value_p (TREE_CODE (op2
))
12538 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12540 location_t loc0
= expr_location_or (arg0
, loc
);
12541 /* Only perform transformation if ARG0 is easily inverted. */
12542 tem
= fold_invert_truthvalue (loc0
, arg0
);
12544 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12545 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
12546 type
, fold_convert_loc (loc
, type
, tem
),
12550 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12551 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
12552 && truth_value_p (TREE_CODE (arg0
))
12553 && truth_value_p (TREE_CODE (op2
))
12554 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
12555 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
12556 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
12557 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
12562 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12563 of fold_ternary on them. */
12564 gcc_unreachable ();
12566 case BIT_FIELD_REF
:
12567 if (TREE_CODE (arg0
) == VECTOR_CST
12568 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
12569 || (VECTOR_TYPE_P (type
)
12570 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
))))
12571 && tree_fits_uhwi_p (op1
)
12572 && tree_fits_uhwi_p (op2
))
12574 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
12575 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
12576 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
12577 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
12580 && (idx
% width
) == 0
12581 && (n
% width
) == 0
12582 && known_le ((idx
+ n
) / width
,
12583 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))))
12588 if (TREE_CODE (arg0
) == VECTOR_CST
)
12592 tem
= VECTOR_CST_ELT (arg0
, idx
);
12593 if (VECTOR_TYPE_P (type
))
12594 tem
= fold_build1 (VIEW_CONVERT_EXPR
, type
, tem
);
12598 tree_vector_builder
vals (type
, n
, 1);
12599 for (unsigned i
= 0; i
< n
; ++i
)
12600 vals
.quick_push (VECTOR_CST_ELT (arg0
, idx
+ i
));
12601 return vals
.build ();
12606 /* On constants we can use native encode/interpret to constant
12607 fold (nearly) all BIT_FIELD_REFs. */
12608 if (CONSTANT_CLASS_P (arg0
)
12609 && can_native_interpret_type_p (type
)
12610 && BITS_PER_UNIT
== 8
12611 && tree_fits_uhwi_p (op1
)
12612 && tree_fits_uhwi_p (op2
))
12614 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12615 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
12616 /* Limit us to a reasonable amount of work. To relax the
12617 other limitations we need bit-shifting of the buffer
12618 and rounding up the size. */
12619 if (bitpos
% BITS_PER_UNIT
== 0
12620 && bitsize
% BITS_PER_UNIT
== 0
12621 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
12623 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
12624 unsigned HOST_WIDE_INT len
12625 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
12626 bitpos
/ BITS_PER_UNIT
);
12628 && len
* BITS_PER_UNIT
>= bitsize
)
12630 tree v
= native_interpret_expr (type
, b
,
12631 bitsize
/ BITS_PER_UNIT
);
12640 case VEC_PERM_EXPR
:
12641 /* Perform constant folding of BIT_INSERT_EXPR. */
12642 if (TREE_CODE (arg2
) == VECTOR_CST
12643 && TREE_CODE (op0
) == VECTOR_CST
12644 && TREE_CODE (op1
) == VECTOR_CST
)
12646 /* Build a vector of integers from the tree mask. */
12647 vec_perm_builder builder
;
12648 if (!tree_to_vec_perm_builder (&builder
, arg2
))
12651 /* Create a vec_perm_indices for the integer vector. */
12652 poly_uint64 nelts
= TYPE_VECTOR_SUBPARTS (type
);
12653 bool single_arg
= (op0
== op1
);
12654 vec_perm_indices
sel (builder
, single_arg
? 1 : 2, nelts
);
12655 return fold_vec_perm (type
, op0
, op1
, sel
);
12659 case BIT_INSERT_EXPR
:
12660 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12661 if (TREE_CODE (arg0
) == INTEGER_CST
12662 && TREE_CODE (arg1
) == INTEGER_CST
)
12664 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12665 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
12666 wide_int tem
= (wi::to_wide (arg0
)
12667 & wi::shifted_mask (bitpos
, bitsize
, true,
12668 TYPE_PRECISION (type
)));
12670 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
12672 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
12674 else if (TREE_CODE (arg0
) == VECTOR_CST
12675 && CONSTANT_CLASS_P (arg1
)
12676 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
12679 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
12680 unsigned HOST_WIDE_INT elsize
12681 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
12682 if (bitpos
% elsize
== 0)
12684 unsigned k
= bitpos
/ elsize
;
12685 unsigned HOST_WIDE_INT nelts
;
12686 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
12688 else if (VECTOR_CST_NELTS (arg0
).is_constant (&nelts
))
12690 tree_vector_builder
elts (type
, nelts
, 1);
12691 elts
.quick_grow (nelts
);
12692 for (unsigned HOST_WIDE_INT i
= 0; i
< nelts
; ++i
)
12693 elts
[i
] = (i
== k
? arg1
: VECTOR_CST_ELT (arg0
, i
));
12694 return elts
.build ();
12702 } /* switch (code) */
12705 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12706 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12707 constructor element index of the value returned. If the element is
12708 not found NULL_TREE is returned and *CTOR_IDX is updated to
12709 the index of the element after the ACCESS_INDEX position (which
12710 may be outside of the CTOR array). */
12713 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
,
12714 unsigned *ctor_idx
)
12716 tree index_type
= NULL_TREE
;
12717 signop index_sgn
= UNSIGNED
;
12718 offset_int low_bound
= 0;
12720 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
12722 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
12723 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12725 /* Static constructors for variably sized objects makes no sense. */
12726 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
12727 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
12728 /* ??? When it is obvious that the range is signed, treat it so. */
12729 if (TYPE_UNSIGNED (index_type
)
12730 && TYPE_MAX_VALUE (domain_type
)
12731 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type
),
12732 TYPE_MIN_VALUE (domain_type
)))
12734 index_sgn
= SIGNED
;
12736 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type
)),
12741 index_sgn
= TYPE_SIGN (index_type
);
12742 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
12748 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
12751 offset_int index
= low_bound
;
12753 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
12755 offset_int max_index
= index
;
12758 bool first_p
= true;
12760 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
12762 /* Array constructor might explicitly set index, or specify a range,
12763 or leave index NULL meaning that it is next index after previous
12767 if (TREE_CODE (cfield
) == INTEGER_CST
)
12769 = offset_int::from (wi::to_wide (cfield
), index_sgn
);
12772 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
12773 index
= offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 0)),
12776 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield
, 1)),
12778 gcc_checking_assert (wi::le_p (index
, max_index
, index_sgn
));
12783 index
= max_index
+ 1;
12785 index
= wi::ext (index
, TYPE_PRECISION (index_type
), index_sgn
);
12786 gcc_checking_assert (wi::gt_p (index
, max_index
, index_sgn
));
12792 /* Do we have match? */
12793 if (wi::cmp (access_index
, index
, index_sgn
) >= 0)
12795 if (wi::cmp (access_index
, max_index
, index_sgn
) <= 0)
12802 else if (in_gimple_form
)
12803 /* We're past the element we search for. Note during parsing
12804 the elements might not be sorted.
12805 ??? We should use a binary search and a flag on the
12806 CONSTRUCTOR as to whether elements are sorted in declaration
12815 /* Perform constant folding and related simplification of EXPR.
12816 The related simplifications include x*1 => x, x*0 => 0, etc.,
12817 and application of the associative law.
12818 NOP_EXPR conversions may be removed freely (as long as we
12819 are careful not to change the type of the overall expression).
12820 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12821 but we can constant-fold them if they have constant operands. */
12823 #ifdef ENABLE_FOLD_CHECKING
12824 # define fold(x) fold_1 (x)
12825 static tree
fold_1 (tree
);
12831 const tree t
= expr
;
12832 enum tree_code code
= TREE_CODE (t
);
12833 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
12835 location_t loc
= EXPR_LOCATION (expr
);
12837 /* Return right away if a constant. */
12838 if (kind
== tcc_constant
)
12841 /* CALL_EXPR-like objects with variable numbers of operands are
12842 treated specially. */
12843 if (kind
== tcc_vl_exp
)
12845 if (code
== CALL_EXPR
)
12847 tem
= fold_call_expr (loc
, expr
, false);
12848 return tem
? tem
: expr
;
12853 if (IS_EXPR_CODE_CLASS (kind
))
12855 tree type
= TREE_TYPE (t
);
12856 tree op0
, op1
, op2
;
12858 switch (TREE_CODE_LENGTH (code
))
12861 op0
= TREE_OPERAND (t
, 0);
12862 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12863 return tem
? tem
: expr
;
12865 op0
= TREE_OPERAND (t
, 0);
12866 op1
= TREE_OPERAND (t
, 1);
12867 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12868 return tem
? tem
: expr
;
12870 op0
= TREE_OPERAND (t
, 0);
12871 op1
= TREE_OPERAND (t
, 1);
12872 op2
= TREE_OPERAND (t
, 2);
12873 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12874 return tem
? tem
: expr
;
12884 tree op0
= TREE_OPERAND (t
, 0);
12885 tree op1
= TREE_OPERAND (t
, 1);
12887 if (TREE_CODE (op1
) == INTEGER_CST
12888 && TREE_CODE (op0
) == CONSTRUCTOR
12889 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
12891 tree val
= get_array_ctor_element_at_index (op0
,
12892 wi::to_offset (op1
));
12900 /* Return a VECTOR_CST if possible. */
12903 tree type
= TREE_TYPE (t
);
12904 if (TREE_CODE (type
) != VECTOR_TYPE
)
12909 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
12910 if (! CONSTANT_CLASS_P (val
))
12913 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
12917 return fold (DECL_INITIAL (t
));
12921 } /* switch (code) */
12924 #ifdef ENABLE_FOLD_CHECKING
12927 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
12928 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
12929 static void fold_check_failed (const_tree
, const_tree
);
12930 void print_fold_checksum (const_tree
);
12932 /* When --enable-checking=fold, compute a digest of expr before
12933 and after actual fold call to see if fold did not accidentally
12934 change original expr. */
12940 struct md5_ctx ctx
;
12941 unsigned char checksum_before
[16], checksum_after
[16];
12942 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12944 md5_init_ctx (&ctx
);
12945 fold_checksum_tree (expr
, &ctx
, &ht
);
12946 md5_finish_ctx (&ctx
, checksum_before
);
12949 ret
= fold_1 (expr
);
12951 md5_init_ctx (&ctx
);
12952 fold_checksum_tree (expr
, &ctx
, &ht
);
12953 md5_finish_ctx (&ctx
, checksum_after
);
12955 if (memcmp (checksum_before
, checksum_after
, 16))
12956 fold_check_failed (expr
, ret
);
12962 print_fold_checksum (const_tree expr
)
12964 struct md5_ctx ctx
;
12965 unsigned char checksum
[16], cnt
;
12966 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12968 md5_init_ctx (&ctx
);
12969 fold_checksum_tree (expr
, &ctx
, &ht
);
12970 md5_finish_ctx (&ctx
, checksum
);
12971 for (cnt
= 0; cnt
< 16; ++cnt
)
12972 fprintf (stderr
, "%02x", checksum
[cnt
]);
12973 putc ('\n', stderr
);
12977 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12979 internal_error ("fold check: original tree changed by fold");
12983 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12984 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12986 const tree_node
**slot
;
12987 enum tree_code code
;
12988 union tree_node
*buf
;
12994 slot
= ht
->find_slot (expr
, INSERT
);
12998 code
= TREE_CODE (expr
);
12999 if (TREE_CODE_CLASS (code
) == tcc_declaration
13000 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
13002 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13003 size_t sz
= tree_size (expr
);
13004 buf
= XALLOCAVAR (union tree_node
, sz
);
13005 memcpy ((char *) buf
, expr
, sz
);
13006 SET_DECL_ASSEMBLER_NAME ((tree
) buf
, NULL
);
13007 buf
->decl_with_vis
.symtab_node
= NULL
;
13008 buf
->base
.nowarning_flag
= 0;
13011 else if (TREE_CODE_CLASS (code
) == tcc_type
13012 && (TYPE_POINTER_TO (expr
)
13013 || TYPE_REFERENCE_TO (expr
)
13014 || TYPE_CACHED_VALUES_P (expr
)
13015 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13016 || TYPE_NEXT_VARIANT (expr
)
13017 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
13019 /* Allow these fields to be modified. */
13021 size_t sz
= tree_size (expr
);
13022 buf
= XALLOCAVAR (union tree_node
, sz
);
13023 memcpy ((char *) buf
, expr
, sz
);
13024 expr
= tmp
= (tree
) buf
;
13025 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13026 TYPE_POINTER_TO (tmp
) = NULL
;
13027 TYPE_REFERENCE_TO (tmp
) = NULL
;
13028 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13029 TYPE_ALIAS_SET (tmp
) = -1;
13030 if (TYPE_CACHED_VALUES_P (tmp
))
13032 TYPE_CACHED_VALUES_P (tmp
) = 0;
13033 TYPE_CACHED_VALUES (tmp
) = NULL
;
13036 else if (TREE_NO_WARNING (expr
) && (DECL_P (expr
) || EXPR_P (expr
)))
13038 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13039 and change builtins.c etc. instead - see PR89543. */
13040 size_t sz
= tree_size (expr
);
13041 buf
= XALLOCAVAR (union tree_node
, sz
);
13042 memcpy ((char *) buf
, expr
, sz
);
13043 buf
->base
.nowarning_flag
= 0;
13046 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13047 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
13048 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13049 if (TREE_CODE_CLASS (code
) != tcc_type
13050 && TREE_CODE_CLASS (code
) != tcc_declaration
13051 && code
!= TREE_LIST
13052 && code
!= SSA_NAME
13053 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13054 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13055 switch (TREE_CODE_CLASS (code
))
13061 md5_process_bytes (TREE_STRING_POINTER (expr
),
13062 TREE_STRING_LENGTH (expr
), ctx
);
13065 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13066 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13069 len
= vector_cst_encoded_nelts (expr
);
13070 for (i
= 0; i
< len
; ++i
)
13071 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr
, i
), ctx
, ht
);
13077 case tcc_exceptional
:
13081 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13082 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13083 expr
= TREE_CHAIN (expr
);
13084 goto recursive_label
;
13087 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13088 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13094 case tcc_expression
:
13095 case tcc_reference
:
13096 case tcc_comparison
:
13099 case tcc_statement
:
13101 len
= TREE_OPERAND_LENGTH (expr
);
13102 for (i
= 0; i
< len
; ++i
)
13103 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13105 case tcc_declaration
:
13106 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13107 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13108 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13110 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13111 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13112 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13113 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13114 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13117 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13119 if (TREE_CODE (expr
) == FUNCTION_DECL
)
13121 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13122 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
13124 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13128 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13129 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13130 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13131 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13132 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13133 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13134 if (INTEGRAL_TYPE_P (expr
)
13135 || SCALAR_FLOAT_TYPE_P (expr
))
13137 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13138 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13140 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13141 if (TREE_CODE (expr
) == RECORD_TYPE
13142 || TREE_CODE (expr
) == UNION_TYPE
13143 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13144 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13145 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13152 /* Helper function for outputting the checksum of a tree T. When
13153 debugging with gdb, you can "define mynext" to be "next" followed
13154 by "call debug_fold_checksum (op0)", then just trace down till the
13157 DEBUG_FUNCTION
void
13158 debug_fold_checksum (const_tree t
)
13161 unsigned char checksum
[16];
13162 struct md5_ctx ctx
;
13163 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13165 md5_init_ctx (&ctx
);
13166 fold_checksum_tree (t
, &ctx
, &ht
);
13167 md5_finish_ctx (&ctx
, checksum
);
13170 for (i
= 0; i
< 16; i
++)
13171 fprintf (stderr
, "%d ", checksum
[i
]);
13173 fprintf (stderr
, "\n");
13178 /* Fold a unary tree expression with code CODE of type TYPE with an
13179 operand OP0. LOC is the location of the resulting expression.
13180 Return a folded expression if successful. Otherwise, return a tree
13181 expression with code CODE of type TYPE with an operand OP0. */
13184 fold_build1_loc (location_t loc
,
13185 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13188 #ifdef ENABLE_FOLD_CHECKING
13189 unsigned char checksum_before
[16], checksum_after
[16];
13190 struct md5_ctx ctx
;
13191 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13193 md5_init_ctx (&ctx
);
13194 fold_checksum_tree (op0
, &ctx
, &ht
);
13195 md5_finish_ctx (&ctx
, checksum_before
);
13199 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13201 tem
= build1_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
13203 #ifdef ENABLE_FOLD_CHECKING
13204 md5_init_ctx (&ctx
);
13205 fold_checksum_tree (op0
, &ctx
, &ht
);
13206 md5_finish_ctx (&ctx
, checksum_after
);
13208 if (memcmp (checksum_before
, checksum_after
, 16))
13209 fold_check_failed (op0
, tem
);
13214 /* Fold a binary tree expression with code CODE of type TYPE with
13215 operands OP0 and OP1. LOC is the location of the resulting
13216 expression. Return a folded expression if successful. Otherwise,
13217 return a tree expression with code CODE of type TYPE with operands
13221 fold_build2_loc (location_t loc
,
13222 enum tree_code code
, tree type
, tree op0
, tree op1
13226 #ifdef ENABLE_FOLD_CHECKING
13227 unsigned char checksum_before_op0
[16],
13228 checksum_before_op1
[16],
13229 checksum_after_op0
[16],
13230 checksum_after_op1
[16];
13231 struct md5_ctx ctx
;
13232 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13234 md5_init_ctx (&ctx
);
13235 fold_checksum_tree (op0
, &ctx
, &ht
);
13236 md5_finish_ctx (&ctx
, checksum_before_op0
);
13239 md5_init_ctx (&ctx
);
13240 fold_checksum_tree (op1
, &ctx
, &ht
);
13241 md5_finish_ctx (&ctx
, checksum_before_op1
);
13245 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13247 tem
= build2_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
13249 #ifdef ENABLE_FOLD_CHECKING
13250 md5_init_ctx (&ctx
);
13251 fold_checksum_tree (op0
, &ctx
, &ht
);
13252 md5_finish_ctx (&ctx
, checksum_after_op0
);
13255 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13256 fold_check_failed (op0
, tem
);
13258 md5_init_ctx (&ctx
);
13259 fold_checksum_tree (op1
, &ctx
, &ht
);
13260 md5_finish_ctx (&ctx
, checksum_after_op1
);
13262 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13263 fold_check_failed (op1
, tem
);
13268 /* Fold a ternary tree expression with code CODE of type TYPE with
13269 operands OP0, OP1, and OP2. Return a folded expression if
13270 successful. Otherwise, return a tree expression with code CODE of
13271 type TYPE with operands OP0, OP1, and OP2. */
13274 fold_build3_loc (location_t loc
, enum tree_code code
, tree type
,
13275 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13278 #ifdef ENABLE_FOLD_CHECKING
13279 unsigned char checksum_before_op0
[16],
13280 checksum_before_op1
[16],
13281 checksum_before_op2
[16],
13282 checksum_after_op0
[16],
13283 checksum_after_op1
[16],
13284 checksum_after_op2
[16];
13285 struct md5_ctx ctx
;
13286 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13288 md5_init_ctx (&ctx
);
13289 fold_checksum_tree (op0
, &ctx
, &ht
);
13290 md5_finish_ctx (&ctx
, checksum_before_op0
);
13293 md5_init_ctx (&ctx
);
13294 fold_checksum_tree (op1
, &ctx
, &ht
);
13295 md5_finish_ctx (&ctx
, checksum_before_op1
);
13298 md5_init_ctx (&ctx
);
13299 fold_checksum_tree (op2
, &ctx
, &ht
);
13300 md5_finish_ctx (&ctx
, checksum_before_op2
);
13304 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13305 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13307 tem
= build3_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13309 #ifdef ENABLE_FOLD_CHECKING
13310 md5_init_ctx (&ctx
);
13311 fold_checksum_tree (op0
, &ctx
, &ht
);
13312 md5_finish_ctx (&ctx
, checksum_after_op0
);
13315 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13316 fold_check_failed (op0
, tem
);
13318 md5_init_ctx (&ctx
);
13319 fold_checksum_tree (op1
, &ctx
, &ht
);
13320 md5_finish_ctx (&ctx
, checksum_after_op1
);
13323 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13324 fold_check_failed (op1
, tem
);
13326 md5_init_ctx (&ctx
);
13327 fold_checksum_tree (op2
, &ctx
, &ht
);
13328 md5_finish_ctx (&ctx
, checksum_after_op2
);
13330 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13331 fold_check_failed (op2
, tem
);
13336 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13337 arguments in ARGARRAY, and a null static chain.
13338 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13339 of type TYPE from the given operands as constructed by build_call_array. */
13342 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13343 int nargs
, tree
*argarray
)
13346 #ifdef ENABLE_FOLD_CHECKING
13347 unsigned char checksum_before_fn
[16],
13348 checksum_before_arglist
[16],
13349 checksum_after_fn
[16],
13350 checksum_after_arglist
[16];
13351 struct md5_ctx ctx
;
13352 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
13355 md5_init_ctx (&ctx
);
13356 fold_checksum_tree (fn
, &ctx
, &ht
);
13357 md5_finish_ctx (&ctx
, checksum_before_fn
);
13360 md5_init_ctx (&ctx
);
13361 for (i
= 0; i
< nargs
; i
++)
13362 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13363 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13367 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13369 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13371 #ifdef ENABLE_FOLD_CHECKING
13372 md5_init_ctx (&ctx
);
13373 fold_checksum_tree (fn
, &ctx
, &ht
);
13374 md5_finish_ctx (&ctx
, checksum_after_fn
);
13377 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13378 fold_check_failed (fn
, tem
);
13380 md5_init_ctx (&ctx
);
13381 for (i
= 0; i
< nargs
; i
++)
13382 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
13383 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13385 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13386 fold_check_failed (NULL_TREE
, tem
);
13391 /* Perform constant folding and related simplification of initializer
13392 expression EXPR. These behave identically to "fold_buildN" but ignore
13393 potential run-time traps and exceptions that fold must preserve. */
13395 #define START_FOLD_INIT \
13396 int saved_signaling_nans = flag_signaling_nans;\
13397 int saved_trapping_math = flag_trapping_math;\
13398 int saved_rounding_math = flag_rounding_math;\
13399 int saved_trapv = flag_trapv;\
13400 int saved_folding_initializer = folding_initializer;\
13401 flag_signaling_nans = 0;\
13402 flag_trapping_math = 0;\
13403 flag_rounding_math = 0;\
13405 folding_initializer = 1;
13407 #define END_FOLD_INIT \
13408 flag_signaling_nans = saved_signaling_nans;\
13409 flag_trapping_math = saved_trapping_math;\
13410 flag_rounding_math = saved_rounding_math;\
13411 flag_trapv = saved_trapv;\
13412 folding_initializer = saved_folding_initializer;
13415 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
13416 tree type
, tree op
)
13421 result
= fold_build1_loc (loc
, code
, type
, op
);
13428 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
13429 tree type
, tree op0
, tree op1
)
13434 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
13441 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
13442 int nargs
, tree
*argarray
)
13447 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
13453 #undef START_FOLD_INIT
13454 #undef END_FOLD_INIT
13456 /* Determine if first argument is a multiple of second argument. Return 0 if
13457 it is not, or we cannot easily determined it to be.
13459 An example of the sort of thing we care about (at this point; this routine
13460 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13461 fold cases do now) is discovering that
13463 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13469 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13471 This code also handles discovering that
13473 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13475 is a multiple of 8 so we don't have to worry about dealing with a
13476 possible remainder.
13478 Note that we *look* inside a SAVE_EXPR only to determine how it was
13479 calculated; it is not safe for fold to do much of anything else with the
13480 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13481 at run time. For example, the latter example above *cannot* be implemented
13482 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13483 evaluation time of the original SAVE_EXPR is not necessarily the same at
13484 the time the new expression is evaluated. The only optimization of this
13485 sort that would be valid is changing
13487 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13491 SAVE_EXPR (I) * SAVE_EXPR (J)
13493 (where the same SAVE_EXPR (J) is used in the original and the
13494 transformed version). */
13497 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
13502 if (operand_equal_p (top
, bottom
, 0))
13505 if (TREE_CODE (type
) != INTEGER_TYPE
)
13508 switch (TREE_CODE (top
))
13511 /* Bitwise and provides a power of two multiple. If the mask is
13512 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13513 if (!integer_pow2p (bottom
))
13515 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13516 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13519 if (TREE_CODE (bottom
) == INTEGER_CST
)
13521 op1
= TREE_OPERAND (top
, 0);
13522 op2
= TREE_OPERAND (top
, 1);
13523 if (TREE_CODE (op1
) == INTEGER_CST
)
13524 std::swap (op1
, op2
);
13525 if (TREE_CODE (op2
) == INTEGER_CST
)
13527 if (multiple_of_p (type
, op2
, bottom
))
13529 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13530 if (multiple_of_p (type
, bottom
, op2
))
13532 widest_int w
= wi::sdiv_trunc (wi::to_widest (bottom
),
13533 wi::to_widest (op2
));
13534 if (wi::fits_to_tree_p (w
, TREE_TYPE (bottom
)))
13536 op2
= wide_int_to_tree (TREE_TYPE (bottom
), w
);
13537 return multiple_of_p (type
, op1
, op2
);
13540 return multiple_of_p (type
, op1
, bottom
);
13543 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13544 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13547 /* It is impossible to prove if op0 - op1 is multiple of bottom
13548 precisely, so be conservative here checking if both op0 and op1
13549 are multiple of bottom. Note we check the second operand first
13550 since it's usually simpler. */
13551 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13552 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13555 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13556 as op0 - 3 if the expression has unsigned type. For example,
13557 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13558 op1
= TREE_OPERAND (top
, 1);
13559 if (TYPE_UNSIGNED (type
)
13560 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
13561 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
13562 return (multiple_of_p (type
, op1
, bottom
)
13563 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
13566 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
13568 op1
= TREE_OPERAND (top
, 1);
13569 /* const_binop may not detect overflow correctly,
13570 so check for it explicitly here. */
13571 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)),
13573 && (t1
= fold_convert (type
,
13574 const_binop (LSHIFT_EXPR
, size_one_node
,
13576 && !TREE_OVERFLOW (t1
))
13577 return multiple_of_p (type
, t1
, bottom
);
13582 /* Can't handle conversions from non-integral or wider integral type. */
13583 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
13584 || (TYPE_PRECISION (type
)
13585 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
13591 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
13594 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
13595 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
13598 if (TREE_CODE (bottom
) != INTEGER_CST
13599 || integer_zerop (bottom
)
13600 || (TYPE_UNSIGNED (type
)
13601 && (tree_int_cst_sgn (top
) < 0
13602 || tree_int_cst_sgn (bottom
) < 0)))
13604 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
13608 if (TREE_CODE (bottom
) == INTEGER_CST
13609 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
13610 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
13612 enum tree_code code
= gimple_assign_rhs_code (stmt
);
13614 /* Check for special cases to see if top is defined as multiple
13617 top = (X & ~(bottom - 1) ; bottom is power of 2
13623 if (code
== BIT_AND_EXPR
13624 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
13625 && TREE_CODE (op2
) == INTEGER_CST
13626 && integer_pow2p (bottom
)
13627 && wi::multiple_of_p (wi::to_widest (op2
),
13628 wi::to_widest (bottom
), UNSIGNED
))
13631 op1
= gimple_assign_rhs1 (stmt
);
13632 if (code
== MINUS_EXPR
13633 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
13634 && TREE_CODE (op2
) == SSA_NAME
13635 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
13636 && gimple_code (stmt
) == GIMPLE_ASSIGN
13637 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
13638 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
13639 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
13646 if (POLY_INT_CST_P (top
) && poly_int_tree_p (bottom
))
13647 return multiple_p (wi::to_poly_widest (top
),
13648 wi::to_poly_widest (bottom
));
13654 #define tree_expr_nonnegative_warnv_p(X, Y) \
13655 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13657 #define RECURSE(X) \
13658 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13660 /* Return true if CODE or TYPE is known to be non-negative. */
13663 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
13665 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
13666 && truth_value_p (code
))
13667 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13668 have a signed:1 type (where the value is -1 and 0). */
13673 /* Return true if (CODE OP0) is known to be non-negative. If the return
13674 value is based on the assumption that signed overflow is undefined,
13675 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13676 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13679 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13680 bool *strict_overflow_p
, int depth
)
13682 if (TYPE_UNSIGNED (type
))
13688 /* We can't return 1 if flag_wrapv is set because
13689 ABS_EXPR<INT_MIN> = INT_MIN. */
13690 if (!ANY_INTEGRAL_TYPE_P (type
))
13692 if (TYPE_OVERFLOW_UNDEFINED (type
))
13694 *strict_overflow_p
= true;
13699 case NON_LVALUE_EXPR
:
13701 case FIX_TRUNC_EXPR
:
13702 return RECURSE (op0
);
13706 tree inner_type
= TREE_TYPE (op0
);
13707 tree outer_type
= type
;
13709 if (TREE_CODE (outer_type
) == REAL_TYPE
)
13711 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13712 return RECURSE (op0
);
13713 if (INTEGRAL_TYPE_P (inner_type
))
13715 if (TYPE_UNSIGNED (inner_type
))
13717 return RECURSE (op0
);
13720 else if (INTEGRAL_TYPE_P (outer_type
))
13722 if (TREE_CODE (inner_type
) == REAL_TYPE
)
13723 return RECURSE (op0
);
13724 if (INTEGRAL_TYPE_P (inner_type
))
13725 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
13726 && TYPE_UNSIGNED (inner_type
);
13732 return tree_simple_nonnegative_warnv_p (code
, type
);
13735 /* We don't know sign of `t', so be conservative and return false. */
13739 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13740 value is based on the assumption that signed overflow is undefined,
13741 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13742 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13745 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
13746 tree op1
, bool *strict_overflow_p
,
13749 if (TYPE_UNSIGNED (type
))
13754 case POINTER_PLUS_EXPR
:
13756 if (FLOAT_TYPE_P (type
))
13757 return RECURSE (op0
) && RECURSE (op1
);
13759 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13760 both unsigned and at least 2 bits shorter than the result. */
13761 if (TREE_CODE (type
) == INTEGER_TYPE
13762 && TREE_CODE (op0
) == NOP_EXPR
13763 && TREE_CODE (op1
) == NOP_EXPR
)
13765 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
13766 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
13767 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
13768 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
13770 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
13771 TYPE_PRECISION (inner2
)) + 1;
13772 return prec
< TYPE_PRECISION (type
);
13778 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
13780 /* x * x is always non-negative for floating point x
13781 or without overflow. */
13782 if (operand_equal_p (op0
, op1
, 0)
13783 || (RECURSE (op0
) && RECURSE (op1
)))
13785 if (ANY_INTEGRAL_TYPE_P (type
)
13786 && TYPE_OVERFLOW_UNDEFINED (type
))
13787 *strict_overflow_p
= true;
13792 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13793 both unsigned and their total bits is shorter than the result. */
13794 if (TREE_CODE (type
) == INTEGER_TYPE
13795 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
13796 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
13798 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
13799 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
13801 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
13802 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
13805 bool unsigned0
= TYPE_UNSIGNED (inner0
);
13806 bool unsigned1
= TYPE_UNSIGNED (inner1
);
13808 if (TREE_CODE (op0
) == INTEGER_CST
)
13809 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
13811 if (TREE_CODE (op1
) == INTEGER_CST
)
13812 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
13814 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
13815 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
13817 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
13818 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
13819 : TYPE_PRECISION (inner0
);
13821 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
13822 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
13823 : TYPE_PRECISION (inner1
);
13825 return precision0
+ precision1
< TYPE_PRECISION (type
);
13832 return RECURSE (op0
) || RECURSE (op1
);
13838 case TRUNC_DIV_EXPR
:
13839 case CEIL_DIV_EXPR
:
13840 case FLOOR_DIV_EXPR
:
13841 case ROUND_DIV_EXPR
:
13842 return RECURSE (op0
) && RECURSE (op1
);
13844 case TRUNC_MOD_EXPR
:
13845 return RECURSE (op0
);
13847 case FLOOR_MOD_EXPR
:
13848 return RECURSE (op1
);
13850 case CEIL_MOD_EXPR
:
13851 case ROUND_MOD_EXPR
:
13853 return tree_simple_nonnegative_warnv_p (code
, type
);
13856 /* We don't know sign of `t', so be conservative and return false. */
13860 /* Return true if T is known to be non-negative. If the return
13861 value is based on the assumption that signed overflow is undefined,
13862 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13863 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13866 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13868 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13871 switch (TREE_CODE (t
))
13874 return tree_int_cst_sgn (t
) >= 0;
13877 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
13880 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
13883 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13886 /* Limit the depth of recursion to avoid quadratic behavior.
13887 This is expected to catch almost all occurrences in practice.
13888 If this code misses important cases that unbounded recursion
13889 would not, passes that need this information could be revised
13890 to provide it through dataflow propagation. */
13891 return (!name_registered_for_update_p (t
)
13892 && depth
< param_max_ssa_name_query_depth
13893 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
13894 strict_overflow_p
, depth
));
13897 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13901 /* Return true if T is known to be non-negative. If the return
13902 value is based on the assumption that signed overflow is undefined,
13903 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13904 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13907 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
13908 bool *strict_overflow_p
, int depth
)
13929 case CFN_BUILT_IN_BSWAP32
:
13930 case CFN_BUILT_IN_BSWAP64
:
13936 /* sqrt(-0.0) is -0.0. */
13937 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
13939 return RECURSE (arg0
);
13967 CASE_CFN_NEARBYINT
:
13968 CASE_CFN_NEARBYINT_FN
:
13973 CASE_CFN_ROUNDEVEN
:
13974 CASE_CFN_ROUNDEVEN_FN
:
13979 CASE_CFN_SIGNIFICAND
:
13984 /* True if the 1st argument is nonnegative. */
13985 return RECURSE (arg0
);
13989 /* True if the 1st OR 2nd arguments are nonnegative. */
13990 return RECURSE (arg0
) || RECURSE (arg1
);
13994 /* True if the 1st AND 2nd arguments are nonnegative. */
13995 return RECURSE (arg0
) && RECURSE (arg1
);
13998 CASE_CFN_COPYSIGN_FN
:
13999 /* True if the 2nd argument is nonnegative. */
14000 return RECURSE (arg1
);
14003 /* True if the 1st argument is nonnegative or the second
14004 argument is an even integer. */
14005 if (TREE_CODE (arg1
) == INTEGER_CST
14006 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14008 return RECURSE (arg0
);
14011 /* True if the 1st argument is nonnegative or the second
14012 argument is an even integer valued real. */
14013 if (TREE_CODE (arg1
) == REAL_CST
)
14018 c
= TREE_REAL_CST (arg1
);
14019 n
= real_to_integer (&c
);
14022 REAL_VALUE_TYPE cint
;
14023 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
14024 if (real_identical (&c
, &cint
))
14028 return RECURSE (arg0
);
14033 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
14036 /* Return true if T is known to be non-negative. If the return
14037 value is based on the assumption that signed overflow is undefined,
14038 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14039 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14042 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14044 enum tree_code code
= TREE_CODE (t
);
14045 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14052 tree temp
= TARGET_EXPR_SLOT (t
);
14053 t
= TARGET_EXPR_INITIAL (t
);
14055 /* If the initializer is non-void, then it's a normal expression
14056 that will be assigned to the slot. */
14057 if (!VOID_TYPE_P (t
))
14058 return RECURSE (t
);
14060 /* Otherwise, the initializer sets the slot in some way. One common
14061 way is an assignment statement at the end of the initializer. */
14064 if (TREE_CODE (t
) == BIND_EXPR
)
14065 t
= expr_last (BIND_EXPR_BODY (t
));
14066 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14067 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14068 t
= expr_last (TREE_OPERAND (t
, 0));
14069 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14074 if (TREE_CODE (t
) == MODIFY_EXPR
14075 && TREE_OPERAND (t
, 0) == temp
)
14076 return RECURSE (TREE_OPERAND (t
, 1));
14083 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14084 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14086 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14087 get_call_combined_fn (t
),
14090 strict_overflow_p
, depth
);
14092 case COMPOUND_EXPR
:
14094 return RECURSE (TREE_OPERAND (t
, 1));
14097 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
14100 return RECURSE (TREE_OPERAND (t
, 0));
14103 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
14108 #undef tree_expr_nonnegative_warnv_p
14110 /* Return true if T is known to be non-negative. If the return
14111 value is based on the assumption that signed overflow is undefined,
14112 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14113 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14116 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
14118 enum tree_code code
;
14119 if (t
== error_mark_node
)
14122 code
= TREE_CODE (t
);
14123 switch (TREE_CODE_CLASS (code
))
14126 case tcc_comparison
:
14127 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14129 TREE_OPERAND (t
, 0),
14130 TREE_OPERAND (t
, 1),
14131 strict_overflow_p
, depth
);
14134 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14136 TREE_OPERAND (t
, 0),
14137 strict_overflow_p
, depth
);
14140 case tcc_declaration
:
14141 case tcc_reference
:
14142 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14150 case TRUTH_AND_EXPR
:
14151 case TRUTH_OR_EXPR
:
14152 case TRUTH_XOR_EXPR
:
14153 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14155 TREE_OPERAND (t
, 0),
14156 TREE_OPERAND (t
, 1),
14157 strict_overflow_p
, depth
);
14158 case TRUTH_NOT_EXPR
:
14159 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14161 TREE_OPERAND (t
, 0),
14162 strict_overflow_p
, depth
);
14169 case WITH_SIZE_EXPR
:
14171 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14174 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
14178 /* Return true if `t' is known to be non-negative. Handle warnings
14179 about undefined signed overflow. */
14182 tree_expr_nonnegative_p (tree t
)
14184 bool ret
, strict_overflow_p
;
14186 strict_overflow_p
= false;
14187 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14188 if (strict_overflow_p
)
14189 fold_overflow_warning (("assuming signed overflow does not occur when "
14190 "determining that expression is always "
14192 WARN_STRICT_OVERFLOW_MISC
);
14197 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14198 For floating point we further ensure that T is not denormal.
14199 Similar logic is present in nonzero_address in rtlanal.h.
14201 If the return value is based on the assumption that signed overflow
14202 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14203 change *STRICT_OVERFLOW_P. */
14206 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14207 bool *strict_overflow_p
)
14212 return tree_expr_nonzero_warnv_p (op0
,
14213 strict_overflow_p
);
14217 tree inner_type
= TREE_TYPE (op0
);
14218 tree outer_type
= type
;
14220 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14221 && tree_expr_nonzero_warnv_p (op0
,
14222 strict_overflow_p
));
14226 case NON_LVALUE_EXPR
:
14227 return tree_expr_nonzero_warnv_p (op0
,
14228 strict_overflow_p
);
14237 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14238 For floating point we further ensure that T is not denormal.
14239 Similar logic is present in nonzero_address in rtlanal.h.
14241 If the return value is based on the assumption that signed overflow
14242 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14243 change *STRICT_OVERFLOW_P. */
14246 tree_binary_nonzero_warnv_p (enum tree_code code
,
14249 tree op1
, bool *strict_overflow_p
)
14251 bool sub_strict_overflow_p
;
14254 case POINTER_PLUS_EXPR
:
14256 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
14258 /* With the presence of negative values it is hard
14259 to say something. */
14260 sub_strict_overflow_p
= false;
14261 if (!tree_expr_nonnegative_warnv_p (op0
,
14262 &sub_strict_overflow_p
)
14263 || !tree_expr_nonnegative_warnv_p (op1
,
14264 &sub_strict_overflow_p
))
14266 /* One of operands must be positive and the other non-negative. */
14267 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14268 overflows, on a twos-complement machine the sum of two
14269 nonnegative numbers can never be zero. */
14270 return (tree_expr_nonzero_warnv_p (op0
,
14272 || tree_expr_nonzero_warnv_p (op1
,
14273 strict_overflow_p
));
14278 if (TYPE_OVERFLOW_UNDEFINED (type
))
14280 if (tree_expr_nonzero_warnv_p (op0
,
14282 && tree_expr_nonzero_warnv_p (op1
,
14283 strict_overflow_p
))
14285 *strict_overflow_p
= true;
14292 sub_strict_overflow_p
= false;
14293 if (tree_expr_nonzero_warnv_p (op0
,
14294 &sub_strict_overflow_p
)
14295 && tree_expr_nonzero_warnv_p (op1
,
14296 &sub_strict_overflow_p
))
14298 if (sub_strict_overflow_p
)
14299 *strict_overflow_p
= true;
14304 sub_strict_overflow_p
= false;
14305 if (tree_expr_nonzero_warnv_p (op0
,
14306 &sub_strict_overflow_p
))
14308 if (sub_strict_overflow_p
)
14309 *strict_overflow_p
= true;
14311 /* When both operands are nonzero, then MAX must be too. */
14312 if (tree_expr_nonzero_warnv_p (op1
,
14313 strict_overflow_p
))
14316 /* MAX where operand 0 is positive is positive. */
14317 return tree_expr_nonnegative_warnv_p (op0
,
14318 strict_overflow_p
);
14320 /* MAX where operand 1 is positive is positive. */
14321 else if (tree_expr_nonzero_warnv_p (op1
,
14322 &sub_strict_overflow_p
)
14323 && tree_expr_nonnegative_warnv_p (op1
,
14324 &sub_strict_overflow_p
))
14326 if (sub_strict_overflow_p
)
14327 *strict_overflow_p
= true;
14333 return (tree_expr_nonzero_warnv_p (op1
,
14335 || tree_expr_nonzero_warnv_p (op0
,
14336 strict_overflow_p
));
14345 /* Return true when T is an address and is known to be nonzero.
14346 For floating point we further ensure that T is not denormal.
14347 Similar logic is present in nonzero_address in rtlanal.h.
14349 If the return value is based on the assumption that signed overflow
14350 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14351 change *STRICT_OVERFLOW_P. */
14354 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14356 bool sub_strict_overflow_p
;
14357 switch (TREE_CODE (t
))
14360 return !integer_zerop (t
);
14364 tree base
= TREE_OPERAND (t
, 0);
14366 if (!DECL_P (base
))
14367 base
= get_base_address (base
);
14369 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
14370 base
= TARGET_EXPR_SLOT (base
);
14375 /* For objects in symbol table check if we know they are non-zero.
14376 Don't do anything for variables and functions before symtab is built;
14377 it is quite possible that they will be declared weak later. */
14378 int nonzero_addr
= maybe_nonzero_address (base
);
14379 if (nonzero_addr
>= 0)
14380 return nonzero_addr
;
14382 /* Constants are never weak. */
14383 if (CONSTANT_CLASS_P (base
))
14390 sub_strict_overflow_p
= false;
14391 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14392 &sub_strict_overflow_p
)
14393 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14394 &sub_strict_overflow_p
))
14396 if (sub_strict_overflow_p
)
14397 *strict_overflow_p
= true;
14403 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
14405 return expr_not_equal_to (t
, wi::zero (TYPE_PRECISION (TREE_TYPE (t
))));
14413 #define integer_valued_real_p(X) \
14414 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14416 #define RECURSE(X) \
14417 ((integer_valued_real_p) (X, depth + 1))
14419 /* Return true if the floating point result of (CODE OP0) has an
14420 integer value. We also allow +Inf, -Inf and NaN to be considered
14421 integer values. Return false for signaling NaN.
14423 DEPTH is the current nesting depth of the query. */
14426 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
14434 return RECURSE (op0
);
14438 tree type
= TREE_TYPE (op0
);
14439 if (TREE_CODE (type
) == INTEGER_TYPE
)
14441 if (TREE_CODE (type
) == REAL_TYPE
)
14442 return RECURSE (op0
);
14452 /* Return true if the floating point result of (CODE OP0 OP1) has an
14453 integer value. We also allow +Inf, -Inf and NaN to be considered
14454 integer values. Return false for signaling NaN.
14456 DEPTH is the current nesting depth of the query. */
14459 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
14468 return RECURSE (op0
) && RECURSE (op1
);
14476 /* Return true if the floating point result of calling FNDECL with arguments
14477 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14478 considered integer values. Return false for signaling NaN. If FNDECL
14479 takes fewer than 2 arguments, the remaining ARGn are null.
14481 DEPTH is the current nesting depth of the query. */
14484 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
14492 CASE_CFN_NEARBYINT
:
14493 CASE_CFN_NEARBYINT_FN
:
14498 CASE_CFN_ROUNDEVEN
:
14499 CASE_CFN_ROUNDEVEN_FN
:
14508 return RECURSE (arg0
) && RECURSE (arg1
);
14516 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14517 has an integer value. We also allow +Inf, -Inf and NaN to be
14518 considered integer values. Return false for signaling NaN.
14520 DEPTH is the current nesting depth of the query. */
14523 integer_valued_real_single_p (tree t
, int depth
)
14525 switch (TREE_CODE (t
))
14528 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
14531 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
14534 /* Limit the depth of recursion to avoid quadratic behavior.
14535 This is expected to catch almost all occurrences in practice.
14536 If this code misses important cases that unbounded recursion
14537 would not, passes that need this information could be revised
14538 to provide it through dataflow propagation. */
14539 return (!name_registered_for_update_p (t
)
14540 && depth
< param_max_ssa_name_query_depth
14541 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
14550 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14551 has an integer value. We also allow +Inf, -Inf and NaN to be
14552 considered integer values. Return false for signaling NaN.
14554 DEPTH is the current nesting depth of the query. */
14557 integer_valued_real_invalid_p (tree t
, int depth
)
14559 switch (TREE_CODE (t
))
14561 case COMPOUND_EXPR
:
14564 return RECURSE (TREE_OPERAND (t
, 1));
14567 return RECURSE (TREE_OPERAND (t
, 0));
14576 #undef integer_valued_real_p
14578 /* Return true if the floating point expression T has an integer value.
14579 We also allow +Inf, -Inf and NaN to be considered integer values.
14580 Return false for signaling NaN.
14582 DEPTH is the current nesting depth of the query. */
14585 integer_valued_real_p (tree t
, int depth
)
14587 if (t
== error_mark_node
)
14590 STRIP_ANY_LOCATION_WRAPPER (t
);
14592 tree_code code
= TREE_CODE (t
);
14593 switch (TREE_CODE_CLASS (code
))
14596 case tcc_comparison
:
14597 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
14598 TREE_OPERAND (t
, 1), depth
);
14601 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
14604 case tcc_declaration
:
14605 case tcc_reference
:
14606 return integer_valued_real_single_p (t
, depth
);
14616 return integer_valued_real_single_p (t
, depth
);
14620 tree arg0
= (call_expr_nargs (t
) > 0
14621 ? CALL_EXPR_ARG (t
, 0)
14623 tree arg1
= (call_expr_nargs (t
) > 1
14624 ? CALL_EXPR_ARG (t
, 1)
14626 return integer_valued_real_call_p (get_call_combined_fn (t
),
14627 arg0
, arg1
, depth
);
14631 return integer_valued_real_invalid_p (t
, depth
);
14635 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14636 attempt to fold the expression to a constant without modifying TYPE,
14639 If the expression could be simplified to a constant, then return
14640 the constant. If the expression would not be simplified to a
14641 constant, then return NULL_TREE. */
14644 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
14646 tree tem
= fold_binary (code
, type
, op0
, op1
);
14647 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14650 /* Given the components of a unary expression CODE, TYPE and OP0,
14651 attempt to fold the expression to a constant without modifying
14654 If the expression could be simplified to a constant, then return
14655 the constant. If the expression would not be simplified to a
14656 constant, then return NULL_TREE. */
14659 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
14661 tree tem
= fold_unary (code
, type
, op0
);
14662 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
14665 /* If EXP represents referencing an element in a constant string
14666 (either via pointer arithmetic or array indexing), return the
14667 tree representing the value accessed, otherwise return NULL. */
14670 fold_read_from_constant_string (tree exp
)
14672 if ((TREE_CODE (exp
) == INDIRECT_REF
14673 || TREE_CODE (exp
) == ARRAY_REF
)
14674 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
14676 tree exp1
= TREE_OPERAND (exp
, 0);
14679 location_t loc
= EXPR_LOCATION (exp
);
14681 if (TREE_CODE (exp
) == INDIRECT_REF
)
14682 string
= string_constant (exp1
, &index
, NULL
, NULL
);
14685 tree low_bound
= array_ref_low_bound (exp
);
14686 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
14688 /* Optimize the special-case of a zero lower bound.
14690 We convert the low_bound to sizetype to avoid some problems
14691 with constant folding. (E.g. suppose the lower bound is 1,
14692 and its mode is QI. Without the conversion,l (ARRAY
14693 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14694 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14695 if (! integer_zerop (low_bound
))
14696 index
= size_diffop_loc (loc
, index
,
14697 fold_convert_loc (loc
, sizetype
, low_bound
));
14702 scalar_int_mode char_mode
;
14704 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
14705 && TREE_CODE (string
) == STRING_CST
14706 && tree_fits_uhwi_p (index
)
14707 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
14708 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))),
14710 && GET_MODE_SIZE (char_mode
) == 1)
14711 return build_int_cst_type (TREE_TYPE (exp
),
14712 (TREE_STRING_POINTER (string
)
14713 [TREE_INT_CST_LOW (index
)]));
14718 /* Folds a read from vector element at IDX of vector ARG. */
14721 fold_read_from_vector (tree arg
, poly_uint64 idx
)
14723 unsigned HOST_WIDE_INT i
;
14724 if (known_lt (idx
, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)))
14725 && known_ge (idx
, 0u)
14726 && idx
.is_constant (&i
))
14728 if (TREE_CODE (arg
) == VECTOR_CST
)
14729 return VECTOR_CST_ELT (arg
, i
);
14730 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
14732 if (CONSTRUCTOR_NELTS (arg
)
14733 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg
, 0)->value
)))
14735 if (i
>= CONSTRUCTOR_NELTS (arg
))
14736 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg
)));
14737 return CONSTRUCTOR_ELT (arg
, i
)->value
;
14743 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14744 an integer constant, real, or fixed-point constant.
14746 TYPE is the type of the result. */
14749 fold_negate_const (tree arg0
, tree type
)
14751 tree t
= NULL_TREE
;
14753 switch (TREE_CODE (arg0
))
14756 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14761 FIXED_VALUE_TYPE f
;
14762 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
14763 &(TREE_FIXED_CST (arg0
)), NULL
,
14764 TYPE_SATURATING (type
));
14765 t
= build_fixed (type
, f
);
14766 /* Propagate overflow flags. */
14767 if (overflow_p
| TREE_OVERFLOW (arg0
))
14768 TREE_OVERFLOW (t
) = 1;
14773 if (poly_int_tree_p (arg0
))
14775 wi::overflow_type overflow
;
14776 poly_wide_int res
= wi::neg (wi::to_poly_wide (arg0
), &overflow
);
14777 t
= force_fit_type (type
, res
, 1,
14778 (overflow
&& ! TYPE_UNSIGNED (type
))
14779 || TREE_OVERFLOW (arg0
));
14783 gcc_unreachable ();
14789 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14790 an integer constant or real constant.
14792 TYPE is the type of the result. */
14795 fold_abs_const (tree arg0
, tree type
)
14797 tree t
= NULL_TREE
;
14799 switch (TREE_CODE (arg0
))
14803 /* If the value is unsigned or non-negative, then the absolute value
14804 is the same as the ordinary value. */
14805 wide_int val
= wi::to_wide (arg0
);
14806 wi::overflow_type overflow
= wi::OVF_NONE
;
14807 if (!wi::neg_p (val
, TYPE_SIGN (TREE_TYPE (arg0
))))
14810 /* If the value is negative, then the absolute value is
14813 val
= wi::neg (val
, &overflow
);
14815 /* Force to the destination type, set TREE_OVERFLOW for signed
14817 t
= force_fit_type (type
, val
, 1, overflow
| TREE_OVERFLOW (arg0
));
14822 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
14823 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
14829 gcc_unreachable ();
14835 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14836 constant. TYPE is the type of the result. */
14839 fold_not_const (const_tree arg0
, tree type
)
14841 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
14843 return force_fit_type (type
, ~wi::to_wide (arg0
), 0, TREE_OVERFLOW (arg0
));
14846 /* Given CODE, a relational operator, the target type, TYPE and two
14847 constant operands OP0 and OP1, return the result of the
14848 relational operation. If the result is not a compile time
14849 constant, then return NULL_TREE. */
14852 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
14854 int result
, invert
;
14856 /* From here on, the only cases we handle are when the result is
14857 known to be a constant. */
14859 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
14861 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
14862 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
14864 /* Handle the cases where either operand is a NaN. */
14865 if (real_isnan (c0
) || real_isnan (c1
))
14875 case UNORDERED_EXPR
:
14889 if (flag_trapping_math
)
14895 gcc_unreachable ();
14898 return constant_boolean_node (result
, type
);
14901 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
14904 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
14906 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
14907 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
14908 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
14911 /* Handle equality/inequality of complex constants. */
14912 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
14914 tree rcond
= fold_relational_const (code
, type
,
14915 TREE_REALPART (op0
),
14916 TREE_REALPART (op1
));
14917 tree icond
= fold_relational_const (code
, type
,
14918 TREE_IMAGPART (op0
),
14919 TREE_IMAGPART (op1
));
14920 if (code
== EQ_EXPR
)
14921 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
14922 else if (code
== NE_EXPR
)
14923 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
14928 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
14930 if (!VECTOR_TYPE_P (type
))
14932 /* Have vector comparison with scalar boolean result. */
14933 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
14934 && known_eq (VECTOR_CST_NELTS (op0
),
14935 VECTOR_CST_NELTS (op1
)));
14936 unsigned HOST_WIDE_INT nunits
;
14937 if (!VECTOR_CST_NELTS (op0
).is_constant (&nunits
))
14939 for (unsigned i
= 0; i
< nunits
; i
++)
14941 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14942 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14943 tree tmp
= fold_relational_const (EQ_EXPR
, type
, elem0
, elem1
);
14944 if (tmp
== NULL_TREE
)
14946 if (integer_zerop (tmp
))
14947 return constant_boolean_node (code
== NE_EXPR
, type
);
14949 return constant_boolean_node (code
== EQ_EXPR
, type
);
14951 tree_vector_builder elts
;
14952 if (!elts
.new_binary_operation (type
, op0
, op1
, false))
14954 unsigned int count
= elts
.encoded_nelts ();
14955 for (unsigned i
= 0; i
< count
; i
++)
14957 tree elem_type
= TREE_TYPE (type
);
14958 tree elem0
= VECTOR_CST_ELT (op0
, i
);
14959 tree elem1
= VECTOR_CST_ELT (op1
, i
);
14961 tree tem
= fold_relational_const (code
, elem_type
,
14964 if (tem
== NULL_TREE
)
14967 elts
.quick_push (build_int_cst (elem_type
,
14968 integer_zerop (tem
) ? 0 : -1));
14971 return elts
.build ();
14974 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14976 To compute GT, swap the arguments and do LT.
14977 To compute GE, do LT and invert the result.
14978 To compute LE, swap the arguments, do LT and invert the result.
14979 To compute NE, do EQ and invert the result.
14981 Therefore, the code below must handle only EQ and LT. */
14983 if (code
== LE_EXPR
|| code
== GT_EXPR
)
14985 std::swap (op0
, op1
);
14986 code
= swap_tree_comparison (code
);
14989 /* Note that it is safe to invert for real values here because we
14990 have already handled the one case that it matters. */
14993 if (code
== NE_EXPR
|| code
== GE_EXPR
)
14996 code
= invert_tree_comparison (code
, false);
14999 /* Compute a result for LT or EQ if args permit;
15000 Otherwise return T. */
15001 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15003 if (code
== EQ_EXPR
)
15004 result
= tree_int_cst_equal (op0
, op1
);
15006 result
= tree_int_cst_lt (op0
, op1
);
15013 return constant_boolean_node (result
, type
);
15016 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15017 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15021 fold_build_cleanup_point_expr (tree type
, tree expr
)
15023 /* If the expression does not have side effects then we don't have to wrap
15024 it with a cleanup point expression. */
15025 if (!TREE_SIDE_EFFECTS (expr
))
15028 /* If the expression is a return, check to see if the expression inside the
15029 return has no side effects or the right hand side of the modify expression
15030 inside the return. If either don't have side effects set we don't need to
15031 wrap the expression in a cleanup point expression. Note we don't check the
15032 left hand side of the modify because it should always be a return decl. */
15033 if (TREE_CODE (expr
) == RETURN_EXPR
)
15035 tree op
= TREE_OPERAND (expr
, 0);
15036 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15038 op
= TREE_OPERAND (op
, 1);
15039 if (!TREE_SIDE_EFFECTS (op
))
15043 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
15046 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15047 of an indirection through OP0, or NULL_TREE if no simplification is
15051 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15055 poly_uint64 const_op01
;
15058 subtype
= TREE_TYPE (sub
);
15059 if (!POINTER_TYPE_P (subtype
)
15060 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0
)))
15063 if (TREE_CODE (sub
) == ADDR_EXPR
)
15065 tree op
= TREE_OPERAND (sub
, 0);
15066 tree optype
= TREE_TYPE (op
);
15068 /* *&CONST_DECL -> to the value of the const decl. */
15069 if (TREE_CODE (op
) == CONST_DECL
)
15070 return DECL_INITIAL (op
);
15071 /* *&p => p; make sure to handle *&"str"[cst] here. */
15072 if (type
== optype
)
15074 tree fop
= fold_read_from_constant_string (op
);
15080 /* *(foo *)&fooarray => fooarray[0] */
15081 else if (TREE_CODE (optype
) == ARRAY_TYPE
15082 && type
== TREE_TYPE (optype
)
15083 && (!in_gimple_form
15084 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15086 tree type_domain
= TYPE_DOMAIN (optype
);
15087 tree min_val
= size_zero_node
;
15088 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15089 min_val
= TYPE_MIN_VALUE (type_domain
);
15091 && TREE_CODE (min_val
) != INTEGER_CST
)
15093 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15094 NULL_TREE
, NULL_TREE
);
15096 /* *(foo *)&complexfoo => __real__ complexfoo */
15097 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15098 && type
== TREE_TYPE (optype
))
15099 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15100 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15101 else if (VECTOR_TYPE_P (optype
)
15102 && type
== TREE_TYPE (optype
))
15104 tree part_width
= TYPE_SIZE (type
);
15105 tree index
= bitsize_int (0);
15106 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
,
15111 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15112 && poly_int_tree_p (TREE_OPERAND (sub
, 1), &const_op01
))
15114 tree op00
= TREE_OPERAND (sub
, 0);
15115 tree op01
= TREE_OPERAND (sub
, 1);
15118 if (TREE_CODE (op00
) == ADDR_EXPR
)
15121 op00
= TREE_OPERAND (op00
, 0);
15122 op00type
= TREE_TYPE (op00
);
15124 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15125 if (VECTOR_TYPE_P (op00type
)
15126 && type
== TREE_TYPE (op00type
)
15127 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15128 but we want to treat offsets with MSB set as negative.
15129 For the code below negative offsets are invalid and
15130 TYPE_SIZE of the element is something unsigned, so
15131 check whether op01 fits into poly_int64, which implies
15132 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15133 then just use poly_uint64 because we want to treat the
15134 value as unsigned. */
15135 && tree_fits_poly_int64_p (op01
))
15137 tree part_width
= TYPE_SIZE (type
);
15138 poly_uint64 max_offset
15139 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
15140 * TYPE_VECTOR_SUBPARTS (op00type
));
15141 if (known_lt (const_op01
, max_offset
))
15143 tree index
= bitsize_int (const_op01
* BITS_PER_UNIT
);
15144 return fold_build3_loc (loc
,
15145 BIT_FIELD_REF
, type
, op00
,
15146 part_width
, index
);
15149 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15150 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15151 && type
== TREE_TYPE (op00type
))
15153 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type
)),
15155 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15157 /* ((foo *)&fooarray)[1] => fooarray[1] */
15158 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15159 && type
== TREE_TYPE (op00type
))
15161 tree type_domain
= TYPE_DOMAIN (op00type
);
15162 tree min_val
= size_zero_node
;
15163 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15164 min_val
= TYPE_MIN_VALUE (type_domain
);
15165 poly_uint64 type_size
, index
;
15166 if (poly_int_tree_p (min_val
)
15167 && poly_int_tree_p (TYPE_SIZE_UNIT (type
), &type_size
)
15168 && multiple_p (const_op01
, type_size
, &index
))
15170 poly_offset_int off
= index
+ wi::to_poly_offset (min_val
);
15171 op01
= wide_int_to_tree (sizetype
, off
);
15172 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15173 NULL_TREE
, NULL_TREE
);
15179 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15180 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15181 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15182 && (!in_gimple_form
15183 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15186 tree min_val
= size_zero_node
;
15187 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15188 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15189 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15190 min_val
= TYPE_MIN_VALUE (type_domain
);
15192 && TREE_CODE (min_val
) != INTEGER_CST
)
15194 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15201 /* Builds an expression for an indirection through T, simplifying some
15205 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15207 tree type
= TREE_TYPE (TREE_TYPE (t
));
15208 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15213 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15216 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15219 fold_indirect_ref_loc (location_t loc
, tree t
)
15221 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15229 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15230 whose result is ignored. The type of the returned tree need not be
15231 the same as the original expression. */
15234 fold_ignored_result (tree t
)
15236 if (!TREE_SIDE_EFFECTS (t
))
15237 return integer_zero_node
;
15240 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15243 t
= TREE_OPERAND (t
, 0);
15247 case tcc_comparison
:
15248 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15249 t
= TREE_OPERAND (t
, 0);
15250 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15251 t
= TREE_OPERAND (t
, 1);
15256 case tcc_expression
:
15257 switch (TREE_CODE (t
))
15259 case COMPOUND_EXPR
:
15260 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15262 t
= TREE_OPERAND (t
, 0);
15266 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15267 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15269 t
= TREE_OPERAND (t
, 0);
15282 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15285 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
15287 tree div
= NULL_TREE
;
15292 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15293 have to do anything. Only do this when we are not given a const,
15294 because in that case, this check is more expensive than just
15296 if (TREE_CODE (value
) != INTEGER_CST
)
15298 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15300 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15304 /* If divisor is a power of two, simplify this to bit manipulation. */
15305 if (pow2_or_zerop (divisor
))
15307 if (TREE_CODE (value
) == INTEGER_CST
)
15309 wide_int val
= wi::to_wide (value
);
15312 if ((val
& (divisor
- 1)) == 0)
15315 overflow_p
= TREE_OVERFLOW (value
);
15316 val
+= divisor
- 1;
15317 val
&= (int) -divisor
;
15321 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
15327 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15328 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15329 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
15330 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15336 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15337 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15338 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15344 /* Likewise, but round down. */
15347 round_down_loc (location_t loc
, tree value
, int divisor
)
15349 tree div
= NULL_TREE
;
15351 gcc_assert (divisor
> 0);
15355 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15356 have to do anything. Only do this when we are not given a const,
15357 because in that case, this check is more expensive than just
15359 if (TREE_CODE (value
) != INTEGER_CST
)
15361 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15363 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15367 /* If divisor is a power of two, simplify this to bit manipulation. */
15368 if (pow2_or_zerop (divisor
))
15372 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15373 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15378 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15379 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15380 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15386 /* Returns the pointer to the base of the object addressed by EXP and
15387 extracts the information about the offset of the access, storing it
15388 to PBITPOS and POFFSET. */
15391 split_address_to_core_and_offset (tree exp
,
15392 poly_int64_pod
*pbitpos
, tree
*poffset
)
15396 int unsignedp
, reversep
, volatilep
;
15397 poly_int64 bitsize
;
15398 location_t loc
= EXPR_LOCATION (exp
);
15400 if (TREE_CODE (exp
) == ADDR_EXPR
)
15402 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15403 poffset
, &mode
, &unsignedp
, &reversep
,
15405 core
= build_fold_addr_expr_loc (loc
, core
);
15407 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
15409 core
= TREE_OPERAND (exp
, 0);
15412 *poffset
= TREE_OPERAND (exp
, 1);
15413 if (poly_int_tree_p (*poffset
))
15415 poly_offset_int tem
15416 = wi::sext (wi::to_poly_offset (*poffset
),
15417 TYPE_PRECISION (TREE_TYPE (*poffset
)));
15418 tem
<<= LOG2_BITS_PER_UNIT
;
15419 if (tem
.to_shwi (pbitpos
))
15420 *poffset
= NULL_TREE
;
15427 *poffset
= NULL_TREE
;
15433 /* Returns true if addresses of E1 and E2 differ by a constant, false
15434 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15437 ptr_difference_const (tree e1
, tree e2
, poly_int64_pod
*diff
)
15440 poly_int64 bitpos1
, bitpos2
;
15441 tree toffset1
, toffset2
, tdiff
, type
;
15443 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15444 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15446 poly_int64 bytepos1
, bytepos2
;
15447 if (!multiple_p (bitpos1
, BITS_PER_UNIT
, &bytepos1
)
15448 || !multiple_p (bitpos2
, BITS_PER_UNIT
, &bytepos2
)
15449 || !operand_equal_p (core1
, core2
, 0))
15452 if (toffset1
&& toffset2
)
15454 type
= TREE_TYPE (toffset1
);
15455 if (type
!= TREE_TYPE (toffset2
))
15456 toffset2
= fold_convert (type
, toffset2
);
15458 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15459 if (!cst_and_fits_in_hwi (tdiff
))
15462 *diff
= int_cst_value (tdiff
);
15464 else if (toffset1
|| toffset2
)
15466 /* If only one of the offsets is non-constant, the difference cannot
15473 *diff
+= bytepos1
- bytepos2
;
15477 /* Return OFF converted to a pointer offset type suitable as offset for
15478 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15480 convert_to_ptrofftype_loc (location_t loc
, tree off
)
15482 return fold_convert_loc (loc
, sizetype
, off
);
15485 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15487 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
15489 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15490 ptr
, convert_to_ptrofftype_loc (loc
, off
));
15493 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15495 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
15497 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
15498 ptr
, size_int (off
));
15501 /* Return a pointer P to a NUL-terminated string containing the sequence
15502 of bytes corresponding to the representation of the object referred to
15503 by SRC (or a subsequence of such bytes within it if SRC is a reference
15504 to an initialized constant array plus some constant offset).
15505 If STRSIZE is non-null, store the number of bytes in the constant
15506 sequence including the terminating NUL byte. *STRSIZE is equal to
15507 sizeof(A) - OFFSET where A is the array that stores the constant
15508 sequence that SRC points to and OFFSET is the byte offset of SRC from
15509 the beginning of A. SRC need not point to a string or even an array
15510 of characters but may point to an object of any type. */
15513 c_getstr (tree src
, unsigned HOST_WIDE_INT
*strsize
/* = NULL */)
15515 /* The offset into the array A storing the string, and A's byte size. */
15522 src
= string_constant (src
, &offset_node
, &mem_size
, NULL
);
15526 unsigned HOST_WIDE_INT offset
= 0;
15527 if (offset_node
!= NULL_TREE
)
15529 if (!tree_fits_uhwi_p (offset_node
))
15532 offset
= tree_to_uhwi (offset_node
);
15535 if (!tree_fits_uhwi_p (mem_size
))
15538 /* ARRAY_SIZE is the byte size of the array the constant sequence
15539 is stored in and equal to sizeof A. INIT_BYTES is the number
15540 of bytes in the constant sequence used to initialize the array,
15541 including any embedded NULs as well as the terminating NUL (for
15542 strings), but not including any trailing zeros/NULs past
15543 the terminating one appended implicitly to a string literal to
15544 zero out the remainder of the array it's stored in. For example,
15546 const char a[7] = "abc\0d";
15547 n = strlen (a + 1);
15548 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15549 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15550 is equal to strlen (A) + 1. */
15551 const unsigned HOST_WIDE_INT array_size
= tree_to_uhwi (mem_size
);
15552 unsigned HOST_WIDE_INT init_bytes
= TREE_STRING_LENGTH (src
);
15554 /* Ideally this would turn into a gcc_checking_assert over time. */
15555 if (init_bytes
> array_size
)
15556 init_bytes
= array_size
;
15558 const char *string
= TREE_STRING_POINTER (src
);
15560 /* Ideally this would turn into a gcc_checking_assert over time. */
15561 if (init_bytes
> array_size
)
15562 init_bytes
= array_size
;
15564 if (init_bytes
== 0 || offset
>= array_size
)
15569 /* Compute and store the number of characters from the beginning
15570 of the substring at OFFSET to the end, including the terminating
15571 nul. Offsets past the initial length refer to null strings. */
15572 if (offset
< init_bytes
)
15573 *strsize
= init_bytes
- offset
;
15579 tree eltype
= TREE_TYPE (TREE_TYPE (src
));
15580 /* Support only properly NUL-terminated single byte strings. */
15581 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype
)) != 1)
15583 if (string
[init_bytes
- 1] != '\0')
15587 return offset
< init_bytes
? string
+ offset
: "";
15590 /* Given a tree T, compute which bits in T may be nonzero. */
15593 tree_nonzero_bits (const_tree t
)
15595 switch (TREE_CODE (t
))
15598 return wi::to_wide (t
);
15600 return get_nonzero_bits (t
);
15601 case NON_LVALUE_EXPR
:
15603 return tree_nonzero_bits (TREE_OPERAND (t
, 0));
15605 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15606 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
15609 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15610 tree_nonzero_bits (TREE_OPERAND (t
, 1)));
15612 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t
, 1)),
15613 tree_nonzero_bits (TREE_OPERAND (t
, 2)));
15615 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t
, 0)),
15616 TYPE_PRECISION (TREE_TYPE (t
)),
15617 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t
, 0))));
15619 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
15621 wide_int nzbits1
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15622 wide_int nzbits2
= tree_nonzero_bits (TREE_OPERAND (t
, 1));
15623 if (wi::bit_and (nzbits1
, nzbits2
) == 0)
15624 return wi::bit_or (nzbits1
, nzbits2
);
15628 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
15630 tree type
= TREE_TYPE (t
);
15631 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15632 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
15633 TYPE_PRECISION (type
));
15634 return wi::neg_p (arg1
)
15635 ? wi::rshift (nzbits
, -arg1
, TYPE_SIGN (type
))
15636 : wi::lshift (nzbits
, arg1
);
15640 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
15642 tree type
= TREE_TYPE (t
);
15643 wide_int nzbits
= tree_nonzero_bits (TREE_OPERAND (t
, 0));
15644 wide_int arg1
= wi::to_wide (TREE_OPERAND (t
, 1),
15645 TYPE_PRECISION (type
));
15646 return wi::neg_p (arg1
)
15647 ? wi::lshift (nzbits
, -arg1
)
15648 : wi::rshift (nzbits
, arg1
, TYPE_SIGN (type
));
15655 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t
)));
15660 namespace selftest
{
15662 /* Helper functions for writing tests of folding trees. */
15664 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15667 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
15670 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
15673 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15674 wrapping WRAPPED_EXPR. */
15677 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
15680 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
15681 ASSERT_NE (wrapped_expr
, result
);
15682 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
15683 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
15686 /* Verify that various arithmetic binary operations are folded
15690 test_arithmetic_folding ()
15692 tree type
= integer_type_node
;
15693 tree x
= create_tmp_var_raw (type
, "x");
15694 tree zero
= build_zero_cst (type
);
15695 tree one
= build_int_cst (type
, 1);
15698 /* 1 <-- (0 + 1) */
15699 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
15701 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
15704 /* (nonlvalue)x <-- (x + 0) */
15705 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
15709 /* 0 <-- (x - x) */
15710 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
15712 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
15715 /* Multiplication. */
15716 /* 0 <-- (x * 0) */
15717 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
15720 /* (nonlvalue)x <-- (x * 1) */
15721 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
15725 /* Verify that various binary operations on vectors are folded
15729 test_vector_folding ()
15731 tree inner_type
= integer_type_node
;
15732 tree type
= build_vector_type (inner_type
, 4);
15733 tree zero
= build_zero_cst (type
);
15734 tree one
= build_one_cst (type
);
15735 tree index
= build_index_vector (type
, 0, 1);
15737 /* Verify equality tests that return a scalar boolean result. */
15738 tree res_type
= boolean_type_node
;
15739 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
15740 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
15741 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
15742 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
15743 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, index
, one
)));
15744 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
15746 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
,
15748 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
,
15752 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15755 test_vec_duplicate_folding ()
15757 scalar_int_mode int_mode
= SCALAR_INT_TYPE_MODE (ssizetype
);
15758 machine_mode vec_mode
= targetm
.vectorize
.preferred_simd_mode (int_mode
);
15759 /* This will be 1 if VEC_MODE isn't a vector mode. */
15760 poly_uint64 nunits
= GET_MODE_NUNITS (vec_mode
);
15762 tree type
= build_vector_type (ssizetype
, nunits
);
15763 tree dup5_expr
= fold_unary (VEC_DUPLICATE_EXPR
, type
, ssize_int (5));
15764 tree dup5_cst
= build_vector_from_val (type
, ssize_int (5));
15765 ASSERT_TRUE (operand_equal_p (dup5_expr
, dup5_cst
, 0));
15768 /* Run all of the selftests within this file. */
15771 fold_const_c_tests ()
15773 test_arithmetic_folding ();
15774 test_vector_folding ();
15775 test_vec_duplicate_folding ();
15778 } // namespace selftest
15780 #endif /* CHECKING_P */