]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
fold-const: Fix up make_range_step [PR105189]
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
89
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
110 };
111
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141
142
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
145
146 static location_t
147 expr_location_or (tree t, location_t loc)
148 {
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
155
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
164 {
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
167 }
168 return x;
169 }
170 \f
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
174
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178 widest_int quo;
179
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183
184 return NULL_TREE;
185 }
186 \f
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
195
196 static int fold_deferring_overflow_warnings;
197
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
202
203 static const char* fold_deferred_overflow_warning;
204
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
207
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
212
213 void
214 fold_defer_overflow_warnings (void)
215 {
216 ++fold_deferring_overflow_warnings;
217 }
218
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
227
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231 const char *warnmsg;
232 location_t locus;
233
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
237 {
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
243 }
244
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
247
248 if (!issue || warnmsg == NULL)
249 return;
250
251 if (gimple_no_warning_p (stmt))
252 return;
253
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
258
259 if (!issue_strict_overflow_warning (code))
260 return;
261
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
271
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275 fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277
278 /* Whether we are deferring overflow warnings. */
279
280 bool
281 fold_deferring_overflow_warnings_p (void)
282 {
283 return fold_deferring_overflow_warnings > 0;
284 }
285
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
288
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292 if (fold_deferring_overflow_warnings > 0)
293 {
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
296 {
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
299 }
300 }
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 \f
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
307
308 bool
309 negate_mathfn_p (combined_fn fn)
310 {
311 switch (fn)
312 {
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
339
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
345
346 default:
347 break;
348 }
349 return false;
350 }
351
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
354
355 bool
356 may_negate_without_overflow_p (const_tree t)
357 {
358 tree type;
359
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
365
366 return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
371
372 static bool
373 negate_expr_p (tree t)
374 {
375 tree type;
376
377 if (t == 0)
378 return false;
379
380 type = TREE_TYPE (t);
381
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
384 {
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
388
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
394
395 case FIXED_CST:
396 return true;
397
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
400
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
409
410 case VECTOR_CST:
411 {
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
414
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
420
421 return true;
422 }
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
464
465 /* Fall through. */
466
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
472
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
492
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
496 {
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
500 }
501 break;
502
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
508
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 {
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == element_precision (type) - 1)
515 return true;
516 }
517 break;
518
519 default:
520 break;
521 }
522 return false;
523 }
524
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
529
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533 tree type = TREE_TYPE (t);
534 tree tem;
535
536 switch (TREE_CODE (t))
537 {
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
544
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
554
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
560
561 case COMPLEX_CST:
562 {
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
567 }
568 break;
569
570 case VECTOR_CST:
571 {
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
576 {
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
581 }
582
583 return elts.build ();
584 }
585
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
592
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
598
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
603
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 {
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 {
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
614 }
615
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 {
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
622 }
623 }
624 break;
625
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
633
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
637
638 /* Fall through. */
639
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 {
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
651 }
652 break;
653
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
678
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
682 {
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
686 }
687 break;
688
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 {
694 tree fndecl, arg;
695
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
699 }
700 break;
701
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 {
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == element_precision (type) - 1)
708 {
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
715 }
716 }
717 break;
718
719 default:
720 break;
721 }
722
723 return NULL_TREE;
724 }
725
726 /* A wrapper for fold_negate_expr_1. */
727
728 static tree
729 fold_negate_expr (location_t loc, tree t)
730 {
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
737 }
738
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
742
743 static tree
744 negate_expr (tree t)
745 {
746 tree type, tem;
747 location_t loc;
748
749 if (t == NULL_TREE)
750 return NULL_TREE;
751
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
755
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
760 }
761 \f
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
769
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
773
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
779
780 If IN is itself a literal or constant, return it as appropriate.
781
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
784
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
789 {
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
796
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
799
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
850 }
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
855 {
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
860 }
861 else
862 var = in;
863
864 if (negate_p)
865 {
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
878 }
879
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 {
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
902 }
903 else if (t2 == 0)
904 return t1;
905
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
973
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
978 {
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
986
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
990
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
994
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
998 {
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1004 }
1005 else
1006 tmp = arg2;
1007
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1016
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1020 {
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1026 }
1027 else
1028 tmp = arg2;
1029
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1035
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1039
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1043
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1047
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1058
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1064
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1070
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1076
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1082
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1088
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1094
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1100
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1104
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1108
1109 default:
1110 return false;
1111 }
1112 return true;
1113 }
1114
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1118
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1123 {
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1127 {
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1132
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1137
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1148
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1155
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1162
1163 default:
1164 return false;
1165 }
1166 return true;
1167 }
1168
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1172
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1176 {
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1189 }
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206 switch (op)
1207 {
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1212
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1215
1216 default:
1217 return false;
1218 }
1219 }
1220
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1225
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1232
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1235
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 {
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241
1242 return int_const_binop (code, arg1, arg2);
1243 }
1244
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 {
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1254
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1257 {
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1265
1266 default:
1267 return NULL_TREE;
1268 }
1269
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1272
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1275
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1282
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1289
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1293 {
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1299 }
1300 else if (REAL_VALUE_ISNAN (d2))
1301 {
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1307 }
1308
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1311
1312 /* Don't constant fold this floating point operation if
1313 both operands are not NaN but the result is NaN, and
1314 flag_trapping_math. Such operations should raise an
1315 invalid operation exception. */
1316 if (flag_trapping_math
1317 && MODE_HAS_NANS (mode)
1318 && REAL_VALUE_ISNAN (result)
1319 && !REAL_VALUE_ISNAN (d1)
1320 && !REAL_VALUE_ISNAN (d2))
1321 return NULL_TREE;
1322
1323 /* Don't constant fold this floating point operation if
1324 the result has overflowed and flag_trapping_math. */
1325 if (flag_trapping_math
1326 && MODE_HAS_INFINITIES (mode)
1327 && REAL_VALUE_ISINF (result)
1328 && !REAL_VALUE_ISINF (d1)
1329 && !REAL_VALUE_ISINF (d2))
1330 return NULL_TREE;
1331
1332 /* Don't constant fold this floating point operation if the
1333 result may dependent upon the run-time rounding mode and
1334 flag_rounding_math is set, or if GCC's software emulation
1335 is unable to accurately represent the result. */
1336 if ((flag_rounding_math
1337 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1338 && (inexact || !real_identical (&result, &value)))
1339 return NULL_TREE;
1340
1341 t = build_real (type, result);
1342
1343 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1344 return t;
1345 }
1346
1347 if (TREE_CODE (arg1) == FIXED_CST)
1348 {
1349 FIXED_VALUE_TYPE f1;
1350 FIXED_VALUE_TYPE f2;
1351 FIXED_VALUE_TYPE result;
1352 tree t, type;
1353 int sat_p;
1354 bool overflow_p;
1355
1356 /* The following codes are handled by fixed_arithmetic. */
1357 switch (code)
1358 {
1359 case PLUS_EXPR:
1360 case MINUS_EXPR:
1361 case MULT_EXPR:
1362 case TRUNC_DIV_EXPR:
1363 if (TREE_CODE (arg2) != FIXED_CST)
1364 return NULL_TREE;
1365 f2 = TREE_FIXED_CST (arg2);
1366 break;
1367
1368 case LSHIFT_EXPR:
1369 case RSHIFT_EXPR:
1370 {
1371 if (TREE_CODE (arg2) != INTEGER_CST)
1372 return NULL_TREE;
1373 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1374 f2.data.high = w2.elt (1);
1375 f2.data.low = w2.ulow ();
1376 f2.mode = SImode;
1377 }
1378 break;
1379
1380 default:
1381 return NULL_TREE;
1382 }
1383
1384 f1 = TREE_FIXED_CST (arg1);
1385 type = TREE_TYPE (arg1);
1386 sat_p = TYPE_SATURATING (type);
1387 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1388 t = build_fixed (type, result);
1389 /* Propagate overflow flags. */
1390 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1391 TREE_OVERFLOW (t) = 1;
1392 return t;
1393 }
1394
1395 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1396 {
1397 tree type = TREE_TYPE (arg1);
1398 tree r1 = TREE_REALPART (arg1);
1399 tree i1 = TREE_IMAGPART (arg1);
1400 tree r2 = TREE_REALPART (arg2);
1401 tree i2 = TREE_IMAGPART (arg2);
1402 tree real, imag;
1403
1404 switch (code)
1405 {
1406 case PLUS_EXPR:
1407 case MINUS_EXPR:
1408 real = const_binop (code, r1, r2);
1409 imag = const_binop (code, i1, i2);
1410 break;
1411
1412 case MULT_EXPR:
1413 if (COMPLEX_FLOAT_TYPE_P (type))
1414 return do_mpc_arg2 (arg1, arg2, type,
1415 /* do_nonfinite= */ folding_initializer,
1416 mpc_mul);
1417
1418 real = const_binop (MINUS_EXPR,
1419 const_binop (MULT_EXPR, r1, r2),
1420 const_binop (MULT_EXPR, i1, i2));
1421 imag = const_binop (PLUS_EXPR,
1422 const_binop (MULT_EXPR, r1, i2),
1423 const_binop (MULT_EXPR, i1, r2));
1424 break;
1425
1426 case RDIV_EXPR:
1427 if (COMPLEX_FLOAT_TYPE_P (type))
1428 return do_mpc_arg2 (arg1, arg2, type,
1429 /* do_nonfinite= */ folding_initializer,
1430 mpc_div);
1431 /* Fallthru. */
1432 case TRUNC_DIV_EXPR:
1433 case CEIL_DIV_EXPR:
1434 case FLOOR_DIV_EXPR:
1435 case ROUND_DIV_EXPR:
1436 if (flag_complex_method == 0)
1437 {
1438 /* Keep this algorithm in sync with
1439 tree-complex.c:expand_complex_div_straight().
1440
1441 Expand complex division to scalars, straightforward algorithm.
1442 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1443 t = br*br + bi*bi
1444 */
1445 tree magsquared
1446 = const_binop (PLUS_EXPR,
1447 const_binop (MULT_EXPR, r2, r2),
1448 const_binop (MULT_EXPR, i2, i2));
1449 tree t1
1450 = const_binop (PLUS_EXPR,
1451 const_binop (MULT_EXPR, r1, r2),
1452 const_binop (MULT_EXPR, i1, i2));
1453 tree t2
1454 = const_binop (MINUS_EXPR,
1455 const_binop (MULT_EXPR, i1, r2),
1456 const_binop (MULT_EXPR, r1, i2));
1457
1458 real = const_binop (code, t1, magsquared);
1459 imag = const_binop (code, t2, magsquared);
1460 }
1461 else
1462 {
1463 /* Keep this algorithm in sync with
1464 tree-complex.c:expand_complex_div_wide().
1465
1466 Expand complex division to scalars, modified algorithm to minimize
1467 overflow with wide input ranges. */
1468 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1469 fold_abs_const (r2, TREE_TYPE (type)),
1470 fold_abs_const (i2, TREE_TYPE (type)));
1471
1472 if (integer_nonzerop (compare))
1473 {
1474 /* In the TRUE branch, we compute
1475 ratio = br/bi;
1476 div = (br * ratio) + bi;
1477 tr = (ar * ratio) + ai;
1478 ti = (ai * ratio) - ar;
1479 tr = tr / div;
1480 ti = ti / div; */
1481 tree ratio = const_binop (code, r2, i2);
1482 tree div = const_binop (PLUS_EXPR, i2,
1483 const_binop (MULT_EXPR, r2, ratio));
1484 real = const_binop (MULT_EXPR, r1, ratio);
1485 real = const_binop (PLUS_EXPR, real, i1);
1486 real = const_binop (code, real, div);
1487
1488 imag = const_binop (MULT_EXPR, i1, ratio);
1489 imag = const_binop (MINUS_EXPR, imag, r1);
1490 imag = const_binop (code, imag, div);
1491 }
1492 else
1493 {
1494 /* In the FALSE branch, we compute
1495 ratio = d/c;
1496 divisor = (d * ratio) + c;
1497 tr = (b * ratio) + a;
1498 ti = b - (a * ratio);
1499 tr = tr / div;
1500 ti = ti / div; */
1501 tree ratio = const_binop (code, i2, r2);
1502 tree div = const_binop (PLUS_EXPR, r2,
1503 const_binop (MULT_EXPR, i2, ratio));
1504
1505 real = const_binop (MULT_EXPR, i1, ratio);
1506 real = const_binop (PLUS_EXPR, real, r1);
1507 real = const_binop (code, real, div);
1508
1509 imag = const_binop (MULT_EXPR, r1, ratio);
1510 imag = const_binop (MINUS_EXPR, i1, imag);
1511 imag = const_binop (code, imag, div);
1512 }
1513 }
1514 break;
1515
1516 default:
1517 return NULL_TREE;
1518 }
1519
1520 if (real && imag)
1521 return build_complex (type, real, imag);
1522 }
1523
1524 if (TREE_CODE (arg1) == VECTOR_CST
1525 && TREE_CODE (arg2) == VECTOR_CST
1526 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1527 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1528 {
1529 tree type = TREE_TYPE (arg1);
1530 bool step_ok_p;
1531 if (VECTOR_CST_STEPPED_P (arg1)
1532 && VECTOR_CST_STEPPED_P (arg2))
1533 /* We can operate directly on the encoding if:
1534
1535 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1536 implies
1537 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1538
1539 Addition and subtraction are the supported operators
1540 for which this is true. */
1541 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1542 else if (VECTOR_CST_STEPPED_P (arg1))
1543 /* We can operate directly on stepped encodings if:
1544
1545 a3 - a2 == a2 - a1
1546 implies:
1547 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1548
1549 which is true if (x -> x op c) distributes over addition. */
1550 step_ok_p = distributes_over_addition_p (code, 1);
1551 else
1552 /* Similarly in reverse. */
1553 step_ok_p = distributes_over_addition_p (code, 2);
1554 tree_vector_builder elts;
1555 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1556 return NULL_TREE;
1557 unsigned int count = elts.encoded_nelts ();
1558 for (unsigned int i = 0; i < count; ++i)
1559 {
1560 tree elem1 = VECTOR_CST_ELT (arg1, i);
1561 tree elem2 = VECTOR_CST_ELT (arg2, i);
1562
1563 tree elt = const_binop (code, elem1, elem2);
1564
1565 /* It is possible that const_binop cannot handle the given
1566 code and return NULL_TREE */
1567 if (elt == NULL_TREE)
1568 return NULL_TREE;
1569 elts.quick_push (elt);
1570 }
1571
1572 return elts.build ();
1573 }
1574
1575 /* Shifts allow a scalar offset for a vector. */
1576 if (TREE_CODE (arg1) == VECTOR_CST
1577 && TREE_CODE (arg2) == INTEGER_CST)
1578 {
1579 tree type = TREE_TYPE (arg1);
1580 bool step_ok_p = distributes_over_addition_p (code, 1);
1581 tree_vector_builder elts;
1582 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1583 return NULL_TREE;
1584 unsigned int count = elts.encoded_nelts ();
1585 for (unsigned int i = 0; i < count; ++i)
1586 {
1587 tree elem1 = VECTOR_CST_ELT (arg1, i);
1588
1589 tree elt = const_binop (code, elem1, arg2);
1590
1591 /* It is possible that const_binop cannot handle the given
1592 code and return NULL_TREE. */
1593 if (elt == NULL_TREE)
1594 return NULL_TREE;
1595 elts.quick_push (elt);
1596 }
1597
1598 return elts.build ();
1599 }
1600 return NULL_TREE;
1601 }
1602
1603 /* Overload that adds a TYPE parameter to be able to dispatch
1604 to fold_relational_const. */
1605
1606 tree
1607 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1608 {
1609 if (TREE_CODE_CLASS (code) == tcc_comparison)
1610 return fold_relational_const (code, type, arg1, arg2);
1611
1612 /* ??? Until we make the const_binop worker take the type of the
1613 result as argument put those cases that need it here. */
1614 switch (code)
1615 {
1616 case VEC_SERIES_EXPR:
1617 if (CONSTANT_CLASS_P (arg1)
1618 && CONSTANT_CLASS_P (arg2))
1619 return build_vec_series (type, arg1, arg2);
1620 return NULL_TREE;
1621
1622 case COMPLEX_EXPR:
1623 if ((TREE_CODE (arg1) == REAL_CST
1624 && TREE_CODE (arg2) == REAL_CST)
1625 || (TREE_CODE (arg1) == INTEGER_CST
1626 && TREE_CODE (arg2) == INTEGER_CST))
1627 return build_complex (type, arg1, arg2);
1628 return NULL_TREE;
1629
1630 case POINTER_DIFF_EXPR:
1631 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1632 {
1633 poly_offset_int res = (wi::to_poly_offset (arg1)
1634 - wi::to_poly_offset (arg2));
1635 return force_fit_type (type, res, 1,
1636 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1637 }
1638 return NULL_TREE;
1639
1640 case VEC_PACK_TRUNC_EXPR:
1641 case VEC_PACK_FIX_TRUNC_EXPR:
1642 case VEC_PACK_FLOAT_EXPR:
1643 {
1644 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1645
1646 if (TREE_CODE (arg1) != VECTOR_CST
1647 || TREE_CODE (arg2) != VECTOR_CST)
1648 return NULL_TREE;
1649
1650 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1651 return NULL_TREE;
1652
1653 out_nelts = in_nelts * 2;
1654 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1655 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1656
1657 tree_vector_builder elts (type, out_nelts, 1);
1658 for (i = 0; i < out_nelts; i++)
1659 {
1660 tree elt = (i < in_nelts
1661 ? VECTOR_CST_ELT (arg1, i)
1662 : VECTOR_CST_ELT (arg2, i - in_nelts));
1663 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1664 ? NOP_EXPR
1665 : code == VEC_PACK_FLOAT_EXPR
1666 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1667 TREE_TYPE (type), elt);
1668 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1669 return NULL_TREE;
1670 elts.quick_push (elt);
1671 }
1672
1673 return elts.build ();
1674 }
1675
1676 case VEC_WIDEN_MULT_LO_EXPR:
1677 case VEC_WIDEN_MULT_HI_EXPR:
1678 case VEC_WIDEN_MULT_EVEN_EXPR:
1679 case VEC_WIDEN_MULT_ODD_EXPR:
1680 {
1681 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1682
1683 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1684 return NULL_TREE;
1685
1686 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1687 return NULL_TREE;
1688 out_nelts = in_nelts / 2;
1689 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1690 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1691
1692 if (code == VEC_WIDEN_MULT_LO_EXPR)
1693 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1694 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1695 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1696 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1697 scale = 1, ofs = 0;
1698 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1699 scale = 1, ofs = 1;
1700
1701 tree_vector_builder elts (type, out_nelts, 1);
1702 for (out = 0; out < out_nelts; out++)
1703 {
1704 unsigned int in = (out << scale) + ofs;
1705 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1706 VECTOR_CST_ELT (arg1, in));
1707 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1708 VECTOR_CST_ELT (arg2, in));
1709
1710 if (t1 == NULL_TREE || t2 == NULL_TREE)
1711 return NULL_TREE;
1712 tree elt = const_binop (MULT_EXPR, t1, t2);
1713 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1714 return NULL_TREE;
1715 elts.quick_push (elt);
1716 }
1717
1718 return elts.build ();
1719 }
1720
1721 default:;
1722 }
1723
1724 if (TREE_CODE_CLASS (code) != tcc_binary)
1725 return NULL_TREE;
1726
1727 /* Make sure type and arg0 have the same saturating flag. */
1728 gcc_checking_assert (TYPE_SATURATING (type)
1729 == TYPE_SATURATING (TREE_TYPE (arg1)));
1730
1731 return const_binop (code, arg1, arg2);
1732 }
1733
1734 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1735 Return zero if computing the constants is not possible. */
1736
1737 tree
1738 const_unop (enum tree_code code, tree type, tree arg0)
1739 {
1740 /* Don't perform the operation, other than NEGATE and ABS, if
1741 flag_signaling_nans is on and the operand is a signaling NaN. */
1742 if (TREE_CODE (arg0) == REAL_CST
1743 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1744 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1745 && code != NEGATE_EXPR
1746 && code != ABS_EXPR
1747 && code != ABSU_EXPR)
1748 return NULL_TREE;
1749
1750 switch (code)
1751 {
1752 CASE_CONVERT:
1753 case FLOAT_EXPR:
1754 case FIX_TRUNC_EXPR:
1755 case FIXED_CONVERT_EXPR:
1756 return fold_convert_const (code, type, arg0);
1757
1758 case ADDR_SPACE_CONVERT_EXPR:
1759 /* If the source address is 0, and the source address space
1760 cannot have a valid object at 0, fold to dest type null. */
1761 if (integer_zerop (arg0)
1762 && !(targetm.addr_space.zero_address_valid
1763 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1764 return fold_convert_const (code, type, arg0);
1765 break;
1766
1767 case VIEW_CONVERT_EXPR:
1768 return fold_view_convert_expr (type, arg0);
1769
1770 case NEGATE_EXPR:
1771 {
1772 /* Can't call fold_negate_const directly here as that doesn't
1773 handle all cases and we might not be able to negate some
1774 constants. */
1775 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1776 if (tem && CONSTANT_CLASS_P (tem))
1777 return tem;
1778 break;
1779 }
1780
1781 case ABS_EXPR:
1782 case ABSU_EXPR:
1783 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1784 return fold_abs_const (arg0, type);
1785 break;
1786
1787 case CONJ_EXPR:
1788 if (TREE_CODE (arg0) == COMPLEX_CST)
1789 {
1790 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1791 TREE_TYPE (type));
1792 return build_complex (type, TREE_REALPART (arg0), ipart);
1793 }
1794 break;
1795
1796 case BIT_NOT_EXPR:
1797 if (TREE_CODE (arg0) == INTEGER_CST)
1798 return fold_not_const (arg0, type);
1799 else if (POLY_INT_CST_P (arg0))
1800 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1801 /* Perform BIT_NOT_EXPR on each element individually. */
1802 else if (TREE_CODE (arg0) == VECTOR_CST)
1803 {
1804 tree elem;
1805
1806 /* This can cope with stepped encodings because ~x == -1 - x. */
1807 tree_vector_builder elements;
1808 elements.new_unary_operation (type, arg0, true);
1809 unsigned int i, count = elements.encoded_nelts ();
1810 for (i = 0; i < count; ++i)
1811 {
1812 elem = VECTOR_CST_ELT (arg0, i);
1813 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1814 if (elem == NULL_TREE)
1815 break;
1816 elements.quick_push (elem);
1817 }
1818 if (i == count)
1819 return elements.build ();
1820 }
1821 break;
1822
1823 case TRUTH_NOT_EXPR:
1824 if (TREE_CODE (arg0) == INTEGER_CST)
1825 return constant_boolean_node (integer_zerop (arg0), type);
1826 break;
1827
1828 case REALPART_EXPR:
1829 if (TREE_CODE (arg0) == COMPLEX_CST)
1830 return fold_convert (type, TREE_REALPART (arg0));
1831 break;
1832
1833 case IMAGPART_EXPR:
1834 if (TREE_CODE (arg0) == COMPLEX_CST)
1835 return fold_convert (type, TREE_IMAGPART (arg0));
1836 break;
1837
1838 case VEC_UNPACK_LO_EXPR:
1839 case VEC_UNPACK_HI_EXPR:
1840 case VEC_UNPACK_FLOAT_LO_EXPR:
1841 case VEC_UNPACK_FLOAT_HI_EXPR:
1842 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1843 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1844 {
1845 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1846 enum tree_code subcode;
1847
1848 if (TREE_CODE (arg0) != VECTOR_CST)
1849 return NULL_TREE;
1850
1851 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1852 return NULL_TREE;
1853 out_nelts = in_nelts / 2;
1854 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1855
1856 unsigned int offset = 0;
1857 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1858 || code == VEC_UNPACK_FLOAT_LO_EXPR
1859 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1860 offset = out_nelts;
1861
1862 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1863 subcode = NOP_EXPR;
1864 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1865 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1866 subcode = FLOAT_EXPR;
1867 else
1868 subcode = FIX_TRUNC_EXPR;
1869
1870 tree_vector_builder elts (type, out_nelts, 1);
1871 for (i = 0; i < out_nelts; i++)
1872 {
1873 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1874 VECTOR_CST_ELT (arg0, i + offset));
1875 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1876 return NULL_TREE;
1877 elts.quick_push (elt);
1878 }
1879
1880 return elts.build ();
1881 }
1882
1883 case VEC_DUPLICATE_EXPR:
1884 if (CONSTANT_CLASS_P (arg0))
1885 return build_vector_from_val (type, arg0);
1886 return NULL_TREE;
1887
1888 default:
1889 break;
1890 }
1891
1892 return NULL_TREE;
1893 }
1894
1895 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1896 indicates which particular sizetype to create. */
1897
1898 tree
1899 size_int_kind (poly_int64 number, enum size_type_kind kind)
1900 {
1901 return build_int_cst (sizetype_tab[(int) kind], number);
1902 }
1903 \f
1904 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1905 is a tree code. The type of the result is taken from the operands.
1906 Both must be equivalent integer types, ala int_binop_types_match_p.
1907 If the operands are constant, so is the result. */
1908
1909 tree
1910 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1911 {
1912 tree type = TREE_TYPE (arg0);
1913
1914 if (arg0 == error_mark_node || arg1 == error_mark_node)
1915 return error_mark_node;
1916
1917 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1918 TREE_TYPE (arg1)));
1919
1920 /* Handle the special case of two poly_int constants faster. */
1921 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1922 {
1923 /* And some specific cases even faster than that. */
1924 if (code == PLUS_EXPR)
1925 {
1926 if (integer_zerop (arg0)
1927 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1928 return arg1;
1929 if (integer_zerop (arg1)
1930 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1931 return arg0;
1932 }
1933 else if (code == MINUS_EXPR)
1934 {
1935 if (integer_zerop (arg1)
1936 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1937 return arg0;
1938 }
1939 else if (code == MULT_EXPR)
1940 {
1941 if (integer_onep (arg0)
1942 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1943 return arg1;
1944 }
1945
1946 /* Handle general case of two integer constants. For sizetype
1947 constant calculations we always want to know about overflow,
1948 even in the unsigned case. */
1949 tree res = int_const_binop (code, arg0, arg1, -1);
1950 if (res != NULL_TREE)
1951 return res;
1952 }
1953
1954 return fold_build2_loc (loc, code, type, arg0, arg1);
1955 }
1956
1957 /* Given two values, either both of sizetype or both of bitsizetype,
1958 compute the difference between the two values. Return the value
1959 in signed type corresponding to the type of the operands. */
1960
1961 tree
1962 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1963 {
1964 tree type = TREE_TYPE (arg0);
1965 tree ctype;
1966
1967 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1968 TREE_TYPE (arg1)));
1969
1970 /* If the type is already signed, just do the simple thing. */
1971 if (!TYPE_UNSIGNED (type))
1972 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1973
1974 if (type == sizetype)
1975 ctype = ssizetype;
1976 else if (type == bitsizetype)
1977 ctype = sbitsizetype;
1978 else
1979 ctype = signed_type_for (type);
1980
1981 /* If either operand is not a constant, do the conversions to the signed
1982 type and subtract. The hardware will do the right thing with any
1983 overflow in the subtraction. */
1984 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1985 return size_binop_loc (loc, MINUS_EXPR,
1986 fold_convert_loc (loc, ctype, arg0),
1987 fold_convert_loc (loc, ctype, arg1));
1988
1989 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1990 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1991 overflow) and negate (which can't either). Special-case a result
1992 of zero while we're here. */
1993 if (tree_int_cst_equal (arg0, arg1))
1994 return build_int_cst (ctype, 0);
1995 else if (tree_int_cst_lt (arg1, arg0))
1996 return fold_convert_loc (loc, ctype,
1997 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1998 else
1999 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2000 fold_convert_loc (loc, ctype,
2001 size_binop_loc (loc,
2002 MINUS_EXPR,
2003 arg1, arg0)));
2004 }
2005 \f
2006 /* A subroutine of fold_convert_const handling conversions of an
2007 INTEGER_CST to another integer type. */
2008
2009 static tree
2010 fold_convert_const_int_from_int (tree type, const_tree arg1)
2011 {
2012 /* Given an integer constant, make new constant with new type,
2013 appropriately sign-extended or truncated. Use widest_int
2014 so that any extension is done according ARG1's type. */
2015 return force_fit_type (type, wi::to_widest (arg1),
2016 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2017 TREE_OVERFLOW (arg1));
2018 }
2019
2020 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2021 to an integer type. */
2022
2023 static tree
2024 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2025 {
2026 bool overflow = false;
2027 tree t;
2028
2029 /* The following code implements the floating point to integer
2030 conversion rules required by the Java Language Specification,
2031 that IEEE NaNs are mapped to zero and values that overflow
2032 the target precision saturate, i.e. values greater than
2033 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2034 are mapped to INT_MIN. These semantics are allowed by the
2035 C and C++ standards that simply state that the behavior of
2036 FP-to-integer conversion is unspecified upon overflow. */
2037
2038 wide_int val;
2039 REAL_VALUE_TYPE r;
2040 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2041
2042 switch (code)
2043 {
2044 case FIX_TRUNC_EXPR:
2045 real_trunc (&r, VOIDmode, &x);
2046 break;
2047
2048 default:
2049 gcc_unreachable ();
2050 }
2051
2052 /* If R is NaN, return zero and show we have an overflow. */
2053 if (REAL_VALUE_ISNAN (r))
2054 {
2055 overflow = true;
2056 val = wi::zero (TYPE_PRECISION (type));
2057 }
2058
2059 /* See if R is less than the lower bound or greater than the
2060 upper bound. */
2061
2062 if (! overflow)
2063 {
2064 tree lt = TYPE_MIN_VALUE (type);
2065 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2066 if (real_less (&r, &l))
2067 {
2068 overflow = true;
2069 val = wi::to_wide (lt);
2070 }
2071 }
2072
2073 if (! overflow)
2074 {
2075 tree ut = TYPE_MAX_VALUE (type);
2076 if (ut)
2077 {
2078 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2079 if (real_less (&u, &r))
2080 {
2081 overflow = true;
2082 val = wi::to_wide (ut);
2083 }
2084 }
2085 }
2086
2087 if (! overflow)
2088 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2089
2090 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2091 return t;
2092 }
2093
2094 /* A subroutine of fold_convert_const handling conversions of a
2095 FIXED_CST to an integer type. */
2096
2097 static tree
2098 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2099 {
2100 tree t;
2101 double_int temp, temp_trunc;
2102 scalar_mode mode;
2103
2104 /* Right shift FIXED_CST to temp by fbit. */
2105 temp = TREE_FIXED_CST (arg1).data;
2106 mode = TREE_FIXED_CST (arg1).mode;
2107 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2108 {
2109 temp = temp.rshift (GET_MODE_FBIT (mode),
2110 HOST_BITS_PER_DOUBLE_INT,
2111 SIGNED_FIXED_POINT_MODE_P (mode));
2112
2113 /* Left shift temp to temp_trunc by fbit. */
2114 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2115 HOST_BITS_PER_DOUBLE_INT,
2116 SIGNED_FIXED_POINT_MODE_P (mode));
2117 }
2118 else
2119 {
2120 temp = double_int_zero;
2121 temp_trunc = double_int_zero;
2122 }
2123
2124 /* If FIXED_CST is negative, we need to round the value toward 0.
2125 By checking if the fractional bits are not zero to add 1 to temp. */
2126 if (SIGNED_FIXED_POINT_MODE_P (mode)
2127 && temp_trunc.is_negative ()
2128 && TREE_FIXED_CST (arg1).data != temp_trunc)
2129 temp += double_int_one;
2130
2131 /* Given a fixed-point constant, make new constant with new type,
2132 appropriately sign-extended or truncated. */
2133 t = force_fit_type (type, temp, -1,
2134 (temp.is_negative ()
2135 && (TYPE_UNSIGNED (type)
2136 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2137 | TREE_OVERFLOW (arg1));
2138
2139 return t;
2140 }
2141
2142 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2143 to another floating point type. */
2144
2145 static tree
2146 fold_convert_const_real_from_real (tree type, const_tree arg1)
2147 {
2148 REAL_VALUE_TYPE value;
2149 tree t;
2150
2151 /* Don't perform the operation if flag_signaling_nans is on
2152 and the operand is a signaling NaN. */
2153 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2154 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2155 return NULL_TREE;
2156
2157 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2158 t = build_real (type, value);
2159
2160 /* If converting an infinity or NAN to a representation that doesn't
2161 have one, set the overflow bit so that we can produce some kind of
2162 error message at the appropriate point if necessary. It's not the
2163 most user-friendly message, but it's better than nothing. */
2164 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2165 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2166 TREE_OVERFLOW (t) = 1;
2167 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2168 && !MODE_HAS_NANS (TYPE_MODE (type)))
2169 TREE_OVERFLOW (t) = 1;
2170 /* Regular overflow, conversion produced an infinity in a mode that
2171 can't represent them. */
2172 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2173 && REAL_VALUE_ISINF (value)
2174 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2175 TREE_OVERFLOW (t) = 1;
2176 else
2177 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2178 return t;
2179 }
2180
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182 to a floating point type. */
2183
2184 static tree
2185 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2186 {
2187 REAL_VALUE_TYPE value;
2188 tree t;
2189
2190 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2191 &TREE_FIXED_CST (arg1));
2192 t = build_real (type, value);
2193
2194 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2195 return t;
2196 }
2197
2198 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2199 to another fixed-point type. */
2200
2201 static tree
2202 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2203 {
2204 FIXED_VALUE_TYPE value;
2205 tree t;
2206 bool overflow_p;
2207
2208 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2209 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2210 t = build_fixed (type, value);
2211
2212 /* Propagate overflow flags. */
2213 if (overflow_p | TREE_OVERFLOW (arg1))
2214 TREE_OVERFLOW (t) = 1;
2215 return t;
2216 }
2217
2218 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2219 to a fixed-point type. */
2220
2221 static tree
2222 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2223 {
2224 FIXED_VALUE_TYPE value;
2225 tree t;
2226 bool overflow_p;
2227 double_int di;
2228
2229 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2230
2231 di.low = TREE_INT_CST_ELT (arg1, 0);
2232 if (TREE_INT_CST_NUNITS (arg1) == 1)
2233 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2234 else
2235 di.high = TREE_INT_CST_ELT (arg1, 1);
2236
2237 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2238 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2239 TYPE_SATURATING (type));
2240 t = build_fixed (type, value);
2241
2242 /* Propagate overflow flags. */
2243 if (overflow_p | TREE_OVERFLOW (arg1))
2244 TREE_OVERFLOW (t) = 1;
2245 return t;
2246 }
2247
2248 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2249 to a fixed-point type. */
2250
2251 static tree
2252 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2253 {
2254 FIXED_VALUE_TYPE value;
2255 tree t;
2256 bool overflow_p;
2257
2258 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2259 &TREE_REAL_CST (arg1),
2260 TYPE_SATURATING (type));
2261 t = build_fixed (type, value);
2262
2263 /* Propagate overflow flags. */
2264 if (overflow_p | TREE_OVERFLOW (arg1))
2265 TREE_OVERFLOW (t) = 1;
2266 return t;
2267 }
2268
2269 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2270 type TYPE. If no simplification can be done return NULL_TREE. */
2271
2272 static tree
2273 fold_convert_const (enum tree_code code, tree type, tree arg1)
2274 {
2275 tree arg_type = TREE_TYPE (arg1);
2276 if (arg_type == type)
2277 return arg1;
2278
2279 /* We can't widen types, since the runtime value could overflow the
2280 original type before being extended to the new type. */
2281 if (POLY_INT_CST_P (arg1)
2282 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2283 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2284 return build_poly_int_cst (type,
2285 poly_wide_int::from (poly_int_cst_value (arg1),
2286 TYPE_PRECISION (type),
2287 TYPE_SIGN (arg_type)));
2288
2289 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2290 || TREE_CODE (type) == OFFSET_TYPE)
2291 {
2292 if (TREE_CODE (arg1) == INTEGER_CST)
2293 return fold_convert_const_int_from_int (type, arg1);
2294 else if (TREE_CODE (arg1) == REAL_CST)
2295 return fold_convert_const_int_from_real (code, type, arg1);
2296 else if (TREE_CODE (arg1) == FIXED_CST)
2297 return fold_convert_const_int_from_fixed (type, arg1);
2298 }
2299 else if (TREE_CODE (type) == REAL_TYPE)
2300 {
2301 if (TREE_CODE (arg1) == INTEGER_CST)
2302 return build_real_from_int_cst (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_real_from_real (type, arg1);
2305 else if (TREE_CODE (arg1) == FIXED_CST)
2306 return fold_convert_const_real_from_fixed (type, arg1);
2307 }
2308 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2309 {
2310 if (TREE_CODE (arg1) == FIXED_CST)
2311 return fold_convert_const_fixed_from_fixed (type, arg1);
2312 else if (TREE_CODE (arg1) == INTEGER_CST)
2313 return fold_convert_const_fixed_from_int (type, arg1);
2314 else if (TREE_CODE (arg1) == REAL_CST)
2315 return fold_convert_const_fixed_from_real (type, arg1);
2316 }
2317 else if (TREE_CODE (type) == VECTOR_TYPE)
2318 {
2319 if (TREE_CODE (arg1) == VECTOR_CST
2320 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2321 {
2322 tree elttype = TREE_TYPE (type);
2323 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2324 /* We can't handle steps directly when extending, since the
2325 values need to wrap at the original precision first. */
2326 bool step_ok_p
2327 = (INTEGRAL_TYPE_P (elttype)
2328 && INTEGRAL_TYPE_P (arg1_elttype)
2329 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2330 tree_vector_builder v;
2331 if (!v.new_unary_operation (type, arg1, step_ok_p))
2332 return NULL_TREE;
2333 unsigned int len = v.encoded_nelts ();
2334 for (unsigned int i = 0; i < len; ++i)
2335 {
2336 tree elt = VECTOR_CST_ELT (arg1, i);
2337 tree cvt = fold_convert_const (code, elttype, elt);
2338 if (cvt == NULL_TREE)
2339 return NULL_TREE;
2340 v.quick_push (cvt);
2341 }
2342 return v.build ();
2343 }
2344 }
2345 return NULL_TREE;
2346 }
2347
2348 /* Construct a vector of zero elements of vector type TYPE. */
2349
2350 static tree
2351 build_zero_vector (tree type)
2352 {
2353 tree t;
2354
2355 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2356 return build_vector_from_val (type, t);
2357 }
2358
2359 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2360
2361 bool
2362 fold_convertible_p (const_tree type, const_tree arg)
2363 {
2364 tree orig = TREE_TYPE (arg);
2365
2366 if (type == orig)
2367 return true;
2368
2369 if (TREE_CODE (arg) == ERROR_MARK
2370 || TREE_CODE (type) == ERROR_MARK
2371 || TREE_CODE (orig) == ERROR_MARK)
2372 return false;
2373
2374 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2375 return true;
2376
2377 switch (TREE_CODE (type))
2378 {
2379 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2380 case POINTER_TYPE: case REFERENCE_TYPE:
2381 case OFFSET_TYPE:
2382 return (INTEGRAL_TYPE_P (orig)
2383 || (POINTER_TYPE_P (orig)
2384 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2385 || TREE_CODE (orig) == OFFSET_TYPE);
2386
2387 case REAL_TYPE:
2388 case FIXED_POINT_TYPE:
2389 case VOID_TYPE:
2390 return TREE_CODE (type) == TREE_CODE (orig);
2391
2392 case VECTOR_TYPE:
2393 return (VECTOR_TYPE_P (orig)
2394 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2395 TYPE_VECTOR_SUBPARTS (orig))
2396 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2397
2398 default:
2399 return false;
2400 }
2401 }
2402
2403 /* Convert expression ARG to type TYPE. Used by the middle-end for
2404 simple conversions in preference to calling the front-end's convert. */
2405
2406 tree
2407 fold_convert_loc (location_t loc, tree type, tree arg)
2408 {
2409 tree orig = TREE_TYPE (arg);
2410 tree tem;
2411
2412 if (type == orig)
2413 return arg;
2414
2415 if (TREE_CODE (arg) == ERROR_MARK
2416 || TREE_CODE (type) == ERROR_MARK
2417 || TREE_CODE (orig) == ERROR_MARK)
2418 return error_mark_node;
2419
2420 switch (TREE_CODE (type))
2421 {
2422 case POINTER_TYPE:
2423 case REFERENCE_TYPE:
2424 /* Handle conversions between pointers to different address spaces. */
2425 if (POINTER_TYPE_P (orig)
2426 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2427 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2428 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2429 /* fall through */
2430
2431 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2432 case OFFSET_TYPE:
2433 if (TREE_CODE (arg) == INTEGER_CST)
2434 {
2435 tem = fold_convert_const (NOP_EXPR, type, arg);
2436 if (tem != NULL_TREE)
2437 return tem;
2438 }
2439 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2440 || TREE_CODE (orig) == OFFSET_TYPE)
2441 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2442 if (TREE_CODE (orig) == COMPLEX_TYPE)
2443 return fold_convert_loc (loc, type,
2444 fold_build1_loc (loc, REALPART_EXPR,
2445 TREE_TYPE (orig), arg));
2446 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2447 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2448 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2449
2450 case REAL_TYPE:
2451 if (TREE_CODE (arg) == INTEGER_CST)
2452 {
2453 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2454 if (tem != NULL_TREE)
2455 return tem;
2456 }
2457 else if (TREE_CODE (arg) == REAL_CST)
2458 {
2459 tem = fold_convert_const (NOP_EXPR, type, arg);
2460 if (tem != NULL_TREE)
2461 return tem;
2462 }
2463 else if (TREE_CODE (arg) == FIXED_CST)
2464 {
2465 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2466 if (tem != NULL_TREE)
2467 return tem;
2468 }
2469
2470 switch (TREE_CODE (orig))
2471 {
2472 case INTEGER_TYPE:
2473 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2474 case POINTER_TYPE: case REFERENCE_TYPE:
2475 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2476
2477 case REAL_TYPE:
2478 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2479
2480 case FIXED_POINT_TYPE:
2481 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2482
2483 case COMPLEX_TYPE:
2484 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2485 return fold_convert_loc (loc, type, tem);
2486
2487 default:
2488 gcc_unreachable ();
2489 }
2490
2491 case FIXED_POINT_TYPE:
2492 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2493 || TREE_CODE (arg) == REAL_CST)
2494 {
2495 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2496 if (tem != NULL_TREE)
2497 goto fold_convert_exit;
2498 }
2499
2500 switch (TREE_CODE (orig))
2501 {
2502 case FIXED_POINT_TYPE:
2503 case INTEGER_TYPE:
2504 case ENUMERAL_TYPE:
2505 case BOOLEAN_TYPE:
2506 case REAL_TYPE:
2507 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2508
2509 case COMPLEX_TYPE:
2510 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2511 return fold_convert_loc (loc, type, tem);
2512
2513 default:
2514 gcc_unreachable ();
2515 }
2516
2517 case COMPLEX_TYPE:
2518 switch (TREE_CODE (orig))
2519 {
2520 case INTEGER_TYPE:
2521 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2522 case POINTER_TYPE: case REFERENCE_TYPE:
2523 case REAL_TYPE:
2524 case FIXED_POINT_TYPE:
2525 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2526 fold_convert_loc (loc, TREE_TYPE (type), arg),
2527 fold_convert_loc (loc, TREE_TYPE (type),
2528 integer_zero_node));
2529 case COMPLEX_TYPE:
2530 {
2531 tree rpart, ipart;
2532
2533 if (TREE_CODE (arg) == COMPLEX_EXPR)
2534 {
2535 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2536 TREE_OPERAND (arg, 0));
2537 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2538 TREE_OPERAND (arg, 1));
2539 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2540 }
2541
2542 arg = save_expr (arg);
2543 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2544 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2545 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2546 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2547 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2548 }
2549
2550 default:
2551 gcc_unreachable ();
2552 }
2553
2554 case VECTOR_TYPE:
2555 if (integer_zerop (arg))
2556 return build_zero_vector (type);
2557 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2558 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2559 || TREE_CODE (orig) == VECTOR_TYPE);
2560 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2561
2562 case VOID_TYPE:
2563 tem = fold_ignored_result (arg);
2564 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2565
2566 default:
2567 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2568 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2569 gcc_unreachable ();
2570 }
2571 fold_convert_exit:
2572 protected_set_expr_location_unshare (tem, loc);
2573 return tem;
2574 }
2575 \f
2576 /* Return false if expr can be assumed not to be an lvalue, true
2577 otherwise. */
2578
2579 static bool
2580 maybe_lvalue_p (const_tree x)
2581 {
2582 /* We only need to wrap lvalue tree codes. */
2583 switch (TREE_CODE (x))
2584 {
2585 case VAR_DECL:
2586 case PARM_DECL:
2587 case RESULT_DECL:
2588 case LABEL_DECL:
2589 case FUNCTION_DECL:
2590 case SSA_NAME:
2591
2592 case COMPONENT_REF:
2593 case MEM_REF:
2594 case INDIRECT_REF:
2595 case ARRAY_REF:
2596 case ARRAY_RANGE_REF:
2597 case BIT_FIELD_REF:
2598 case OBJ_TYPE_REF:
2599
2600 case REALPART_EXPR:
2601 case IMAGPART_EXPR:
2602 case PREINCREMENT_EXPR:
2603 case PREDECREMENT_EXPR:
2604 case SAVE_EXPR:
2605 case TRY_CATCH_EXPR:
2606 case WITH_CLEANUP_EXPR:
2607 case COMPOUND_EXPR:
2608 case MODIFY_EXPR:
2609 case TARGET_EXPR:
2610 case COND_EXPR:
2611 case BIND_EXPR:
2612 case VIEW_CONVERT_EXPR:
2613 break;
2614
2615 default:
2616 /* Assume the worst for front-end tree codes. */
2617 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2618 break;
2619 return false;
2620 }
2621
2622 return true;
2623 }
2624
2625 /* Return an expr equal to X but certainly not valid as an lvalue. */
2626
2627 tree
2628 non_lvalue_loc (location_t loc, tree x)
2629 {
2630 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2631 us. */
2632 if (in_gimple_form)
2633 return x;
2634
2635 if (! maybe_lvalue_p (x))
2636 return x;
2637 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2638 }
2639
2640 /* When pedantic, return an expr equal to X but certainly not valid as a
2641 pedantic lvalue. Otherwise, return X. */
2642
2643 static tree
2644 pedantic_non_lvalue_loc (location_t loc, tree x)
2645 {
2646 return protected_set_expr_location_unshare (x, loc);
2647 }
2648 \f
2649 /* Given a tree comparison code, return the code that is the logical inverse.
2650 It is generally not safe to do this for floating-point comparisons, except
2651 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2652 ERROR_MARK in this case. */
2653
2654 enum tree_code
2655 invert_tree_comparison (enum tree_code code, bool honor_nans)
2656 {
2657 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2658 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2659 return ERROR_MARK;
2660
2661 switch (code)
2662 {
2663 case EQ_EXPR:
2664 return NE_EXPR;
2665 case NE_EXPR:
2666 return EQ_EXPR;
2667 case GT_EXPR:
2668 return honor_nans ? UNLE_EXPR : LE_EXPR;
2669 case GE_EXPR:
2670 return honor_nans ? UNLT_EXPR : LT_EXPR;
2671 case LT_EXPR:
2672 return honor_nans ? UNGE_EXPR : GE_EXPR;
2673 case LE_EXPR:
2674 return honor_nans ? UNGT_EXPR : GT_EXPR;
2675 case LTGT_EXPR:
2676 return UNEQ_EXPR;
2677 case UNEQ_EXPR:
2678 return LTGT_EXPR;
2679 case UNGT_EXPR:
2680 return LE_EXPR;
2681 case UNGE_EXPR:
2682 return LT_EXPR;
2683 case UNLT_EXPR:
2684 return GE_EXPR;
2685 case UNLE_EXPR:
2686 return GT_EXPR;
2687 case ORDERED_EXPR:
2688 return UNORDERED_EXPR;
2689 case UNORDERED_EXPR:
2690 return ORDERED_EXPR;
2691 default:
2692 gcc_unreachable ();
2693 }
2694 }
2695
2696 /* Similar, but return the comparison that results if the operands are
2697 swapped. This is safe for floating-point. */
2698
2699 enum tree_code
2700 swap_tree_comparison (enum tree_code code)
2701 {
2702 switch (code)
2703 {
2704 case EQ_EXPR:
2705 case NE_EXPR:
2706 case ORDERED_EXPR:
2707 case UNORDERED_EXPR:
2708 case LTGT_EXPR:
2709 case UNEQ_EXPR:
2710 return code;
2711 case GT_EXPR:
2712 return LT_EXPR;
2713 case GE_EXPR:
2714 return LE_EXPR;
2715 case LT_EXPR:
2716 return GT_EXPR;
2717 case LE_EXPR:
2718 return GE_EXPR;
2719 case UNGT_EXPR:
2720 return UNLT_EXPR;
2721 case UNGE_EXPR:
2722 return UNLE_EXPR;
2723 case UNLT_EXPR:
2724 return UNGT_EXPR;
2725 case UNLE_EXPR:
2726 return UNGE_EXPR;
2727 default:
2728 gcc_unreachable ();
2729 }
2730 }
2731
2732
2733 /* Convert a comparison tree code from an enum tree_code representation
2734 into a compcode bit-based encoding. This function is the inverse of
2735 compcode_to_comparison. */
2736
2737 static enum comparison_code
2738 comparison_to_compcode (enum tree_code code)
2739 {
2740 switch (code)
2741 {
2742 case LT_EXPR:
2743 return COMPCODE_LT;
2744 case EQ_EXPR:
2745 return COMPCODE_EQ;
2746 case LE_EXPR:
2747 return COMPCODE_LE;
2748 case GT_EXPR:
2749 return COMPCODE_GT;
2750 case NE_EXPR:
2751 return COMPCODE_NE;
2752 case GE_EXPR:
2753 return COMPCODE_GE;
2754 case ORDERED_EXPR:
2755 return COMPCODE_ORD;
2756 case UNORDERED_EXPR:
2757 return COMPCODE_UNORD;
2758 case UNLT_EXPR:
2759 return COMPCODE_UNLT;
2760 case UNEQ_EXPR:
2761 return COMPCODE_UNEQ;
2762 case UNLE_EXPR:
2763 return COMPCODE_UNLE;
2764 case UNGT_EXPR:
2765 return COMPCODE_UNGT;
2766 case LTGT_EXPR:
2767 return COMPCODE_LTGT;
2768 case UNGE_EXPR:
2769 return COMPCODE_UNGE;
2770 default:
2771 gcc_unreachable ();
2772 }
2773 }
2774
2775 /* Convert a compcode bit-based encoding of a comparison operator back
2776 to GCC's enum tree_code representation. This function is the
2777 inverse of comparison_to_compcode. */
2778
2779 static enum tree_code
2780 compcode_to_comparison (enum comparison_code code)
2781 {
2782 switch (code)
2783 {
2784 case COMPCODE_LT:
2785 return LT_EXPR;
2786 case COMPCODE_EQ:
2787 return EQ_EXPR;
2788 case COMPCODE_LE:
2789 return LE_EXPR;
2790 case COMPCODE_GT:
2791 return GT_EXPR;
2792 case COMPCODE_NE:
2793 return NE_EXPR;
2794 case COMPCODE_GE:
2795 return GE_EXPR;
2796 case COMPCODE_ORD:
2797 return ORDERED_EXPR;
2798 case COMPCODE_UNORD:
2799 return UNORDERED_EXPR;
2800 case COMPCODE_UNLT:
2801 return UNLT_EXPR;
2802 case COMPCODE_UNEQ:
2803 return UNEQ_EXPR;
2804 case COMPCODE_UNLE:
2805 return UNLE_EXPR;
2806 case COMPCODE_UNGT:
2807 return UNGT_EXPR;
2808 case COMPCODE_LTGT:
2809 return LTGT_EXPR;
2810 case COMPCODE_UNGE:
2811 return UNGE_EXPR;
2812 default:
2813 gcc_unreachable ();
2814 }
2815 }
2816
2817 /* Return true if COND1 tests the opposite condition of COND2. */
2818
2819 bool
2820 inverse_conditions_p (const_tree cond1, const_tree cond2)
2821 {
2822 return (COMPARISON_CLASS_P (cond1)
2823 && COMPARISON_CLASS_P (cond2)
2824 && (invert_tree_comparison
2825 (TREE_CODE (cond1),
2826 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2827 && operand_equal_p (TREE_OPERAND (cond1, 0),
2828 TREE_OPERAND (cond2, 0), 0)
2829 && operand_equal_p (TREE_OPERAND (cond1, 1),
2830 TREE_OPERAND (cond2, 1), 0));
2831 }
2832
2833 /* Return a tree for the comparison which is the combination of
2834 doing the AND or OR (depending on CODE) of the two operations LCODE
2835 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2836 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2837 if this makes the transformation invalid. */
2838
2839 tree
2840 combine_comparisons (location_t loc,
2841 enum tree_code code, enum tree_code lcode,
2842 enum tree_code rcode, tree truth_type,
2843 tree ll_arg, tree lr_arg)
2844 {
2845 bool honor_nans = HONOR_NANS (ll_arg);
2846 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2847 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2848 int compcode;
2849
2850 switch (code)
2851 {
2852 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2853 compcode = lcompcode & rcompcode;
2854 break;
2855
2856 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2857 compcode = lcompcode | rcompcode;
2858 break;
2859
2860 default:
2861 return NULL_TREE;
2862 }
2863
2864 if (!honor_nans)
2865 {
2866 /* Eliminate unordered comparisons, as well as LTGT and ORD
2867 which are not used unless the mode has NaNs. */
2868 compcode &= ~COMPCODE_UNORD;
2869 if (compcode == COMPCODE_LTGT)
2870 compcode = COMPCODE_NE;
2871 else if (compcode == COMPCODE_ORD)
2872 compcode = COMPCODE_TRUE;
2873 }
2874 else if (flag_trapping_math)
2875 {
2876 /* Check that the original operation and the optimized ones will trap
2877 under the same condition. */
2878 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2879 && (lcompcode != COMPCODE_EQ)
2880 && (lcompcode != COMPCODE_ORD);
2881 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2882 && (rcompcode != COMPCODE_EQ)
2883 && (rcompcode != COMPCODE_ORD);
2884 bool trap = (compcode & COMPCODE_UNORD) == 0
2885 && (compcode != COMPCODE_EQ)
2886 && (compcode != COMPCODE_ORD);
2887
2888 /* In a short-circuited boolean expression the LHS might be
2889 such that the RHS, if evaluated, will never trap. For
2890 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2891 if neither x nor y is NaN. (This is a mixed blessing: for
2892 example, the expression above will never trap, hence
2893 optimizing it to x < y would be invalid). */
2894 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2895 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2896 rtrap = false;
2897
2898 /* If the comparison was short-circuited, and only the RHS
2899 trapped, we may now generate a spurious trap. */
2900 if (rtrap && !ltrap
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2902 return NULL_TREE;
2903
2904 /* If we changed the conditions that cause a trap, we lose. */
2905 if ((ltrap || rtrap) != trap)
2906 return NULL_TREE;
2907 }
2908
2909 if (compcode == COMPCODE_TRUE)
2910 return constant_boolean_node (true, truth_type);
2911 else if (compcode == COMPCODE_FALSE)
2912 return constant_boolean_node (false, truth_type);
2913 else
2914 {
2915 enum tree_code tcode;
2916
2917 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2918 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2919 }
2920 }
2921 \f
2922 /* Return nonzero if two operands (typically of the same tree node)
2923 are necessarily equal. FLAGS modifies behavior as follows:
2924
2925 If OEP_ONLY_CONST is set, only return nonzero for constants.
2926 This function tests whether the operands are indistinguishable;
2927 it does not test whether they are equal using C's == operation.
2928 The distinction is important for IEEE floating point, because
2929 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2930 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2931
2932 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2933 even though it may hold multiple values during a function.
2934 This is because a GCC tree node guarantees that nothing else is
2935 executed between the evaluation of its "operands" (which may often
2936 be evaluated in arbitrary order). Hence if the operands themselves
2937 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2938 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2939 unset means assuming isochronic (or instantaneous) tree equivalence.
2940 Unless comparing arbitrary expression trees, such as from different
2941 statements, this flag can usually be left unset.
2942
2943 If OEP_PURE_SAME is set, then pure functions with identical arguments
2944 are considered the same. It is used when the caller has other ways
2945 to ensure that global memory is unchanged in between.
2946
2947 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2948 not values of expressions.
2949
2950 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2951 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2952
2953 If OEP_BITWISE is set, then require the values to be bitwise identical
2954 rather than simply numerically equal. Do not take advantage of things
2955 like math-related flags or undefined behavior; only return true for
2956 values that are provably bitwise identical in all circumstances.
2957
2958 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2959 any operand with side effect. This is unnecesarily conservative in the
2960 case we know that arg0 and arg1 are in disjoint code paths (such as in
2961 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2962 addresses with TREE_CONSTANT flag set so we know that &var == &var
2963 even if var is volatile. */
2964
2965 bool
2966 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2967 unsigned int flags)
2968 {
2969 bool r;
2970 if (verify_hash_value (arg0, arg1, flags, &r))
2971 return r;
2972
2973 STRIP_ANY_LOCATION_WRAPPER (arg0);
2974 STRIP_ANY_LOCATION_WRAPPER (arg1);
2975
2976 /* If either is ERROR_MARK, they aren't equal. */
2977 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2978 || TREE_TYPE (arg0) == error_mark_node
2979 || TREE_TYPE (arg1) == error_mark_node)
2980 return false;
2981
2982 /* Similar, if either does not have a type (like a template id),
2983 they aren't equal. */
2984 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2985 return false;
2986
2987 /* Bitwise identity makes no sense if the values have different layouts. */
2988 if ((flags & OEP_BITWISE)
2989 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2990 return false;
2991
2992 /* We cannot consider pointers to different address space equal. */
2993 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2994 && POINTER_TYPE_P (TREE_TYPE (arg1))
2995 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2996 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2997 return false;
2998
2999 /* Check equality of integer constants before bailing out due to
3000 precision differences. */
3001 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3002 {
3003 /* Address of INTEGER_CST is not defined; check that we did not forget
3004 to drop the OEP_ADDRESS_OF flags. */
3005 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3006 return tree_int_cst_equal (arg0, arg1);
3007 }
3008
3009 if (!(flags & OEP_ADDRESS_OF))
3010 {
3011 /* If both types don't have the same signedness, then we can't consider
3012 them equal. We must check this before the STRIP_NOPS calls
3013 because they may change the signedness of the arguments. As pointers
3014 strictly don't have a signedness, require either two pointers or
3015 two non-pointers as well. */
3016 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3017 || POINTER_TYPE_P (TREE_TYPE (arg0))
3018 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3019 return false;
3020
3021 /* If both types don't have the same precision, then it is not safe
3022 to strip NOPs. */
3023 if (element_precision (TREE_TYPE (arg0))
3024 != element_precision (TREE_TYPE (arg1)))
3025 return false;
3026
3027 STRIP_NOPS (arg0);
3028 STRIP_NOPS (arg1);
3029 }
3030 #if 0
3031 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3032 sanity check once the issue is solved. */
3033 else
3034 /* Addresses of conversions and SSA_NAMEs (and many other things)
3035 are not defined. Check that we did not forget to drop the
3036 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3037 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3038 && TREE_CODE (arg0) != SSA_NAME);
3039 #endif
3040
3041 /* In case both args are comparisons but with different comparison
3042 code, try to swap the comparison operands of one arg to produce
3043 a match and compare that variant. */
3044 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3045 && COMPARISON_CLASS_P (arg0)
3046 && COMPARISON_CLASS_P (arg1))
3047 {
3048 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3049
3050 if (TREE_CODE (arg0) == swap_code)
3051 return operand_equal_p (TREE_OPERAND (arg0, 0),
3052 TREE_OPERAND (arg1, 1), flags)
3053 && operand_equal_p (TREE_OPERAND (arg0, 1),
3054 TREE_OPERAND (arg1, 0), flags);
3055 }
3056
3057 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3058 {
3059 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3060 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3061 ;
3062 else if (flags & OEP_ADDRESS_OF)
3063 {
3064 /* If we are interested in comparing addresses ignore
3065 MEM_REF wrappings of the base that can appear just for
3066 TBAA reasons. */
3067 if (TREE_CODE (arg0) == MEM_REF
3068 && DECL_P (arg1)
3069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3070 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3071 && integer_zerop (TREE_OPERAND (arg0, 1)))
3072 return true;
3073 else if (TREE_CODE (arg1) == MEM_REF
3074 && DECL_P (arg0)
3075 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3076 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3077 && integer_zerop (TREE_OPERAND (arg1, 1)))
3078 return true;
3079 return false;
3080 }
3081 else
3082 return false;
3083 }
3084
3085 /* When not checking adddresses, this is needed for conversions and for
3086 COMPONENT_REF. Might as well play it safe and always test this. */
3087 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3088 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3089 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3090 && !(flags & OEP_ADDRESS_OF)))
3091 return false;
3092
3093 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3094 We don't care about side effects in that case because the SAVE_EXPR
3095 takes care of that for us. In all other cases, two expressions are
3096 equal if they have no side effects. If we have two identical
3097 expressions with side effects that should be treated the same due
3098 to the only side effects being identical SAVE_EXPR's, that will
3099 be detected in the recursive calls below.
3100 If we are taking an invariant address of two identical objects
3101 they are necessarily equal as well. */
3102 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3103 && (TREE_CODE (arg0) == SAVE_EXPR
3104 || (flags & OEP_MATCH_SIDE_EFFECTS)
3105 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3106 return true;
3107
3108 /* Next handle constant cases, those for which we can return 1 even
3109 if ONLY_CONST is set. */
3110 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3111 switch (TREE_CODE (arg0))
3112 {
3113 case INTEGER_CST:
3114 return tree_int_cst_equal (arg0, arg1);
3115
3116 case FIXED_CST:
3117 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3118 TREE_FIXED_CST (arg1));
3119
3120 case REAL_CST:
3121 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3122 return true;
3123
3124 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3125 {
3126 /* If we do not distinguish between signed and unsigned zero,
3127 consider them equal. */
3128 if (real_zerop (arg0) && real_zerop (arg1))
3129 return true;
3130 }
3131 return false;
3132
3133 case VECTOR_CST:
3134 {
3135 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3136 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3137 return false;
3138
3139 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3140 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3141 return false;
3142
3143 unsigned int count = vector_cst_encoded_nelts (arg0);
3144 for (unsigned int i = 0; i < count; ++i)
3145 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3146 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3147 return false;
3148 return true;
3149 }
3150
3151 case COMPLEX_CST:
3152 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3153 flags)
3154 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3155 flags));
3156
3157 case STRING_CST:
3158 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3159 && ! memcmp (TREE_STRING_POINTER (arg0),
3160 TREE_STRING_POINTER (arg1),
3161 TREE_STRING_LENGTH (arg0)));
3162
3163 case ADDR_EXPR:
3164 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3165 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3166 flags | OEP_ADDRESS_OF
3167 | OEP_MATCH_SIDE_EFFECTS);
3168 case CONSTRUCTOR:
3169 /* In GIMPLE empty constructors are allowed in initializers of
3170 aggregates. */
3171 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3172 default:
3173 break;
3174 }
3175
3176 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3177 two instances of undefined behavior will give identical results. */
3178 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3179 return false;
3180
3181 /* Define macros to test an operand from arg0 and arg1 for equality and a
3182 variant that allows null and views null as being different from any
3183 non-null value. In the latter case, if either is null, the both
3184 must be; otherwise, do the normal comparison. */
3185 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3186 TREE_OPERAND (arg1, N), flags)
3187
3188 #define OP_SAME_WITH_NULL(N) \
3189 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3190 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3191
3192 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3193 {
3194 case tcc_unary:
3195 /* Two conversions are equal only if signedness and modes match. */
3196 switch (TREE_CODE (arg0))
3197 {
3198 CASE_CONVERT:
3199 case FIX_TRUNC_EXPR:
3200 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3201 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3202 return false;
3203 break;
3204 default:
3205 break;
3206 }
3207
3208 return OP_SAME (0);
3209
3210
3211 case tcc_comparison:
3212 case tcc_binary:
3213 if (OP_SAME (0) && OP_SAME (1))
3214 return true;
3215
3216 /* For commutative ops, allow the other order. */
3217 return (commutative_tree_code (TREE_CODE (arg0))
3218 && operand_equal_p (TREE_OPERAND (arg0, 0),
3219 TREE_OPERAND (arg1, 1), flags)
3220 && operand_equal_p (TREE_OPERAND (arg0, 1),
3221 TREE_OPERAND (arg1, 0), flags));
3222
3223 case tcc_reference:
3224 /* If either of the pointer (or reference) expressions we are
3225 dereferencing contain a side effect, these cannot be equal,
3226 but their addresses can be. */
3227 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3228 && (TREE_SIDE_EFFECTS (arg0)
3229 || TREE_SIDE_EFFECTS (arg1)))
3230 return false;
3231
3232 switch (TREE_CODE (arg0))
3233 {
3234 case INDIRECT_REF:
3235 if (!(flags & OEP_ADDRESS_OF))
3236 {
3237 if (TYPE_ALIGN (TREE_TYPE (arg0))
3238 != TYPE_ALIGN (TREE_TYPE (arg1)))
3239 return false;
3240 /* Verify that the access types are compatible. */
3241 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3242 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3243 return false;
3244 }
3245 flags &= ~OEP_ADDRESS_OF;
3246 return OP_SAME (0);
3247
3248 case IMAGPART_EXPR:
3249 /* Require the same offset. */
3250 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3251 TYPE_SIZE (TREE_TYPE (arg1)),
3252 flags & ~OEP_ADDRESS_OF))
3253 return false;
3254
3255 /* Fallthru. */
3256 case REALPART_EXPR:
3257 case VIEW_CONVERT_EXPR:
3258 return OP_SAME (0);
3259
3260 case TARGET_MEM_REF:
3261 case MEM_REF:
3262 if (!(flags & OEP_ADDRESS_OF))
3263 {
3264 /* Require equal access sizes */
3265 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3266 && (!TYPE_SIZE (TREE_TYPE (arg0))
3267 || !TYPE_SIZE (TREE_TYPE (arg1))
3268 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3269 TYPE_SIZE (TREE_TYPE (arg1)),
3270 flags)))
3271 return false;
3272 /* Verify that access happens in similar types. */
3273 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3274 return false;
3275 /* Verify that accesses are TBAA compatible. */
3276 if (!alias_ptr_types_compatible_p
3277 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3278 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3279 || (MR_DEPENDENCE_CLIQUE (arg0)
3280 != MR_DEPENDENCE_CLIQUE (arg1))
3281 || (MR_DEPENDENCE_BASE (arg0)
3282 != MR_DEPENDENCE_BASE (arg1)))
3283 return false;
3284 /* Verify that alignment is compatible. */
3285 if (TYPE_ALIGN (TREE_TYPE (arg0))
3286 != TYPE_ALIGN (TREE_TYPE (arg1)))
3287 return false;
3288 }
3289 flags &= ~OEP_ADDRESS_OF;
3290 return (OP_SAME (0) && OP_SAME (1)
3291 /* TARGET_MEM_REF require equal extra operands. */
3292 && (TREE_CODE (arg0) != TARGET_MEM_REF
3293 || (OP_SAME_WITH_NULL (2)
3294 && OP_SAME_WITH_NULL (3)
3295 && OP_SAME_WITH_NULL (4))));
3296
3297 case ARRAY_REF:
3298 case ARRAY_RANGE_REF:
3299 if (!OP_SAME (0))
3300 return false;
3301 flags &= ~OEP_ADDRESS_OF;
3302 /* Compare the array index by value if it is constant first as we
3303 may have different types but same value here. */
3304 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3305 TREE_OPERAND (arg1, 1))
3306 || OP_SAME (1))
3307 && OP_SAME_WITH_NULL (2)
3308 && OP_SAME_WITH_NULL (3)
3309 /* Compare low bound and element size as with OEP_ADDRESS_OF
3310 we have to account for the offset of the ref. */
3311 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3312 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3313 || (operand_equal_p (array_ref_low_bound
3314 (CONST_CAST_TREE (arg0)),
3315 array_ref_low_bound
3316 (CONST_CAST_TREE (arg1)), flags)
3317 && operand_equal_p (array_ref_element_size
3318 (CONST_CAST_TREE (arg0)),
3319 array_ref_element_size
3320 (CONST_CAST_TREE (arg1)),
3321 flags))));
3322
3323 case COMPONENT_REF:
3324 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3325 may be NULL when we're called to compare MEM_EXPRs. */
3326 if (!OP_SAME_WITH_NULL (0)
3327 || !OP_SAME (1))
3328 return false;
3329 flags &= ~OEP_ADDRESS_OF;
3330 return OP_SAME_WITH_NULL (2);
3331
3332 case BIT_FIELD_REF:
3333 if (!OP_SAME (0))
3334 return false;
3335 flags &= ~OEP_ADDRESS_OF;
3336 return OP_SAME (1) && OP_SAME (2);
3337
3338 /* Virtual table call. */
3339 case OBJ_TYPE_REF:
3340 {
3341 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3342 OBJ_TYPE_REF_EXPR (arg1), flags))
3343 return false;
3344 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3345 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3346 return false;
3347 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3348 OBJ_TYPE_REF_OBJECT (arg1), flags))
3349 return false;
3350 if (!types_same_for_odr (obj_type_ref_class (arg0),
3351 obj_type_ref_class (arg1)))
3352 return false;
3353 return true;
3354 }
3355
3356 default:
3357 return false;
3358 }
3359
3360 case tcc_expression:
3361 switch (TREE_CODE (arg0))
3362 {
3363 case ADDR_EXPR:
3364 /* Be sure we pass right ADDRESS_OF flag. */
3365 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3366 return operand_equal_p (TREE_OPERAND (arg0, 0),
3367 TREE_OPERAND (arg1, 0),
3368 flags | OEP_ADDRESS_OF);
3369
3370 case TRUTH_NOT_EXPR:
3371 return OP_SAME (0);
3372
3373 case TRUTH_ANDIF_EXPR:
3374 case TRUTH_ORIF_EXPR:
3375 return OP_SAME (0) && OP_SAME (1);
3376
3377 case WIDEN_MULT_PLUS_EXPR:
3378 case WIDEN_MULT_MINUS_EXPR:
3379 if (!OP_SAME (2))
3380 return false;
3381 /* The multiplcation operands are commutative. */
3382 /* FALLTHRU */
3383
3384 case TRUTH_AND_EXPR:
3385 case TRUTH_OR_EXPR:
3386 case TRUTH_XOR_EXPR:
3387 if (OP_SAME (0) && OP_SAME (1))
3388 return true;
3389
3390 /* Otherwise take into account this is a commutative operation. */
3391 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3392 TREE_OPERAND (arg1, 1), flags)
3393 && operand_equal_p (TREE_OPERAND (arg0, 1),
3394 TREE_OPERAND (arg1, 0), flags));
3395
3396 case COND_EXPR:
3397 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3398 return false;
3399 flags &= ~OEP_ADDRESS_OF;
3400 return OP_SAME (0);
3401
3402 case BIT_INSERT_EXPR:
3403 /* BIT_INSERT_EXPR has an implict operand as the type precision
3404 of op1. Need to check to make sure they are the same. */
3405 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3406 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3407 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3408 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3409 return false;
3410 /* FALLTHRU */
3411
3412 case VEC_COND_EXPR:
3413 case DOT_PROD_EXPR:
3414 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3415
3416 case MODIFY_EXPR:
3417 case INIT_EXPR:
3418 case COMPOUND_EXPR:
3419 case PREDECREMENT_EXPR:
3420 case PREINCREMENT_EXPR:
3421 case POSTDECREMENT_EXPR:
3422 case POSTINCREMENT_EXPR:
3423 if (flags & OEP_LEXICOGRAPHIC)
3424 return OP_SAME (0) && OP_SAME (1);
3425 return false;
3426
3427 case CLEANUP_POINT_EXPR:
3428 case EXPR_STMT:
3429 case SAVE_EXPR:
3430 if (flags & OEP_LEXICOGRAPHIC)
3431 return OP_SAME (0);
3432 return false;
3433
3434 default:
3435 return false;
3436 }
3437
3438 case tcc_vl_exp:
3439 switch (TREE_CODE (arg0))
3440 {
3441 case CALL_EXPR:
3442 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3443 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3444 /* If not both CALL_EXPRs are either internal or normal function
3445 functions, then they are not equal. */
3446 return false;
3447 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3448 {
3449 /* If the CALL_EXPRs call different internal functions, then they
3450 are not equal. */
3451 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3452 return false;
3453 }
3454 else
3455 {
3456 /* If the CALL_EXPRs call different functions, then they are not
3457 equal. */
3458 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3459 flags))
3460 return false;
3461 }
3462
3463 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3464 {
3465 unsigned int cef = call_expr_flags (arg0);
3466 if (flags & OEP_PURE_SAME)
3467 cef &= ECF_CONST | ECF_PURE;
3468 else
3469 cef &= ECF_CONST;
3470 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3471 return false;
3472 }
3473
3474 /* Now see if all the arguments are the same. */
3475 {
3476 const_call_expr_arg_iterator iter0, iter1;
3477 const_tree a0, a1;
3478 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3479 a1 = first_const_call_expr_arg (arg1, &iter1);
3480 a0 && a1;
3481 a0 = next_const_call_expr_arg (&iter0),
3482 a1 = next_const_call_expr_arg (&iter1))
3483 if (! operand_equal_p (a0, a1, flags))
3484 return false;
3485
3486 /* If we get here and both argument lists are exhausted
3487 then the CALL_EXPRs are equal. */
3488 return ! (a0 || a1);
3489 }
3490 default:
3491 return false;
3492 }
3493
3494 case tcc_declaration:
3495 /* Consider __builtin_sqrt equal to sqrt. */
3496 return (TREE_CODE (arg0) == FUNCTION_DECL
3497 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3498 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3499 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3500 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3501
3502 case tcc_exceptional:
3503 if (TREE_CODE (arg0) == CONSTRUCTOR)
3504 {
3505 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3506 return false;
3507
3508 /* In GIMPLE constructors are used only to build vectors from
3509 elements. Individual elements in the constructor must be
3510 indexed in increasing order and form an initial sequence.
3511
3512 We make no effort to compare constructors in generic.
3513 (see sem_variable::equals in ipa-icf which can do so for
3514 constants). */
3515 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3516 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3517 return false;
3518
3519 /* Be sure that vectors constructed have the same representation.
3520 We only tested element precision and modes to match.
3521 Vectors may be BLKmode and thus also check that the number of
3522 parts match. */
3523 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3524 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3525 return false;
3526
3527 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3528 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3529 unsigned int len = vec_safe_length (v0);
3530
3531 if (len != vec_safe_length (v1))
3532 return false;
3533
3534 for (unsigned int i = 0; i < len; i++)
3535 {
3536 constructor_elt *c0 = &(*v0)[i];
3537 constructor_elt *c1 = &(*v1)[i];
3538
3539 if (!operand_equal_p (c0->value, c1->value, flags)
3540 /* In GIMPLE the indexes can be either NULL or matching i.
3541 Double check this so we won't get false
3542 positives for GENERIC. */
3543 || (c0->index
3544 && (TREE_CODE (c0->index) != INTEGER_CST
3545 || compare_tree_int (c0->index, i)))
3546 || (c1->index
3547 && (TREE_CODE (c1->index) != INTEGER_CST
3548 || compare_tree_int (c1->index, i))))
3549 return false;
3550 }
3551 return true;
3552 }
3553 else if (TREE_CODE (arg0) == STATEMENT_LIST
3554 && (flags & OEP_LEXICOGRAPHIC))
3555 {
3556 /* Compare the STATEMENT_LISTs. */
3557 tree_stmt_iterator tsi1, tsi2;
3558 tree body1 = CONST_CAST_TREE (arg0);
3559 tree body2 = CONST_CAST_TREE (arg1);
3560 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3561 tsi_next (&tsi1), tsi_next (&tsi2))
3562 {
3563 /* The lists don't have the same number of statements. */
3564 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3565 return false;
3566 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3567 return true;
3568 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3569 flags & (OEP_LEXICOGRAPHIC
3570 | OEP_NO_HASH_CHECK)))
3571 return false;
3572 }
3573 }
3574 return false;
3575
3576 case tcc_statement:
3577 switch (TREE_CODE (arg0))
3578 {
3579 case RETURN_EXPR:
3580 if (flags & OEP_LEXICOGRAPHIC)
3581 return OP_SAME_WITH_NULL (0);
3582 return false;
3583 case DEBUG_BEGIN_STMT:
3584 if (flags & OEP_LEXICOGRAPHIC)
3585 return true;
3586 return false;
3587 default:
3588 return false;
3589 }
3590
3591 default:
3592 return false;
3593 }
3594
3595 #undef OP_SAME
3596 #undef OP_SAME_WITH_NULL
3597 }
3598
3599 /* Generate a hash value for an expression. This can be used iteratively
3600 by passing a previous result as the HSTATE argument. */
3601
3602 void
3603 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3604 unsigned int flags)
3605 {
3606 int i;
3607 enum tree_code code;
3608 enum tree_code_class tclass;
3609
3610 if (t == NULL_TREE || t == error_mark_node)
3611 {
3612 hstate.merge_hash (0);
3613 return;
3614 }
3615
3616 STRIP_ANY_LOCATION_WRAPPER (t);
3617
3618 if (!(flags & OEP_ADDRESS_OF))
3619 STRIP_NOPS (t);
3620
3621 code = TREE_CODE (t);
3622
3623 switch (code)
3624 {
3625 /* Alas, constants aren't shared, so we can't rely on pointer
3626 identity. */
3627 case VOID_CST:
3628 hstate.merge_hash (0);
3629 return;
3630 case INTEGER_CST:
3631 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3632 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3633 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3634 return;
3635 case REAL_CST:
3636 {
3637 unsigned int val2;
3638 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3639 val2 = rvc_zero;
3640 else
3641 val2 = real_hash (TREE_REAL_CST_PTR (t));
3642 hstate.merge_hash (val2);
3643 return;
3644 }
3645 case FIXED_CST:
3646 {
3647 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3648 hstate.merge_hash (val2);
3649 return;
3650 }
3651 case STRING_CST:
3652 hstate.add ((const void *) TREE_STRING_POINTER (t),
3653 TREE_STRING_LENGTH (t));
3654 return;
3655 case COMPLEX_CST:
3656 hash_operand (TREE_REALPART (t), hstate, flags);
3657 hash_operand (TREE_IMAGPART (t), hstate, flags);
3658 return;
3659 case VECTOR_CST:
3660 {
3661 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3662 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3663 unsigned int count = vector_cst_encoded_nelts (t);
3664 for (unsigned int i = 0; i < count; ++i)
3665 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3666 return;
3667 }
3668 case SSA_NAME:
3669 /* We can just compare by pointer. */
3670 hstate.add_hwi (SSA_NAME_VERSION (t));
3671 return;
3672 case PLACEHOLDER_EXPR:
3673 /* The node itself doesn't matter. */
3674 return;
3675 case BLOCK:
3676 case OMP_CLAUSE:
3677 /* Ignore. */
3678 return;
3679 case TREE_LIST:
3680 /* A list of expressions, for a CALL_EXPR or as the elements of a
3681 VECTOR_CST. */
3682 for (; t; t = TREE_CHAIN (t))
3683 hash_operand (TREE_VALUE (t), hstate, flags);
3684 return;
3685 case CONSTRUCTOR:
3686 {
3687 unsigned HOST_WIDE_INT idx;
3688 tree field, value;
3689 flags &= ~OEP_ADDRESS_OF;
3690 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3692 {
3693 /* In GIMPLE the indexes can be either NULL or matching i. */
3694 if (field == NULL_TREE)
3695 field = bitsize_int (idx);
3696 hash_operand (field, hstate, flags);
3697 hash_operand (value, hstate, flags);
3698 }
3699 return;
3700 }
3701 case STATEMENT_LIST:
3702 {
3703 tree_stmt_iterator i;
3704 for (i = tsi_start (CONST_CAST_TREE (t));
3705 !tsi_end_p (i); tsi_next (&i))
3706 hash_operand (tsi_stmt (i), hstate, flags);
3707 return;
3708 }
3709 case TREE_VEC:
3710 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3711 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3712 return;
3713 case IDENTIFIER_NODE:
3714 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3715 return;
3716 case FUNCTION_DECL:
3717 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3718 Otherwise nodes that compare equal according to operand_equal_p might
3719 get different hash codes. However, don't do this for machine specific
3720 or front end builtins, since the function code is overloaded in those
3721 cases. */
3722 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3723 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3724 {
3725 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3726 code = TREE_CODE (t);
3727 }
3728 /* FALL THROUGH */
3729 default:
3730 if (POLY_INT_CST_P (t))
3731 {
3732 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3733 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3734 return;
3735 }
3736 tclass = TREE_CODE_CLASS (code);
3737
3738 if (tclass == tcc_declaration)
3739 {
3740 /* DECL's have a unique ID */
3741 hstate.add_hwi (DECL_UID (t));
3742 }
3743 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3744 {
3745 /* For comparisons that can be swapped, use the lower
3746 tree code. */
3747 enum tree_code ccode = swap_tree_comparison (code);
3748 if (code < ccode)
3749 ccode = code;
3750 hstate.add_object (ccode);
3751 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3752 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3753 }
3754 else if (CONVERT_EXPR_CODE_P (code))
3755 {
3756 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3757 operand_equal_p. */
3758 enum tree_code ccode = NOP_EXPR;
3759 hstate.add_object (ccode);
3760
3761 /* Don't hash the type, that can lead to having nodes which
3762 compare equal according to operand_equal_p, but which
3763 have different hash codes. Make sure to include signedness
3764 in the hash computation. */
3765 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3766 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3767 }
3768 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3769 else if (code == MEM_REF
3770 && (flags & OEP_ADDRESS_OF) != 0
3771 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3772 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3773 && integer_zerop (TREE_OPERAND (t, 1)))
3774 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3775 hstate, flags);
3776 /* Don't ICE on FE specific trees, or their arguments etc.
3777 during operand_equal_p hash verification. */
3778 else if (!IS_EXPR_CODE_CLASS (tclass))
3779 gcc_assert (flags & OEP_HASH_CHECK);
3780 else
3781 {
3782 unsigned int sflags = flags;
3783
3784 hstate.add_object (code);
3785
3786 switch (code)
3787 {
3788 case ADDR_EXPR:
3789 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3790 flags |= OEP_ADDRESS_OF;
3791 sflags = flags;
3792 break;
3793
3794 case INDIRECT_REF:
3795 case MEM_REF:
3796 case TARGET_MEM_REF:
3797 flags &= ~OEP_ADDRESS_OF;
3798 sflags = flags;
3799 break;
3800
3801 case ARRAY_REF:
3802 case ARRAY_RANGE_REF:
3803 case COMPONENT_REF:
3804 case BIT_FIELD_REF:
3805 sflags &= ~OEP_ADDRESS_OF;
3806 break;
3807
3808 case COND_EXPR:
3809 flags &= ~OEP_ADDRESS_OF;
3810 break;
3811
3812 case WIDEN_MULT_PLUS_EXPR:
3813 case WIDEN_MULT_MINUS_EXPR:
3814 {
3815 /* The multiplication operands are commutative. */
3816 inchash::hash one, two;
3817 hash_operand (TREE_OPERAND (t, 0), one, flags);
3818 hash_operand (TREE_OPERAND (t, 1), two, flags);
3819 hstate.add_commutative (one, two);
3820 hash_operand (TREE_OPERAND (t, 2), two, flags);
3821 return;
3822 }
3823
3824 case CALL_EXPR:
3825 if (CALL_EXPR_FN (t) == NULL_TREE)
3826 hstate.add_int (CALL_EXPR_IFN (t));
3827 break;
3828
3829 case TARGET_EXPR:
3830 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3831 Usually different TARGET_EXPRs just should use
3832 different temporaries in their slots. */
3833 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3834 return;
3835
3836 /* Virtual table call. */
3837 case OBJ_TYPE_REF:
3838 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3839 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3840 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3841 return;
3842 default:
3843 break;
3844 }
3845
3846 /* Don't hash the type, that can lead to having nodes which
3847 compare equal according to operand_equal_p, but which
3848 have different hash codes. */
3849 if (code == NON_LVALUE_EXPR)
3850 {
3851 /* Make sure to include signness in the hash computation. */
3852 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3853 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3854 }
3855
3856 else if (commutative_tree_code (code))
3857 {
3858 /* It's a commutative expression. We want to hash it the same
3859 however it appears. We do this by first hashing both operands
3860 and then rehashing based on the order of their independent
3861 hashes. */
3862 inchash::hash one, two;
3863 hash_operand (TREE_OPERAND (t, 0), one, flags);
3864 hash_operand (TREE_OPERAND (t, 1), two, flags);
3865 hstate.add_commutative (one, two);
3866 }
3867 else
3868 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3869 hash_operand (TREE_OPERAND (t, i), hstate,
3870 i == 0 ? flags : sflags);
3871 }
3872 return;
3873 }
3874 }
3875
3876 bool
3877 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3878 unsigned int flags, bool *ret)
3879 {
3880 /* When checking, verify at the outermost operand_equal_p call that
3881 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3882 hash value. */
3883 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3884 {
3885 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3886 {
3887 if (arg0 != arg1)
3888 {
3889 inchash::hash hstate0 (0), hstate1 (0);
3890 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3891 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3892 hashval_t h0 = hstate0.end ();
3893 hashval_t h1 = hstate1.end ();
3894 gcc_assert (h0 == h1);
3895 }
3896 *ret = true;
3897 }
3898 else
3899 *ret = false;
3900
3901 return true;
3902 }
3903
3904 return false;
3905 }
3906
3907
3908 static operand_compare default_compare_instance;
3909
3910 /* Conveinece wrapper around operand_compare class because usually we do
3911 not need to play with the valueizer. */
3912
3913 bool
3914 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3915 {
3916 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3917 }
3918
3919 namespace inchash
3920 {
3921
3922 /* Generate a hash value for an expression. This can be used iteratively
3923 by passing a previous result as the HSTATE argument.
3924
3925 This function is intended to produce the same hash for expressions which
3926 would compare equal using operand_equal_p. */
3927 void
3928 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3929 {
3930 default_compare_instance.hash_operand (t, hstate, flags);
3931 }
3932
3933 }
3934 \f
3935 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3936 with a different signedness or a narrower precision. */
3937
3938 static bool
3939 operand_equal_for_comparison_p (tree arg0, tree arg1)
3940 {
3941 if (operand_equal_p (arg0, arg1, 0))
3942 return true;
3943
3944 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3945 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3946 return false;
3947
3948 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3949 and see if the inner values are the same. This removes any
3950 signedness comparison, which doesn't matter here. */
3951 tree op0 = arg0;
3952 tree op1 = arg1;
3953 STRIP_NOPS (op0);
3954 STRIP_NOPS (op1);
3955 if (operand_equal_p (op0, op1, 0))
3956 return true;
3957
3958 /* Discard a single widening conversion from ARG1 and see if the inner
3959 value is the same as ARG0. */
3960 if (CONVERT_EXPR_P (arg1)
3961 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3962 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3963 < TYPE_PRECISION (TREE_TYPE (arg1))
3964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3965 return true;
3966
3967 return false;
3968 }
3969 \f
3970 /* See if ARG is an expression that is either a comparison or is performing
3971 arithmetic on comparisons. The comparisons must only be comparing
3972 two different values, which will be stored in *CVAL1 and *CVAL2; if
3973 they are nonzero it means that some operands have already been found.
3974 No variables may be used anywhere else in the expression except in the
3975 comparisons.
3976
3977 If this is true, return 1. Otherwise, return zero. */
3978
3979 static bool
3980 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3981 {
3982 enum tree_code code = TREE_CODE (arg);
3983 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3984
3985 /* We can handle some of the tcc_expression cases here. */
3986 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3987 tclass = tcc_unary;
3988 else if (tclass == tcc_expression
3989 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3990 || code == COMPOUND_EXPR))
3991 tclass = tcc_binary;
3992
3993 switch (tclass)
3994 {
3995 case tcc_unary:
3996 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3997
3998 case tcc_binary:
3999 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4000 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4001
4002 case tcc_constant:
4003 return true;
4004
4005 case tcc_expression:
4006 if (code == COND_EXPR)
4007 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4008 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4009 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4010 return false;
4011
4012 case tcc_comparison:
4013 /* First see if we can handle the first operand, then the second. For
4014 the second operand, we know *CVAL1 can't be zero. It must be that
4015 one side of the comparison is each of the values; test for the
4016 case where this isn't true by failing if the two operands
4017 are the same. */
4018
4019 if (operand_equal_p (TREE_OPERAND (arg, 0),
4020 TREE_OPERAND (arg, 1), 0))
4021 return false;
4022
4023 if (*cval1 == 0)
4024 *cval1 = TREE_OPERAND (arg, 0);
4025 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4026 ;
4027 else if (*cval2 == 0)
4028 *cval2 = TREE_OPERAND (arg, 0);
4029 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4030 ;
4031 else
4032 return false;
4033
4034 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4035 ;
4036 else if (*cval2 == 0)
4037 *cval2 = TREE_OPERAND (arg, 1);
4038 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4039 ;
4040 else
4041 return false;
4042
4043 return true;
4044
4045 default:
4046 return false;
4047 }
4048 }
4049 \f
4050 /* ARG is a tree that is known to contain just arithmetic operations and
4051 comparisons. Evaluate the operations in the tree substituting NEW0 for
4052 any occurrence of OLD0 as an operand of a comparison and likewise for
4053 NEW1 and OLD1. */
4054
4055 static tree
4056 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4057 tree old1, tree new1)
4058 {
4059 tree type = TREE_TYPE (arg);
4060 enum tree_code code = TREE_CODE (arg);
4061 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4062
4063 /* We can handle some of the tcc_expression cases here. */
4064 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4065 tclass = tcc_unary;
4066 else if (tclass == tcc_expression
4067 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4068 tclass = tcc_binary;
4069
4070 switch (tclass)
4071 {
4072 case tcc_unary:
4073 return fold_build1_loc (loc, code, type,
4074 eval_subst (loc, TREE_OPERAND (arg, 0),
4075 old0, new0, old1, new1));
4076
4077 case tcc_binary:
4078 return fold_build2_loc (loc, code, type,
4079 eval_subst (loc, TREE_OPERAND (arg, 0),
4080 old0, new0, old1, new1),
4081 eval_subst (loc, TREE_OPERAND (arg, 1),
4082 old0, new0, old1, new1));
4083
4084 case tcc_expression:
4085 switch (code)
4086 {
4087 case SAVE_EXPR:
4088 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4089 old1, new1);
4090
4091 case COMPOUND_EXPR:
4092 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4093 old1, new1);
4094
4095 case COND_EXPR:
4096 return fold_build3_loc (loc, code, type,
4097 eval_subst (loc, TREE_OPERAND (arg, 0),
4098 old0, new0, old1, new1),
4099 eval_subst (loc, TREE_OPERAND (arg, 1),
4100 old0, new0, old1, new1),
4101 eval_subst (loc, TREE_OPERAND (arg, 2),
4102 old0, new0, old1, new1));
4103 default:
4104 break;
4105 }
4106 /* Fall through - ??? */
4107
4108 case tcc_comparison:
4109 {
4110 tree arg0 = TREE_OPERAND (arg, 0);
4111 tree arg1 = TREE_OPERAND (arg, 1);
4112
4113 /* We need to check both for exact equality and tree equality. The
4114 former will be true if the operand has a side-effect. In that
4115 case, we know the operand occurred exactly once. */
4116
4117 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4118 arg0 = new0;
4119 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4120 arg0 = new1;
4121
4122 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4123 arg1 = new0;
4124 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4125 arg1 = new1;
4126
4127 return fold_build2_loc (loc, code, type, arg0, arg1);
4128 }
4129
4130 default:
4131 return arg;
4132 }
4133 }
4134 \f
4135 /* Return a tree for the case when the result of an expression is RESULT
4136 converted to TYPE and OMITTED was previously an operand of the expression
4137 but is now not needed (e.g., we folded OMITTED * 0).
4138
4139 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4140 the conversion of RESULT to TYPE. */
4141
4142 tree
4143 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4144 {
4145 tree t = fold_convert_loc (loc, type, result);
4146
4147 /* If the resulting operand is an empty statement, just return the omitted
4148 statement casted to void. */
4149 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4150 return build1_loc (loc, NOP_EXPR, void_type_node,
4151 fold_ignored_result (omitted));
4152
4153 if (TREE_SIDE_EFFECTS (omitted))
4154 return build2_loc (loc, COMPOUND_EXPR, type,
4155 fold_ignored_result (omitted), t);
4156
4157 return non_lvalue_loc (loc, t);
4158 }
4159
4160 /* Return a tree for the case when the result of an expression is RESULT
4161 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4162 of the expression but are now not needed.
4163
4164 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4165 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4166 evaluated before OMITTED2. Otherwise, if neither has side effects,
4167 just do the conversion of RESULT to TYPE. */
4168
4169 tree
4170 omit_two_operands_loc (location_t loc, tree type, tree result,
4171 tree omitted1, tree omitted2)
4172 {
4173 tree t = fold_convert_loc (loc, type, result);
4174
4175 if (TREE_SIDE_EFFECTS (omitted2))
4176 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4177 if (TREE_SIDE_EFFECTS (omitted1))
4178 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4179
4180 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4181 }
4182
4183 \f
4184 /* Return a simplified tree node for the truth-negation of ARG. This
4185 never alters ARG itself. We assume that ARG is an operation that
4186 returns a truth value (0 or 1).
4187
4188 FIXME: one would think we would fold the result, but it causes
4189 problems with the dominator optimizer. */
4190
4191 static tree
4192 fold_truth_not_expr (location_t loc, tree arg)
4193 {
4194 tree type = TREE_TYPE (arg);
4195 enum tree_code code = TREE_CODE (arg);
4196 location_t loc1, loc2;
4197
4198 /* If this is a comparison, we can simply invert it, except for
4199 floating-point non-equality comparisons, in which case we just
4200 enclose a TRUTH_NOT_EXPR around what we have. */
4201
4202 if (TREE_CODE_CLASS (code) == tcc_comparison)
4203 {
4204 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4205 if (FLOAT_TYPE_P (op_type)
4206 && flag_trapping_math
4207 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4208 && code != NE_EXPR && code != EQ_EXPR)
4209 return NULL_TREE;
4210
4211 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4212 if (code == ERROR_MARK)
4213 return NULL_TREE;
4214
4215 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4216 TREE_OPERAND (arg, 1));
4217 if (TREE_NO_WARNING (arg))
4218 TREE_NO_WARNING (ret) = 1;
4219 return ret;
4220 }
4221
4222 switch (code)
4223 {
4224 case INTEGER_CST:
4225 return constant_boolean_node (integer_zerop (arg), type);
4226
4227 case TRUTH_AND_EXPR:
4228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4229 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4230 return build2_loc (loc, TRUTH_OR_EXPR, type,
4231 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4232 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4233
4234 case TRUTH_OR_EXPR:
4235 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4236 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4237 return build2_loc (loc, TRUTH_AND_EXPR, type,
4238 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4239 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4240
4241 case TRUTH_XOR_EXPR:
4242 /* Here we can invert either operand. We invert the first operand
4243 unless the second operand is a TRUTH_NOT_EXPR in which case our
4244 result is the XOR of the first operand with the inside of the
4245 negation of the second operand. */
4246
4247 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4248 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4249 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4250 else
4251 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4252 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4253 TREE_OPERAND (arg, 1));
4254
4255 case TRUTH_ANDIF_EXPR:
4256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4257 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4258 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4259 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4260 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4261
4262 case TRUTH_ORIF_EXPR:
4263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4264 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4265 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4267 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4268
4269 case TRUTH_NOT_EXPR:
4270 return TREE_OPERAND (arg, 0);
4271
4272 case COND_EXPR:
4273 {
4274 tree arg1 = TREE_OPERAND (arg, 1);
4275 tree arg2 = TREE_OPERAND (arg, 2);
4276
4277 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4278 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4279
4280 /* A COND_EXPR may have a throw as one operand, which
4281 then has void type. Just leave void operands
4282 as they are. */
4283 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4284 VOID_TYPE_P (TREE_TYPE (arg1))
4285 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4286 VOID_TYPE_P (TREE_TYPE (arg2))
4287 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4288 }
4289
4290 case COMPOUND_EXPR:
4291 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4292 return build2_loc (loc, COMPOUND_EXPR, type,
4293 TREE_OPERAND (arg, 0),
4294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4295
4296 case NON_LVALUE_EXPR:
4297 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4298 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4299
4300 CASE_CONVERT:
4301 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4302 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4303
4304 /* fall through */
4305
4306 case FLOAT_EXPR:
4307 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4308 return build1_loc (loc, TREE_CODE (arg), type,
4309 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4310
4311 case BIT_AND_EXPR:
4312 if (!integer_onep (TREE_OPERAND (arg, 1)))
4313 return NULL_TREE;
4314 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4315
4316 case SAVE_EXPR:
4317 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4318
4319 case CLEANUP_POINT_EXPR:
4320 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4321 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4322 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4323
4324 default:
4325 return NULL_TREE;
4326 }
4327 }
4328
4329 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4330 assume that ARG is an operation that returns a truth value (0 or 1
4331 for scalars, 0 or -1 for vectors). Return the folded expression if
4332 folding is successful. Otherwise, return NULL_TREE. */
4333
4334 static tree
4335 fold_invert_truthvalue (location_t loc, tree arg)
4336 {
4337 tree type = TREE_TYPE (arg);
4338 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4339 ? BIT_NOT_EXPR
4340 : TRUTH_NOT_EXPR,
4341 type, arg);
4342 }
4343
4344 /* Return a simplified tree node for the truth-negation of ARG. This
4345 never alters ARG itself. We assume that ARG is an operation that
4346 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4347
4348 tree
4349 invert_truthvalue_loc (location_t loc, tree arg)
4350 {
4351 if (TREE_CODE (arg) == ERROR_MARK)
4352 return arg;
4353
4354 tree type = TREE_TYPE (arg);
4355 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4356 ? BIT_NOT_EXPR
4357 : TRUTH_NOT_EXPR,
4358 type, arg);
4359 }
4360 \f
4361 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4362 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4363 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4364 is the original memory reference used to preserve the alias set of
4365 the access. */
4366
4367 static tree
4368 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4369 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4370 int unsignedp, int reversep)
4371 {
4372 tree result, bftype;
4373
4374 /* Attempt not to lose the access path if possible. */
4375 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4376 {
4377 tree ninner = TREE_OPERAND (orig_inner, 0);
4378 machine_mode nmode;
4379 poly_int64 nbitsize, nbitpos;
4380 tree noffset;
4381 int nunsignedp, nreversep, nvolatilep = 0;
4382 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4383 &noffset, &nmode, &nunsignedp,
4384 &nreversep, &nvolatilep);
4385 if (base == inner
4386 && noffset == NULL_TREE
4387 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4388 && !reversep
4389 && !nreversep
4390 && !nvolatilep)
4391 {
4392 inner = ninner;
4393 bitpos -= nbitpos;
4394 }
4395 }
4396
4397 alias_set_type iset = get_alias_set (orig_inner);
4398 if (iset == 0 && get_alias_set (inner) != iset)
4399 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4400 build_fold_addr_expr (inner),
4401 build_int_cst (ptr_type_node, 0));
4402
4403 if (known_eq (bitpos, 0) && !reversep)
4404 {
4405 tree size = TYPE_SIZE (TREE_TYPE (inner));
4406 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4407 || POINTER_TYPE_P (TREE_TYPE (inner)))
4408 && tree_fits_shwi_p (size)
4409 && tree_to_shwi (size) == bitsize)
4410 return fold_convert_loc (loc, type, inner);
4411 }
4412
4413 bftype = type;
4414 if (TYPE_PRECISION (bftype) != bitsize
4415 || TYPE_UNSIGNED (bftype) == !unsignedp)
4416 bftype = build_nonstandard_integer_type (bitsize, 0);
4417
4418 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4419 bitsize_int (bitsize), bitsize_int (bitpos));
4420 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4421
4422 if (bftype != type)
4423 result = fold_convert_loc (loc, type, result);
4424
4425 return result;
4426 }
4427
4428 /* Optimize a bit-field compare.
4429
4430 There are two cases: First is a compare against a constant and the
4431 second is a comparison of two items where the fields are at the same
4432 bit position relative to the start of a chunk (byte, halfword, word)
4433 large enough to contain it. In these cases we can avoid the shift
4434 implicit in bitfield extractions.
4435
4436 For constants, we emit a compare of the shifted constant with the
4437 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4438 compared. For two fields at the same position, we do the ANDs with the
4439 similar mask and compare the result of the ANDs.
4440
4441 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4442 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4443 are the left and right operands of the comparison, respectively.
4444
4445 If the optimization described above can be done, we return the resulting
4446 tree. Otherwise we return zero. */
4447
4448 static tree
4449 optimize_bit_field_compare (location_t loc, enum tree_code code,
4450 tree compare_type, tree lhs, tree rhs)
4451 {
4452 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4453 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4454 tree type = TREE_TYPE (lhs);
4455 tree unsigned_type;
4456 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4457 machine_mode lmode, rmode;
4458 scalar_int_mode nmode;
4459 int lunsignedp, runsignedp;
4460 int lreversep, rreversep;
4461 int lvolatilep = 0, rvolatilep = 0;
4462 tree linner, rinner = NULL_TREE;
4463 tree mask;
4464 tree offset;
4465
4466 /* Get all the information about the extractions being done. If the bit size
4467 is the same as the size of the underlying object, we aren't doing an
4468 extraction at all and so can do nothing. We also don't want to
4469 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4470 then will no longer be able to replace it. */
4471 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4472 &lunsignedp, &lreversep, &lvolatilep);
4473 if (linner == lhs
4474 || !known_size_p (plbitsize)
4475 || !plbitsize.is_constant (&lbitsize)
4476 || !plbitpos.is_constant (&lbitpos)
4477 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4478 || offset != 0
4479 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4480 || lvolatilep)
4481 return 0;
4482
4483 if (const_p)
4484 rreversep = lreversep;
4485 else
4486 {
4487 /* If this is not a constant, we can only do something if bit positions,
4488 sizes, signedness and storage order are the same. */
4489 rinner
4490 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4491 &runsignedp, &rreversep, &rvolatilep);
4492
4493 if (rinner == rhs
4494 || maybe_ne (lbitpos, rbitpos)
4495 || maybe_ne (lbitsize, rbitsize)
4496 || lunsignedp != runsignedp
4497 || lreversep != rreversep
4498 || offset != 0
4499 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4500 || rvolatilep)
4501 return 0;
4502 }
4503
4504 /* Honor the C++ memory model and mimic what RTL expansion does. */
4505 poly_uint64 bitstart = 0;
4506 poly_uint64 bitend = 0;
4507 if (TREE_CODE (lhs) == COMPONENT_REF)
4508 {
4509 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4510 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4511 return 0;
4512 }
4513
4514 /* See if we can find a mode to refer to this field. We should be able to,
4515 but fail if we can't. */
4516 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4517 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4518 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4519 TYPE_ALIGN (TREE_TYPE (rinner))),
4520 BITS_PER_WORD, false, &nmode))
4521 return 0;
4522
4523 /* Set signed and unsigned types of the precision of this mode for the
4524 shifts below. */
4525 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4526
4527 /* Compute the bit position and size for the new reference and our offset
4528 within it. If the new reference is the same size as the original, we
4529 won't optimize anything, so return zero. */
4530 nbitsize = GET_MODE_BITSIZE (nmode);
4531 nbitpos = lbitpos & ~ (nbitsize - 1);
4532 lbitpos -= nbitpos;
4533 if (nbitsize == lbitsize)
4534 return 0;
4535
4536 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4537 lbitpos = nbitsize - lbitsize - lbitpos;
4538
4539 /* Make the mask to be used against the extracted field. */
4540 mask = build_int_cst_type (unsigned_type, -1);
4541 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4542 mask = const_binop (RSHIFT_EXPR, mask,
4543 size_int (nbitsize - lbitsize - lbitpos));
4544
4545 if (! const_p)
4546 {
4547 if (nbitpos < 0)
4548 return 0;
4549
4550 /* If not comparing with constant, just rework the comparison
4551 and return. */
4552 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4553 nbitsize, nbitpos, 1, lreversep);
4554 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4555 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4556 nbitsize, nbitpos, 1, rreversep);
4557 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4558 return fold_build2_loc (loc, code, compare_type, t1, t2);
4559 }
4560
4561 /* Otherwise, we are handling the constant case. See if the constant is too
4562 big for the field. Warn and return a tree for 0 (false) if so. We do
4563 this not only for its own sake, but to avoid having to test for this
4564 error case below. If we didn't, we might generate wrong code.
4565
4566 For unsigned fields, the constant shifted right by the field length should
4567 be all zero. For signed fields, the high-order bits should agree with
4568 the sign bit. */
4569
4570 if (lunsignedp)
4571 {
4572 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4573 {
4574 warning (0, "comparison is always %d due to width of bit-field",
4575 code == NE_EXPR);
4576 return constant_boolean_node (code == NE_EXPR, compare_type);
4577 }
4578 }
4579 else
4580 {
4581 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4582 if (tem != 0 && tem != -1)
4583 {
4584 warning (0, "comparison is always %d due to width of bit-field",
4585 code == NE_EXPR);
4586 return constant_boolean_node (code == NE_EXPR, compare_type);
4587 }
4588 }
4589
4590 if (nbitpos < 0)
4591 return 0;
4592
4593 /* Single-bit compares should always be against zero. */
4594 if (lbitsize == 1 && ! integer_zerop (rhs))
4595 {
4596 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4597 rhs = build_int_cst (type, 0);
4598 }
4599
4600 /* Make a new bitfield reference, shift the constant over the
4601 appropriate number of bits and mask it with the computed mask
4602 (in case this was a signed field). If we changed it, make a new one. */
4603 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4604 nbitsize, nbitpos, 1, lreversep);
4605
4606 rhs = const_binop (BIT_AND_EXPR,
4607 const_binop (LSHIFT_EXPR,
4608 fold_convert_loc (loc, unsigned_type, rhs),
4609 size_int (lbitpos)),
4610 mask);
4611
4612 lhs = build2_loc (loc, code, compare_type,
4613 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4614 return lhs;
4615 }
4616 \f
4617 /* Subroutine for fold_truth_andor_1: decode a field reference.
4618
4619 If EXP is a comparison reference, we return the innermost reference.
4620
4621 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4622 set to the starting bit number.
4623
4624 If the innermost field can be completely contained in a mode-sized
4625 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4626
4627 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4628 otherwise it is not changed.
4629
4630 *PUNSIGNEDP is set to the signedness of the field.
4631
4632 *PREVERSEP is set to the storage order of the field.
4633
4634 *PMASK is set to the mask used. This is either contained in a
4635 BIT_AND_EXPR or derived from the width of the field.
4636
4637 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4638
4639 Return 0 if this is not a component reference or is one that we can't
4640 do anything with. */
4641
4642 static tree
4643 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4644 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4645 int *punsignedp, int *preversep, int *pvolatilep,
4646 tree *pmask, tree *pand_mask)
4647 {
4648 tree exp = *exp_;
4649 tree outer_type = 0;
4650 tree and_mask = 0;
4651 tree mask, inner, offset;
4652 tree unsigned_type;
4653 unsigned int precision;
4654
4655 /* All the optimizations using this function assume integer fields.
4656 There are problems with FP fields since the type_for_size call
4657 below can fail for, e.g., XFmode. */
4658 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4659 return NULL_TREE;
4660
4661 /* We are interested in the bare arrangement of bits, so strip everything
4662 that doesn't affect the machine mode. However, record the type of the
4663 outermost expression if it may matter below. */
4664 if (CONVERT_EXPR_P (exp)
4665 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4666 outer_type = TREE_TYPE (exp);
4667 STRIP_NOPS (exp);
4668
4669 if (TREE_CODE (exp) == BIT_AND_EXPR)
4670 {
4671 and_mask = TREE_OPERAND (exp, 1);
4672 exp = TREE_OPERAND (exp, 0);
4673 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4674 if (TREE_CODE (and_mask) != INTEGER_CST)
4675 return NULL_TREE;
4676 }
4677
4678 poly_int64 poly_bitsize, poly_bitpos;
4679 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4680 pmode, punsignedp, preversep, pvolatilep);
4681 if ((inner == exp && and_mask == 0)
4682 || !poly_bitsize.is_constant (pbitsize)
4683 || !poly_bitpos.is_constant (pbitpos)
4684 || *pbitsize < 0
4685 || offset != 0
4686 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4687 /* Reject out-of-bound accesses (PR79731). */
4688 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4689 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4690 *pbitpos + *pbitsize) < 0))
4691 return NULL_TREE;
4692
4693 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4694 if (unsigned_type == NULL_TREE)
4695 return NULL_TREE;
4696
4697 *exp_ = exp;
4698
4699 /* If the number of bits in the reference is the same as the bitsize of
4700 the outer type, then the outer type gives the signedness. Otherwise
4701 (in case of a small bitfield) the signedness is unchanged. */
4702 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4703 *punsignedp = TYPE_UNSIGNED (outer_type);
4704
4705 /* Compute the mask to access the bitfield. */
4706 precision = TYPE_PRECISION (unsigned_type);
4707
4708 mask = build_int_cst_type (unsigned_type, -1);
4709
4710 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4711 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4712
4713 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4714 if (and_mask != 0)
4715 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4716 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4717
4718 *pmask = mask;
4719 *pand_mask = and_mask;
4720 return inner;
4721 }
4722
4723 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4724 bit positions and MASK is SIGNED. */
4725
4726 static bool
4727 all_ones_mask_p (const_tree mask, unsigned int size)
4728 {
4729 tree type = TREE_TYPE (mask);
4730 unsigned int precision = TYPE_PRECISION (type);
4731
4732 /* If this function returns true when the type of the mask is
4733 UNSIGNED, then there will be errors. In particular see
4734 gcc.c-torture/execute/990326-1.c. There does not appear to be
4735 any documentation paper trail as to why this is so. But the pre
4736 wide-int worked with that restriction and it has been preserved
4737 here. */
4738 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4739 return false;
4740
4741 return wi::mask (size, false, precision) == wi::to_wide (mask);
4742 }
4743
4744 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4745 represents the sign bit of EXP's type. If EXP represents a sign
4746 or zero extension, also test VAL against the unextended type.
4747 The return value is the (sub)expression whose sign bit is VAL,
4748 or NULL_TREE otherwise. */
4749
4750 tree
4751 sign_bit_p (tree exp, const_tree val)
4752 {
4753 int width;
4754 tree t;
4755
4756 /* Tree EXP must have an integral type. */
4757 t = TREE_TYPE (exp);
4758 if (! INTEGRAL_TYPE_P (t))
4759 return NULL_TREE;
4760
4761 /* Tree VAL must be an integer constant. */
4762 if (TREE_CODE (val) != INTEGER_CST
4763 || TREE_OVERFLOW (val))
4764 return NULL_TREE;
4765
4766 width = TYPE_PRECISION (t);
4767 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4768 return exp;
4769
4770 /* Handle extension from a narrower type. */
4771 if (TREE_CODE (exp) == NOP_EXPR
4772 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4773 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4774
4775 return NULL_TREE;
4776 }
4777
4778 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4779 to be evaluated unconditionally. */
4780
4781 static bool
4782 simple_operand_p (const_tree exp)
4783 {
4784 /* Strip any conversions that don't change the machine mode. */
4785 STRIP_NOPS (exp);
4786
4787 return (CONSTANT_CLASS_P (exp)
4788 || TREE_CODE (exp) == SSA_NAME
4789 || (DECL_P (exp)
4790 && ! TREE_ADDRESSABLE (exp)
4791 && ! TREE_THIS_VOLATILE (exp)
4792 && ! DECL_NONLOCAL (exp)
4793 /* Don't regard global variables as simple. They may be
4794 allocated in ways unknown to the compiler (shared memory,
4795 #pragma weak, etc). */
4796 && ! TREE_PUBLIC (exp)
4797 && ! DECL_EXTERNAL (exp)
4798 /* Weakrefs are not safe to be read, since they can be NULL.
4799 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4800 have DECL_WEAK flag set. */
4801 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4802 /* Loading a static variable is unduly expensive, but global
4803 registers aren't expensive. */
4804 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4805 }
4806
4807 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4808 to be evaluated unconditionally.
4809 I addition to simple_operand_p, we assume that comparisons, conversions,
4810 and logic-not operations are simple, if their operands are simple, too. */
4811
4812 static bool
4813 simple_operand_p_2 (tree exp)
4814 {
4815 enum tree_code code;
4816
4817 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4818 return false;
4819
4820 while (CONVERT_EXPR_P (exp))
4821 exp = TREE_OPERAND (exp, 0);
4822
4823 code = TREE_CODE (exp);
4824
4825 if (TREE_CODE_CLASS (code) == tcc_comparison)
4826 return (simple_operand_p (TREE_OPERAND (exp, 0))
4827 && simple_operand_p (TREE_OPERAND (exp, 1)));
4828
4829 if (code == TRUTH_NOT_EXPR)
4830 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4831
4832 return simple_operand_p (exp);
4833 }
4834
4835 \f
4836 /* The following functions are subroutines to fold_range_test and allow it to
4837 try to change a logical combination of comparisons into a range test.
4838
4839 For example, both
4840 X == 2 || X == 3 || X == 4 || X == 5
4841 and
4842 X >= 2 && X <= 5
4843 are converted to
4844 (unsigned) (X - 2) <= 3
4845
4846 We describe each set of comparisons as being either inside or outside
4847 a range, using a variable named like IN_P, and then describe the
4848 range with a lower and upper bound. If one of the bounds is omitted,
4849 it represents either the highest or lowest value of the type.
4850
4851 In the comments below, we represent a range by two numbers in brackets
4852 preceded by a "+" to designate being inside that range, or a "-" to
4853 designate being outside that range, so the condition can be inverted by
4854 flipping the prefix. An omitted bound is represented by a "-". For
4855 example, "- [-, 10]" means being outside the range starting at the lowest
4856 possible value and ending at 10, in other words, being greater than 10.
4857 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4858 always false.
4859
4860 We set up things so that the missing bounds are handled in a consistent
4861 manner so neither a missing bound nor "true" and "false" need to be
4862 handled using a special case. */
4863
4864 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4865 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4866 and UPPER1_P are nonzero if the respective argument is an upper bound
4867 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4868 must be specified for a comparison. ARG1 will be converted to ARG0's
4869 type if both are specified. */
4870
4871 static tree
4872 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4873 tree arg1, int upper1_p)
4874 {
4875 tree tem;
4876 int result;
4877 int sgn0, sgn1;
4878
4879 /* If neither arg represents infinity, do the normal operation.
4880 Else, if not a comparison, return infinity. Else handle the special
4881 comparison rules. Note that most of the cases below won't occur, but
4882 are handled for consistency. */
4883
4884 if (arg0 != 0 && arg1 != 0)
4885 {
4886 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4887 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4888 STRIP_NOPS (tem);
4889 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4890 }
4891
4892 if (TREE_CODE_CLASS (code) != tcc_comparison)
4893 return 0;
4894
4895 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4896 for neither. In real maths, we cannot assume open ended ranges are
4897 the same. But, this is computer arithmetic, where numbers are finite.
4898 We can therefore make the transformation of any unbounded range with
4899 the value Z, Z being greater than any representable number. This permits
4900 us to treat unbounded ranges as equal. */
4901 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4902 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4903 switch (code)
4904 {
4905 case EQ_EXPR:
4906 result = sgn0 == sgn1;
4907 break;
4908 case NE_EXPR:
4909 result = sgn0 != sgn1;
4910 break;
4911 case LT_EXPR:
4912 result = sgn0 < sgn1;
4913 break;
4914 case LE_EXPR:
4915 result = sgn0 <= sgn1;
4916 break;
4917 case GT_EXPR:
4918 result = sgn0 > sgn1;
4919 break;
4920 case GE_EXPR:
4921 result = sgn0 >= sgn1;
4922 break;
4923 default:
4924 gcc_unreachable ();
4925 }
4926
4927 return constant_boolean_node (result, type);
4928 }
4929 \f
4930 /* Helper routine for make_range. Perform one step for it, return
4931 new expression if the loop should continue or NULL_TREE if it should
4932 stop. */
4933
4934 tree
4935 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4936 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4937 bool *strict_overflow_p)
4938 {
4939 tree arg0_type = TREE_TYPE (arg0);
4940 tree n_low, n_high, low = *p_low, high = *p_high;
4941 int in_p = *p_in_p, n_in_p;
4942
4943 switch (code)
4944 {
4945 case TRUTH_NOT_EXPR:
4946 /* We can only do something if the range is testing for zero. */
4947 if (low == NULL_TREE || high == NULL_TREE
4948 || ! integer_zerop (low) || ! integer_zerop (high))
4949 return NULL_TREE;
4950 *p_in_p = ! in_p;
4951 return arg0;
4952
4953 case EQ_EXPR: case NE_EXPR:
4954 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4955 /* We can only do something if the range is testing for zero
4956 and if the second operand is an integer constant. Note that
4957 saying something is "in" the range we make is done by
4958 complementing IN_P since it will set in the initial case of
4959 being not equal to zero; "out" is leaving it alone. */
4960 if (low == NULL_TREE || high == NULL_TREE
4961 || ! integer_zerop (low) || ! integer_zerop (high)
4962 || TREE_CODE (arg1) != INTEGER_CST)
4963 return NULL_TREE;
4964
4965 switch (code)
4966 {
4967 case NE_EXPR: /* - [c, c] */
4968 low = high = arg1;
4969 break;
4970 case EQ_EXPR: /* + [c, c] */
4971 in_p = ! in_p, low = high = arg1;
4972 break;
4973 case GT_EXPR: /* - [-, c] */
4974 low = 0, high = arg1;
4975 break;
4976 case GE_EXPR: /* + [c, -] */
4977 in_p = ! in_p, low = arg1, high = 0;
4978 break;
4979 case LT_EXPR: /* - [c, -] */
4980 low = arg1, high = 0;
4981 break;
4982 case LE_EXPR: /* + [-, c] */
4983 in_p = ! in_p, low = 0, high = arg1;
4984 break;
4985 default:
4986 gcc_unreachable ();
4987 }
4988
4989 /* If this is an unsigned comparison, we also know that EXP is
4990 greater than or equal to zero. We base the range tests we make
4991 on that fact, so we record it here so we can parse existing
4992 range tests. We test arg0_type since often the return type
4993 of, e.g. EQ_EXPR, is boolean. */
4994 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4995 {
4996 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4997 in_p, low, high, 1,
4998 build_int_cst (arg0_type, 0),
4999 NULL_TREE))
5000 return NULL_TREE;
5001
5002 in_p = n_in_p, low = n_low, high = n_high;
5003
5004 /* If the high bound is missing, but we have a nonzero low
5005 bound, reverse the range so it goes from zero to the low bound
5006 minus 1. */
5007 if (high == 0 && low && ! integer_zerop (low))
5008 {
5009 in_p = ! in_p;
5010 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5011 build_int_cst (TREE_TYPE (low), 1), 0);
5012 low = build_int_cst (arg0_type, 0);
5013 }
5014 }
5015
5016 *p_low = low;
5017 *p_high = high;
5018 *p_in_p = in_p;
5019 return arg0;
5020
5021 case NEGATE_EXPR:
5022 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5023 low and high are non-NULL, then normalize will DTRT. */
5024 if (!TYPE_UNSIGNED (arg0_type)
5025 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5026 {
5027 if (low == NULL_TREE)
5028 low = TYPE_MIN_VALUE (arg0_type);
5029 if (high == NULL_TREE)
5030 high = TYPE_MAX_VALUE (arg0_type);
5031 }
5032
5033 /* (-x) IN [a,b] -> x in [-b, -a] */
5034 n_low = range_binop (MINUS_EXPR, exp_type,
5035 build_int_cst (exp_type, 0),
5036 0, high, 1);
5037 n_high = range_binop (MINUS_EXPR, exp_type,
5038 build_int_cst (exp_type, 0),
5039 0, low, 0);
5040 if (n_high != 0 && TREE_OVERFLOW (n_high))
5041 return NULL_TREE;
5042 goto normalize;
5043
5044 case BIT_NOT_EXPR:
5045 /* ~ X -> -X - 1 */
5046 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5047 build_int_cst (exp_type, 1));
5048
5049 case PLUS_EXPR:
5050 case MINUS_EXPR:
5051 if (TREE_CODE (arg1) != INTEGER_CST)
5052 return NULL_TREE;
5053
5054 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5055 move a constant to the other side. */
5056 if (!TYPE_UNSIGNED (arg0_type)
5057 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5058 return NULL_TREE;
5059
5060 /* If EXP is signed, any overflow in the computation is undefined,
5061 so we don't worry about it so long as our computations on
5062 the bounds don't overflow. For unsigned, overflow is defined
5063 and this is exactly the right thing. */
5064 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5065 arg0_type, low, 0, arg1, 0);
5066 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5067 arg0_type, high, 1, arg1, 0);
5068 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5069 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5070 return NULL_TREE;
5071
5072 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5073 *strict_overflow_p = true;
5074
5075 normalize:
5076 /* Check for an unsigned range which has wrapped around the maximum
5077 value thus making n_high < n_low, and normalize it. */
5078 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5079 {
5080 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5081 build_int_cst (TREE_TYPE (n_high), 1), 0);
5082 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5083 build_int_cst (TREE_TYPE (n_low), 1), 0);
5084
5085 /* If the range is of the form +/- [ x+1, x ], we won't
5086 be able to normalize it. But then, it represents the
5087 whole range or the empty set, so make it
5088 +/- [ -, - ]. */
5089 if (tree_int_cst_equal (n_low, low)
5090 && tree_int_cst_equal (n_high, high))
5091 low = high = 0;
5092 else
5093 in_p = ! in_p;
5094 }
5095 else
5096 low = n_low, high = n_high;
5097
5098 *p_low = low;
5099 *p_high = high;
5100 *p_in_p = in_p;
5101 return arg0;
5102
5103 CASE_CONVERT:
5104 case NON_LVALUE_EXPR:
5105 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5106 return NULL_TREE;
5107
5108 if (! INTEGRAL_TYPE_P (arg0_type)
5109 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5110 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5111 return NULL_TREE;
5112
5113 n_low = low, n_high = high;
5114
5115 if (n_low != 0)
5116 n_low = fold_convert_loc (loc, arg0_type, n_low);
5117
5118 if (n_high != 0)
5119 n_high = fold_convert_loc (loc, arg0_type, n_high);
5120
5121 /* If we're converting arg0 from an unsigned type, to exp,
5122 a signed type, we will be doing the comparison as unsigned.
5123 The tests above have already verified that LOW and HIGH
5124 are both positive.
5125
5126 So we have to ensure that we will handle large unsigned
5127 values the same way that the current signed bounds treat
5128 negative values. */
5129
5130 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5131 {
5132 tree high_positive;
5133 tree equiv_type;
5134 /* For fixed-point modes, we need to pass the saturating flag
5135 as the 2nd parameter. */
5136 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5137 equiv_type
5138 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5139 TYPE_SATURATING (arg0_type));
5140 else
5141 equiv_type
5142 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5143
5144 /* A range without an upper bound is, naturally, unbounded.
5145 Since convert would have cropped a very large value, use
5146 the max value for the destination type. */
5147 high_positive
5148 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5149 : TYPE_MAX_VALUE (arg0_type);
5150
5151 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5152 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5153 fold_convert_loc (loc, arg0_type,
5154 high_positive),
5155 build_int_cst (arg0_type, 1));
5156
5157 /* If the low bound is specified, "and" the range with the
5158 range for which the original unsigned value will be
5159 positive. */
5160 if (low != 0)
5161 {
5162 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5163 1, fold_convert_loc (loc, arg0_type,
5164 integer_zero_node),
5165 high_positive))
5166 return NULL_TREE;
5167
5168 in_p = (n_in_p == in_p);
5169 }
5170 else
5171 {
5172 /* Otherwise, "or" the range with the range of the input
5173 that will be interpreted as negative. */
5174 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5175 1, fold_convert_loc (loc, arg0_type,
5176 integer_zero_node),
5177 high_positive))
5178 return NULL_TREE;
5179
5180 in_p = (in_p != n_in_p);
5181 }
5182 }
5183
5184 /* Otherwise, if we are converting arg0 from signed type, to exp,
5185 an unsigned type, we will do the comparison as signed. If
5186 high is non-NULL, we punt above if it doesn't fit in the signed
5187 type, so if we get through here, +[-, high] or +[low, high] are
5188 equivalent to +[-, n_high] or +[n_low, n_high]. Similarly,
5189 +[-, -] or -[-, -] are equivalent too. But if low is specified and
5190 high is not, the +[low, -] range is equivalent to union of
5191 +[n_low, -] and +[-, -1] ranges, so +[low, -] is equivalent to
5192 -[0, n_low-1] and similarly -[low, -] to +[0, n_low-1], except for
5193 low being 0, which should be treated as [-, -]. */
5194 else if (TYPE_UNSIGNED (exp_type)
5195 && !TYPE_UNSIGNED (arg0_type)
5196 && low
5197 && !high)
5198 {
5199 if (integer_zerop (low))
5200 n_low = NULL_TREE;
5201 else
5202 {
5203 n_high = fold_build2_loc (loc, PLUS_EXPR, arg0_type,
5204 n_low, build_int_cst (arg0_type, -1));
5205 n_low = build_zero_cst (arg0_type);
5206 in_p = !in_p;
5207 }
5208 }
5209
5210 *p_low = n_low;
5211 *p_high = n_high;
5212 *p_in_p = in_p;
5213 return arg0;
5214
5215 default:
5216 return NULL_TREE;
5217 }
5218 }
5219
5220 /* Given EXP, a logical expression, set the range it is testing into
5221 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5222 actually being tested. *PLOW and *PHIGH will be made of the same
5223 type as the returned expression. If EXP is not a comparison, we
5224 will most likely not be returning a useful value and range. Set
5225 *STRICT_OVERFLOW_P to true if the return value is only valid
5226 because signed overflow is undefined; otherwise, do not change
5227 *STRICT_OVERFLOW_P. */
5228
5229 tree
5230 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5231 bool *strict_overflow_p)
5232 {
5233 enum tree_code code;
5234 tree arg0, arg1 = NULL_TREE;
5235 tree exp_type, nexp;
5236 int in_p;
5237 tree low, high;
5238 location_t loc = EXPR_LOCATION (exp);
5239
5240 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5241 and see if we can refine the range. Some of the cases below may not
5242 happen, but it doesn't seem worth worrying about this. We "continue"
5243 the outer loop when we've changed something; otherwise we "break"
5244 the switch, which will "break" the while. */
5245
5246 in_p = 0;
5247 low = high = build_int_cst (TREE_TYPE (exp), 0);
5248
5249 while (1)
5250 {
5251 code = TREE_CODE (exp);
5252 exp_type = TREE_TYPE (exp);
5253 arg0 = NULL_TREE;
5254
5255 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5256 {
5257 if (TREE_OPERAND_LENGTH (exp) > 0)
5258 arg0 = TREE_OPERAND (exp, 0);
5259 if (TREE_CODE_CLASS (code) == tcc_binary
5260 || TREE_CODE_CLASS (code) == tcc_comparison
5261 || (TREE_CODE_CLASS (code) == tcc_expression
5262 && TREE_OPERAND_LENGTH (exp) > 1))
5263 arg1 = TREE_OPERAND (exp, 1);
5264 }
5265 if (arg0 == NULL_TREE)
5266 break;
5267
5268 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5269 &high, &in_p, strict_overflow_p);
5270 if (nexp == NULL_TREE)
5271 break;
5272 exp = nexp;
5273 }
5274
5275 /* If EXP is a constant, we can evaluate whether this is true or false. */
5276 if (TREE_CODE (exp) == INTEGER_CST)
5277 {
5278 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5279 exp, 0, low, 0))
5280 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5281 exp, 1, high, 1)));
5282 low = high = 0;
5283 exp = 0;
5284 }
5285
5286 *pin_p = in_p, *plow = low, *phigh = high;
5287 return exp;
5288 }
5289
5290 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5291 a bitwise check i.e. when
5292 LOW == 0xXX...X00...0
5293 HIGH == 0xXX...X11...1
5294 Return corresponding mask in MASK and stem in VALUE. */
5295
5296 static bool
5297 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5298 tree *value)
5299 {
5300 if (TREE_CODE (low) != INTEGER_CST
5301 || TREE_CODE (high) != INTEGER_CST)
5302 return false;
5303
5304 unsigned prec = TYPE_PRECISION (type);
5305 wide_int lo = wi::to_wide (low, prec);
5306 wide_int hi = wi::to_wide (high, prec);
5307
5308 wide_int end_mask = lo ^ hi;
5309 if ((end_mask & (end_mask + 1)) != 0
5310 || (lo & end_mask) != 0)
5311 return false;
5312
5313 wide_int stem_mask = ~end_mask;
5314 wide_int stem = lo & stem_mask;
5315 if (stem != (hi & stem_mask))
5316 return false;
5317
5318 *mask = wide_int_to_tree (type, stem_mask);
5319 *value = wide_int_to_tree (type, stem);
5320
5321 return true;
5322 }
5323 \f
5324 /* Helper routine for build_range_check and match.pd. Return the type to
5325 perform the check or NULL if it shouldn't be optimized. */
5326
5327 tree
5328 range_check_type (tree etype)
5329 {
5330 /* First make sure that arithmetics in this type is valid, then make sure
5331 that it wraps around. */
5332 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5333 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5334
5335 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5336 {
5337 tree utype, minv, maxv;
5338
5339 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5340 for the type in question, as we rely on this here. */
5341 utype = unsigned_type_for (etype);
5342 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5343 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5344 build_int_cst (TREE_TYPE (maxv), 1), 1);
5345 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5346
5347 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5348 minv, 1, maxv, 1)))
5349 etype = utype;
5350 else
5351 return NULL_TREE;
5352 }
5353 else if (POINTER_TYPE_P (etype))
5354 etype = unsigned_type_for (etype);
5355 return etype;
5356 }
5357
5358 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5359 type, TYPE, return an expression to test if EXP is in (or out of, depending
5360 on IN_P) the range. Return 0 if the test couldn't be created. */
5361
5362 tree
5363 build_range_check (location_t loc, tree type, tree exp, int in_p,
5364 tree low, tree high)
5365 {
5366 tree etype = TREE_TYPE (exp), mask, value;
5367
5368 /* Disable this optimization for function pointer expressions
5369 on targets that require function pointer canonicalization. */
5370 if (targetm.have_canonicalize_funcptr_for_compare ()
5371 && POINTER_TYPE_P (etype)
5372 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5373 return NULL_TREE;
5374
5375 if (! in_p)
5376 {
5377 value = build_range_check (loc, type, exp, 1, low, high);
5378 if (value != 0)
5379 return invert_truthvalue_loc (loc, value);
5380
5381 return 0;
5382 }
5383
5384 if (low == 0 && high == 0)
5385 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5386
5387 if (low == 0)
5388 return fold_build2_loc (loc, LE_EXPR, type, exp,
5389 fold_convert_loc (loc, etype, high));
5390
5391 if (high == 0)
5392 return fold_build2_loc (loc, GE_EXPR, type, exp,
5393 fold_convert_loc (loc, etype, low));
5394
5395 if (operand_equal_p (low, high, 0))
5396 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5397 fold_convert_loc (loc, etype, low));
5398
5399 if (TREE_CODE (exp) == BIT_AND_EXPR
5400 && maskable_range_p (low, high, etype, &mask, &value))
5401 return fold_build2_loc (loc, EQ_EXPR, type,
5402 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5403 exp, mask),
5404 value);
5405
5406 if (integer_zerop (low))
5407 {
5408 if (! TYPE_UNSIGNED (etype))
5409 {
5410 etype = unsigned_type_for (etype);
5411 high = fold_convert_loc (loc, etype, high);
5412 exp = fold_convert_loc (loc, etype, exp);
5413 }
5414 return build_range_check (loc, type, exp, 1, 0, high);
5415 }
5416
5417 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5418 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5419 {
5420 int prec = TYPE_PRECISION (etype);
5421
5422 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5423 {
5424 if (TYPE_UNSIGNED (etype))
5425 {
5426 tree signed_etype = signed_type_for (etype);
5427 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5428 etype
5429 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5430 else
5431 etype = signed_etype;
5432 exp = fold_convert_loc (loc, etype, exp);
5433 }
5434 return fold_build2_loc (loc, GT_EXPR, type, exp,
5435 build_int_cst (etype, 0));
5436 }
5437 }
5438
5439 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5440 This requires wrap-around arithmetics for the type of the expression. */
5441 etype = range_check_type (etype);
5442 if (etype == NULL_TREE)
5443 return NULL_TREE;
5444
5445 high = fold_convert_loc (loc, etype, high);
5446 low = fold_convert_loc (loc, etype, low);
5447 exp = fold_convert_loc (loc, etype, exp);
5448
5449 value = const_binop (MINUS_EXPR, high, low);
5450
5451 if (value != 0 && !TREE_OVERFLOW (value))
5452 return build_range_check (loc, type,
5453 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5454 1, build_int_cst (etype, 0), value);
5455
5456 return 0;
5457 }
5458 \f
5459 /* Return the predecessor of VAL in its type, handling the infinite case. */
5460
5461 static tree
5462 range_predecessor (tree val)
5463 {
5464 tree type = TREE_TYPE (val);
5465
5466 if (INTEGRAL_TYPE_P (type)
5467 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5468 return 0;
5469 else
5470 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5471 build_int_cst (TREE_TYPE (val), 1), 0);
5472 }
5473
5474 /* Return the successor of VAL in its type, handling the infinite case. */
5475
5476 static tree
5477 range_successor (tree val)
5478 {
5479 tree type = TREE_TYPE (val);
5480
5481 if (INTEGRAL_TYPE_P (type)
5482 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5483 return 0;
5484 else
5485 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5486 build_int_cst (TREE_TYPE (val), 1), 0);
5487 }
5488
5489 /* Given two ranges, see if we can merge them into one. Return 1 if we
5490 can, 0 if we can't. Set the output range into the specified parameters. */
5491
5492 bool
5493 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5494 tree high0, int in1_p, tree low1, tree high1)
5495 {
5496 int no_overlap;
5497 int subset;
5498 int temp;
5499 tree tem;
5500 int in_p;
5501 tree low, high;
5502 int lowequal = ((low0 == 0 && low1 == 0)
5503 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5504 low0, 0, low1, 0)));
5505 int highequal = ((high0 == 0 && high1 == 0)
5506 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5507 high0, 1, high1, 1)));
5508
5509 /* Make range 0 be the range that starts first, or ends last if they
5510 start at the same value. Swap them if it isn't. */
5511 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5512 low0, 0, low1, 0))
5513 || (lowequal
5514 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5515 high1, 1, high0, 1))))
5516 {
5517 temp = in0_p, in0_p = in1_p, in1_p = temp;
5518 tem = low0, low0 = low1, low1 = tem;
5519 tem = high0, high0 = high1, high1 = tem;
5520 }
5521
5522 /* If the second range is != high1 where high1 is the type maximum of
5523 the type, try first merging with < high1 range. */
5524 if (low1
5525 && high1
5526 && TREE_CODE (low1) == INTEGER_CST
5527 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5528 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5529 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5530 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5531 && operand_equal_p (low1, high1, 0))
5532 {
5533 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5534 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5535 !in1_p, NULL_TREE, range_predecessor (low1)))
5536 return true;
5537 /* Similarly for the second range != low1 where low1 is the type minimum
5538 of the type, try first merging with > low1 range. */
5539 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5540 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5541 !in1_p, range_successor (low1), NULL_TREE))
5542 return true;
5543 }
5544
5545 /* Now flag two cases, whether the ranges are disjoint or whether the
5546 second range is totally subsumed in the first. Note that the tests
5547 below are simplified by the ones above. */
5548 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5549 high0, 1, low1, 0));
5550 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5551 high1, 1, high0, 1));
5552
5553 /* We now have four cases, depending on whether we are including or
5554 excluding the two ranges. */
5555 if (in0_p && in1_p)
5556 {
5557 /* If they don't overlap, the result is false. If the second range
5558 is a subset it is the result. Otherwise, the range is from the start
5559 of the second to the end of the first. */
5560 if (no_overlap)
5561 in_p = 0, low = high = 0;
5562 else if (subset)
5563 in_p = 1, low = low1, high = high1;
5564 else
5565 in_p = 1, low = low1, high = high0;
5566 }
5567
5568 else if (in0_p && ! in1_p)
5569 {
5570 /* If they don't overlap, the result is the first range. If they are
5571 equal, the result is false. If the second range is a subset of the
5572 first, and the ranges begin at the same place, we go from just after
5573 the end of the second range to the end of the first. If the second
5574 range is not a subset of the first, or if it is a subset and both
5575 ranges end at the same place, the range starts at the start of the
5576 first range and ends just before the second range.
5577 Otherwise, we can't describe this as a single range. */
5578 if (no_overlap)
5579 in_p = 1, low = low0, high = high0;
5580 else if (lowequal && highequal)
5581 in_p = 0, low = high = 0;
5582 else if (subset && lowequal)
5583 {
5584 low = range_successor (high1);
5585 high = high0;
5586 in_p = 1;
5587 if (low == 0)
5588 {
5589 /* We are in the weird situation where high0 > high1 but
5590 high1 has no successor. Punt. */
5591 return 0;
5592 }
5593 }
5594 else if (! subset || highequal)
5595 {
5596 low = low0;
5597 high = range_predecessor (low1);
5598 in_p = 1;
5599 if (high == 0)
5600 {
5601 /* low0 < low1 but low1 has no predecessor. Punt. */
5602 return 0;
5603 }
5604 }
5605 else
5606 return 0;
5607 }
5608
5609 else if (! in0_p && in1_p)
5610 {
5611 /* If they don't overlap, the result is the second range. If the second
5612 is a subset of the first, the result is false. Otherwise,
5613 the range starts just after the first range and ends at the
5614 end of the second. */
5615 if (no_overlap)
5616 in_p = 1, low = low1, high = high1;
5617 else if (subset || highequal)
5618 in_p = 0, low = high = 0;
5619 else
5620 {
5621 low = range_successor (high0);
5622 high = high1;
5623 in_p = 1;
5624 if (low == 0)
5625 {
5626 /* high1 > high0 but high0 has no successor. Punt. */
5627 return 0;
5628 }
5629 }
5630 }
5631
5632 else
5633 {
5634 /* The case where we are excluding both ranges. Here the complex case
5635 is if they don't overlap. In that case, the only time we have a
5636 range is if they are adjacent. If the second is a subset of the
5637 first, the result is the first. Otherwise, the range to exclude
5638 starts at the beginning of the first range and ends at the end of the
5639 second. */
5640 if (no_overlap)
5641 {
5642 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5643 range_successor (high0),
5644 1, low1, 0)))
5645 in_p = 0, low = low0, high = high1;
5646 else
5647 {
5648 /* Canonicalize - [min, x] into - [-, x]. */
5649 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5650 switch (TREE_CODE (TREE_TYPE (low0)))
5651 {
5652 case ENUMERAL_TYPE:
5653 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5654 GET_MODE_BITSIZE
5655 (TYPE_MODE (TREE_TYPE (low0)))))
5656 break;
5657 /* FALLTHROUGH */
5658 case INTEGER_TYPE:
5659 if (tree_int_cst_equal (low0,
5660 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5661 low0 = 0;
5662 break;
5663 case POINTER_TYPE:
5664 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5665 && integer_zerop (low0))
5666 low0 = 0;
5667 break;
5668 default:
5669 break;
5670 }
5671
5672 /* Canonicalize - [x, max] into - [x, -]. */
5673 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5674 switch (TREE_CODE (TREE_TYPE (high1)))
5675 {
5676 case ENUMERAL_TYPE:
5677 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5678 GET_MODE_BITSIZE
5679 (TYPE_MODE (TREE_TYPE (high1)))))
5680 break;
5681 /* FALLTHROUGH */
5682 case INTEGER_TYPE:
5683 if (tree_int_cst_equal (high1,
5684 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5685 high1 = 0;
5686 break;
5687 case POINTER_TYPE:
5688 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5689 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5690 high1, 1,
5691 build_int_cst (TREE_TYPE (high1), 1),
5692 1)))
5693 high1 = 0;
5694 break;
5695 default:
5696 break;
5697 }
5698
5699 /* The ranges might be also adjacent between the maximum and
5700 minimum values of the given type. For
5701 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5702 return + [x + 1, y - 1]. */
5703 if (low0 == 0 && high1 == 0)
5704 {
5705 low = range_successor (high0);
5706 high = range_predecessor (low1);
5707 if (low == 0 || high == 0)
5708 return 0;
5709
5710 in_p = 1;
5711 }
5712 else
5713 return 0;
5714 }
5715 }
5716 else if (subset)
5717 in_p = 0, low = low0, high = high0;
5718 else
5719 in_p = 0, low = low0, high = high1;
5720 }
5721
5722 *pin_p = in_p, *plow = low, *phigh = high;
5723 return 1;
5724 }
5725 \f
5726
5727 /* Subroutine of fold, looking inside expressions of the form
5728 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5729 of the COND_EXPR. This function is being used also to optimize
5730 A op B ? C : A, by reversing the comparison first.
5731
5732 Return a folded expression whose code is not a COND_EXPR
5733 anymore, or NULL_TREE if no folding opportunity is found. */
5734
5735 static tree
5736 fold_cond_expr_with_comparison (location_t loc, tree type,
5737 tree arg0, tree arg1, tree arg2)
5738 {
5739 enum tree_code comp_code = TREE_CODE (arg0);
5740 tree arg00 = TREE_OPERAND (arg0, 0);
5741 tree arg01 = TREE_OPERAND (arg0, 1);
5742 tree arg1_type = TREE_TYPE (arg1);
5743 tree tem;
5744
5745 STRIP_NOPS (arg1);
5746 STRIP_NOPS (arg2);
5747
5748 /* If we have A op 0 ? A : -A, consider applying the following
5749 transformations:
5750
5751 A == 0? A : -A same as -A
5752 A != 0? A : -A same as A
5753 A >= 0? A : -A same as abs (A)
5754 A > 0? A : -A same as abs (A)
5755 A <= 0? A : -A same as -abs (A)
5756 A < 0? A : -A same as -abs (A)
5757
5758 None of these transformations work for modes with signed
5759 zeros. If A is +/-0, the first two transformations will
5760 change the sign of the result (from +0 to -0, or vice
5761 versa). The last four will fix the sign of the result,
5762 even though the original expressions could be positive or
5763 negative, depending on the sign of A.
5764
5765 Note that all these transformations are correct if A is
5766 NaN, since the two alternatives (A and -A) are also NaNs. */
5767 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5768 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5769 ? real_zerop (arg01)
5770 : integer_zerop (arg01))
5771 && ((TREE_CODE (arg2) == NEGATE_EXPR
5772 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5773 /* In the case that A is of the form X-Y, '-A' (arg2) may
5774 have already been folded to Y-X, check for that. */
5775 || (TREE_CODE (arg1) == MINUS_EXPR
5776 && TREE_CODE (arg2) == MINUS_EXPR
5777 && operand_equal_p (TREE_OPERAND (arg1, 0),
5778 TREE_OPERAND (arg2, 1), 0)
5779 && operand_equal_p (TREE_OPERAND (arg1, 1),
5780 TREE_OPERAND (arg2, 0), 0))))
5781 switch (comp_code)
5782 {
5783 case EQ_EXPR:
5784 case UNEQ_EXPR:
5785 tem = fold_convert_loc (loc, arg1_type, arg1);
5786 return fold_convert_loc (loc, type, negate_expr (tem));
5787 case NE_EXPR:
5788 case LTGT_EXPR:
5789 return fold_convert_loc (loc, type, arg1);
5790 case UNGE_EXPR:
5791 case UNGT_EXPR:
5792 if (flag_trapping_math)
5793 break;
5794 /* Fall through. */
5795 case GE_EXPR:
5796 case GT_EXPR:
5797 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5798 break;
5799 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5800 return fold_convert_loc (loc, type, tem);
5801 case UNLE_EXPR:
5802 case UNLT_EXPR:
5803 if (flag_trapping_math)
5804 break;
5805 /* FALLTHRU */
5806 case LE_EXPR:
5807 case LT_EXPR:
5808 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5809 break;
5810 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5811 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5812 {
5813 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5814 is not, invokes UB both in abs and in the negation of it.
5815 So, use ABSU_EXPR instead. */
5816 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5817 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5818 tem = negate_expr (tem);
5819 return fold_convert_loc (loc, type, tem);
5820 }
5821 else
5822 {
5823 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5824 return negate_expr (fold_convert_loc (loc, type, tem));
5825 }
5826 default:
5827 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5828 break;
5829 }
5830
5831 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5832 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5833 both transformations are correct when A is NaN: A != 0
5834 is then true, and A == 0 is false. */
5835
5836 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5837 && integer_zerop (arg01) && integer_zerop (arg2))
5838 {
5839 if (comp_code == NE_EXPR)
5840 return fold_convert_loc (loc, type, arg1);
5841 else if (comp_code == EQ_EXPR)
5842 return build_zero_cst (type);
5843 }
5844
5845 /* Try some transformations of A op B ? A : B.
5846
5847 A == B? A : B same as B
5848 A != B? A : B same as A
5849 A >= B? A : B same as max (A, B)
5850 A > B? A : B same as max (B, A)
5851 A <= B? A : B same as min (A, B)
5852 A < B? A : B same as min (B, A)
5853
5854 As above, these transformations don't work in the presence
5855 of signed zeros. For example, if A and B are zeros of
5856 opposite sign, the first two transformations will change
5857 the sign of the result. In the last four, the original
5858 expressions give different results for (A=+0, B=-0) and
5859 (A=-0, B=+0), but the transformed expressions do not.
5860
5861 The first two transformations are correct if either A or B
5862 is a NaN. In the first transformation, the condition will
5863 be false, and B will indeed be chosen. In the case of the
5864 second transformation, the condition A != B will be true,
5865 and A will be chosen.
5866
5867 The conversions to max() and min() are not correct if B is
5868 a number and A is not. The conditions in the original
5869 expressions will be false, so all four give B. The min()
5870 and max() versions would give a NaN instead. */
5871 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5872 && operand_equal_for_comparison_p (arg01, arg2)
5873 /* Avoid these transformations if the COND_EXPR may be used
5874 as an lvalue in the C++ front-end. PR c++/19199. */
5875 && (in_gimple_form
5876 || VECTOR_TYPE_P (type)
5877 || (! lang_GNU_CXX ()
5878 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5879 || ! maybe_lvalue_p (arg1)
5880 || ! maybe_lvalue_p (arg2)))
5881 {
5882 tree comp_op0 = arg00;
5883 tree comp_op1 = arg01;
5884 tree comp_type = TREE_TYPE (comp_op0);
5885
5886 switch (comp_code)
5887 {
5888 case EQ_EXPR:
5889 return fold_convert_loc (loc, type, arg2);
5890 case NE_EXPR:
5891 return fold_convert_loc (loc, type, arg1);
5892 case LE_EXPR:
5893 case LT_EXPR:
5894 case UNLE_EXPR:
5895 case UNLT_EXPR:
5896 /* In C++ a ?: expression can be an lvalue, so put the
5897 operand which will be used if they are equal first
5898 so that we can convert this back to the
5899 corresponding COND_EXPR. */
5900 if (!HONOR_NANS (arg1))
5901 {
5902 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5903 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5904 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5905 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5906 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5907 comp_op1, comp_op0);
5908 return fold_convert_loc (loc, type, tem);
5909 }
5910 break;
5911 case GE_EXPR:
5912 case GT_EXPR:
5913 case UNGE_EXPR:
5914 case UNGT_EXPR:
5915 if (!HONOR_NANS (arg1))
5916 {
5917 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5918 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5919 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5920 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5921 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5922 comp_op1, comp_op0);
5923 return fold_convert_loc (loc, type, tem);
5924 }
5925 break;
5926 case UNEQ_EXPR:
5927 if (!HONOR_NANS (arg1))
5928 return fold_convert_loc (loc, type, arg2);
5929 break;
5930 case LTGT_EXPR:
5931 if (!HONOR_NANS (arg1))
5932 return fold_convert_loc (loc, type, arg1);
5933 break;
5934 default:
5935 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5936 break;
5937 }
5938 }
5939
5940 return NULL_TREE;
5941 }
5942
5943
5944 \f
5945 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5946 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5947 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5948 false) >= 2)
5949 #endif
5950
5951 /* EXP is some logical combination of boolean tests. See if we can
5952 merge it into some range test. Return the new tree if so. */
5953
5954 static tree
5955 fold_range_test (location_t loc, enum tree_code code, tree type,
5956 tree op0, tree op1)
5957 {
5958 int or_op = (code == TRUTH_ORIF_EXPR
5959 || code == TRUTH_OR_EXPR);
5960 int in0_p, in1_p, in_p;
5961 tree low0, low1, low, high0, high1, high;
5962 bool strict_overflow_p = false;
5963 tree tem, lhs, rhs;
5964 const char * const warnmsg = G_("assuming signed overflow does not occur "
5965 "when simplifying range test");
5966
5967 if (!INTEGRAL_TYPE_P (type))
5968 return 0;
5969
5970 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5971 /* If op0 is known true or false and this is a short-circuiting
5972 operation we must not merge with op1 since that makes side-effects
5973 unconditional. So special-case this. */
5974 if (!lhs
5975 && ((code == TRUTH_ORIF_EXPR && in0_p)
5976 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5977 return op0;
5978 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5979
5980 /* If this is an OR operation, invert both sides; we will invert
5981 again at the end. */
5982 if (or_op)
5983 in0_p = ! in0_p, in1_p = ! in1_p;
5984
5985 /* If both expressions are the same, if we can merge the ranges, and we
5986 can build the range test, return it or it inverted. If one of the
5987 ranges is always true or always false, consider it to be the same
5988 expression as the other. */
5989 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5990 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5991 in1_p, low1, high1)
5992 && (tem = (build_range_check (loc, type,
5993 lhs != 0 ? lhs
5994 : rhs != 0 ? rhs : integer_zero_node,
5995 in_p, low, high))) != 0)
5996 {
5997 if (strict_overflow_p)
5998 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5999 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6000 }
6001
6002 /* On machines where the branch cost is expensive, if this is a
6003 short-circuited branch and the underlying object on both sides
6004 is the same, make a non-short-circuit operation. */
6005 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6006 if (param_logical_op_non_short_circuit != -1)
6007 logical_op_non_short_circuit
6008 = param_logical_op_non_short_circuit;
6009 if (logical_op_non_short_circuit
6010 && !flag_sanitize_coverage
6011 && lhs != 0 && rhs != 0
6012 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6013 && operand_equal_p (lhs, rhs, 0))
6014 {
6015 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6016 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6017 which cases we can't do this. */
6018 if (simple_operand_p (lhs))
6019 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6020 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6021 type, op0, op1);
6022
6023 else if (!lang_hooks.decls.global_bindings_p ()
6024 && !CONTAINS_PLACEHOLDER_P (lhs))
6025 {
6026 tree common = save_expr (lhs);
6027
6028 if ((lhs = build_range_check (loc, type, common,
6029 or_op ? ! in0_p : in0_p,
6030 low0, high0)) != 0
6031 && (rhs = build_range_check (loc, type, common,
6032 or_op ? ! in1_p : in1_p,
6033 low1, high1)) != 0)
6034 {
6035 if (strict_overflow_p)
6036 fold_overflow_warning (warnmsg,
6037 WARN_STRICT_OVERFLOW_COMPARISON);
6038 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6039 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6040 type, lhs, rhs);
6041 }
6042 }
6043 }
6044
6045 return 0;
6046 }
6047 \f
6048 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6049 bit value. Arrange things so the extra bits will be set to zero if and
6050 only if C is signed-extended to its full width. If MASK is nonzero,
6051 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6052
6053 static tree
6054 unextend (tree c, int p, int unsignedp, tree mask)
6055 {
6056 tree type = TREE_TYPE (c);
6057 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6058 tree temp;
6059
6060 if (p == modesize || unsignedp)
6061 return c;
6062
6063 /* We work by getting just the sign bit into the low-order bit, then
6064 into the high-order bit, then sign-extend. We then XOR that value
6065 with C. */
6066 temp = build_int_cst (TREE_TYPE (c),
6067 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6068
6069 /* We must use a signed type in order to get an arithmetic right shift.
6070 However, we must also avoid introducing accidental overflows, so that
6071 a subsequent call to integer_zerop will work. Hence we must
6072 do the type conversion here. At this point, the constant is either
6073 zero or one, and the conversion to a signed type can never overflow.
6074 We could get an overflow if this conversion is done anywhere else. */
6075 if (TYPE_UNSIGNED (type))
6076 temp = fold_convert (signed_type_for (type), temp);
6077
6078 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6079 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6080 if (mask != 0)
6081 temp = const_binop (BIT_AND_EXPR, temp,
6082 fold_convert (TREE_TYPE (c), mask));
6083 /* If necessary, convert the type back to match the type of C. */
6084 if (TYPE_UNSIGNED (type))
6085 temp = fold_convert (type, temp);
6086
6087 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6088 }
6089 \f
6090 /* For an expression that has the form
6091 (A && B) || ~B
6092 or
6093 (A || B) && ~B,
6094 we can drop one of the inner expressions and simplify to
6095 A || ~B
6096 or
6097 A && ~B
6098 LOC is the location of the resulting expression. OP is the inner
6099 logical operation; the left-hand side in the examples above, while CMPOP
6100 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6101 removing a condition that guards another, as in
6102 (A != NULL && A->...) || A == NULL
6103 which we must not transform. If RHS_ONLY is true, only eliminate the
6104 right-most operand of the inner logical operation. */
6105
6106 static tree
6107 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6108 bool rhs_only)
6109 {
6110 tree type = TREE_TYPE (cmpop);
6111 enum tree_code code = TREE_CODE (cmpop);
6112 enum tree_code truthop_code = TREE_CODE (op);
6113 tree lhs = TREE_OPERAND (op, 0);
6114 tree rhs = TREE_OPERAND (op, 1);
6115 tree orig_lhs = lhs, orig_rhs = rhs;
6116 enum tree_code rhs_code = TREE_CODE (rhs);
6117 enum tree_code lhs_code = TREE_CODE (lhs);
6118 enum tree_code inv_code;
6119
6120 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6121 return NULL_TREE;
6122
6123 if (TREE_CODE_CLASS (code) != tcc_comparison)
6124 return NULL_TREE;
6125
6126 if (rhs_code == truthop_code)
6127 {
6128 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6129 if (newrhs != NULL_TREE)
6130 {
6131 rhs = newrhs;
6132 rhs_code = TREE_CODE (rhs);
6133 }
6134 }
6135 if (lhs_code == truthop_code && !rhs_only)
6136 {
6137 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6138 if (newlhs != NULL_TREE)
6139 {
6140 lhs = newlhs;
6141 lhs_code = TREE_CODE (lhs);
6142 }
6143 }
6144
6145 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6146 if (inv_code == rhs_code
6147 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6148 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6149 return lhs;
6150 if (!rhs_only && inv_code == lhs_code
6151 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6152 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6153 return rhs;
6154 if (rhs != orig_rhs || lhs != orig_lhs)
6155 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6156 lhs, rhs);
6157 return NULL_TREE;
6158 }
6159
6160 /* Find ways of folding logical expressions of LHS and RHS:
6161 Try to merge two comparisons to the same innermost item.
6162 Look for range tests like "ch >= '0' && ch <= '9'".
6163 Look for combinations of simple terms on machines with expensive branches
6164 and evaluate the RHS unconditionally.
6165
6166 For example, if we have p->a == 2 && p->b == 4 and we can make an
6167 object large enough to span both A and B, we can do this with a comparison
6168 against the object ANDed with the a mask.
6169
6170 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6171 operations to do this with one comparison.
6172
6173 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6174 function and the one above.
6175
6176 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6177 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6178
6179 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6180 two operands.
6181
6182 We return the simplified tree or 0 if no optimization is possible. */
6183
6184 static tree
6185 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6186 tree lhs, tree rhs)
6187 {
6188 /* If this is the "or" of two comparisons, we can do something if
6189 the comparisons are NE_EXPR. If this is the "and", we can do something
6190 if the comparisons are EQ_EXPR. I.e.,
6191 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6192
6193 WANTED_CODE is this operation code. For single bit fields, we can
6194 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6195 comparison for one-bit fields. */
6196
6197 enum tree_code wanted_code;
6198 enum tree_code lcode, rcode;
6199 tree ll_arg, lr_arg, rl_arg, rr_arg;
6200 tree ll_inner, lr_inner, rl_inner, rr_inner;
6201 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6202 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6203 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6204 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6205 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6206 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6207 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6208 scalar_int_mode lnmode, rnmode;
6209 tree ll_mask, lr_mask, rl_mask, rr_mask;
6210 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6211 tree l_const, r_const;
6212 tree lntype, rntype, result;
6213 HOST_WIDE_INT first_bit, end_bit;
6214 int volatilep;
6215
6216 /* Start by getting the comparison codes. Fail if anything is volatile.
6217 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6218 it were surrounded with a NE_EXPR. */
6219
6220 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6221 return 0;
6222
6223 lcode = TREE_CODE (lhs);
6224 rcode = TREE_CODE (rhs);
6225
6226 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6227 {
6228 lhs = build2 (NE_EXPR, truth_type, lhs,
6229 build_int_cst (TREE_TYPE (lhs), 0));
6230 lcode = NE_EXPR;
6231 }
6232
6233 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6234 {
6235 rhs = build2 (NE_EXPR, truth_type, rhs,
6236 build_int_cst (TREE_TYPE (rhs), 0));
6237 rcode = NE_EXPR;
6238 }
6239
6240 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6241 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6242 return 0;
6243
6244 ll_arg = TREE_OPERAND (lhs, 0);
6245 lr_arg = TREE_OPERAND (lhs, 1);
6246 rl_arg = TREE_OPERAND (rhs, 0);
6247 rr_arg = TREE_OPERAND (rhs, 1);
6248
6249 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6250 if (simple_operand_p (ll_arg)
6251 && simple_operand_p (lr_arg))
6252 {
6253 if (operand_equal_p (ll_arg, rl_arg, 0)
6254 && operand_equal_p (lr_arg, rr_arg, 0))
6255 {
6256 result = combine_comparisons (loc, code, lcode, rcode,
6257 truth_type, ll_arg, lr_arg);
6258 if (result)
6259 return result;
6260 }
6261 else if (operand_equal_p (ll_arg, rr_arg, 0)
6262 && operand_equal_p (lr_arg, rl_arg, 0))
6263 {
6264 result = combine_comparisons (loc, code, lcode,
6265 swap_tree_comparison (rcode),
6266 truth_type, ll_arg, lr_arg);
6267 if (result)
6268 return result;
6269 }
6270 }
6271
6272 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6273 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6274
6275 /* If the RHS can be evaluated unconditionally and its operands are
6276 simple, it wins to evaluate the RHS unconditionally on machines
6277 with expensive branches. In this case, this isn't a comparison
6278 that can be merged. */
6279
6280 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6281 false) >= 2
6282 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6283 && simple_operand_p (rl_arg)
6284 && simple_operand_p (rr_arg))
6285 {
6286 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6287 if (code == TRUTH_OR_EXPR
6288 && lcode == NE_EXPR && integer_zerop (lr_arg)
6289 && rcode == NE_EXPR && integer_zerop (rr_arg)
6290 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6291 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6292 return build2_loc (loc, NE_EXPR, truth_type,
6293 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6294 ll_arg, rl_arg),
6295 build_int_cst (TREE_TYPE (ll_arg), 0));
6296
6297 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6298 if (code == TRUTH_AND_EXPR
6299 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6300 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6301 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6302 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6303 return build2_loc (loc, EQ_EXPR, truth_type,
6304 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6305 ll_arg, rl_arg),
6306 build_int_cst (TREE_TYPE (ll_arg), 0));
6307 }
6308
6309 /* See if the comparisons can be merged. Then get all the parameters for
6310 each side. */
6311
6312 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6313 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6314 return 0;
6315
6316 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6317 volatilep = 0;
6318 ll_inner = decode_field_reference (loc, &ll_arg,
6319 &ll_bitsize, &ll_bitpos, &ll_mode,
6320 &ll_unsignedp, &ll_reversep, &volatilep,
6321 &ll_mask, &ll_and_mask);
6322 lr_inner = decode_field_reference (loc, &lr_arg,
6323 &lr_bitsize, &lr_bitpos, &lr_mode,
6324 &lr_unsignedp, &lr_reversep, &volatilep,
6325 &lr_mask, &lr_and_mask);
6326 rl_inner = decode_field_reference (loc, &rl_arg,
6327 &rl_bitsize, &rl_bitpos, &rl_mode,
6328 &rl_unsignedp, &rl_reversep, &volatilep,
6329 &rl_mask, &rl_and_mask);
6330 rr_inner = decode_field_reference (loc, &rr_arg,
6331 &rr_bitsize, &rr_bitpos, &rr_mode,
6332 &rr_unsignedp, &rr_reversep, &volatilep,
6333 &rr_mask, &rr_and_mask);
6334
6335 /* It must be true that the inner operation on the lhs of each
6336 comparison must be the same if we are to be able to do anything.
6337 Then see if we have constants. If not, the same must be true for
6338 the rhs's. */
6339 if (volatilep
6340 || ll_reversep != rl_reversep
6341 || ll_inner == 0 || rl_inner == 0
6342 || ! operand_equal_p (ll_inner, rl_inner, 0))
6343 return 0;
6344
6345 if (TREE_CODE (lr_arg) == INTEGER_CST
6346 && TREE_CODE (rr_arg) == INTEGER_CST)
6347 {
6348 l_const = lr_arg, r_const = rr_arg;
6349 lr_reversep = ll_reversep;
6350 }
6351 else if (lr_reversep != rr_reversep
6352 || lr_inner == 0 || rr_inner == 0
6353 || ! operand_equal_p (lr_inner, rr_inner, 0))
6354 return 0;
6355 else
6356 l_const = r_const = 0;
6357
6358 /* If either comparison code is not correct for our logical operation,
6359 fail. However, we can convert a one-bit comparison against zero into
6360 the opposite comparison against that bit being set in the field. */
6361
6362 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6363 if (lcode != wanted_code)
6364 {
6365 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6366 {
6367 /* Make the left operand unsigned, since we are only interested
6368 in the value of one bit. Otherwise we are doing the wrong
6369 thing below. */
6370 ll_unsignedp = 1;
6371 l_const = ll_mask;
6372 }
6373 else
6374 return 0;
6375 }
6376
6377 /* This is analogous to the code for l_const above. */
6378 if (rcode != wanted_code)
6379 {
6380 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6381 {
6382 rl_unsignedp = 1;
6383 r_const = rl_mask;
6384 }
6385 else
6386 return 0;
6387 }
6388
6389 /* See if we can find a mode that contains both fields being compared on
6390 the left. If we can't, fail. Otherwise, update all constants and masks
6391 to be relative to a field of that size. */
6392 first_bit = MIN (ll_bitpos, rl_bitpos);
6393 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6394 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6395 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6396 volatilep, &lnmode))
6397 return 0;
6398
6399 lnbitsize = GET_MODE_BITSIZE (lnmode);
6400 lnbitpos = first_bit & ~ (lnbitsize - 1);
6401 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6402 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6403
6404 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6405 {
6406 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6407 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6408 }
6409
6410 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6411 size_int (xll_bitpos));
6412 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6413 size_int (xrl_bitpos));
6414
6415 if (l_const)
6416 {
6417 l_const = fold_convert_loc (loc, lntype, l_const);
6418 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6419 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6420 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6421 fold_build1_loc (loc, BIT_NOT_EXPR,
6422 lntype, ll_mask))))
6423 {
6424 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6425
6426 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6427 }
6428 }
6429 if (r_const)
6430 {
6431 r_const = fold_convert_loc (loc, lntype, r_const);
6432 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6433 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6434 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6435 fold_build1_loc (loc, BIT_NOT_EXPR,
6436 lntype, rl_mask))))
6437 {
6438 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6439
6440 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6441 }
6442 }
6443
6444 /* If the right sides are not constant, do the same for it. Also,
6445 disallow this optimization if a size, signedness or storage order
6446 mismatch occurs between the left and right sides. */
6447 if (l_const == 0)
6448 {
6449 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6450 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6451 || ll_reversep != lr_reversep
6452 /* Make sure the two fields on the right
6453 correspond to the left without being swapped. */
6454 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6455 return 0;
6456
6457 first_bit = MIN (lr_bitpos, rr_bitpos);
6458 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6459 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6460 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6461 volatilep, &rnmode))
6462 return 0;
6463
6464 rnbitsize = GET_MODE_BITSIZE (rnmode);
6465 rnbitpos = first_bit & ~ (rnbitsize - 1);
6466 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6467 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6468
6469 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6470 {
6471 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6472 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6473 }
6474
6475 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6476 rntype, lr_mask),
6477 size_int (xlr_bitpos));
6478 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6479 rntype, rr_mask),
6480 size_int (xrr_bitpos));
6481
6482 /* Make a mask that corresponds to both fields being compared.
6483 Do this for both items being compared. If the operands are the
6484 same size and the bits being compared are in the same position
6485 then we can do this by masking both and comparing the masked
6486 results. */
6487 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6488 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6489 if (lnbitsize == rnbitsize
6490 && xll_bitpos == xlr_bitpos
6491 && lnbitpos >= 0
6492 && rnbitpos >= 0)
6493 {
6494 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6495 lntype, lnbitsize, lnbitpos,
6496 ll_unsignedp || rl_unsignedp, ll_reversep);
6497 if (! all_ones_mask_p (ll_mask, lnbitsize))
6498 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6499
6500 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6501 rntype, rnbitsize, rnbitpos,
6502 lr_unsignedp || rr_unsignedp, lr_reversep);
6503 if (! all_ones_mask_p (lr_mask, rnbitsize))
6504 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6505
6506 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6507 }
6508
6509 /* There is still another way we can do something: If both pairs of
6510 fields being compared are adjacent, we may be able to make a wider
6511 field containing them both.
6512
6513 Note that we still must mask the lhs/rhs expressions. Furthermore,
6514 the mask must be shifted to account for the shift done by
6515 make_bit_field_ref. */
6516 if (((ll_bitsize + ll_bitpos == rl_bitpos
6517 && lr_bitsize + lr_bitpos == rr_bitpos)
6518 || (ll_bitpos == rl_bitpos + rl_bitsize
6519 && lr_bitpos == rr_bitpos + rr_bitsize))
6520 && ll_bitpos >= 0
6521 && rl_bitpos >= 0
6522 && lr_bitpos >= 0
6523 && rr_bitpos >= 0)
6524 {
6525 tree type;
6526
6527 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6528 ll_bitsize + rl_bitsize,
6529 MIN (ll_bitpos, rl_bitpos),
6530 ll_unsignedp, ll_reversep);
6531 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6532 lr_bitsize + rr_bitsize,
6533 MIN (lr_bitpos, rr_bitpos),
6534 lr_unsignedp, lr_reversep);
6535
6536 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6537 size_int (MIN (xll_bitpos, xrl_bitpos)));
6538 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6539 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6540
6541 /* Convert to the smaller type before masking out unwanted bits. */
6542 type = lntype;
6543 if (lntype != rntype)
6544 {
6545 if (lnbitsize > rnbitsize)
6546 {
6547 lhs = fold_convert_loc (loc, rntype, lhs);
6548 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6549 type = rntype;
6550 }
6551 else if (lnbitsize < rnbitsize)
6552 {
6553 rhs = fold_convert_loc (loc, lntype, rhs);
6554 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6555 type = lntype;
6556 }
6557 }
6558
6559 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6560 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6561
6562 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6563 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6564
6565 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6566 }
6567
6568 return 0;
6569 }
6570
6571 /* Handle the case of comparisons with constants. If there is something in
6572 common between the masks, those bits of the constants must be the same.
6573 If not, the condition is always false. Test for this to avoid generating
6574 incorrect code below. */
6575 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6576 if (! integer_zerop (result)
6577 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6578 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6579 {
6580 if (wanted_code == NE_EXPR)
6581 {
6582 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6583 return constant_boolean_node (true, truth_type);
6584 }
6585 else
6586 {
6587 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6588 return constant_boolean_node (false, truth_type);
6589 }
6590 }
6591
6592 if (lnbitpos < 0)
6593 return 0;
6594
6595 /* Construct the expression we will return. First get the component
6596 reference we will make. Unless the mask is all ones the width of
6597 that field, perform the mask operation. Then compare with the
6598 merged constant. */
6599 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6600 lntype, lnbitsize, lnbitpos,
6601 ll_unsignedp || rl_unsignedp, ll_reversep);
6602
6603 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6604 if (! all_ones_mask_p (ll_mask, lnbitsize))
6605 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6606
6607 return build2_loc (loc, wanted_code, truth_type, result,
6608 const_binop (BIT_IOR_EXPR, l_const, r_const));
6609 }
6610 \f
6611 /* T is an integer expression that is being multiplied, divided, or taken a
6612 modulus (CODE says which and what kind of divide or modulus) by a
6613 constant C. See if we can eliminate that operation by folding it with
6614 other operations already in T. WIDE_TYPE, if non-null, is a type that
6615 should be used for the computation if wider than our type.
6616
6617 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6618 (X * 2) + (Y * 4). We must, however, be assured that either the original
6619 expression would not overflow or that overflow is undefined for the type
6620 in the language in question.
6621
6622 If we return a non-null expression, it is an equivalent form of the
6623 original computation, but need not be in the original type.
6624
6625 We set *STRICT_OVERFLOW_P to true if the return values depends on
6626 signed overflow being undefined. Otherwise we do not change
6627 *STRICT_OVERFLOW_P. */
6628
6629 static tree
6630 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6631 bool *strict_overflow_p)
6632 {
6633 /* To avoid exponential search depth, refuse to allow recursion past
6634 three levels. Beyond that (1) it's highly unlikely that we'll find
6635 something interesting and (2) we've probably processed it before
6636 when we built the inner expression. */
6637
6638 static int depth;
6639 tree ret;
6640
6641 if (depth > 3)
6642 return NULL;
6643
6644 depth++;
6645 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6646 depth--;
6647
6648 return ret;
6649 }
6650
6651 static tree
6652 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6653 bool *strict_overflow_p)
6654 {
6655 tree type = TREE_TYPE (t);
6656 enum tree_code tcode = TREE_CODE (t);
6657 tree ctype = (wide_type != 0
6658 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6659 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6660 ? wide_type : type);
6661 tree t1, t2;
6662 int same_p = tcode == code;
6663 tree op0 = NULL_TREE, op1 = NULL_TREE;
6664 bool sub_strict_overflow_p;
6665
6666 /* Don't deal with constants of zero here; they confuse the code below. */
6667 if (integer_zerop (c))
6668 return NULL_TREE;
6669
6670 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6671 op0 = TREE_OPERAND (t, 0);
6672
6673 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6674 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6675
6676 /* Note that we need not handle conditional operations here since fold
6677 already handles those cases. So just do arithmetic here. */
6678 switch (tcode)
6679 {
6680 case INTEGER_CST:
6681 /* For a constant, we can always simplify if we are a multiply
6682 or (for divide and modulus) if it is a multiple of our constant. */
6683 if (code == MULT_EXPR
6684 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6685 TYPE_SIGN (type)))
6686 {
6687 tree tem = const_binop (code, fold_convert (ctype, t),
6688 fold_convert (ctype, c));
6689 /* If the multiplication overflowed, we lost information on it.
6690 See PR68142 and PR69845. */
6691 if (TREE_OVERFLOW (tem))
6692 return NULL_TREE;
6693 return tem;
6694 }
6695 break;
6696
6697 CASE_CONVERT: case NON_LVALUE_EXPR:
6698 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6699 break;
6700 /* If op0 is an expression ... */
6701 if ((COMPARISON_CLASS_P (op0)
6702 || UNARY_CLASS_P (op0)
6703 || BINARY_CLASS_P (op0)
6704 || VL_EXP_CLASS_P (op0)
6705 || EXPRESSION_CLASS_P (op0))
6706 /* ... and has wrapping overflow, and its type is smaller
6707 than ctype, then we cannot pass through as widening. */
6708 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6709 && (TYPE_PRECISION (ctype)
6710 > TYPE_PRECISION (TREE_TYPE (op0))))
6711 /* ... or this is a truncation (t is narrower than op0),
6712 then we cannot pass through this narrowing. */
6713 || (TYPE_PRECISION (type)
6714 < TYPE_PRECISION (TREE_TYPE (op0)))
6715 /* ... or signedness changes for division or modulus,
6716 then we cannot pass through this conversion. */
6717 || (code != MULT_EXPR
6718 && (TYPE_UNSIGNED (ctype)
6719 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6720 /* ... or has undefined overflow while the converted to
6721 type has not, we cannot do the operation in the inner type
6722 as that would introduce undefined overflow. */
6723 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6724 && !TYPE_OVERFLOW_UNDEFINED (type))))
6725 break;
6726
6727 /* Pass the constant down and see if we can make a simplification. If
6728 we can, replace this expression with the inner simplification for
6729 possible later conversion to our or some other type. */
6730 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6731 && TREE_CODE (t2) == INTEGER_CST
6732 && !TREE_OVERFLOW (t2)
6733 && (t1 = extract_muldiv (op0, t2, code,
6734 code == MULT_EXPR ? ctype : NULL_TREE,
6735 strict_overflow_p)) != 0)
6736 return t1;
6737 break;
6738
6739 case ABS_EXPR:
6740 /* If widening the type changes it from signed to unsigned, then we
6741 must avoid building ABS_EXPR itself as unsigned. */
6742 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6743 {
6744 tree cstype = (*signed_type_for) (ctype);
6745 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6746 != 0)
6747 {
6748 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6749 return fold_convert (ctype, t1);
6750 }
6751 break;
6752 }
6753 /* If the constant is negative, we cannot simplify this. */
6754 if (tree_int_cst_sgn (c) == -1)
6755 break;
6756 /* FALLTHROUGH */
6757 case NEGATE_EXPR:
6758 /* For division and modulus, type can't be unsigned, as e.g.
6759 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6760 For signed types, even with wrapping overflow, this is fine. */
6761 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6762 break;
6763 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6764 != 0)
6765 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6766 break;
6767
6768 case MIN_EXPR: case MAX_EXPR:
6769 /* If widening the type changes the signedness, then we can't perform
6770 this optimization as that changes the result. */
6771 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6772 break;
6773
6774 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6775 sub_strict_overflow_p = false;
6776 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6777 &sub_strict_overflow_p)) != 0
6778 && (t2 = extract_muldiv (op1, c, code, wide_type,
6779 &sub_strict_overflow_p)) != 0)
6780 {
6781 if (tree_int_cst_sgn (c) < 0)
6782 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6783 if (sub_strict_overflow_p)
6784 *strict_overflow_p = true;
6785 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6786 fold_convert (ctype, t2));
6787 }
6788 break;
6789
6790 case LSHIFT_EXPR: case RSHIFT_EXPR:
6791 /* If the second operand is constant, this is a multiplication
6792 or floor division, by a power of two, so we can treat it that
6793 way unless the multiplier or divisor overflows. Signed
6794 left-shift overflow is implementation-defined rather than
6795 undefined in C90, so do not convert signed left shift into
6796 multiplication. */
6797 if (TREE_CODE (op1) == INTEGER_CST
6798 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6799 /* const_binop may not detect overflow correctly,
6800 so check for it explicitly here. */
6801 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6802 wi::to_wide (op1))
6803 && (t1 = fold_convert (ctype,
6804 const_binop (LSHIFT_EXPR, size_one_node,
6805 op1))) != 0
6806 && !TREE_OVERFLOW (t1))
6807 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6808 ? MULT_EXPR : FLOOR_DIV_EXPR,
6809 ctype,
6810 fold_convert (ctype, op0),
6811 t1),
6812 c, code, wide_type, strict_overflow_p);
6813 break;
6814
6815 case PLUS_EXPR: case MINUS_EXPR:
6816 /* See if we can eliminate the operation on both sides. If we can, we
6817 can return a new PLUS or MINUS. If we can't, the only remaining
6818 cases where we can do anything are if the second operand is a
6819 constant. */
6820 sub_strict_overflow_p = false;
6821 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6822 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6823 if (t1 != 0 && t2 != 0
6824 && TYPE_OVERFLOW_WRAPS (ctype)
6825 && (code == MULT_EXPR
6826 /* If not multiplication, we can only do this if both operands
6827 are divisible by c. */
6828 || (multiple_of_p (ctype, op0, c)
6829 && multiple_of_p (ctype, op1, c))))
6830 {
6831 if (sub_strict_overflow_p)
6832 *strict_overflow_p = true;
6833 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6834 fold_convert (ctype, t2));
6835 }
6836
6837 /* If this was a subtraction, negate OP1 and set it to be an addition.
6838 This simplifies the logic below. */
6839 if (tcode == MINUS_EXPR)
6840 {
6841 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6842 /* If OP1 was not easily negatable, the constant may be OP0. */
6843 if (TREE_CODE (op0) == INTEGER_CST)
6844 {
6845 std::swap (op0, op1);
6846 std::swap (t1, t2);
6847 }
6848 }
6849
6850 if (TREE_CODE (op1) != INTEGER_CST)
6851 break;
6852
6853 /* If either OP1 or C are negative, this optimization is not safe for
6854 some of the division and remainder types while for others we need
6855 to change the code. */
6856 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6857 {
6858 if (code == CEIL_DIV_EXPR)
6859 code = FLOOR_DIV_EXPR;
6860 else if (code == FLOOR_DIV_EXPR)
6861 code = CEIL_DIV_EXPR;
6862 else if (code != MULT_EXPR
6863 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6864 break;
6865 }
6866
6867 /* If it's a multiply or a division/modulus operation of a multiple
6868 of our constant, do the operation and verify it doesn't overflow. */
6869 if (code == MULT_EXPR
6870 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 TYPE_SIGN (type)))
6872 {
6873 op1 = const_binop (code, fold_convert (ctype, op1),
6874 fold_convert (ctype, c));
6875 /* We allow the constant to overflow with wrapping semantics. */
6876 if (op1 == 0
6877 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6878 break;
6879 }
6880 else
6881 break;
6882
6883 /* If we have an unsigned type, we cannot widen the operation since it
6884 will change the result if the original computation overflowed. */
6885 if (TYPE_UNSIGNED (ctype) && ctype != type)
6886 break;
6887
6888 /* The last case is if we are a multiply. In that case, we can
6889 apply the distributive law to commute the multiply and addition
6890 if the multiplication of the constants doesn't overflow
6891 and overflow is defined. With undefined overflow
6892 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6893 But fold_plusminus_mult_expr would factor back any power-of-two
6894 value so do not distribute in the first place in this case. */
6895 if (code == MULT_EXPR
6896 && TYPE_OVERFLOW_WRAPS (ctype)
6897 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6898 return fold_build2 (tcode, ctype,
6899 fold_build2 (code, ctype,
6900 fold_convert (ctype, op0),
6901 fold_convert (ctype, c)),
6902 op1);
6903
6904 break;
6905
6906 case MULT_EXPR:
6907 /* We have a special case here if we are doing something like
6908 (C * 8) % 4 since we know that's zero. */
6909 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6910 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6911 /* If the multiplication can overflow we cannot optimize this. */
6912 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6913 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6914 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6915 TYPE_SIGN (type)))
6916 {
6917 *strict_overflow_p = true;
6918 return omit_one_operand (type, integer_zero_node, op0);
6919 }
6920
6921 /* ... fall through ... */
6922
6923 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6924 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6925 /* If we can extract our operation from the LHS, do so and return a
6926 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6927 do something only if the second operand is a constant. */
6928 if (same_p
6929 && TYPE_OVERFLOW_WRAPS (ctype)
6930 && (t1 = extract_muldiv (op0, c, code, wide_type,
6931 strict_overflow_p)) != 0)
6932 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6933 fold_convert (ctype, op1));
6934 else if (tcode == MULT_EXPR && code == MULT_EXPR
6935 && TYPE_OVERFLOW_WRAPS (ctype)
6936 && (t1 = extract_muldiv (op1, c, code, wide_type,
6937 strict_overflow_p)) != 0)
6938 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 fold_convert (ctype, t1));
6940 else if (TREE_CODE (op1) != INTEGER_CST)
6941 return 0;
6942
6943 /* If these are the same operation types, we can associate them
6944 assuming no overflow. */
6945 if (tcode == code)
6946 {
6947 bool overflow_p = false;
6948 wi::overflow_type overflow_mul;
6949 signop sign = TYPE_SIGN (ctype);
6950 unsigned prec = TYPE_PRECISION (ctype);
6951 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6952 wi::to_wide (c, prec),
6953 sign, &overflow_mul);
6954 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6955 if (overflow_mul
6956 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6957 overflow_p = true;
6958 if (!overflow_p)
6959 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6960 wide_int_to_tree (ctype, mul));
6961 }
6962
6963 /* If these operations "cancel" each other, we have the main
6964 optimizations of this pass, which occur when either constant is a
6965 multiple of the other, in which case we replace this with either an
6966 operation or CODE or TCODE.
6967
6968 If we have an unsigned type, we cannot do this since it will change
6969 the result if the original computation overflowed. */
6970 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6971 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6972 || (tcode == MULT_EXPR
6973 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6974 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6975 && code != MULT_EXPR)))
6976 {
6977 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6978 TYPE_SIGN (type)))
6979 {
6980 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6981 *strict_overflow_p = true;
6982 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6983 fold_convert (ctype,
6984 const_binop (TRUNC_DIV_EXPR,
6985 op1, c)));
6986 }
6987 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6988 TYPE_SIGN (type)))
6989 {
6990 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6991 *strict_overflow_p = true;
6992 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6993 fold_convert (ctype,
6994 const_binop (TRUNC_DIV_EXPR,
6995 c, op1)));
6996 }
6997 }
6998 break;
6999
7000 default:
7001 break;
7002 }
7003
7004 return 0;
7005 }
7006 \f
7007 /* Return a node which has the indicated constant VALUE (either 0 or
7008 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7009 and is of the indicated TYPE. */
7010
7011 tree
7012 constant_boolean_node (bool value, tree type)
7013 {
7014 if (type == integer_type_node)
7015 return value ? integer_one_node : integer_zero_node;
7016 else if (type == boolean_type_node)
7017 return value ? boolean_true_node : boolean_false_node;
7018 else if (TREE_CODE (type) == VECTOR_TYPE)
7019 return build_vector_from_val (type,
7020 build_int_cst (TREE_TYPE (type),
7021 value ? -1 : 0));
7022 else
7023 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7024 }
7025
7026
7027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7028 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7029 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7030 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7031 COND is the first argument to CODE; otherwise (as in the example
7032 given here), it is the second argument. TYPE is the type of the
7033 original expression. Return NULL_TREE if no simplification is
7034 possible. */
7035
7036 static tree
7037 fold_binary_op_with_conditional_arg (location_t loc,
7038 enum tree_code code,
7039 tree type, tree op0, tree op1,
7040 tree cond, tree arg, int cond_first_p)
7041 {
7042 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7043 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7044 tree test, true_value, false_value;
7045 tree lhs = NULL_TREE;
7046 tree rhs = NULL_TREE;
7047 enum tree_code cond_code = COND_EXPR;
7048
7049 /* Do not move possibly trapping operations into the conditional as this
7050 pessimizes code and causes gimplification issues when applied late. */
7051 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7052 ANY_INTEGRAL_TYPE_P (type)
7053 && TYPE_OVERFLOW_TRAPS (type), op1))
7054 return NULL_TREE;
7055
7056 if (TREE_CODE (cond) == COND_EXPR
7057 || TREE_CODE (cond) == VEC_COND_EXPR)
7058 {
7059 test = TREE_OPERAND (cond, 0);
7060 true_value = TREE_OPERAND (cond, 1);
7061 false_value = TREE_OPERAND (cond, 2);
7062 /* If this operand throws an expression, then it does not make
7063 sense to try to perform a logical or arithmetic operation
7064 involving it. */
7065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7066 lhs = true_value;
7067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7068 rhs = false_value;
7069 }
7070 else if (!(TREE_CODE (type) != VECTOR_TYPE
7071 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7072 {
7073 tree testtype = TREE_TYPE (cond);
7074 test = cond;
7075 true_value = constant_boolean_node (true, testtype);
7076 false_value = constant_boolean_node (false, testtype);
7077 }
7078 else
7079 /* Detect the case of mixing vector and scalar types - bail out. */
7080 return NULL_TREE;
7081
7082 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7083 cond_code = VEC_COND_EXPR;
7084
7085 /* This transformation is only worthwhile if we don't have to wrap ARG
7086 in a SAVE_EXPR and the operation can be simplified without recursing
7087 on at least one of the branches once its pushed inside the COND_EXPR. */
7088 if (!TREE_CONSTANT (arg)
7089 && (TREE_SIDE_EFFECTS (arg)
7090 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7091 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7092 return NULL_TREE;
7093
7094 arg = fold_convert_loc (loc, arg_type, arg);
7095 if (lhs == 0)
7096 {
7097 true_value = fold_convert_loc (loc, cond_type, true_value);
7098 if (cond_first_p)
7099 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7100 else
7101 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7102 }
7103 if (rhs == 0)
7104 {
7105 false_value = fold_convert_loc (loc, cond_type, false_value);
7106 if (cond_first_p)
7107 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7108 else
7109 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7110 }
7111
7112 /* Check that we have simplified at least one of the branches. */
7113 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7114 return NULL_TREE;
7115
7116 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7117 }
7118
7119 \f
7120 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7121
7122 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7123 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7124 ADDEND is the same as X.
7125
7126 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7127 and finite. The problematic cases are when X is zero, and its mode
7128 has signed zeros. In the case of rounding towards -infinity,
7129 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7130 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7131
7132 bool
7133 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7134 {
7135 if (!real_zerop (addend))
7136 return false;
7137
7138 /* Don't allow the fold with -fsignaling-nans. */
7139 if (HONOR_SNANS (type))
7140 return false;
7141
7142 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7143 if (!HONOR_SIGNED_ZEROS (type))
7144 return true;
7145
7146 /* There is no case that is safe for all rounding modes. */
7147 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7148 return false;
7149
7150 /* In a vector or complex, we would need to check the sign of all zeros. */
7151 if (TREE_CODE (addend) == VECTOR_CST)
7152 addend = uniform_vector_p (addend);
7153 if (!addend || TREE_CODE (addend) != REAL_CST)
7154 return false;
7155
7156 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7157 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7158 negate = !negate;
7159
7160 /* The mode has signed zeros, and we have to honor their sign.
7161 In this situation, there is only one case we can return true for.
7162 X - 0 is the same as X with default rounding. */
7163 return negate;
7164 }
7165
7166 /* Subroutine of match.pd that optimizes comparisons of a division by
7167 a nonzero integer constant against an integer constant, i.e.
7168 X/C1 op C2.
7169
7170 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7171 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7172
7173 enum tree_code
7174 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7175 tree *hi, bool *neg_overflow)
7176 {
7177 tree prod, tmp, type = TREE_TYPE (c1);
7178 signop sign = TYPE_SIGN (type);
7179 wi::overflow_type overflow;
7180
7181 /* We have to do this the hard way to detect unsigned overflow.
7182 prod = int_const_binop (MULT_EXPR, c1, c2); */
7183 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7184 prod = force_fit_type (type, val, -1, overflow);
7185 *neg_overflow = false;
7186
7187 if (sign == UNSIGNED)
7188 {
7189 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7190 *lo = prod;
7191
7192 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7193 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7194 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7195 }
7196 else if (tree_int_cst_sgn (c1) >= 0)
7197 {
7198 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7199 switch (tree_int_cst_sgn (c2))
7200 {
7201 case -1:
7202 *neg_overflow = true;
7203 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7204 *hi = prod;
7205 break;
7206
7207 case 0:
7208 *lo = fold_negate_const (tmp, type);
7209 *hi = tmp;
7210 break;
7211
7212 case 1:
7213 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7214 *lo = prod;
7215 break;
7216
7217 default:
7218 gcc_unreachable ();
7219 }
7220 }
7221 else
7222 {
7223 /* A negative divisor reverses the relational operators. */
7224 code = swap_tree_comparison (code);
7225
7226 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7227 switch (tree_int_cst_sgn (c2))
7228 {
7229 case -1:
7230 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7231 *lo = prod;
7232 break;
7233
7234 case 0:
7235 *hi = fold_negate_const (tmp, type);
7236 *lo = tmp;
7237 break;
7238
7239 case 1:
7240 *neg_overflow = true;
7241 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7242 *hi = prod;
7243 break;
7244
7245 default:
7246 gcc_unreachable ();
7247 }
7248 }
7249
7250 if (code != EQ_EXPR && code != NE_EXPR)
7251 return code;
7252
7253 if (TREE_OVERFLOW (*lo)
7254 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7255 *lo = NULL_TREE;
7256 if (TREE_OVERFLOW (*hi)
7257 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7258 *hi = NULL_TREE;
7259
7260 return code;
7261 }
7262
7263
7264 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7265 equality/inequality test, then return a simplified form of the test
7266 using a sign testing. Otherwise return NULL. TYPE is the desired
7267 result type. */
7268
7269 static tree
7270 fold_single_bit_test_into_sign_test (location_t loc,
7271 enum tree_code code, tree arg0, tree arg1,
7272 tree result_type)
7273 {
7274 /* If this is testing a single bit, we can optimize the test. */
7275 if ((code == NE_EXPR || code == EQ_EXPR)
7276 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7277 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7278 {
7279 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7280 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7281 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7282
7283 if (arg00 != NULL_TREE
7284 /* This is only a win if casting to a signed type is cheap,
7285 i.e. when arg00's type is not a partial mode. */
7286 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7287 {
7288 tree stype = signed_type_for (TREE_TYPE (arg00));
7289 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7290 result_type,
7291 fold_convert_loc (loc, stype, arg00),
7292 build_int_cst (stype, 0));
7293 }
7294 }
7295
7296 return NULL_TREE;
7297 }
7298
7299 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7300 equality/inequality test, then return a simplified form of
7301 the test using shifts and logical operations. Otherwise return
7302 NULL. TYPE is the desired result type. */
7303
7304 tree
7305 fold_single_bit_test (location_t loc, enum tree_code code,
7306 tree arg0, tree arg1, tree result_type)
7307 {
7308 /* If this is testing a single bit, we can optimize the test. */
7309 if ((code == NE_EXPR || code == EQ_EXPR)
7310 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7311 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7312 {
7313 tree inner = TREE_OPERAND (arg0, 0);
7314 tree type = TREE_TYPE (arg0);
7315 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7316 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7317 int ops_unsigned;
7318 tree signed_type, unsigned_type, intermediate_type;
7319 tree tem, one;
7320
7321 /* First, see if we can fold the single bit test into a sign-bit
7322 test. */
7323 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7324 result_type);
7325 if (tem)
7326 return tem;
7327
7328 /* Otherwise we have (A & C) != 0 where C is a single bit,
7329 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7330 Similarly for (A & C) == 0. */
7331
7332 /* If INNER is a right shift of a constant and it plus BITNUM does
7333 not overflow, adjust BITNUM and INNER. */
7334 if (TREE_CODE (inner) == RSHIFT_EXPR
7335 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7336 && bitnum < TYPE_PRECISION (type)
7337 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7338 TYPE_PRECISION (type) - bitnum))
7339 {
7340 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7341 inner = TREE_OPERAND (inner, 0);
7342 }
7343
7344 /* If we are going to be able to omit the AND below, we must do our
7345 operations as unsigned. If we must use the AND, we have a choice.
7346 Normally unsigned is faster, but for some machines signed is. */
7347 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7348 && !flag_syntax_only) ? 0 : 1;
7349
7350 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7351 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7352 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7353 inner = fold_convert_loc (loc, intermediate_type, inner);
7354
7355 if (bitnum != 0)
7356 inner = build2 (RSHIFT_EXPR, intermediate_type,
7357 inner, size_int (bitnum));
7358
7359 one = build_int_cst (intermediate_type, 1);
7360
7361 if (code == EQ_EXPR)
7362 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7363
7364 /* Put the AND last so it can combine with more things. */
7365 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7366
7367 /* Make sure to return the proper type. */
7368 inner = fold_convert_loc (loc, result_type, inner);
7369
7370 return inner;
7371 }
7372 return NULL_TREE;
7373 }
7374
7375 /* Test whether it is preferable two swap two operands, ARG0 and
7376 ARG1, for example because ARG0 is an integer constant and ARG1
7377 isn't. */
7378
7379 bool
7380 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7381 {
7382 if (CONSTANT_CLASS_P (arg1))
7383 return 0;
7384 if (CONSTANT_CLASS_P (arg0))
7385 return 1;
7386
7387 STRIP_NOPS (arg0);
7388 STRIP_NOPS (arg1);
7389
7390 if (TREE_CONSTANT (arg1))
7391 return 0;
7392 if (TREE_CONSTANT (arg0))
7393 return 1;
7394
7395 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7396 for commutative and comparison operators. Ensuring a canonical
7397 form allows the optimizers to find additional redundancies without
7398 having to explicitly check for both orderings. */
7399 if (TREE_CODE (arg0) == SSA_NAME
7400 && TREE_CODE (arg1) == SSA_NAME
7401 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7402 return 1;
7403
7404 /* Put SSA_NAMEs last. */
7405 if (TREE_CODE (arg1) == SSA_NAME)
7406 return 0;
7407 if (TREE_CODE (arg0) == SSA_NAME)
7408 return 1;
7409
7410 /* Put variables last. */
7411 if (DECL_P (arg1))
7412 return 0;
7413 if (DECL_P (arg0))
7414 return 1;
7415
7416 return 0;
7417 }
7418
7419
7420 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7421 means A >= Y && A != MAX, but in this case we know that
7422 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7423
7424 static tree
7425 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7426 {
7427 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7428
7429 if (TREE_CODE (bound) == LT_EXPR)
7430 a = TREE_OPERAND (bound, 0);
7431 else if (TREE_CODE (bound) == GT_EXPR)
7432 a = TREE_OPERAND (bound, 1);
7433 else
7434 return NULL_TREE;
7435
7436 typea = TREE_TYPE (a);
7437 if (!INTEGRAL_TYPE_P (typea)
7438 && !POINTER_TYPE_P (typea))
7439 return NULL_TREE;
7440
7441 if (TREE_CODE (ineq) == LT_EXPR)
7442 {
7443 a1 = TREE_OPERAND (ineq, 1);
7444 y = TREE_OPERAND (ineq, 0);
7445 }
7446 else if (TREE_CODE (ineq) == GT_EXPR)
7447 {
7448 a1 = TREE_OPERAND (ineq, 0);
7449 y = TREE_OPERAND (ineq, 1);
7450 }
7451 else
7452 return NULL_TREE;
7453
7454 if (TREE_TYPE (a1) != typea)
7455 return NULL_TREE;
7456
7457 if (POINTER_TYPE_P (typea))
7458 {
7459 /* Convert the pointer types into integer before taking the difference. */
7460 tree ta = fold_convert_loc (loc, ssizetype, a);
7461 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7462 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7463 }
7464 else
7465 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7466
7467 if (!diff || !integer_onep (diff))
7468 return NULL_TREE;
7469
7470 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7471 }
7472
7473 /* Fold a sum or difference of at least one multiplication.
7474 Returns the folded tree or NULL if no simplification could be made. */
7475
7476 static tree
7477 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7478 tree arg0, tree arg1)
7479 {
7480 tree arg00, arg01, arg10, arg11;
7481 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7482
7483 /* (A * C) +- (B * C) -> (A+-B) * C.
7484 (A * C) +- A -> A * (C+-1).
7485 We are most concerned about the case where C is a constant,
7486 but other combinations show up during loop reduction. Since
7487 it is not difficult, try all four possibilities. */
7488
7489 if (TREE_CODE (arg0) == MULT_EXPR)
7490 {
7491 arg00 = TREE_OPERAND (arg0, 0);
7492 arg01 = TREE_OPERAND (arg0, 1);
7493 }
7494 else if (TREE_CODE (arg0) == INTEGER_CST)
7495 {
7496 arg00 = build_one_cst (type);
7497 arg01 = arg0;
7498 }
7499 else
7500 {
7501 /* We cannot generate constant 1 for fract. */
7502 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7503 return NULL_TREE;
7504 arg00 = arg0;
7505 arg01 = build_one_cst (type);
7506 }
7507 if (TREE_CODE (arg1) == MULT_EXPR)
7508 {
7509 arg10 = TREE_OPERAND (arg1, 0);
7510 arg11 = TREE_OPERAND (arg1, 1);
7511 }
7512 else if (TREE_CODE (arg1) == INTEGER_CST)
7513 {
7514 arg10 = build_one_cst (type);
7515 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7516 the purpose of this canonicalization. */
7517 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7518 && negate_expr_p (arg1)
7519 && code == PLUS_EXPR)
7520 {
7521 arg11 = negate_expr (arg1);
7522 code = MINUS_EXPR;
7523 }
7524 else
7525 arg11 = arg1;
7526 }
7527 else
7528 {
7529 /* We cannot generate constant 1 for fract. */
7530 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7531 return NULL_TREE;
7532 arg10 = arg1;
7533 arg11 = build_one_cst (type);
7534 }
7535 same = NULL_TREE;
7536
7537 /* Prefer factoring a common non-constant. */
7538 if (operand_equal_p (arg00, arg10, 0))
7539 same = arg00, alt0 = arg01, alt1 = arg11;
7540 else if (operand_equal_p (arg01, arg11, 0))
7541 same = arg01, alt0 = arg00, alt1 = arg10;
7542 else if (operand_equal_p (arg00, arg11, 0))
7543 same = arg00, alt0 = arg01, alt1 = arg10;
7544 else if (operand_equal_p (arg01, arg10, 0))
7545 same = arg01, alt0 = arg00, alt1 = arg11;
7546
7547 /* No identical multiplicands; see if we can find a common
7548 power-of-two factor in non-power-of-two multiplies. This
7549 can help in multi-dimensional array access. */
7550 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7551 {
7552 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7553 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7554 HOST_WIDE_INT tmp;
7555 bool swap = false;
7556 tree maybe_same;
7557
7558 /* Move min of absolute values to int11. */
7559 if (absu_hwi (int01) < absu_hwi (int11))
7560 {
7561 tmp = int01, int01 = int11, int11 = tmp;
7562 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7563 maybe_same = arg01;
7564 swap = true;
7565 }
7566 else
7567 maybe_same = arg11;
7568
7569 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7570 if (factor > 1
7571 && pow2p_hwi (factor)
7572 && (int01 & (factor - 1)) == 0
7573 /* The remainder should not be a constant, otherwise we
7574 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7575 increased the number of multiplications necessary. */
7576 && TREE_CODE (arg10) != INTEGER_CST)
7577 {
7578 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7579 build_int_cst (TREE_TYPE (arg00),
7580 int01 / int11));
7581 alt1 = arg10;
7582 same = maybe_same;
7583 if (swap)
7584 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7585 }
7586 }
7587
7588 if (!same)
7589 return NULL_TREE;
7590
7591 if (! ANY_INTEGRAL_TYPE_P (type)
7592 || TYPE_OVERFLOW_WRAPS (type)
7593 /* We are neither factoring zero nor minus one. */
7594 || TREE_CODE (same) == INTEGER_CST)
7595 return fold_build2_loc (loc, MULT_EXPR, type,
7596 fold_build2_loc (loc, code, type,
7597 fold_convert_loc (loc, type, alt0),
7598 fold_convert_loc (loc, type, alt1)),
7599 fold_convert_loc (loc, type, same));
7600
7601 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7602 same may be minus one and thus the multiplication may overflow. Perform
7603 the sum operation in an unsigned type. */
7604 tree utype = unsigned_type_for (type);
7605 tree tem = fold_build2_loc (loc, code, utype,
7606 fold_convert_loc (loc, utype, alt0),
7607 fold_convert_loc (loc, utype, alt1));
7608 /* If the sum evaluated to a constant that is not -INF the multiplication
7609 cannot overflow. */
7610 if (TREE_CODE (tem) == INTEGER_CST
7611 && (wi::to_wide (tem)
7612 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7613 return fold_build2_loc (loc, MULT_EXPR, type,
7614 fold_convert (type, tem), same);
7615
7616 /* Do not resort to unsigned multiplication because
7617 we lose the no-overflow property of the expression. */
7618 return NULL_TREE;
7619 }
7620
7621 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7622 specified by EXPR into the buffer PTR of length LEN bytes.
7623 Return the number of bytes placed in the buffer, or zero
7624 upon failure. */
7625
7626 static int
7627 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7628 {
7629 tree type = TREE_TYPE (expr);
7630 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7631 int byte, offset, word, words;
7632 unsigned char value;
7633
7634 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7635 return 0;
7636 if (off == -1)
7637 off = 0;
7638
7639 if (ptr == NULL)
7640 /* Dry run. */
7641 return MIN (len, total_bytes - off);
7642
7643 words = total_bytes / UNITS_PER_WORD;
7644
7645 for (byte = 0; byte < total_bytes; byte++)
7646 {
7647 int bitpos = byte * BITS_PER_UNIT;
7648 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7649 number of bytes. */
7650 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7651
7652 if (total_bytes > UNITS_PER_WORD)
7653 {
7654 word = byte / UNITS_PER_WORD;
7655 if (WORDS_BIG_ENDIAN)
7656 word = (words - 1) - word;
7657 offset = word * UNITS_PER_WORD;
7658 if (BYTES_BIG_ENDIAN)
7659 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7660 else
7661 offset += byte % UNITS_PER_WORD;
7662 }
7663 else
7664 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7665 if (offset >= off && offset - off < len)
7666 ptr[offset - off] = value;
7667 }
7668 return MIN (len, total_bytes - off);
7669 }
7670
7671
7672 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7673 specified by EXPR into the buffer PTR of length LEN bytes.
7674 Return the number of bytes placed in the buffer, or zero
7675 upon failure. */
7676
7677 static int
7678 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7679 {
7680 tree type = TREE_TYPE (expr);
7681 scalar_mode mode = SCALAR_TYPE_MODE (type);
7682 int total_bytes = GET_MODE_SIZE (mode);
7683 FIXED_VALUE_TYPE value;
7684 tree i_value, i_type;
7685
7686 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7687 return 0;
7688
7689 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7690
7691 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7692 return 0;
7693
7694 value = TREE_FIXED_CST (expr);
7695 i_value = double_int_to_tree (i_type, value.data);
7696
7697 return native_encode_int (i_value, ptr, len, off);
7698 }
7699
7700
7701 /* Subroutine of native_encode_expr. Encode the REAL_CST
7702 specified by EXPR into the buffer PTR of length LEN bytes.
7703 Return the number of bytes placed in the buffer, or zero
7704 upon failure. */
7705
7706 static int
7707 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7708 {
7709 tree type = TREE_TYPE (expr);
7710 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7711 int byte, offset, word, words, bitpos;
7712 unsigned char value;
7713
7714 /* There are always 32 bits in each long, no matter the size of
7715 the hosts long. We handle floating point representations with
7716 up to 192 bits. */
7717 long tmp[6];
7718
7719 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7720 return 0;
7721 if (off == -1)
7722 off = 0;
7723
7724 if (ptr == NULL)
7725 /* Dry run. */
7726 return MIN (len, total_bytes - off);
7727
7728 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7729
7730 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7731
7732 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7733 bitpos += BITS_PER_UNIT)
7734 {
7735 byte = (bitpos / BITS_PER_UNIT) & 3;
7736 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7737
7738 if (UNITS_PER_WORD < 4)
7739 {
7740 word = byte / UNITS_PER_WORD;
7741 if (WORDS_BIG_ENDIAN)
7742 word = (words - 1) - word;
7743 offset = word * UNITS_PER_WORD;
7744 if (BYTES_BIG_ENDIAN)
7745 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7746 else
7747 offset += byte % UNITS_PER_WORD;
7748 }
7749 else
7750 {
7751 offset = byte;
7752 if (BYTES_BIG_ENDIAN)
7753 {
7754 /* Reverse bytes within each long, or within the entire float
7755 if it's smaller than a long (for HFmode). */
7756 offset = MIN (3, total_bytes - 1) - offset;
7757 gcc_assert (offset >= 0);
7758 }
7759 }
7760 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7761 if (offset >= off
7762 && offset - off < len)
7763 ptr[offset - off] = value;
7764 }
7765 return MIN (len, total_bytes - off);
7766 }
7767
7768 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7769 specified by EXPR into the buffer PTR of length LEN bytes.
7770 Return the number of bytes placed in the buffer, or zero
7771 upon failure. */
7772
7773 static int
7774 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7775 {
7776 int rsize, isize;
7777 tree part;
7778
7779 part = TREE_REALPART (expr);
7780 rsize = native_encode_expr (part, ptr, len, off);
7781 if (off == -1 && rsize == 0)
7782 return 0;
7783 part = TREE_IMAGPART (expr);
7784 if (off != -1)
7785 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7786 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7787 len - rsize, off);
7788 if (off == -1 && isize != rsize)
7789 return 0;
7790 return rsize + isize;
7791 }
7792
7793 /* Like native_encode_vector, but only encode the first COUNT elements.
7794 The other arguments are as for native_encode_vector. */
7795
7796 static int
7797 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7798 int off, unsigned HOST_WIDE_INT count)
7799 {
7800 tree itype = TREE_TYPE (TREE_TYPE (expr));
7801 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7802 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7803 {
7804 /* This is the only case in which elements can be smaller than a byte.
7805 Element 0 is always in the lsb of the containing byte. */
7806 unsigned int elt_bits = TYPE_PRECISION (itype);
7807 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7808 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7809 return 0;
7810
7811 if (off == -1)
7812 off = 0;
7813
7814 /* Zero the buffer and then set bits later where necessary. */
7815 int extract_bytes = MIN (len, total_bytes - off);
7816 if (ptr)
7817 memset (ptr, 0, extract_bytes);
7818
7819 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7820 unsigned int first_elt = off * elts_per_byte;
7821 unsigned int extract_elts = extract_bytes * elts_per_byte;
7822 for (unsigned int i = 0; i < extract_elts; ++i)
7823 {
7824 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7825 if (TREE_CODE (elt) != INTEGER_CST)
7826 return 0;
7827
7828 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7829 {
7830 unsigned int bit = i * elt_bits;
7831 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7832 }
7833 }
7834 return extract_bytes;
7835 }
7836
7837 int offset = 0;
7838 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7839 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7840 {
7841 if (off >= size)
7842 {
7843 off -= size;
7844 continue;
7845 }
7846 tree elem = VECTOR_CST_ELT (expr, i);
7847 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7848 len - offset, off);
7849 if ((off == -1 && res != size) || res == 0)
7850 return 0;
7851 offset += res;
7852 if (offset >= len)
7853 return (off == -1 && i < count - 1) ? 0 : offset;
7854 if (off != -1)
7855 off = 0;
7856 }
7857 return offset;
7858 }
7859
7860 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7861 specified by EXPR into the buffer PTR of length LEN bytes.
7862 Return the number of bytes placed in the buffer, or zero
7863 upon failure. */
7864
7865 static int
7866 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7867 {
7868 unsigned HOST_WIDE_INT count;
7869 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7870 return 0;
7871 return native_encode_vector_part (expr, ptr, len, off, count);
7872 }
7873
7874
7875 /* Subroutine of native_encode_expr. Encode the STRING_CST
7876 specified by EXPR into the buffer PTR of length LEN bytes.
7877 Return the number of bytes placed in the buffer, or zero
7878 upon failure. */
7879
7880 static int
7881 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7882 {
7883 tree type = TREE_TYPE (expr);
7884
7885 /* Wide-char strings are encoded in target byte-order so native
7886 encoding them is trivial. */
7887 if (BITS_PER_UNIT != CHAR_BIT
7888 || TREE_CODE (type) != ARRAY_TYPE
7889 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7890 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7891 return 0;
7892
7893 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7894 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7895 return 0;
7896 if (off == -1)
7897 off = 0;
7898 len = MIN (total_bytes - off, len);
7899 if (ptr == NULL)
7900 /* Dry run. */;
7901 else
7902 {
7903 int written = 0;
7904 if (off < TREE_STRING_LENGTH (expr))
7905 {
7906 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7907 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7908 }
7909 memset (ptr + written, 0, len - written);
7910 }
7911 return len;
7912 }
7913
7914
7915 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7916 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7917 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7918 anything, just do a dry run. If OFF is not -1 then start
7919 the encoding at byte offset OFF and encode at most LEN bytes.
7920 Return the number of bytes placed in the buffer, or zero upon failure. */
7921
7922 int
7923 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7924 {
7925 /* We don't support starting at negative offset and -1 is special. */
7926 if (off < -1)
7927 return 0;
7928
7929 switch (TREE_CODE (expr))
7930 {
7931 case INTEGER_CST:
7932 return native_encode_int (expr, ptr, len, off);
7933
7934 case REAL_CST:
7935 return native_encode_real (expr, ptr, len, off);
7936
7937 case FIXED_CST:
7938 return native_encode_fixed (expr, ptr, len, off);
7939
7940 case COMPLEX_CST:
7941 return native_encode_complex (expr, ptr, len, off);
7942
7943 case VECTOR_CST:
7944 return native_encode_vector (expr, ptr, len, off);
7945
7946 case STRING_CST:
7947 return native_encode_string (expr, ptr, len, off);
7948
7949 default:
7950 return 0;
7951 }
7952 }
7953
7954 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7955 NON_LVALUE_EXPRs and nops. */
7956
7957 int
7958 native_encode_initializer (tree init, unsigned char *ptr, int len,
7959 int off)
7960 {
7961 /* We don't support starting at negative offset and -1 is special. */
7962 if (off < -1 || init == NULL_TREE)
7963 return 0;
7964
7965 STRIP_NOPS (init);
7966 switch (TREE_CODE (init))
7967 {
7968 case VIEW_CONVERT_EXPR:
7969 case NON_LVALUE_EXPR:
7970 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7971 default:
7972 return native_encode_expr (init, ptr, len, off);
7973 case CONSTRUCTOR:
7974 tree type = TREE_TYPE (init);
7975 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7976 if (total_bytes < 0)
7977 return 0;
7978 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7979 return 0;
7980 int o = off == -1 ? 0 : off;
7981 if (TREE_CODE (type) == ARRAY_TYPE)
7982 {
7983 tree min_index;
7984 unsigned HOST_WIDE_INT cnt;
7985 HOST_WIDE_INT curpos = 0, fieldsize;
7986 constructor_elt *ce;
7987
7988 if (!TYPE_DOMAIN (type)
7989 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
7990 return 0;
7991
7992 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7993 if (fieldsize <= 0)
7994 return 0;
7995
7996 min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7997 if (ptr)
7998 memset (ptr, '\0', MIN (total_bytes - off, len));
7999
8000 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8001 {
8002 tree val = ce->value;
8003 tree index = ce->index;
8004 HOST_WIDE_INT pos = curpos, count = 0;
8005 bool full = false;
8006 if (index && TREE_CODE (index) == RANGE_EXPR)
8007 {
8008 if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8009 || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8010 return 0;
8011 offset_int wpos
8012 = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8013 - wi::to_offset (min_index),
8014 TYPE_PRECISION (sizetype));
8015 wpos *= fieldsize;
8016 if (!wi::fits_shwi_p (pos))
8017 return 0;
8018 pos = wpos.to_shwi ();
8019 offset_int wcount
8020 = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8021 - wi::to_offset (TREE_OPERAND (index, 0)),
8022 TYPE_PRECISION (sizetype));
8023 if (!wi::fits_shwi_p (wcount))
8024 return 0;
8025 count = wcount.to_shwi ();
8026 }
8027 else if (index)
8028 {
8029 if (TREE_CODE (index) != INTEGER_CST)
8030 return 0;
8031 offset_int wpos
8032 = wi::sext (wi::to_offset (index)
8033 - wi::to_offset (min_index),
8034 TYPE_PRECISION (sizetype));
8035 wpos *= fieldsize;
8036 if (!wi::fits_shwi_p (wpos))
8037 return 0;
8038 pos = wpos.to_shwi ();
8039 }
8040
8041 curpos = pos;
8042 if (val)
8043 do
8044 {
8045 if (off == -1
8046 || (curpos >= off
8047 && (curpos + fieldsize
8048 <= (HOST_WIDE_INT) off + len)))
8049 {
8050 if (full)
8051 {
8052 if (ptr)
8053 memcpy (ptr + (curpos - o), ptr + (pos - o),
8054 fieldsize);
8055 }
8056 else if (!native_encode_initializer (val,
8057 ptr
8058 ? ptr + curpos - o
8059 : NULL,
8060 fieldsize,
8061 off == -1 ? -1
8062 : 0))
8063 return 0;
8064 else
8065 {
8066 full = true;
8067 pos = curpos;
8068 }
8069 }
8070 else if (curpos + fieldsize > off
8071 && curpos < (HOST_WIDE_INT) off + len)
8072 {
8073 /* Partial overlap. */
8074 unsigned char *p = NULL;
8075 int no = 0;
8076 int l;
8077 if (curpos >= off)
8078 {
8079 if (ptr)
8080 p = ptr + curpos - off;
8081 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8082 fieldsize);
8083 }
8084 else
8085 {
8086 p = ptr;
8087 no = off - curpos;
8088 l = len;
8089 }
8090 if (!native_encode_initializer (val, p, l, no))
8091 return 0;
8092 }
8093 curpos += fieldsize;
8094 }
8095 while (count-- != 0);
8096 }
8097 return MIN (total_bytes - off, len);
8098 }
8099 else if (TREE_CODE (type) == RECORD_TYPE
8100 || TREE_CODE (type) == UNION_TYPE)
8101 {
8102 unsigned HOST_WIDE_INT cnt;
8103 constructor_elt *ce;
8104
8105 if (ptr != NULL)
8106 memset (ptr, '\0', MIN (total_bytes - off, len));
8107 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8108 {
8109 tree field = ce->index;
8110 tree val = ce->value;
8111 HOST_WIDE_INT pos, fieldsize;
8112
8113 if (field == NULL_TREE)
8114 return 0;
8115
8116 pos = int_byte_position (field);
8117 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8118 continue;
8119
8120 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8121 && TYPE_DOMAIN (TREE_TYPE (field))
8122 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8123 return 0;
8124 if (DECL_SIZE_UNIT (field) == NULL_TREE
8125 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8126 return 0;
8127 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8128 if (fieldsize == 0)
8129 continue;
8130
8131 if (off != -1 && pos + fieldsize <= off)
8132 continue;
8133
8134 if (DECL_BIT_FIELD (field))
8135 return 0;
8136
8137 if (val == NULL_TREE)
8138 continue;
8139
8140 if (off == -1
8141 || (pos >= off
8142 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8143 {
8144 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8145 : NULL,
8146 fieldsize,
8147 off == -1 ? -1 : 0))
8148 return 0;
8149 }
8150 else
8151 {
8152 /* Partial overlap. */
8153 unsigned char *p = NULL;
8154 int no = 0;
8155 int l;
8156 if (pos >= off)
8157 {
8158 if (ptr)
8159 p = ptr + pos - off;
8160 l = MIN ((HOST_WIDE_INT) off + len - pos,
8161 fieldsize);
8162 }
8163 else
8164 {
8165 p = ptr;
8166 no = off - pos;
8167 l = len;
8168 }
8169 if (!native_encode_initializer (val, p, l, no))
8170 return 0;
8171 }
8172 }
8173 return MIN (total_bytes - off, len);
8174 }
8175 return 0;
8176 }
8177 }
8178
8179
8180 /* Subroutine of native_interpret_expr. Interpret the contents of
8181 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8182 If the buffer cannot be interpreted, return NULL_TREE. */
8183
8184 static tree
8185 native_interpret_int (tree type, const unsigned char *ptr, int len)
8186 {
8187 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8188
8189 if (total_bytes > len
8190 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8191 return NULL_TREE;
8192
8193 wide_int result = wi::from_buffer (ptr, total_bytes);
8194
8195 return wide_int_to_tree (type, result);
8196 }
8197
8198
8199 /* Subroutine of native_interpret_expr. Interpret the contents of
8200 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8201 If the buffer cannot be interpreted, return NULL_TREE. */
8202
8203 static tree
8204 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8205 {
8206 scalar_mode mode = SCALAR_TYPE_MODE (type);
8207 int total_bytes = GET_MODE_SIZE (mode);
8208 double_int result;
8209 FIXED_VALUE_TYPE fixed_value;
8210
8211 if (total_bytes > len
8212 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8213 return NULL_TREE;
8214
8215 result = double_int::from_buffer (ptr, total_bytes);
8216 fixed_value = fixed_from_double_int (result, mode);
8217
8218 return build_fixed (type, fixed_value);
8219 }
8220
8221
8222 /* Subroutine of native_interpret_expr. Interpret the contents of
8223 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8224 If the buffer cannot be interpreted, return NULL_TREE. */
8225
8226 static tree
8227 native_interpret_real (tree type, const unsigned char *ptr, int len)
8228 {
8229 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8230 int total_bytes = GET_MODE_SIZE (mode);
8231 unsigned char value;
8232 /* There are always 32 bits in each long, no matter the size of
8233 the hosts long. We handle floating point representations with
8234 up to 192 bits. */
8235 REAL_VALUE_TYPE r;
8236 long tmp[6];
8237
8238 if (total_bytes > len || total_bytes > 24)
8239 return NULL_TREE;
8240 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8241
8242 memset (tmp, 0, sizeof (tmp));
8243 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8244 bitpos += BITS_PER_UNIT)
8245 {
8246 /* Both OFFSET and BYTE index within a long;
8247 bitpos indexes the whole float. */
8248 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8249 if (UNITS_PER_WORD < 4)
8250 {
8251 int word = byte / UNITS_PER_WORD;
8252 if (WORDS_BIG_ENDIAN)
8253 word = (words - 1) - word;
8254 offset = word * UNITS_PER_WORD;
8255 if (BYTES_BIG_ENDIAN)
8256 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8257 else
8258 offset += byte % UNITS_PER_WORD;
8259 }
8260 else
8261 {
8262 offset = byte;
8263 if (BYTES_BIG_ENDIAN)
8264 {
8265 /* Reverse bytes within each long, or within the entire float
8266 if it's smaller than a long (for HFmode). */
8267 offset = MIN (3, total_bytes - 1) - offset;
8268 gcc_assert (offset >= 0);
8269 }
8270 }
8271 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8272
8273 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8274 }
8275
8276 real_from_target (&r, tmp, mode);
8277 tree ret = build_real (type, r);
8278 if (MODE_COMPOSITE_P (mode))
8279 {
8280 /* For floating point values in composite modes, punt if this folding
8281 doesn't preserve bit representation. As the mode doesn't have fixed
8282 precision while GCC pretends it does, there could be valid values that
8283 GCC can't really represent accurately. See PR95450. */
8284 unsigned char buf[24];
8285 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8286 || memcmp (ptr, buf, total_bytes) != 0)
8287 ret = NULL_TREE;
8288 }
8289 return ret;
8290 }
8291
8292
8293 /* Subroutine of native_interpret_expr. Interpret the contents of
8294 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8295 If the buffer cannot be interpreted, return NULL_TREE. */
8296
8297 static tree
8298 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8299 {
8300 tree etype, rpart, ipart;
8301 int size;
8302
8303 etype = TREE_TYPE (type);
8304 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8305 if (size * 2 > len)
8306 return NULL_TREE;
8307 rpart = native_interpret_expr (etype, ptr, size);
8308 if (!rpart)
8309 return NULL_TREE;
8310 ipart = native_interpret_expr (etype, ptr+size, size);
8311 if (!ipart)
8312 return NULL_TREE;
8313 return build_complex (type, rpart, ipart);
8314 }
8315
8316 /* Read a vector of type TYPE from the target memory image given by BYTES,
8317 which contains LEN bytes. The vector is known to be encodable using
8318 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8319
8320 Return the vector on success, otherwise return null. */
8321
8322 static tree
8323 native_interpret_vector_part (tree type, const unsigned char *bytes,
8324 unsigned int len, unsigned int npatterns,
8325 unsigned int nelts_per_pattern)
8326 {
8327 tree elt_type = TREE_TYPE (type);
8328 if (VECTOR_BOOLEAN_TYPE_P (type)
8329 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8330 {
8331 /* This is the only case in which elements can be smaller than a byte.
8332 Element 0 is always in the lsb of the containing byte. */
8333 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8334 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8335 return NULL_TREE;
8336
8337 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8338 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8339 {
8340 unsigned int bit_index = i * elt_bits;
8341 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8342 unsigned int lsb = bit_index % BITS_PER_UNIT;
8343 builder.quick_push (bytes[byte_index] & (1 << lsb)
8344 ? build_all_ones_cst (elt_type)
8345 : build_zero_cst (elt_type));
8346 }
8347 return builder.build ();
8348 }
8349
8350 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8351 if (elt_bytes * npatterns * nelts_per_pattern > len)
8352 return NULL_TREE;
8353
8354 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8355 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8356 {
8357 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8358 if (!elt)
8359 return NULL_TREE;
8360 builder.quick_push (elt);
8361 bytes += elt_bytes;
8362 }
8363 return builder.build ();
8364 }
8365
8366 /* Subroutine of native_interpret_expr. Interpret the contents of
8367 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8368 If the buffer cannot be interpreted, return NULL_TREE. */
8369
8370 static tree
8371 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8372 {
8373 tree etype;
8374 unsigned int size;
8375 unsigned HOST_WIDE_INT count;
8376
8377 etype = TREE_TYPE (type);
8378 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8379 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8380 || size * count > len)
8381 return NULL_TREE;
8382
8383 return native_interpret_vector_part (type, ptr, len, count, 1);
8384 }
8385
8386
8387 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8388 the buffer PTR of length LEN as a constant of type TYPE. For
8389 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8390 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8391 return NULL_TREE. */
8392
8393 tree
8394 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8395 {
8396 switch (TREE_CODE (type))
8397 {
8398 case INTEGER_TYPE:
8399 case ENUMERAL_TYPE:
8400 case BOOLEAN_TYPE:
8401 case POINTER_TYPE:
8402 case REFERENCE_TYPE:
8403 return native_interpret_int (type, ptr, len);
8404
8405 case REAL_TYPE:
8406 return native_interpret_real (type, ptr, len);
8407
8408 case FIXED_POINT_TYPE:
8409 return native_interpret_fixed (type, ptr, len);
8410
8411 case COMPLEX_TYPE:
8412 return native_interpret_complex (type, ptr, len);
8413
8414 case VECTOR_TYPE:
8415 return native_interpret_vector (type, ptr, len);
8416
8417 default:
8418 return NULL_TREE;
8419 }
8420 }
8421
8422 /* Returns true if we can interpret the contents of a native encoding
8423 as TYPE. */
8424
8425 bool
8426 can_native_interpret_type_p (tree type)
8427 {
8428 switch (TREE_CODE (type))
8429 {
8430 case INTEGER_TYPE:
8431 case ENUMERAL_TYPE:
8432 case BOOLEAN_TYPE:
8433 case POINTER_TYPE:
8434 case REFERENCE_TYPE:
8435 case FIXED_POINT_TYPE:
8436 case REAL_TYPE:
8437 case COMPLEX_TYPE:
8438 case VECTOR_TYPE:
8439 return true;
8440 default:
8441 return false;
8442 }
8443 }
8444
8445 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8446 or extracted constant positions and/or sizes aren't byte aligned. */
8447
8448 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8449 bits between adjacent elements. AMNT should be within
8450 [0, BITS_PER_UNIT).
8451 Example, AMNT = 2:
8452 00011111|11100000 << 2 = 01111111|10000000
8453 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8454
8455 void
8456 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8457 unsigned int amnt)
8458 {
8459 if (amnt == 0)
8460 return;
8461
8462 unsigned char carry_over = 0U;
8463 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8464 unsigned char clear_mask = (~0U) << amnt;
8465
8466 for (unsigned int i = 0; i < sz; i++)
8467 {
8468 unsigned prev_carry_over = carry_over;
8469 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8470
8471 ptr[i] <<= amnt;
8472 if (i != 0)
8473 {
8474 ptr[i] &= clear_mask;
8475 ptr[i] |= prev_carry_over;
8476 }
8477 }
8478 }
8479
8480 /* Like shift_bytes_in_array_left but for big-endian.
8481 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8482 bits between adjacent elements. AMNT should be within
8483 [0, BITS_PER_UNIT).
8484 Example, AMNT = 2:
8485 00011111|11100000 >> 2 = 00000111|11111000
8486 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8487
8488 void
8489 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8490 unsigned int amnt)
8491 {
8492 if (amnt == 0)
8493 return;
8494
8495 unsigned char carry_over = 0U;
8496 unsigned char carry_mask = ~(~0U << amnt);
8497
8498 for (unsigned int i = 0; i < sz; i++)
8499 {
8500 unsigned prev_carry_over = carry_over;
8501 carry_over = ptr[i] & carry_mask;
8502
8503 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8504 ptr[i] >>= amnt;
8505 ptr[i] |= prev_carry_over;
8506 }
8507 }
8508
8509 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8510 directly on the VECTOR_CST encoding, in a way that works for variable-
8511 length vectors. Return the resulting VECTOR_CST on success or null
8512 on failure. */
8513
8514 static tree
8515 fold_view_convert_vector_encoding (tree type, tree expr)
8516 {
8517 tree expr_type = TREE_TYPE (expr);
8518 poly_uint64 type_bits, expr_bits;
8519 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8520 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8521 return NULL_TREE;
8522
8523 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8524 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8525 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8526 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8527
8528 /* We can only preserve the semantics of a stepped pattern if the new
8529 vector element is an integer of the same size. */
8530 if (VECTOR_CST_STEPPED_P (expr)
8531 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8532 return NULL_TREE;
8533
8534 /* The number of bits needed to encode one element from every pattern
8535 of the original vector. */
8536 unsigned int expr_sequence_bits
8537 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8538
8539 /* The number of bits needed to encode one element from every pattern
8540 of the result. */
8541 unsigned int type_sequence_bits
8542 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8543
8544 /* Don't try to read more bytes than are available, which can happen
8545 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8546 The general VIEW_CONVERT handling can cope with that case, so there's
8547 no point complicating things here. */
8548 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8549 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8550 BITS_PER_UNIT);
8551 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8552 if (known_gt (buffer_bits, expr_bits))
8553 return NULL_TREE;
8554
8555 /* Get enough bytes of EXPR to form the new encoding. */
8556 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8557 buffer.quick_grow (buffer_bytes);
8558 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8559 buffer_bits / expr_elt_bits)
8560 != (int) buffer_bytes)
8561 return NULL_TREE;
8562
8563 /* Reencode the bytes as TYPE. */
8564 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8565 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8566 type_npatterns, nelts_per_pattern);
8567 }
8568
8569 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8570 TYPE at compile-time. If we're unable to perform the conversion
8571 return NULL_TREE. */
8572
8573 static tree
8574 fold_view_convert_expr (tree type, tree expr)
8575 {
8576 /* We support up to 512-bit values (for V8DFmode). */
8577 unsigned char buffer[64];
8578 int len;
8579
8580 /* Check that the host and target are sane. */
8581 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8582 return NULL_TREE;
8583
8584 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8585 if (tree res = fold_view_convert_vector_encoding (type, expr))
8586 return res;
8587
8588 len = native_encode_expr (expr, buffer, sizeof (buffer));
8589 if (len == 0)
8590 return NULL_TREE;
8591
8592 return native_interpret_expr (type, buffer, len);
8593 }
8594
8595 /* Build an expression for the address of T. Folds away INDIRECT_REF
8596 to avoid confusing the gimplify process. */
8597
8598 tree
8599 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8600 {
8601 /* The size of the object is not relevant when talking about its address. */
8602 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8603 t = TREE_OPERAND (t, 0);
8604
8605 if (TREE_CODE (t) == INDIRECT_REF)
8606 {
8607 t = TREE_OPERAND (t, 0);
8608
8609 if (TREE_TYPE (t) != ptrtype)
8610 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8611 }
8612 else if (TREE_CODE (t) == MEM_REF
8613 && integer_zerop (TREE_OPERAND (t, 1)))
8614 {
8615 t = TREE_OPERAND (t, 0);
8616
8617 if (TREE_TYPE (t) != ptrtype)
8618 t = fold_convert_loc (loc, ptrtype, t);
8619 }
8620 else if (TREE_CODE (t) == MEM_REF
8621 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8622 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8623 TREE_OPERAND (t, 0),
8624 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8625 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8626 {
8627 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8628
8629 if (TREE_TYPE (t) != ptrtype)
8630 t = fold_convert_loc (loc, ptrtype, t);
8631 }
8632 else
8633 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8634
8635 return t;
8636 }
8637
8638 /* Build an expression for the address of T. */
8639
8640 tree
8641 build_fold_addr_expr_loc (location_t loc, tree t)
8642 {
8643 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8644
8645 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8646 }
8647
8648 /* Fold a unary expression of code CODE and type TYPE with operand
8649 OP0. Return the folded expression if folding is successful.
8650 Otherwise, return NULL_TREE. */
8651
8652 tree
8653 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8654 {
8655 tree tem;
8656 tree arg0;
8657 enum tree_code_class kind = TREE_CODE_CLASS (code);
8658
8659 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8660 && TREE_CODE_LENGTH (code) == 1);
8661
8662 arg0 = op0;
8663 if (arg0)
8664 {
8665 if (CONVERT_EXPR_CODE_P (code)
8666 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8667 {
8668 /* Don't use STRIP_NOPS, because signedness of argument type
8669 matters. */
8670 STRIP_SIGN_NOPS (arg0);
8671 }
8672 else
8673 {
8674 /* Strip any conversions that don't change the mode. This
8675 is safe for every expression, except for a comparison
8676 expression because its signedness is derived from its
8677 operands.
8678
8679 Note that this is done as an internal manipulation within
8680 the constant folder, in order to find the simplest
8681 representation of the arguments so that their form can be
8682 studied. In any cases, the appropriate type conversions
8683 should be put back in the tree that will get out of the
8684 constant folder. */
8685 STRIP_NOPS (arg0);
8686 }
8687
8688 if (CONSTANT_CLASS_P (arg0))
8689 {
8690 tree tem = const_unop (code, type, arg0);
8691 if (tem)
8692 {
8693 if (TREE_TYPE (tem) != type)
8694 tem = fold_convert_loc (loc, type, tem);
8695 return tem;
8696 }
8697 }
8698 }
8699
8700 tem = generic_simplify (loc, code, type, op0);
8701 if (tem)
8702 return tem;
8703
8704 if (TREE_CODE_CLASS (code) == tcc_unary)
8705 {
8706 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8707 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8708 fold_build1_loc (loc, code, type,
8709 fold_convert_loc (loc, TREE_TYPE (op0),
8710 TREE_OPERAND (arg0, 1))));
8711 else if (TREE_CODE (arg0) == COND_EXPR)
8712 {
8713 tree arg01 = TREE_OPERAND (arg0, 1);
8714 tree arg02 = TREE_OPERAND (arg0, 2);
8715 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8716 arg01 = fold_build1_loc (loc, code, type,
8717 fold_convert_loc (loc,
8718 TREE_TYPE (op0), arg01));
8719 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8720 arg02 = fold_build1_loc (loc, code, type,
8721 fold_convert_loc (loc,
8722 TREE_TYPE (op0), arg02));
8723 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8724 arg01, arg02);
8725
8726 /* If this was a conversion, and all we did was to move into
8727 inside the COND_EXPR, bring it back out. But leave it if
8728 it is a conversion from integer to integer and the
8729 result precision is no wider than a word since such a
8730 conversion is cheap and may be optimized away by combine,
8731 while it couldn't if it were outside the COND_EXPR. Then return
8732 so we don't get into an infinite recursion loop taking the
8733 conversion out and then back in. */
8734
8735 if ((CONVERT_EXPR_CODE_P (code)
8736 || code == NON_LVALUE_EXPR)
8737 && TREE_CODE (tem) == COND_EXPR
8738 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8739 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8740 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8741 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8742 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8743 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8744 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8745 && (INTEGRAL_TYPE_P
8746 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8747 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8748 || flag_syntax_only))
8749 tem = build1_loc (loc, code, type,
8750 build3 (COND_EXPR,
8751 TREE_TYPE (TREE_OPERAND
8752 (TREE_OPERAND (tem, 1), 0)),
8753 TREE_OPERAND (tem, 0),
8754 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8755 TREE_OPERAND (TREE_OPERAND (tem, 2),
8756 0)));
8757 return tem;
8758 }
8759 }
8760
8761 switch (code)
8762 {
8763 case NON_LVALUE_EXPR:
8764 if (!maybe_lvalue_p (op0))
8765 return fold_convert_loc (loc, type, op0);
8766 return NULL_TREE;
8767
8768 CASE_CONVERT:
8769 case FLOAT_EXPR:
8770 case FIX_TRUNC_EXPR:
8771 if (COMPARISON_CLASS_P (op0))
8772 {
8773 /* If we have (type) (a CMP b) and type is an integral type, return
8774 new expression involving the new type. Canonicalize
8775 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8776 non-integral type.
8777 Do not fold the result as that would not simplify further, also
8778 folding again results in recursions. */
8779 if (TREE_CODE (type) == BOOLEAN_TYPE)
8780 return build2_loc (loc, TREE_CODE (op0), type,
8781 TREE_OPERAND (op0, 0),
8782 TREE_OPERAND (op0, 1));
8783 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8784 && TREE_CODE (type) != VECTOR_TYPE)
8785 return build3_loc (loc, COND_EXPR, type, op0,
8786 constant_boolean_node (true, type),
8787 constant_boolean_node (false, type));
8788 }
8789
8790 /* Handle (T *)&A.B.C for A being of type T and B and C
8791 living at offset zero. This occurs frequently in
8792 C++ upcasting and then accessing the base. */
8793 if (TREE_CODE (op0) == ADDR_EXPR
8794 && POINTER_TYPE_P (type)
8795 && handled_component_p (TREE_OPERAND (op0, 0)))
8796 {
8797 poly_int64 bitsize, bitpos;
8798 tree offset;
8799 machine_mode mode;
8800 int unsignedp, reversep, volatilep;
8801 tree base
8802 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8803 &offset, &mode, &unsignedp, &reversep,
8804 &volatilep);
8805 /* If the reference was to a (constant) zero offset, we can use
8806 the address of the base if it has the same base type
8807 as the result type and the pointer type is unqualified. */
8808 if (!offset
8809 && known_eq (bitpos, 0)
8810 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8811 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8812 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8813 return fold_convert_loc (loc, type,
8814 build_fold_addr_expr_loc (loc, base));
8815 }
8816
8817 if (TREE_CODE (op0) == MODIFY_EXPR
8818 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8819 /* Detect assigning a bitfield. */
8820 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8821 && DECL_BIT_FIELD
8822 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8823 {
8824 /* Don't leave an assignment inside a conversion
8825 unless assigning a bitfield. */
8826 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8827 /* First do the assignment, then return converted constant. */
8828 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8829 TREE_NO_WARNING (tem) = 1;
8830 TREE_USED (tem) = 1;
8831 return tem;
8832 }
8833
8834 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8835 constants (if x has signed type, the sign bit cannot be set
8836 in c). This folds extension into the BIT_AND_EXPR.
8837 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8838 very likely don't have maximal range for their precision and this
8839 transformation effectively doesn't preserve non-maximal ranges. */
8840 if (TREE_CODE (type) == INTEGER_TYPE
8841 && TREE_CODE (op0) == BIT_AND_EXPR
8842 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8843 {
8844 tree and_expr = op0;
8845 tree and0 = TREE_OPERAND (and_expr, 0);
8846 tree and1 = TREE_OPERAND (and_expr, 1);
8847 int change = 0;
8848
8849 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8850 || (TYPE_PRECISION (type)
8851 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8852 change = 1;
8853 else if (TYPE_PRECISION (TREE_TYPE (and1))
8854 <= HOST_BITS_PER_WIDE_INT
8855 && tree_fits_uhwi_p (and1))
8856 {
8857 unsigned HOST_WIDE_INT cst;
8858
8859 cst = tree_to_uhwi (and1);
8860 cst &= HOST_WIDE_INT_M1U
8861 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8862 change = (cst == 0);
8863 if (change
8864 && !flag_syntax_only
8865 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8866 == ZERO_EXTEND))
8867 {
8868 tree uns = unsigned_type_for (TREE_TYPE (and0));
8869 and0 = fold_convert_loc (loc, uns, and0);
8870 and1 = fold_convert_loc (loc, uns, and1);
8871 }
8872 }
8873 if (change)
8874 {
8875 tem = force_fit_type (type, wi::to_widest (and1), 0,
8876 TREE_OVERFLOW (and1));
8877 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8878 fold_convert_loc (loc, type, and0), tem);
8879 }
8880 }
8881
8882 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8883 cast (T1)X will fold away. We assume that this happens when X itself
8884 is a cast. */
8885 if (POINTER_TYPE_P (type)
8886 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8887 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8888 {
8889 tree arg00 = TREE_OPERAND (arg0, 0);
8890 tree arg01 = TREE_OPERAND (arg0, 1);
8891
8892 return fold_build_pointer_plus_loc
8893 (loc, fold_convert_loc (loc, type, arg00), arg01);
8894 }
8895
8896 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8897 of the same precision, and X is an integer type not narrower than
8898 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8899 if (INTEGRAL_TYPE_P (type)
8900 && TREE_CODE (op0) == BIT_NOT_EXPR
8901 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8902 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8903 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8904 {
8905 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8906 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8907 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8908 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8909 fold_convert_loc (loc, type, tem));
8910 }
8911
8912 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8913 type of X and Y (integer types only). */
8914 if (INTEGRAL_TYPE_P (type)
8915 && TREE_CODE (op0) == MULT_EXPR
8916 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8917 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8918 {
8919 /* Be careful not to introduce new overflows. */
8920 tree mult_type;
8921 if (TYPE_OVERFLOW_WRAPS (type))
8922 mult_type = type;
8923 else
8924 mult_type = unsigned_type_for (type);
8925
8926 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8927 {
8928 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8929 fold_convert_loc (loc, mult_type,
8930 TREE_OPERAND (op0, 0)),
8931 fold_convert_loc (loc, mult_type,
8932 TREE_OPERAND (op0, 1)));
8933 return fold_convert_loc (loc, type, tem);
8934 }
8935 }
8936
8937 return NULL_TREE;
8938
8939 case VIEW_CONVERT_EXPR:
8940 if (TREE_CODE (op0) == MEM_REF)
8941 {
8942 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8943 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8944 tem = fold_build2_loc (loc, MEM_REF, type,
8945 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8946 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8947 return tem;
8948 }
8949
8950 return NULL_TREE;
8951
8952 case NEGATE_EXPR:
8953 tem = fold_negate_expr (loc, arg0);
8954 if (tem)
8955 return fold_convert_loc (loc, type, tem);
8956 return NULL_TREE;
8957
8958 case ABS_EXPR:
8959 /* Convert fabs((double)float) into (double)fabsf(float). */
8960 if (TREE_CODE (arg0) == NOP_EXPR
8961 && TREE_CODE (type) == REAL_TYPE)
8962 {
8963 tree targ0 = strip_float_extensions (arg0);
8964 if (targ0 != arg0)
8965 return fold_convert_loc (loc, type,
8966 fold_build1_loc (loc, ABS_EXPR,
8967 TREE_TYPE (targ0),
8968 targ0));
8969 }
8970 return NULL_TREE;
8971
8972 case BIT_NOT_EXPR:
8973 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8974 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8975 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8976 fold_convert_loc (loc, type,
8977 TREE_OPERAND (arg0, 0)))))
8978 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8979 fold_convert_loc (loc, type,
8980 TREE_OPERAND (arg0, 1)));
8981 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8982 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8983 fold_convert_loc (loc, type,
8984 TREE_OPERAND (arg0, 1)))))
8985 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8986 fold_convert_loc (loc, type,
8987 TREE_OPERAND (arg0, 0)), tem);
8988
8989 return NULL_TREE;
8990
8991 case TRUTH_NOT_EXPR:
8992 /* Note that the operand of this must be an int
8993 and its values must be 0 or 1.
8994 ("true" is a fixed value perhaps depending on the language,
8995 but we don't handle values other than 1 correctly yet.) */
8996 tem = fold_truth_not_expr (loc, arg0);
8997 if (!tem)
8998 return NULL_TREE;
8999 return fold_convert_loc (loc, type, tem);
9000
9001 case INDIRECT_REF:
9002 /* Fold *&X to X if X is an lvalue. */
9003 if (TREE_CODE (op0) == ADDR_EXPR)
9004 {
9005 tree op00 = TREE_OPERAND (op0, 0);
9006 if ((VAR_P (op00)
9007 || TREE_CODE (op00) == PARM_DECL
9008 || TREE_CODE (op00) == RESULT_DECL)
9009 && !TREE_READONLY (op00))
9010 return op00;
9011 }
9012 return NULL_TREE;
9013
9014 default:
9015 return NULL_TREE;
9016 } /* switch (code) */
9017 }
9018
9019
9020 /* If the operation was a conversion do _not_ mark a resulting constant
9021 with TREE_OVERFLOW if the original constant was not. These conversions
9022 have implementation defined behavior and retaining the TREE_OVERFLOW
9023 flag here would confuse later passes such as VRP. */
9024 tree
9025 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9026 tree type, tree op0)
9027 {
9028 tree res = fold_unary_loc (loc, code, type, op0);
9029 if (res
9030 && TREE_CODE (res) == INTEGER_CST
9031 && TREE_CODE (op0) == INTEGER_CST
9032 && CONVERT_EXPR_CODE_P (code))
9033 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9034
9035 return res;
9036 }
9037
9038 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9039 operands OP0 and OP1. LOC is the location of the resulting expression.
9040 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9041 Return the folded expression if folding is successful. Otherwise,
9042 return NULL_TREE. */
9043 static tree
9044 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9045 tree arg0, tree arg1, tree op0, tree op1)
9046 {
9047 tree tem;
9048
9049 /* We only do these simplifications if we are optimizing. */
9050 if (!optimize)
9051 return NULL_TREE;
9052
9053 /* Check for things like (A || B) && (A || C). We can convert this
9054 to A || (B && C). Note that either operator can be any of the four
9055 truth and/or operations and the transformation will still be
9056 valid. Also note that we only care about order for the
9057 ANDIF and ORIF operators. If B contains side effects, this
9058 might change the truth-value of A. */
9059 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9060 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9061 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9062 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9063 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9064 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9065 {
9066 tree a00 = TREE_OPERAND (arg0, 0);
9067 tree a01 = TREE_OPERAND (arg0, 1);
9068 tree a10 = TREE_OPERAND (arg1, 0);
9069 tree a11 = TREE_OPERAND (arg1, 1);
9070 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9071 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9072 && (code == TRUTH_AND_EXPR
9073 || code == TRUTH_OR_EXPR));
9074
9075 if (operand_equal_p (a00, a10, 0))
9076 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9077 fold_build2_loc (loc, code, type, a01, a11));
9078 else if (commutative && operand_equal_p (a00, a11, 0))
9079 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9080 fold_build2_loc (loc, code, type, a01, a10));
9081 else if (commutative && operand_equal_p (a01, a10, 0))
9082 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9083 fold_build2_loc (loc, code, type, a00, a11));
9084
9085 /* This case if tricky because we must either have commutative
9086 operators or else A10 must not have side-effects. */
9087
9088 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9089 && operand_equal_p (a01, a11, 0))
9090 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9091 fold_build2_loc (loc, code, type, a00, a10),
9092 a01);
9093 }
9094
9095 /* See if we can build a range comparison. */
9096 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9097 return tem;
9098
9099 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9100 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9101 {
9102 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9103 if (tem)
9104 return fold_build2_loc (loc, code, type, tem, arg1);
9105 }
9106
9107 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9108 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9109 {
9110 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9111 if (tem)
9112 return fold_build2_loc (loc, code, type, arg0, tem);
9113 }
9114
9115 /* Check for the possibility of merging component references. If our
9116 lhs is another similar operation, try to merge its rhs with our
9117 rhs. Then try to merge our lhs and rhs. */
9118 if (TREE_CODE (arg0) == code
9119 && (tem = fold_truth_andor_1 (loc, code, type,
9120 TREE_OPERAND (arg0, 1), arg1)) != 0)
9121 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9122
9123 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9124 return tem;
9125
9126 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9127 if (param_logical_op_non_short_circuit != -1)
9128 logical_op_non_short_circuit
9129 = param_logical_op_non_short_circuit;
9130 if (logical_op_non_short_circuit
9131 && !flag_sanitize_coverage
9132 && (code == TRUTH_AND_EXPR
9133 || code == TRUTH_ANDIF_EXPR
9134 || code == TRUTH_OR_EXPR
9135 || code == TRUTH_ORIF_EXPR))
9136 {
9137 enum tree_code ncode, icode;
9138
9139 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9140 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9141 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9142
9143 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9144 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9145 We don't want to pack more than two leafs to a non-IF AND/OR
9146 expression.
9147 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9148 equal to IF-CODE, then we don't want to add right-hand operand.
9149 If the inner right-hand side of left-hand operand has
9150 side-effects, or isn't simple, then we can't add to it,
9151 as otherwise we might destroy if-sequence. */
9152 if (TREE_CODE (arg0) == icode
9153 && simple_operand_p_2 (arg1)
9154 /* Needed for sequence points to handle trappings, and
9155 side-effects. */
9156 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9157 {
9158 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9159 arg1);
9160 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9161 tem);
9162 }
9163 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9164 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9165 else if (TREE_CODE (arg1) == icode
9166 && simple_operand_p_2 (arg0)
9167 /* Needed for sequence points to handle trappings, and
9168 side-effects. */
9169 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9170 {
9171 tem = fold_build2_loc (loc, ncode, type,
9172 arg0, TREE_OPERAND (arg1, 0));
9173 return fold_build2_loc (loc, icode, type, tem,
9174 TREE_OPERAND (arg1, 1));
9175 }
9176 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9177 into (A OR B).
9178 For sequence point consistancy, we need to check for trapping,
9179 and side-effects. */
9180 else if (code == icode && simple_operand_p_2 (arg0)
9181 && simple_operand_p_2 (arg1))
9182 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9183 }
9184
9185 return NULL_TREE;
9186 }
9187
9188 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9189 by changing CODE to reduce the magnitude of constants involved in
9190 ARG0 of the comparison.
9191 Returns a canonicalized comparison tree if a simplification was
9192 possible, otherwise returns NULL_TREE.
9193 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9194 valid if signed overflow is undefined. */
9195
9196 static tree
9197 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9198 tree arg0, tree arg1,
9199 bool *strict_overflow_p)
9200 {
9201 enum tree_code code0 = TREE_CODE (arg0);
9202 tree t, cst0 = NULL_TREE;
9203 int sgn0;
9204
9205 /* Match A +- CST code arg1. We can change this only if overflow
9206 is undefined. */
9207 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9208 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9209 /* In principle pointers also have undefined overflow behavior,
9210 but that causes problems elsewhere. */
9211 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9212 && (code0 == MINUS_EXPR
9213 || code0 == PLUS_EXPR)
9214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9215 return NULL_TREE;
9216
9217 /* Identify the constant in arg0 and its sign. */
9218 cst0 = TREE_OPERAND (arg0, 1);
9219 sgn0 = tree_int_cst_sgn (cst0);
9220
9221 /* Overflowed constants and zero will cause problems. */
9222 if (integer_zerop (cst0)
9223 || TREE_OVERFLOW (cst0))
9224 return NULL_TREE;
9225
9226 /* See if we can reduce the magnitude of the constant in
9227 arg0 by changing the comparison code. */
9228 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9229 if (code == LT_EXPR
9230 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9231 code = LE_EXPR;
9232 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9233 else if (code == GT_EXPR
9234 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9235 code = GE_EXPR;
9236 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9237 else if (code == LE_EXPR
9238 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9239 code = LT_EXPR;
9240 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9241 else if (code == GE_EXPR
9242 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9243 code = GT_EXPR;
9244 else
9245 return NULL_TREE;
9246 *strict_overflow_p = true;
9247
9248 /* Now build the constant reduced in magnitude. But not if that
9249 would produce one outside of its types range. */
9250 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9251 && ((sgn0 == 1
9252 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9253 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9254 || (sgn0 == -1
9255 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9256 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9257 return NULL_TREE;
9258
9259 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9260 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9261 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9262 t = fold_convert (TREE_TYPE (arg1), t);
9263
9264 return fold_build2_loc (loc, code, type, t, arg1);
9265 }
9266
9267 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9268 overflow further. Try to decrease the magnitude of constants involved
9269 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9270 and put sole constants at the second argument position.
9271 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9272
9273 static tree
9274 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9275 tree arg0, tree arg1)
9276 {
9277 tree t;
9278 bool strict_overflow_p;
9279 const char * const warnmsg = G_("assuming signed overflow does not occur "
9280 "when reducing constant in comparison");
9281
9282 /* Try canonicalization by simplifying arg0. */
9283 strict_overflow_p = false;
9284 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9285 &strict_overflow_p);
9286 if (t)
9287 {
9288 if (strict_overflow_p)
9289 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9290 return t;
9291 }
9292
9293 /* Try canonicalization by simplifying arg1 using the swapped
9294 comparison. */
9295 code = swap_tree_comparison (code);
9296 strict_overflow_p = false;
9297 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9298 &strict_overflow_p);
9299 if (t && strict_overflow_p)
9300 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9301 return t;
9302 }
9303
9304 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9305 space. This is used to avoid issuing overflow warnings for
9306 expressions like &p->x which cannot wrap. */
9307
9308 static bool
9309 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9310 {
9311 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9312 return true;
9313
9314 if (maybe_lt (bitpos, 0))
9315 return true;
9316
9317 poly_wide_int wi_offset;
9318 int precision = TYPE_PRECISION (TREE_TYPE (base));
9319 if (offset == NULL_TREE)
9320 wi_offset = wi::zero (precision);
9321 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9322 return true;
9323 else
9324 wi_offset = wi::to_poly_wide (offset);
9325
9326 wi::overflow_type overflow;
9327 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9328 precision);
9329 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9330 if (overflow)
9331 return true;
9332
9333 poly_uint64 total_hwi, size;
9334 if (!total.to_uhwi (&total_hwi)
9335 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9336 &size)
9337 || known_eq (size, 0U))
9338 return true;
9339
9340 if (known_le (total_hwi, size))
9341 return false;
9342
9343 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9344 array. */
9345 if (TREE_CODE (base) == ADDR_EXPR
9346 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9347 &size)
9348 && maybe_ne (size, 0U)
9349 && known_le (total_hwi, size))
9350 return false;
9351
9352 return true;
9353 }
9354
9355 /* Return a positive integer when the symbol DECL is known to have
9356 a nonzero address, zero when it's known not to (e.g., it's a weak
9357 symbol), and a negative integer when the symbol is not yet in the
9358 symbol table and so whether or not its address is zero is unknown.
9359 For function local objects always return positive integer. */
9360 static int
9361 maybe_nonzero_address (tree decl)
9362 {
9363 if (DECL_P (decl) && decl_in_symtab_p (decl))
9364 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9365 return symbol->nonzero_address ();
9366
9367 /* Function local objects are never NULL. */
9368 if (DECL_P (decl)
9369 && (DECL_CONTEXT (decl)
9370 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9371 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9372 return 1;
9373
9374 return -1;
9375 }
9376
9377 /* Subroutine of fold_binary. This routine performs all of the
9378 transformations that are common to the equality/inequality
9379 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9380 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9381 fold_binary should call fold_binary. Fold a comparison with
9382 tree code CODE and type TYPE with operands OP0 and OP1. Return
9383 the folded comparison or NULL_TREE. */
9384
9385 static tree
9386 fold_comparison (location_t loc, enum tree_code code, tree type,
9387 tree op0, tree op1)
9388 {
9389 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9390 tree arg0, arg1, tem;
9391
9392 arg0 = op0;
9393 arg1 = op1;
9394
9395 STRIP_SIGN_NOPS (arg0);
9396 STRIP_SIGN_NOPS (arg1);
9397
9398 /* For comparisons of pointers we can decompose it to a compile time
9399 comparison of the base objects and the offsets into the object.
9400 This requires at least one operand being an ADDR_EXPR or a
9401 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9402 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9403 && (TREE_CODE (arg0) == ADDR_EXPR
9404 || TREE_CODE (arg1) == ADDR_EXPR
9405 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9406 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9407 {
9408 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9409 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9410 machine_mode mode;
9411 int volatilep, reversep, unsignedp;
9412 bool indirect_base0 = false, indirect_base1 = false;
9413
9414 /* Get base and offset for the access. Strip ADDR_EXPR for
9415 get_inner_reference, but put it back by stripping INDIRECT_REF
9416 off the base object if possible. indirect_baseN will be true
9417 if baseN is not an address but refers to the object itself. */
9418 base0 = arg0;
9419 if (TREE_CODE (arg0) == ADDR_EXPR)
9420 {
9421 base0
9422 = get_inner_reference (TREE_OPERAND (arg0, 0),
9423 &bitsize, &bitpos0, &offset0, &mode,
9424 &unsignedp, &reversep, &volatilep);
9425 if (TREE_CODE (base0) == INDIRECT_REF)
9426 base0 = TREE_OPERAND (base0, 0);
9427 else
9428 indirect_base0 = true;
9429 }
9430 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9431 {
9432 base0 = TREE_OPERAND (arg0, 0);
9433 STRIP_SIGN_NOPS (base0);
9434 if (TREE_CODE (base0) == ADDR_EXPR)
9435 {
9436 base0
9437 = get_inner_reference (TREE_OPERAND (base0, 0),
9438 &bitsize, &bitpos0, &offset0, &mode,
9439 &unsignedp, &reversep, &volatilep);
9440 if (TREE_CODE (base0) == INDIRECT_REF)
9441 base0 = TREE_OPERAND (base0, 0);
9442 else
9443 indirect_base0 = true;
9444 }
9445 if (offset0 == NULL_TREE || integer_zerop (offset0))
9446 offset0 = TREE_OPERAND (arg0, 1);
9447 else
9448 offset0 = size_binop (PLUS_EXPR, offset0,
9449 TREE_OPERAND (arg0, 1));
9450 if (poly_int_tree_p (offset0))
9451 {
9452 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9453 TYPE_PRECISION (sizetype));
9454 tem <<= LOG2_BITS_PER_UNIT;
9455 tem += bitpos0;
9456 if (tem.to_shwi (&bitpos0))
9457 offset0 = NULL_TREE;
9458 }
9459 }
9460
9461 base1 = arg1;
9462 if (TREE_CODE (arg1) == ADDR_EXPR)
9463 {
9464 base1
9465 = get_inner_reference (TREE_OPERAND (arg1, 0),
9466 &bitsize, &bitpos1, &offset1, &mode,
9467 &unsignedp, &reversep, &volatilep);
9468 if (TREE_CODE (base1) == INDIRECT_REF)
9469 base1 = TREE_OPERAND (base1, 0);
9470 else
9471 indirect_base1 = true;
9472 }
9473 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9474 {
9475 base1 = TREE_OPERAND (arg1, 0);
9476 STRIP_SIGN_NOPS (base1);
9477 if (TREE_CODE (base1) == ADDR_EXPR)
9478 {
9479 base1
9480 = get_inner_reference (TREE_OPERAND (base1, 0),
9481 &bitsize, &bitpos1, &offset1, &mode,
9482 &unsignedp, &reversep, &volatilep);
9483 if (TREE_CODE (base1) == INDIRECT_REF)
9484 base1 = TREE_OPERAND (base1, 0);
9485 else
9486 indirect_base1 = true;
9487 }
9488 if (offset1 == NULL_TREE || integer_zerop (offset1))
9489 offset1 = TREE_OPERAND (arg1, 1);
9490 else
9491 offset1 = size_binop (PLUS_EXPR, offset1,
9492 TREE_OPERAND (arg1, 1));
9493 if (poly_int_tree_p (offset1))
9494 {
9495 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9496 TYPE_PRECISION (sizetype));
9497 tem <<= LOG2_BITS_PER_UNIT;
9498 tem += bitpos1;
9499 if (tem.to_shwi (&bitpos1))
9500 offset1 = NULL_TREE;
9501 }
9502 }
9503
9504 /* If we have equivalent bases we might be able to simplify. */
9505 if (indirect_base0 == indirect_base1
9506 && operand_equal_p (base0, base1,
9507 indirect_base0 ? OEP_ADDRESS_OF : 0))
9508 {
9509 /* We can fold this expression to a constant if the non-constant
9510 offset parts are equal. */
9511 if ((offset0 == offset1
9512 || (offset0 && offset1
9513 && operand_equal_p (offset0, offset1, 0)))
9514 && (equality_code
9515 || (indirect_base0
9516 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9517 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9518 {
9519 if (!equality_code
9520 && maybe_ne (bitpos0, bitpos1)
9521 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9522 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9523 fold_overflow_warning (("assuming pointer wraparound does not "
9524 "occur when comparing P +- C1 with "
9525 "P +- C2"),
9526 WARN_STRICT_OVERFLOW_CONDITIONAL);
9527
9528 switch (code)
9529 {
9530 case EQ_EXPR:
9531 if (known_eq (bitpos0, bitpos1))
9532 return constant_boolean_node (true, type);
9533 if (known_ne (bitpos0, bitpos1))
9534 return constant_boolean_node (false, type);
9535 break;
9536 case NE_EXPR:
9537 if (known_ne (bitpos0, bitpos1))
9538 return constant_boolean_node (true, type);
9539 if (known_eq (bitpos0, bitpos1))
9540 return constant_boolean_node (false, type);
9541 break;
9542 case LT_EXPR:
9543 if (known_lt (bitpos0, bitpos1))
9544 return constant_boolean_node (true, type);
9545 if (known_ge (bitpos0, bitpos1))
9546 return constant_boolean_node (false, type);
9547 break;
9548 case LE_EXPR:
9549 if (known_le (bitpos0, bitpos1))
9550 return constant_boolean_node (true, type);
9551 if (known_gt (bitpos0, bitpos1))
9552 return constant_boolean_node (false, type);
9553 break;
9554 case GE_EXPR:
9555 if (known_ge (bitpos0, bitpos1))
9556 return constant_boolean_node (true, type);
9557 if (known_lt (bitpos0, bitpos1))
9558 return constant_boolean_node (false, type);
9559 break;
9560 case GT_EXPR:
9561 if (known_gt (bitpos0, bitpos1))
9562 return constant_boolean_node (true, type);
9563 if (known_le (bitpos0, bitpos1))
9564 return constant_boolean_node (false, type);
9565 break;
9566 default:;
9567 }
9568 }
9569 /* We can simplify the comparison to a comparison of the variable
9570 offset parts if the constant offset parts are equal.
9571 Be careful to use signed sizetype here because otherwise we
9572 mess with array offsets in the wrong way. This is possible
9573 because pointer arithmetic is restricted to retain within an
9574 object and overflow on pointer differences is undefined as of
9575 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9576 else if (known_eq (bitpos0, bitpos1)
9577 && (equality_code
9578 || (indirect_base0
9579 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9580 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9581 {
9582 /* By converting to signed sizetype we cover middle-end pointer
9583 arithmetic which operates on unsigned pointer types of size
9584 type size and ARRAY_REF offsets which are properly sign or
9585 zero extended from their type in case it is narrower than
9586 sizetype. */
9587 if (offset0 == NULL_TREE)
9588 offset0 = build_int_cst (ssizetype, 0);
9589 else
9590 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9591 if (offset1 == NULL_TREE)
9592 offset1 = build_int_cst (ssizetype, 0);
9593 else
9594 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9595
9596 if (!equality_code
9597 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9598 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9599 fold_overflow_warning (("assuming pointer wraparound does not "
9600 "occur when comparing P +- C1 with "
9601 "P +- C2"),
9602 WARN_STRICT_OVERFLOW_COMPARISON);
9603
9604 return fold_build2_loc (loc, code, type, offset0, offset1);
9605 }
9606 }
9607 /* For equal offsets we can simplify to a comparison of the
9608 base addresses. */
9609 else if (known_eq (bitpos0, bitpos1)
9610 && (indirect_base0
9611 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9612 && (indirect_base1
9613 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9614 && ((offset0 == offset1)
9615 || (offset0 && offset1
9616 && operand_equal_p (offset0, offset1, 0))))
9617 {
9618 if (indirect_base0)
9619 base0 = build_fold_addr_expr_loc (loc, base0);
9620 if (indirect_base1)
9621 base1 = build_fold_addr_expr_loc (loc, base1);
9622 return fold_build2_loc (loc, code, type, base0, base1);
9623 }
9624 /* Comparison between an ordinary (non-weak) symbol and a null
9625 pointer can be eliminated since such symbols must have a non
9626 null address. In C, relational expressions between pointers
9627 to objects and null pointers are undefined. The results
9628 below follow the C++ rules with the additional property that
9629 every object pointer compares greater than a null pointer.
9630 */
9631 else if (((DECL_P (base0)
9632 && maybe_nonzero_address (base0) > 0
9633 /* Avoid folding references to struct members at offset 0 to
9634 prevent tests like '&ptr->firstmember == 0' from getting
9635 eliminated. When ptr is null, although the -> expression
9636 is strictly speaking invalid, GCC retains it as a matter
9637 of QoI. See PR c/44555. */
9638 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9639 || CONSTANT_CLASS_P (base0))
9640 && indirect_base0
9641 /* The caller guarantees that when one of the arguments is
9642 constant (i.e., null in this case) it is second. */
9643 && integer_zerop (arg1))
9644 {
9645 switch (code)
9646 {
9647 case EQ_EXPR:
9648 case LE_EXPR:
9649 case LT_EXPR:
9650 return constant_boolean_node (false, type);
9651 case GE_EXPR:
9652 case GT_EXPR:
9653 case NE_EXPR:
9654 return constant_boolean_node (true, type);
9655 default:
9656 gcc_unreachable ();
9657 }
9658 }
9659 }
9660
9661 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9662 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9663 the resulting offset is smaller in absolute value than the
9664 original one and has the same sign. */
9665 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9666 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9667 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9668 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9669 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9670 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9671 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9672 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9673 {
9674 tree const1 = TREE_OPERAND (arg0, 1);
9675 tree const2 = TREE_OPERAND (arg1, 1);
9676 tree variable1 = TREE_OPERAND (arg0, 0);
9677 tree variable2 = TREE_OPERAND (arg1, 0);
9678 tree cst;
9679 const char * const warnmsg = G_("assuming signed overflow does not "
9680 "occur when combining constants around "
9681 "a comparison");
9682
9683 /* Put the constant on the side where it doesn't overflow and is
9684 of lower absolute value and of same sign than before. */
9685 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9686 ? MINUS_EXPR : PLUS_EXPR,
9687 const2, const1);
9688 if (!TREE_OVERFLOW (cst)
9689 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9690 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9691 {
9692 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9693 return fold_build2_loc (loc, code, type,
9694 variable1,
9695 fold_build2_loc (loc, TREE_CODE (arg1),
9696 TREE_TYPE (arg1),
9697 variable2, cst));
9698 }
9699
9700 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9701 ? MINUS_EXPR : PLUS_EXPR,
9702 const1, const2);
9703 if (!TREE_OVERFLOW (cst)
9704 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9705 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9706 {
9707 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9708 return fold_build2_loc (loc, code, type,
9709 fold_build2_loc (loc, TREE_CODE (arg0),
9710 TREE_TYPE (arg0),
9711 variable1, cst),
9712 variable2);
9713 }
9714 }
9715
9716 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9717 if (tem)
9718 return tem;
9719
9720 /* If we are comparing an expression that just has comparisons
9721 of two integer values, arithmetic expressions of those comparisons,
9722 and constants, we can simplify it. There are only three cases
9723 to check: the two values can either be equal, the first can be
9724 greater, or the second can be greater. Fold the expression for
9725 those three values. Since each value must be 0 or 1, we have
9726 eight possibilities, each of which corresponds to the constant 0
9727 or 1 or one of the six possible comparisons.
9728
9729 This handles common cases like (a > b) == 0 but also handles
9730 expressions like ((x > y) - (y > x)) > 0, which supposedly
9731 occur in macroized code. */
9732
9733 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9734 {
9735 tree cval1 = 0, cval2 = 0;
9736
9737 if (twoval_comparison_p (arg0, &cval1, &cval2)
9738 /* Don't handle degenerate cases here; they should already
9739 have been handled anyway. */
9740 && cval1 != 0 && cval2 != 0
9741 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9742 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9743 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9744 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9745 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9746 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9747 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9748 {
9749 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9750 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9751
9752 /* We can't just pass T to eval_subst in case cval1 or cval2
9753 was the same as ARG1. */
9754
9755 tree high_result
9756 = fold_build2_loc (loc, code, type,
9757 eval_subst (loc, arg0, cval1, maxval,
9758 cval2, minval),
9759 arg1);
9760 tree equal_result
9761 = fold_build2_loc (loc, code, type,
9762 eval_subst (loc, arg0, cval1, maxval,
9763 cval2, maxval),
9764 arg1);
9765 tree low_result
9766 = fold_build2_loc (loc, code, type,
9767 eval_subst (loc, arg0, cval1, minval,
9768 cval2, maxval),
9769 arg1);
9770
9771 /* All three of these results should be 0 or 1. Confirm they are.
9772 Then use those values to select the proper code to use. */
9773
9774 if (TREE_CODE (high_result) == INTEGER_CST
9775 && TREE_CODE (equal_result) == INTEGER_CST
9776 && TREE_CODE (low_result) == INTEGER_CST)
9777 {
9778 /* Make a 3-bit mask with the high-order bit being the
9779 value for `>', the next for '=', and the low for '<'. */
9780 switch ((integer_onep (high_result) * 4)
9781 + (integer_onep (equal_result) * 2)
9782 + integer_onep (low_result))
9783 {
9784 case 0:
9785 /* Always false. */
9786 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9787 case 1:
9788 code = LT_EXPR;
9789 break;
9790 case 2:
9791 code = EQ_EXPR;
9792 break;
9793 case 3:
9794 code = LE_EXPR;
9795 break;
9796 case 4:
9797 code = GT_EXPR;
9798 break;
9799 case 5:
9800 code = NE_EXPR;
9801 break;
9802 case 6:
9803 code = GE_EXPR;
9804 break;
9805 case 7:
9806 /* Always true. */
9807 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9808 }
9809
9810 return fold_build2_loc (loc, code, type, cval1, cval2);
9811 }
9812 }
9813 }
9814
9815 return NULL_TREE;
9816 }
9817
9818
9819 /* Subroutine of fold_binary. Optimize complex multiplications of the
9820 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9821 argument EXPR represents the expression "z" of type TYPE. */
9822
9823 static tree
9824 fold_mult_zconjz (location_t loc, tree type, tree expr)
9825 {
9826 tree itype = TREE_TYPE (type);
9827 tree rpart, ipart, tem;
9828
9829 if (TREE_CODE (expr) == COMPLEX_EXPR)
9830 {
9831 rpart = TREE_OPERAND (expr, 0);
9832 ipart = TREE_OPERAND (expr, 1);
9833 }
9834 else if (TREE_CODE (expr) == COMPLEX_CST)
9835 {
9836 rpart = TREE_REALPART (expr);
9837 ipart = TREE_IMAGPART (expr);
9838 }
9839 else
9840 {
9841 expr = save_expr (expr);
9842 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9843 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9844 }
9845
9846 rpart = save_expr (rpart);
9847 ipart = save_expr (ipart);
9848 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9849 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9850 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9851 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9852 build_zero_cst (itype));
9853 }
9854
9855
9856 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9857 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9858 true if successful. */
9859
9860 static bool
9861 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9862 {
9863 unsigned HOST_WIDE_INT i, nunits;
9864
9865 if (TREE_CODE (arg) == VECTOR_CST
9866 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9867 {
9868 for (i = 0; i < nunits; ++i)
9869 elts[i] = VECTOR_CST_ELT (arg, i);
9870 }
9871 else if (TREE_CODE (arg) == CONSTRUCTOR)
9872 {
9873 constructor_elt *elt;
9874
9875 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9876 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9877 return false;
9878 else
9879 elts[i] = elt->value;
9880 }
9881 else
9882 return false;
9883 for (; i < nelts; i++)
9884 elts[i]
9885 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9886 return true;
9887 }
9888
9889 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9890 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9891 NULL_TREE otherwise. */
9892
9893 tree
9894 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9895 {
9896 unsigned int i;
9897 unsigned HOST_WIDE_INT nelts;
9898 bool need_ctor = false;
9899
9900 if (!sel.length ().is_constant (&nelts))
9901 return NULL_TREE;
9902 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9903 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9904 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9905 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9906 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9907 return NULL_TREE;
9908
9909 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9910 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9911 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9912 return NULL_TREE;
9913
9914 tree_vector_builder out_elts (type, nelts, 1);
9915 for (i = 0; i < nelts; i++)
9916 {
9917 HOST_WIDE_INT index;
9918 if (!sel[i].is_constant (&index))
9919 return NULL_TREE;
9920 if (!CONSTANT_CLASS_P (in_elts[index]))
9921 need_ctor = true;
9922 out_elts.quick_push (unshare_expr (in_elts[index]));
9923 }
9924
9925 if (need_ctor)
9926 {
9927 vec<constructor_elt, va_gc> *v;
9928 vec_alloc (v, nelts);
9929 for (i = 0; i < nelts; i++)
9930 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9931 return build_constructor (type, v);
9932 }
9933 else
9934 return out_elts.build ();
9935 }
9936
9937 /* Try to fold a pointer difference of type TYPE two address expressions of
9938 array references AREF0 and AREF1 using location LOC. Return a
9939 simplified expression for the difference or NULL_TREE. */
9940
9941 static tree
9942 fold_addr_of_array_ref_difference (location_t loc, tree type,
9943 tree aref0, tree aref1,
9944 bool use_pointer_diff)
9945 {
9946 tree base0 = TREE_OPERAND (aref0, 0);
9947 tree base1 = TREE_OPERAND (aref1, 0);
9948 tree base_offset = build_int_cst (type, 0);
9949
9950 /* If the bases are array references as well, recurse. If the bases
9951 are pointer indirections compute the difference of the pointers.
9952 If the bases are equal, we are set. */
9953 if ((TREE_CODE (base0) == ARRAY_REF
9954 && TREE_CODE (base1) == ARRAY_REF
9955 && (base_offset
9956 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9957 use_pointer_diff)))
9958 || (INDIRECT_REF_P (base0)
9959 && INDIRECT_REF_P (base1)
9960 && (base_offset
9961 = use_pointer_diff
9962 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9963 TREE_OPERAND (base0, 0),
9964 TREE_OPERAND (base1, 0))
9965 : fold_binary_loc (loc, MINUS_EXPR, type,
9966 fold_convert (type,
9967 TREE_OPERAND (base0, 0)),
9968 fold_convert (type,
9969 TREE_OPERAND (base1, 0)))))
9970 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9971 {
9972 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9973 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9974 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9975 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9976 return fold_build2_loc (loc, PLUS_EXPR, type,
9977 base_offset,
9978 fold_build2_loc (loc, MULT_EXPR, type,
9979 diff, esz));
9980 }
9981 return NULL_TREE;
9982 }
9983
9984 /* If the real or vector real constant CST of type TYPE has an exact
9985 inverse, return it, else return NULL. */
9986
9987 tree
9988 exact_inverse (tree type, tree cst)
9989 {
9990 REAL_VALUE_TYPE r;
9991 tree unit_type;
9992 machine_mode mode;
9993
9994 switch (TREE_CODE (cst))
9995 {
9996 case REAL_CST:
9997 r = TREE_REAL_CST (cst);
9998
9999 if (exact_real_inverse (TYPE_MODE (type), &r))
10000 return build_real (type, r);
10001
10002 return NULL_TREE;
10003
10004 case VECTOR_CST:
10005 {
10006 unit_type = TREE_TYPE (type);
10007 mode = TYPE_MODE (unit_type);
10008
10009 tree_vector_builder elts;
10010 if (!elts.new_unary_operation (type, cst, false))
10011 return NULL_TREE;
10012 unsigned int count = elts.encoded_nelts ();
10013 for (unsigned int i = 0; i < count; ++i)
10014 {
10015 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10016 if (!exact_real_inverse (mode, &r))
10017 return NULL_TREE;
10018 elts.quick_push (build_real (unit_type, r));
10019 }
10020
10021 return elts.build ();
10022 }
10023
10024 default:
10025 return NULL_TREE;
10026 }
10027 }
10028
10029 /* Mask out the tz least significant bits of X of type TYPE where
10030 tz is the number of trailing zeroes in Y. */
10031 static wide_int
10032 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10033 {
10034 int tz = wi::ctz (y);
10035 if (tz > 0)
10036 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10037 return x;
10038 }
10039
10040 /* Return true when T is an address and is known to be nonzero.
10041 For floating point we further ensure that T is not denormal.
10042 Similar logic is present in nonzero_address in rtlanal.h.
10043
10044 If the return value is based on the assumption that signed overflow
10045 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10046 change *STRICT_OVERFLOW_P. */
10047
10048 static bool
10049 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10050 {
10051 tree type = TREE_TYPE (t);
10052 enum tree_code code;
10053
10054 /* Doing something useful for floating point would need more work. */
10055 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10056 return false;
10057
10058 code = TREE_CODE (t);
10059 switch (TREE_CODE_CLASS (code))
10060 {
10061 case tcc_unary:
10062 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10063 strict_overflow_p);
10064 case tcc_binary:
10065 case tcc_comparison:
10066 return tree_binary_nonzero_warnv_p (code, type,
10067 TREE_OPERAND (t, 0),
10068 TREE_OPERAND (t, 1),
10069 strict_overflow_p);
10070 case tcc_constant:
10071 case tcc_declaration:
10072 case tcc_reference:
10073 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10074
10075 default:
10076 break;
10077 }
10078
10079 switch (code)
10080 {
10081 case TRUTH_NOT_EXPR:
10082 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10083 strict_overflow_p);
10084
10085 case TRUTH_AND_EXPR:
10086 case TRUTH_OR_EXPR:
10087 case TRUTH_XOR_EXPR:
10088 return tree_binary_nonzero_warnv_p (code, type,
10089 TREE_OPERAND (t, 0),
10090 TREE_OPERAND (t, 1),
10091 strict_overflow_p);
10092
10093 case COND_EXPR:
10094 case CONSTRUCTOR:
10095 case OBJ_TYPE_REF:
10096 case ASSERT_EXPR:
10097 case ADDR_EXPR:
10098 case WITH_SIZE_EXPR:
10099 case SSA_NAME:
10100 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10101
10102 case COMPOUND_EXPR:
10103 case MODIFY_EXPR:
10104 case BIND_EXPR:
10105 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10106 strict_overflow_p);
10107
10108 case SAVE_EXPR:
10109 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10110 strict_overflow_p);
10111
10112 case CALL_EXPR:
10113 {
10114 tree fndecl = get_callee_fndecl (t);
10115 if (!fndecl) return false;
10116 if (flag_delete_null_pointer_checks && !flag_check_new
10117 && DECL_IS_OPERATOR_NEW_P (fndecl)
10118 && !TREE_NOTHROW (fndecl))
10119 return true;
10120 if (flag_delete_null_pointer_checks
10121 && lookup_attribute ("returns_nonnull",
10122 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10123 return true;
10124 return alloca_call_p (t);
10125 }
10126
10127 default:
10128 break;
10129 }
10130 return false;
10131 }
10132
10133 /* Return true when T is an address and is known to be nonzero.
10134 Handle warnings about undefined signed overflow. */
10135
10136 bool
10137 tree_expr_nonzero_p (tree t)
10138 {
10139 bool ret, strict_overflow_p;
10140
10141 strict_overflow_p = false;
10142 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10143 if (strict_overflow_p)
10144 fold_overflow_warning (("assuming signed overflow does not occur when "
10145 "determining that expression is always "
10146 "non-zero"),
10147 WARN_STRICT_OVERFLOW_MISC);
10148 return ret;
10149 }
10150
10151 /* Return true if T is known not to be equal to an integer W. */
10152
10153 bool
10154 expr_not_equal_to (tree t, const wide_int &w)
10155 {
10156 wide_int min, max, nz;
10157 value_range_kind rtype;
10158 switch (TREE_CODE (t))
10159 {
10160 case INTEGER_CST:
10161 return wi::to_wide (t) != w;
10162
10163 case SSA_NAME:
10164 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10165 return false;
10166 rtype = get_range_info (t, &min, &max);
10167 if (rtype == VR_RANGE)
10168 {
10169 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10170 return true;
10171 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10172 return true;
10173 }
10174 else if (rtype == VR_ANTI_RANGE
10175 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10176 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10177 return true;
10178 /* If T has some known zero bits and W has any of those bits set,
10179 then T is known not to be equal to W. */
10180 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10181 TYPE_PRECISION (TREE_TYPE (t))), 0))
10182 return true;
10183 return false;
10184
10185 default:
10186 return false;
10187 }
10188 }
10189
10190 /* Fold a binary expression of code CODE and type TYPE with operands
10191 OP0 and OP1. LOC is the location of the resulting expression.
10192 Return the folded expression if folding is successful. Otherwise,
10193 return NULL_TREE. */
10194
10195 tree
10196 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10197 tree op0, tree op1)
10198 {
10199 enum tree_code_class kind = TREE_CODE_CLASS (code);
10200 tree arg0, arg1, tem;
10201 tree t1 = NULL_TREE;
10202 bool strict_overflow_p;
10203 unsigned int prec;
10204
10205 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10206 && TREE_CODE_LENGTH (code) == 2
10207 && op0 != NULL_TREE
10208 && op1 != NULL_TREE);
10209
10210 arg0 = op0;
10211 arg1 = op1;
10212
10213 /* Strip any conversions that don't change the mode. This is
10214 safe for every expression, except for a comparison expression
10215 because its signedness is derived from its operands. So, in
10216 the latter case, only strip conversions that don't change the
10217 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10218 preserved.
10219
10220 Note that this is done as an internal manipulation within the
10221 constant folder, in order to find the simplest representation
10222 of the arguments so that their form can be studied. In any
10223 cases, the appropriate type conversions should be put back in
10224 the tree that will get out of the constant folder. */
10225
10226 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10227 {
10228 STRIP_SIGN_NOPS (arg0);
10229 STRIP_SIGN_NOPS (arg1);
10230 }
10231 else
10232 {
10233 STRIP_NOPS (arg0);
10234 STRIP_NOPS (arg1);
10235 }
10236
10237 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10238 constant but we can't do arithmetic on them. */
10239 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10240 {
10241 tem = const_binop (code, type, arg0, arg1);
10242 if (tem != NULL_TREE)
10243 {
10244 if (TREE_TYPE (tem) != type)
10245 tem = fold_convert_loc (loc, type, tem);
10246 return tem;
10247 }
10248 }
10249
10250 /* If this is a commutative operation, and ARG0 is a constant, move it
10251 to ARG1 to reduce the number of tests below. */
10252 if (commutative_tree_code (code)
10253 && tree_swap_operands_p (arg0, arg1))
10254 return fold_build2_loc (loc, code, type, op1, op0);
10255
10256 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10257 to ARG1 to reduce the number of tests below. */
10258 if (kind == tcc_comparison
10259 && tree_swap_operands_p (arg0, arg1))
10260 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10261
10262 tem = generic_simplify (loc, code, type, op0, op1);
10263 if (tem)
10264 return tem;
10265
10266 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10267
10268 First check for cases where an arithmetic operation is applied to a
10269 compound, conditional, or comparison operation. Push the arithmetic
10270 operation inside the compound or conditional to see if any folding
10271 can then be done. Convert comparison to conditional for this purpose.
10272 The also optimizes non-constant cases that used to be done in
10273 expand_expr.
10274
10275 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10276 one of the operands is a comparison and the other is a comparison, a
10277 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10278 code below would make the expression more complex. Change it to a
10279 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10280 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10281
10282 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10283 || code == EQ_EXPR || code == NE_EXPR)
10284 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10285 && ((truth_value_p (TREE_CODE (arg0))
10286 && (truth_value_p (TREE_CODE (arg1))
10287 || (TREE_CODE (arg1) == BIT_AND_EXPR
10288 && integer_onep (TREE_OPERAND (arg1, 1)))))
10289 || (truth_value_p (TREE_CODE (arg1))
10290 && (truth_value_p (TREE_CODE (arg0))
10291 || (TREE_CODE (arg0) == BIT_AND_EXPR
10292 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10293 {
10294 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10295 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10296 : TRUTH_XOR_EXPR,
10297 boolean_type_node,
10298 fold_convert_loc (loc, boolean_type_node, arg0),
10299 fold_convert_loc (loc, boolean_type_node, arg1));
10300
10301 if (code == EQ_EXPR)
10302 tem = invert_truthvalue_loc (loc, tem);
10303
10304 return fold_convert_loc (loc, type, tem);
10305 }
10306
10307 if (TREE_CODE_CLASS (code) == tcc_binary
10308 || TREE_CODE_CLASS (code) == tcc_comparison)
10309 {
10310 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10311 {
10312 tem = fold_build2_loc (loc, code, type,
10313 fold_convert_loc (loc, TREE_TYPE (op0),
10314 TREE_OPERAND (arg0, 1)), op1);
10315 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10316 tem);
10317 }
10318 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10319 {
10320 tem = fold_build2_loc (loc, code, type, op0,
10321 fold_convert_loc (loc, TREE_TYPE (op1),
10322 TREE_OPERAND (arg1, 1)));
10323 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10324 tem);
10325 }
10326
10327 if (TREE_CODE (arg0) == COND_EXPR
10328 || TREE_CODE (arg0) == VEC_COND_EXPR
10329 || COMPARISON_CLASS_P (arg0))
10330 {
10331 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10332 arg0, arg1,
10333 /*cond_first_p=*/1);
10334 if (tem != NULL_TREE)
10335 return tem;
10336 }
10337
10338 if (TREE_CODE (arg1) == COND_EXPR
10339 || TREE_CODE (arg1) == VEC_COND_EXPR
10340 || COMPARISON_CLASS_P (arg1))
10341 {
10342 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10343 arg1, arg0,
10344 /*cond_first_p=*/0);
10345 if (tem != NULL_TREE)
10346 return tem;
10347 }
10348 }
10349
10350 switch (code)
10351 {
10352 case MEM_REF:
10353 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10354 if (TREE_CODE (arg0) == ADDR_EXPR
10355 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10356 {
10357 tree iref = TREE_OPERAND (arg0, 0);
10358 return fold_build2 (MEM_REF, type,
10359 TREE_OPERAND (iref, 0),
10360 int_const_binop (PLUS_EXPR, arg1,
10361 TREE_OPERAND (iref, 1)));
10362 }
10363
10364 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10365 if (TREE_CODE (arg0) == ADDR_EXPR
10366 && handled_component_p (TREE_OPERAND (arg0, 0)))
10367 {
10368 tree base;
10369 poly_int64 coffset;
10370 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10371 &coffset);
10372 if (!base)
10373 return NULL_TREE;
10374 return fold_build2 (MEM_REF, type,
10375 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10376 int_const_binop (PLUS_EXPR, arg1,
10377 size_int (coffset)));
10378 }
10379
10380 return NULL_TREE;
10381
10382 case POINTER_PLUS_EXPR:
10383 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10384 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10385 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10386 return fold_convert_loc (loc, type,
10387 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10388 fold_convert_loc (loc, sizetype,
10389 arg1),
10390 fold_convert_loc (loc, sizetype,
10391 arg0)));
10392
10393 return NULL_TREE;
10394
10395 case PLUS_EXPR:
10396 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10397 {
10398 /* X + (X / CST) * -CST is X % CST. */
10399 if (TREE_CODE (arg1) == MULT_EXPR
10400 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10401 && operand_equal_p (arg0,
10402 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10403 {
10404 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10405 tree cst1 = TREE_OPERAND (arg1, 1);
10406 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10407 cst1, cst0);
10408 if (sum && integer_zerop (sum))
10409 return fold_convert_loc (loc, type,
10410 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10411 TREE_TYPE (arg0), arg0,
10412 cst0));
10413 }
10414 }
10415
10416 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10417 one. Make sure the type is not saturating and has the signedness of
10418 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10419 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10420 if ((TREE_CODE (arg0) == MULT_EXPR
10421 || TREE_CODE (arg1) == MULT_EXPR)
10422 && !TYPE_SATURATING (type)
10423 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10424 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10425 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10426 {
10427 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10428 if (tem)
10429 return tem;
10430 }
10431
10432 if (! FLOAT_TYPE_P (type))
10433 {
10434 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10435 (plus (plus (mult) (mult)) (foo)) so that we can
10436 take advantage of the factoring cases below. */
10437 if (ANY_INTEGRAL_TYPE_P (type)
10438 && TYPE_OVERFLOW_WRAPS (type)
10439 && (((TREE_CODE (arg0) == PLUS_EXPR
10440 || TREE_CODE (arg0) == MINUS_EXPR)
10441 && TREE_CODE (arg1) == MULT_EXPR)
10442 || ((TREE_CODE (arg1) == PLUS_EXPR
10443 || TREE_CODE (arg1) == MINUS_EXPR)
10444 && TREE_CODE (arg0) == MULT_EXPR)))
10445 {
10446 tree parg0, parg1, parg, marg;
10447 enum tree_code pcode;
10448
10449 if (TREE_CODE (arg1) == MULT_EXPR)
10450 parg = arg0, marg = arg1;
10451 else
10452 parg = arg1, marg = arg0;
10453 pcode = TREE_CODE (parg);
10454 parg0 = TREE_OPERAND (parg, 0);
10455 parg1 = TREE_OPERAND (parg, 1);
10456 STRIP_NOPS (parg0);
10457 STRIP_NOPS (parg1);
10458
10459 if (TREE_CODE (parg0) == MULT_EXPR
10460 && TREE_CODE (parg1) != MULT_EXPR)
10461 return fold_build2_loc (loc, pcode, type,
10462 fold_build2_loc (loc, PLUS_EXPR, type,
10463 fold_convert_loc (loc, type,
10464 parg0),
10465 fold_convert_loc (loc, type,
10466 marg)),
10467 fold_convert_loc (loc, type, parg1));
10468 if (TREE_CODE (parg0) != MULT_EXPR
10469 && TREE_CODE (parg1) == MULT_EXPR)
10470 return
10471 fold_build2_loc (loc, PLUS_EXPR, type,
10472 fold_convert_loc (loc, type, parg0),
10473 fold_build2_loc (loc, pcode, type,
10474 fold_convert_loc (loc, type, marg),
10475 fold_convert_loc (loc, type,
10476 parg1)));
10477 }
10478 }
10479 else
10480 {
10481 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10482 to __complex__ ( x, y ). This is not the same for SNaNs or
10483 if signed zeros are involved. */
10484 if (!HONOR_SNANS (element_mode (arg0))
10485 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10486 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10487 {
10488 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10489 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10490 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10491 bool arg0rz = false, arg0iz = false;
10492 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10493 || (arg0i && (arg0iz = real_zerop (arg0i))))
10494 {
10495 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10496 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10497 if (arg0rz && arg1i && real_zerop (arg1i))
10498 {
10499 tree rp = arg1r ? arg1r
10500 : build1 (REALPART_EXPR, rtype, arg1);
10501 tree ip = arg0i ? arg0i
10502 : build1 (IMAGPART_EXPR, rtype, arg0);
10503 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10504 }
10505 else if (arg0iz && arg1r && real_zerop (arg1r))
10506 {
10507 tree rp = arg0r ? arg0r
10508 : build1 (REALPART_EXPR, rtype, arg0);
10509 tree ip = arg1i ? arg1i
10510 : build1 (IMAGPART_EXPR, rtype, arg1);
10511 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10512 }
10513 }
10514 }
10515
10516 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10517 We associate floats only if the user has specified
10518 -fassociative-math. */
10519 if (flag_associative_math
10520 && TREE_CODE (arg1) == PLUS_EXPR
10521 && TREE_CODE (arg0) != MULT_EXPR)
10522 {
10523 tree tree10 = TREE_OPERAND (arg1, 0);
10524 tree tree11 = TREE_OPERAND (arg1, 1);
10525 if (TREE_CODE (tree11) == MULT_EXPR
10526 && TREE_CODE (tree10) == MULT_EXPR)
10527 {
10528 tree tree0;
10529 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10530 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10531 }
10532 }
10533 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10534 We associate floats only if the user has specified
10535 -fassociative-math. */
10536 if (flag_associative_math
10537 && TREE_CODE (arg0) == PLUS_EXPR
10538 && TREE_CODE (arg1) != MULT_EXPR)
10539 {
10540 tree tree00 = TREE_OPERAND (arg0, 0);
10541 tree tree01 = TREE_OPERAND (arg0, 1);
10542 if (TREE_CODE (tree01) == MULT_EXPR
10543 && TREE_CODE (tree00) == MULT_EXPR)
10544 {
10545 tree tree0;
10546 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10547 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10548 }
10549 }
10550 }
10551
10552 bit_rotate:
10553 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10554 is a rotate of A by C1 bits. */
10555 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10556 is a rotate of A by B bits.
10557 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10558 though in this case CODE must be | and not + or ^, otherwise
10559 it doesn't return A when B is 0. */
10560 {
10561 enum tree_code code0, code1;
10562 tree rtype;
10563 code0 = TREE_CODE (arg0);
10564 code1 = TREE_CODE (arg1);
10565 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10566 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10567 && operand_equal_p (TREE_OPERAND (arg0, 0),
10568 TREE_OPERAND (arg1, 0), 0)
10569 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10570 TYPE_UNSIGNED (rtype))
10571 /* Only create rotates in complete modes. Other cases are not
10572 expanded properly. */
10573 && (element_precision (rtype)
10574 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10575 {
10576 tree tree01, tree11;
10577 tree orig_tree01, orig_tree11;
10578 enum tree_code code01, code11;
10579
10580 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10581 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10582 STRIP_NOPS (tree01);
10583 STRIP_NOPS (tree11);
10584 code01 = TREE_CODE (tree01);
10585 code11 = TREE_CODE (tree11);
10586 if (code11 != MINUS_EXPR
10587 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10588 {
10589 std::swap (code0, code1);
10590 std::swap (code01, code11);
10591 std::swap (tree01, tree11);
10592 std::swap (orig_tree01, orig_tree11);
10593 }
10594 if (code01 == INTEGER_CST
10595 && code11 == INTEGER_CST
10596 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10597 == element_precision (rtype)))
10598 {
10599 tem = build2_loc (loc, LROTATE_EXPR,
10600 rtype, TREE_OPERAND (arg0, 0),
10601 code0 == LSHIFT_EXPR
10602 ? orig_tree01 : orig_tree11);
10603 return fold_convert_loc (loc, type, tem);
10604 }
10605 else if (code11 == MINUS_EXPR)
10606 {
10607 tree tree110, tree111;
10608 tree110 = TREE_OPERAND (tree11, 0);
10609 tree111 = TREE_OPERAND (tree11, 1);
10610 STRIP_NOPS (tree110);
10611 STRIP_NOPS (tree111);
10612 if (TREE_CODE (tree110) == INTEGER_CST
10613 && compare_tree_int (tree110,
10614 element_precision (rtype)) == 0
10615 && operand_equal_p (tree01, tree111, 0))
10616 {
10617 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10618 ? LROTATE_EXPR : RROTATE_EXPR),
10619 rtype, TREE_OPERAND (arg0, 0),
10620 orig_tree01);
10621 return fold_convert_loc (loc, type, tem);
10622 }
10623 }
10624 else if (code == BIT_IOR_EXPR
10625 && code11 == BIT_AND_EXPR
10626 && pow2p_hwi (element_precision (rtype)))
10627 {
10628 tree tree110, tree111;
10629 tree110 = TREE_OPERAND (tree11, 0);
10630 tree111 = TREE_OPERAND (tree11, 1);
10631 STRIP_NOPS (tree110);
10632 STRIP_NOPS (tree111);
10633 if (TREE_CODE (tree110) == NEGATE_EXPR
10634 && TREE_CODE (tree111) == INTEGER_CST
10635 && compare_tree_int (tree111,
10636 element_precision (rtype) - 1) == 0
10637 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10638 {
10639 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10640 ? LROTATE_EXPR : RROTATE_EXPR),
10641 rtype, TREE_OPERAND (arg0, 0),
10642 orig_tree01);
10643 return fold_convert_loc (loc, type, tem);
10644 }
10645 }
10646 }
10647 }
10648
10649 associate:
10650 /* In most languages, can't associate operations on floats through
10651 parentheses. Rather than remember where the parentheses were, we
10652 don't associate floats at all, unless the user has specified
10653 -fassociative-math.
10654 And, we need to make sure type is not saturating. */
10655
10656 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10657 && !TYPE_SATURATING (type))
10658 {
10659 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10660 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10661 tree atype = type;
10662 bool ok = true;
10663
10664 /* Split both trees into variables, constants, and literals. Then
10665 associate each group together, the constants with literals,
10666 then the result with variables. This increases the chances of
10667 literals being recombined later and of generating relocatable
10668 expressions for the sum of a constant and literal. */
10669 var0 = split_tree (arg0, type, code,
10670 &minus_var0, &con0, &minus_con0,
10671 &lit0, &minus_lit0, 0);
10672 var1 = split_tree (arg1, type, code,
10673 &minus_var1, &con1, &minus_con1,
10674 &lit1, &minus_lit1, code == MINUS_EXPR);
10675
10676 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10677 if (code == MINUS_EXPR)
10678 code = PLUS_EXPR;
10679
10680 /* With undefined overflow prefer doing association in a type
10681 which wraps on overflow, if that is one of the operand types. */
10682 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10683 && !TYPE_OVERFLOW_WRAPS (type))
10684 {
10685 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10686 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10687 atype = TREE_TYPE (arg0);
10688 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10689 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10690 atype = TREE_TYPE (arg1);
10691 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10692 }
10693
10694 /* With undefined overflow we can only associate constants with one
10695 variable, and constants whose association doesn't overflow. */
10696 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10697 && !TYPE_OVERFLOW_WRAPS (atype))
10698 {
10699 if ((var0 && var1) || (minus_var0 && minus_var1))
10700 {
10701 /* ??? If split_tree would handle NEGATE_EXPR we could
10702 simply reject these cases and the allowed cases would
10703 be the var0/minus_var1 ones. */
10704 tree tmp0 = var0 ? var0 : minus_var0;
10705 tree tmp1 = var1 ? var1 : minus_var1;
10706 bool one_neg = false;
10707
10708 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10709 {
10710 tmp0 = TREE_OPERAND (tmp0, 0);
10711 one_neg = !one_neg;
10712 }
10713 if (CONVERT_EXPR_P (tmp0)
10714 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10715 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10716 <= TYPE_PRECISION (atype)))
10717 tmp0 = TREE_OPERAND (tmp0, 0);
10718 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10719 {
10720 tmp1 = TREE_OPERAND (tmp1, 0);
10721 one_neg = !one_neg;
10722 }
10723 if (CONVERT_EXPR_P (tmp1)
10724 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10725 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10726 <= TYPE_PRECISION (atype)))
10727 tmp1 = TREE_OPERAND (tmp1, 0);
10728 /* The only case we can still associate with two variables
10729 is if they cancel out. */
10730 if (!one_neg
10731 || !operand_equal_p (tmp0, tmp1, 0))
10732 ok = false;
10733 }
10734 else if ((var0 && minus_var1
10735 && ! operand_equal_p (var0, minus_var1, 0))
10736 || (minus_var0 && var1
10737 && ! operand_equal_p (minus_var0, var1, 0)))
10738 ok = false;
10739 }
10740
10741 /* Only do something if we found more than two objects. Otherwise,
10742 nothing has changed and we risk infinite recursion. */
10743 if (ok
10744 && ((var0 != 0) + (var1 != 0)
10745 + (minus_var0 != 0) + (minus_var1 != 0)
10746 + (con0 != 0) + (con1 != 0)
10747 + (minus_con0 != 0) + (minus_con1 != 0)
10748 + (lit0 != 0) + (lit1 != 0)
10749 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10750 {
10751 var0 = associate_trees (loc, var0, var1, code, atype);
10752 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10753 code, atype);
10754 con0 = associate_trees (loc, con0, con1, code, atype);
10755 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10756 code, atype);
10757 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10758 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10759 code, atype);
10760
10761 if (minus_var0 && var0)
10762 {
10763 var0 = associate_trees (loc, var0, minus_var0,
10764 MINUS_EXPR, atype);
10765 minus_var0 = 0;
10766 }
10767 if (minus_con0 && con0)
10768 {
10769 con0 = associate_trees (loc, con0, minus_con0,
10770 MINUS_EXPR, atype);
10771 minus_con0 = 0;
10772 }
10773
10774 /* Preserve the MINUS_EXPR if the negative part of the literal is
10775 greater than the positive part. Otherwise, the multiplicative
10776 folding code (i.e extract_muldiv) may be fooled in case
10777 unsigned constants are subtracted, like in the following
10778 example: ((X*2 + 4) - 8U)/2. */
10779 if (minus_lit0 && lit0)
10780 {
10781 if (TREE_CODE (lit0) == INTEGER_CST
10782 && TREE_CODE (minus_lit0) == INTEGER_CST
10783 && tree_int_cst_lt (lit0, minus_lit0)
10784 /* But avoid ending up with only negated parts. */
10785 && (var0 || con0))
10786 {
10787 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10788 MINUS_EXPR, atype);
10789 lit0 = 0;
10790 }
10791 else
10792 {
10793 lit0 = associate_trees (loc, lit0, minus_lit0,
10794 MINUS_EXPR, atype);
10795 minus_lit0 = 0;
10796 }
10797 }
10798
10799 /* Don't introduce overflows through reassociation. */
10800 if ((lit0 && TREE_OVERFLOW_P (lit0))
10801 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10802 return NULL_TREE;
10803
10804 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10805 con0 = associate_trees (loc, con0, lit0, code, atype);
10806 lit0 = 0;
10807 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10808 code, atype);
10809 minus_lit0 = 0;
10810
10811 /* Eliminate minus_con0. */
10812 if (minus_con0)
10813 {
10814 if (con0)
10815 con0 = associate_trees (loc, con0, minus_con0,
10816 MINUS_EXPR, atype);
10817 else if (var0)
10818 var0 = associate_trees (loc, var0, minus_con0,
10819 MINUS_EXPR, atype);
10820 else
10821 gcc_unreachable ();
10822 minus_con0 = 0;
10823 }
10824
10825 /* Eliminate minus_var0. */
10826 if (minus_var0)
10827 {
10828 if (con0)
10829 con0 = associate_trees (loc, con0, minus_var0,
10830 MINUS_EXPR, atype);
10831 else
10832 gcc_unreachable ();
10833 minus_var0 = 0;
10834 }
10835
10836 return
10837 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10838 code, atype));
10839 }
10840 }
10841
10842 return NULL_TREE;
10843
10844 case POINTER_DIFF_EXPR:
10845 case MINUS_EXPR:
10846 /* Fold &a[i] - &a[j] to i-j. */
10847 if (TREE_CODE (arg0) == ADDR_EXPR
10848 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10849 && TREE_CODE (arg1) == ADDR_EXPR
10850 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10851 {
10852 tree tem = fold_addr_of_array_ref_difference (loc, type,
10853 TREE_OPERAND (arg0, 0),
10854 TREE_OPERAND (arg1, 0),
10855 code
10856 == POINTER_DIFF_EXPR);
10857 if (tem)
10858 return tem;
10859 }
10860
10861 /* Further transformations are not for pointers. */
10862 if (code == POINTER_DIFF_EXPR)
10863 return NULL_TREE;
10864
10865 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10866 if (TREE_CODE (arg0) == NEGATE_EXPR
10867 && negate_expr_p (op1)
10868 /* If arg0 is e.g. unsigned int and type is int, then this could
10869 introduce UB, because if A is INT_MIN at runtime, the original
10870 expression can be well defined while the latter is not.
10871 See PR83269. */
10872 && !(ANY_INTEGRAL_TYPE_P (type)
10873 && TYPE_OVERFLOW_UNDEFINED (type)
10874 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10875 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10876 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10877 fold_convert_loc (loc, type,
10878 TREE_OPERAND (arg0, 0)));
10879
10880 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10881 __complex__ ( x, -y ). This is not the same for SNaNs or if
10882 signed zeros are involved. */
10883 if (!HONOR_SNANS (element_mode (arg0))
10884 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10885 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10886 {
10887 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10888 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10889 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10890 bool arg0rz = false, arg0iz = false;
10891 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10892 || (arg0i && (arg0iz = real_zerop (arg0i))))
10893 {
10894 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10895 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10896 if (arg0rz && arg1i && real_zerop (arg1i))
10897 {
10898 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10899 arg1r ? arg1r
10900 : build1 (REALPART_EXPR, rtype, arg1));
10901 tree ip = arg0i ? arg0i
10902 : build1 (IMAGPART_EXPR, rtype, arg0);
10903 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10904 }
10905 else if (arg0iz && arg1r && real_zerop (arg1r))
10906 {
10907 tree rp = arg0r ? arg0r
10908 : build1 (REALPART_EXPR, rtype, arg0);
10909 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10910 arg1i ? arg1i
10911 : build1 (IMAGPART_EXPR, rtype, arg1));
10912 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10913 }
10914 }
10915 }
10916
10917 /* A - B -> A + (-B) if B is easily negatable. */
10918 if (negate_expr_p (op1)
10919 && ! TYPE_OVERFLOW_SANITIZED (type)
10920 && ((FLOAT_TYPE_P (type)
10921 /* Avoid this transformation if B is a positive REAL_CST. */
10922 && (TREE_CODE (op1) != REAL_CST
10923 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10924 || INTEGRAL_TYPE_P (type)))
10925 return fold_build2_loc (loc, PLUS_EXPR, type,
10926 fold_convert_loc (loc, type, arg0),
10927 negate_expr (op1));
10928
10929 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10930 one. Make sure the type is not saturating and has the signedness of
10931 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10932 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10933 if ((TREE_CODE (arg0) == MULT_EXPR
10934 || TREE_CODE (arg1) == MULT_EXPR)
10935 && !TYPE_SATURATING (type)
10936 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10937 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10938 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10939 {
10940 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10941 if (tem)
10942 return tem;
10943 }
10944
10945 goto associate;
10946
10947 case MULT_EXPR:
10948 if (! FLOAT_TYPE_P (type))
10949 {
10950 /* Transform x * -C into -x * C if x is easily negatable. */
10951 if (TREE_CODE (op1) == INTEGER_CST
10952 && tree_int_cst_sgn (op1) == -1
10953 && negate_expr_p (op0)
10954 && negate_expr_p (op1)
10955 && (tem = negate_expr (op1)) != op1
10956 && ! TREE_OVERFLOW (tem))
10957 return fold_build2_loc (loc, MULT_EXPR, type,
10958 fold_convert_loc (loc, type,
10959 negate_expr (op0)), tem);
10960
10961 strict_overflow_p = false;
10962 if (TREE_CODE (arg1) == INTEGER_CST
10963 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10964 &strict_overflow_p)) != 0)
10965 {
10966 if (strict_overflow_p)
10967 fold_overflow_warning (("assuming signed overflow does not "
10968 "occur when simplifying "
10969 "multiplication"),
10970 WARN_STRICT_OVERFLOW_MISC);
10971 return fold_convert_loc (loc, type, tem);
10972 }
10973
10974 /* Optimize z * conj(z) for integer complex numbers. */
10975 if (TREE_CODE (arg0) == CONJ_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10977 return fold_mult_zconjz (loc, type, arg1);
10978 if (TREE_CODE (arg1) == CONJ_EXPR
10979 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10980 return fold_mult_zconjz (loc, type, arg0);
10981 }
10982 else
10983 {
10984 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10985 This is not the same for NaNs or if signed zeros are
10986 involved. */
10987 if (!HONOR_NANS (arg0)
10988 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10989 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10990 && TREE_CODE (arg1) == COMPLEX_CST
10991 && real_zerop (TREE_REALPART (arg1)))
10992 {
10993 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10994 if (real_onep (TREE_IMAGPART (arg1)))
10995 return
10996 fold_build2_loc (loc, COMPLEX_EXPR, type,
10997 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10998 rtype, arg0)),
10999 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11000 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11001 return
11002 fold_build2_loc (loc, COMPLEX_EXPR, type,
11003 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11004 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11005 rtype, arg0)));
11006 }
11007
11008 /* Optimize z * conj(z) for floating point complex numbers.
11009 Guarded by flag_unsafe_math_optimizations as non-finite
11010 imaginary components don't produce scalar results. */
11011 if (flag_unsafe_math_optimizations
11012 && TREE_CODE (arg0) == CONJ_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11014 return fold_mult_zconjz (loc, type, arg1);
11015 if (flag_unsafe_math_optimizations
11016 && TREE_CODE (arg1) == CONJ_EXPR
11017 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11018 return fold_mult_zconjz (loc, type, arg0);
11019 }
11020 goto associate;
11021
11022 case BIT_IOR_EXPR:
11023 /* Canonicalize (X & C1) | C2. */
11024 if (TREE_CODE (arg0) == BIT_AND_EXPR
11025 && TREE_CODE (arg1) == INTEGER_CST
11026 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11027 {
11028 int width = TYPE_PRECISION (type), w;
11029 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11030 wide_int c2 = wi::to_wide (arg1);
11031
11032 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11033 if ((c1 & c2) == c1)
11034 return omit_one_operand_loc (loc, type, arg1,
11035 TREE_OPERAND (arg0, 0));
11036
11037 wide_int msk = wi::mask (width, false,
11038 TYPE_PRECISION (TREE_TYPE (arg1)));
11039
11040 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11041 if (wi::bit_and_not (msk, c1 | c2) == 0)
11042 {
11043 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11044 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11045 }
11046
11047 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11048 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11049 mode which allows further optimizations. */
11050 c1 &= msk;
11051 c2 &= msk;
11052 wide_int c3 = wi::bit_and_not (c1, c2);
11053 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11054 {
11055 wide_int mask = wi::mask (w, false,
11056 TYPE_PRECISION (type));
11057 if (((c1 | c2) & mask) == mask
11058 && wi::bit_and_not (c1, mask) == 0)
11059 {
11060 c3 = mask;
11061 break;
11062 }
11063 }
11064
11065 if (c3 != c1)
11066 {
11067 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11068 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11069 wide_int_to_tree (type, c3));
11070 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11071 }
11072 }
11073
11074 /* See if this can be simplified into a rotate first. If that
11075 is unsuccessful continue in the association code. */
11076 goto bit_rotate;
11077
11078 case BIT_XOR_EXPR:
11079 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11080 if (TREE_CODE (arg0) == BIT_AND_EXPR
11081 && INTEGRAL_TYPE_P (type)
11082 && integer_onep (TREE_OPERAND (arg0, 1))
11083 && integer_onep (arg1))
11084 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11085 build_zero_cst (TREE_TYPE (arg0)));
11086
11087 /* See if this can be simplified into a rotate first. If that
11088 is unsuccessful continue in the association code. */
11089 goto bit_rotate;
11090
11091 case BIT_AND_EXPR:
11092 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11093 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11094 && INTEGRAL_TYPE_P (type)
11095 && integer_onep (TREE_OPERAND (arg0, 1))
11096 && integer_onep (arg1))
11097 {
11098 tree tem2;
11099 tem = TREE_OPERAND (arg0, 0);
11100 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11101 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11102 tem, tem2);
11103 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11104 build_zero_cst (TREE_TYPE (tem)));
11105 }
11106 /* Fold ~X & 1 as (X & 1) == 0. */
11107 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11108 && INTEGRAL_TYPE_P (type)
11109 && integer_onep (arg1))
11110 {
11111 tree tem2;
11112 tem = TREE_OPERAND (arg0, 0);
11113 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11114 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11115 tem, tem2);
11116 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11117 build_zero_cst (TREE_TYPE (tem)));
11118 }
11119 /* Fold !X & 1 as X == 0. */
11120 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11121 && integer_onep (arg1))
11122 {
11123 tem = TREE_OPERAND (arg0, 0);
11124 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11125 build_zero_cst (TREE_TYPE (tem)));
11126 }
11127
11128 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11129 multiple of 1 << CST. */
11130 if (TREE_CODE (arg1) == INTEGER_CST)
11131 {
11132 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11133 wide_int ncst1 = -cst1;
11134 if ((cst1 & ncst1) == ncst1
11135 && multiple_of_p (type, arg0,
11136 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11137 return fold_convert_loc (loc, type, arg0);
11138 }
11139
11140 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11141 bits from CST2. */
11142 if (TREE_CODE (arg1) == INTEGER_CST
11143 && TREE_CODE (arg0) == MULT_EXPR
11144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11145 {
11146 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11147 wide_int masked
11148 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11149
11150 if (masked == 0)
11151 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11152 arg0, arg1);
11153 else if (masked != warg1)
11154 {
11155 /* Avoid the transform if arg1 is a mask of some
11156 mode which allows further optimizations. */
11157 int pop = wi::popcount (warg1);
11158 if (!(pop >= BITS_PER_UNIT
11159 && pow2p_hwi (pop)
11160 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11161 return fold_build2_loc (loc, code, type, op0,
11162 wide_int_to_tree (type, masked));
11163 }
11164 }
11165
11166 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11167 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11168 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11169 {
11170 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11171
11172 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11173 if (mask == -1)
11174 return
11175 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11176 }
11177
11178 goto associate;
11179
11180 case RDIV_EXPR:
11181 /* Don't touch a floating-point divide by zero unless the mode
11182 of the constant can represent infinity. */
11183 if (TREE_CODE (arg1) == REAL_CST
11184 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11185 && real_zerop (arg1))
11186 return NULL_TREE;
11187
11188 /* (-A) / (-B) -> A / B */
11189 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11190 return fold_build2_loc (loc, RDIV_EXPR, type,
11191 TREE_OPERAND (arg0, 0),
11192 negate_expr (arg1));
11193 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11194 return fold_build2_loc (loc, RDIV_EXPR, type,
11195 negate_expr (arg0),
11196 TREE_OPERAND (arg1, 0));
11197 return NULL_TREE;
11198
11199 case TRUNC_DIV_EXPR:
11200 /* Fall through */
11201
11202 case FLOOR_DIV_EXPR:
11203 /* Simplify A / (B << N) where A and B are positive and B is
11204 a power of 2, to A >> (N + log2(B)). */
11205 strict_overflow_p = false;
11206 if (TREE_CODE (arg1) == LSHIFT_EXPR
11207 && (TYPE_UNSIGNED (type)
11208 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11209 {
11210 tree sval = TREE_OPERAND (arg1, 0);
11211 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11212 {
11213 tree sh_cnt = TREE_OPERAND (arg1, 1);
11214 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11215 wi::exact_log2 (wi::to_wide (sval)));
11216
11217 if (strict_overflow_p)
11218 fold_overflow_warning (("assuming signed overflow does not "
11219 "occur when simplifying A / (B << N)"),
11220 WARN_STRICT_OVERFLOW_MISC);
11221
11222 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11223 sh_cnt, pow2);
11224 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11225 fold_convert_loc (loc, type, arg0), sh_cnt);
11226 }
11227 }
11228
11229 /* Fall through */
11230
11231 case ROUND_DIV_EXPR:
11232 case CEIL_DIV_EXPR:
11233 case EXACT_DIV_EXPR:
11234 if (integer_zerop (arg1))
11235 return NULL_TREE;
11236
11237 /* Convert -A / -B to A / B when the type is signed and overflow is
11238 undefined. */
11239 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11240 && TREE_CODE (op0) == NEGATE_EXPR
11241 && negate_expr_p (op1))
11242 {
11243 if (ANY_INTEGRAL_TYPE_P (type))
11244 fold_overflow_warning (("assuming signed overflow does not occur "
11245 "when distributing negation across "
11246 "division"),
11247 WARN_STRICT_OVERFLOW_MISC);
11248 return fold_build2_loc (loc, code, type,
11249 fold_convert_loc (loc, type,
11250 TREE_OPERAND (arg0, 0)),
11251 negate_expr (op1));
11252 }
11253 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11254 && TREE_CODE (arg1) == NEGATE_EXPR
11255 && negate_expr_p (op0))
11256 {
11257 if (ANY_INTEGRAL_TYPE_P (type))
11258 fold_overflow_warning (("assuming signed overflow does not occur "
11259 "when distributing negation across "
11260 "division"),
11261 WARN_STRICT_OVERFLOW_MISC);
11262 return fold_build2_loc (loc, code, type,
11263 negate_expr (op0),
11264 fold_convert_loc (loc, type,
11265 TREE_OPERAND (arg1, 0)));
11266 }
11267
11268 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11269 operation, EXACT_DIV_EXPR.
11270
11271 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11272 At one time others generated faster code, it's not clear if they do
11273 after the last round to changes to the DIV code in expmed.c. */
11274 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11275 && multiple_of_p (type, arg0, arg1))
11276 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11277 fold_convert (type, arg0),
11278 fold_convert (type, arg1));
11279
11280 strict_overflow_p = false;
11281 if (TREE_CODE (arg1) == INTEGER_CST
11282 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11283 &strict_overflow_p)) != 0)
11284 {
11285 if (strict_overflow_p)
11286 fold_overflow_warning (("assuming signed overflow does not occur "
11287 "when simplifying division"),
11288 WARN_STRICT_OVERFLOW_MISC);
11289 return fold_convert_loc (loc, type, tem);
11290 }
11291
11292 return NULL_TREE;
11293
11294 case CEIL_MOD_EXPR:
11295 case FLOOR_MOD_EXPR:
11296 case ROUND_MOD_EXPR:
11297 case TRUNC_MOD_EXPR:
11298 strict_overflow_p = false;
11299 if (TREE_CODE (arg1) == INTEGER_CST
11300 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11301 &strict_overflow_p)) != 0)
11302 {
11303 if (strict_overflow_p)
11304 fold_overflow_warning (("assuming signed overflow does not occur "
11305 "when simplifying modulus"),
11306 WARN_STRICT_OVERFLOW_MISC);
11307 return fold_convert_loc (loc, type, tem);
11308 }
11309
11310 return NULL_TREE;
11311
11312 case LROTATE_EXPR:
11313 case RROTATE_EXPR:
11314 case RSHIFT_EXPR:
11315 case LSHIFT_EXPR:
11316 /* Since negative shift count is not well-defined,
11317 don't try to compute it in the compiler. */
11318 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11319 return NULL_TREE;
11320
11321 prec = element_precision (type);
11322
11323 /* If we have a rotate of a bit operation with the rotate count and
11324 the second operand of the bit operation both constant,
11325 permute the two operations. */
11326 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11327 && (TREE_CODE (arg0) == BIT_AND_EXPR
11328 || TREE_CODE (arg0) == BIT_IOR_EXPR
11329 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11330 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11331 {
11332 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11333 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11334 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11335 fold_build2_loc (loc, code, type,
11336 arg00, arg1),
11337 fold_build2_loc (loc, code, type,
11338 arg01, arg1));
11339 }
11340
11341 /* Two consecutive rotates adding up to the some integer
11342 multiple of the precision of the type can be ignored. */
11343 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11344 && TREE_CODE (arg0) == RROTATE_EXPR
11345 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11346 && wi::umod_trunc (wi::to_wide (arg1)
11347 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11348 prec) == 0)
11349 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11350
11351 return NULL_TREE;
11352
11353 case MIN_EXPR:
11354 case MAX_EXPR:
11355 goto associate;
11356
11357 case TRUTH_ANDIF_EXPR:
11358 /* Note that the operands of this must be ints
11359 and their values must be 0 or 1.
11360 ("true" is a fixed value perhaps depending on the language.) */
11361 /* If first arg is constant zero, return it. */
11362 if (integer_zerop (arg0))
11363 return fold_convert_loc (loc, type, arg0);
11364 /* FALLTHRU */
11365 case TRUTH_AND_EXPR:
11366 /* If either arg is constant true, drop it. */
11367 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11368 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11369 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11370 /* Preserve sequence points. */
11371 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11372 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11373 /* If second arg is constant zero, result is zero, but first arg
11374 must be evaluated. */
11375 if (integer_zerop (arg1))
11376 return omit_one_operand_loc (loc, type, arg1, arg0);
11377 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11378 case will be handled here. */
11379 if (integer_zerop (arg0))
11380 return omit_one_operand_loc (loc, type, arg0, arg1);
11381
11382 /* !X && X is always false. */
11383 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11385 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11386 /* X && !X is always false. */
11387 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11388 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11389 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11390
11391 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11392 means A >= Y && A != MAX, but in this case we know that
11393 A < X <= MAX. */
11394
11395 if (!TREE_SIDE_EFFECTS (arg0)
11396 && !TREE_SIDE_EFFECTS (arg1))
11397 {
11398 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11399 if (tem && !operand_equal_p (tem, arg0, 0))
11400 return fold_build2_loc (loc, code, type, tem, arg1);
11401
11402 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11403 if (tem && !operand_equal_p (tem, arg1, 0))
11404 return fold_build2_loc (loc, code, type, arg0, tem);
11405 }
11406
11407 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11408 != NULL_TREE)
11409 return tem;
11410
11411 return NULL_TREE;
11412
11413 case TRUTH_ORIF_EXPR:
11414 /* Note that the operands of this must be ints
11415 and their values must be 0 or true.
11416 ("true" is a fixed value perhaps depending on the language.) */
11417 /* If first arg is constant true, return it. */
11418 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11419 return fold_convert_loc (loc, type, arg0);
11420 /* FALLTHRU */
11421 case TRUTH_OR_EXPR:
11422 /* If either arg is constant zero, drop it. */
11423 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11424 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11425 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11426 /* Preserve sequence points. */
11427 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11428 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11429 /* If second arg is constant true, result is true, but we must
11430 evaluate first arg. */
11431 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11432 return omit_one_operand_loc (loc, type, arg1, arg0);
11433 /* Likewise for first arg, but note this only occurs here for
11434 TRUTH_OR_EXPR. */
11435 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11436 return omit_one_operand_loc (loc, type, arg0, arg1);
11437
11438 /* !X || X is always true. */
11439 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11441 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11442 /* X || !X is always true. */
11443 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11445 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11446
11447 /* (X && !Y) || (!X && Y) is X ^ Y */
11448 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11449 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11450 {
11451 tree a0, a1, l0, l1, n0, n1;
11452
11453 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11454 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11455
11456 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11457 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11458
11459 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11460 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11461
11462 if ((operand_equal_p (n0, a0, 0)
11463 && operand_equal_p (n1, a1, 0))
11464 || (operand_equal_p (n0, a1, 0)
11465 && operand_equal_p (n1, a0, 0)))
11466 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11467 }
11468
11469 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11470 != NULL_TREE)
11471 return tem;
11472
11473 return NULL_TREE;
11474
11475 case TRUTH_XOR_EXPR:
11476 /* If the second arg is constant zero, drop it. */
11477 if (integer_zerop (arg1))
11478 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11479 /* If the second arg is constant true, this is a logical inversion. */
11480 if (integer_onep (arg1))
11481 {
11482 tem = invert_truthvalue_loc (loc, arg0);
11483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11484 }
11485 /* Identical arguments cancel to zero. */
11486 if (operand_equal_p (arg0, arg1, 0))
11487 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11488
11489 /* !X ^ X is always true. */
11490 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11491 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11492 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11493
11494 /* X ^ !X is always true. */
11495 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11496 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11497 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11498
11499 return NULL_TREE;
11500
11501 case EQ_EXPR:
11502 case NE_EXPR:
11503 STRIP_NOPS (arg0);
11504 STRIP_NOPS (arg1);
11505
11506 tem = fold_comparison (loc, code, type, op0, op1);
11507 if (tem != NULL_TREE)
11508 return tem;
11509
11510 /* bool_var != 1 becomes !bool_var. */
11511 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11512 && code == NE_EXPR)
11513 return fold_convert_loc (loc, type,
11514 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11515 TREE_TYPE (arg0), arg0));
11516
11517 /* bool_var == 0 becomes !bool_var. */
11518 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11519 && code == EQ_EXPR)
11520 return fold_convert_loc (loc, type,
11521 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11522 TREE_TYPE (arg0), arg0));
11523
11524 /* !exp != 0 becomes !exp */
11525 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11526 && code == NE_EXPR)
11527 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11528
11529 /* If this is an EQ or NE comparison with zero and ARG0 is
11530 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11531 two operations, but the latter can be done in one less insn
11532 on machines that have only two-operand insns or on which a
11533 constant cannot be the first operand. */
11534 if (TREE_CODE (arg0) == BIT_AND_EXPR
11535 && integer_zerop (arg1))
11536 {
11537 tree arg00 = TREE_OPERAND (arg0, 0);
11538 tree arg01 = TREE_OPERAND (arg0, 1);
11539 if (TREE_CODE (arg00) == LSHIFT_EXPR
11540 && integer_onep (TREE_OPERAND (arg00, 0)))
11541 {
11542 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11543 arg01, TREE_OPERAND (arg00, 1));
11544 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11545 build_one_cst (TREE_TYPE (arg0)));
11546 return fold_build2_loc (loc, code, type,
11547 fold_convert_loc (loc, TREE_TYPE (arg1),
11548 tem), arg1);
11549 }
11550 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11551 && integer_onep (TREE_OPERAND (arg01, 0)))
11552 {
11553 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11554 arg00, TREE_OPERAND (arg01, 1));
11555 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11556 build_one_cst (TREE_TYPE (arg0)));
11557 return fold_build2_loc (loc, code, type,
11558 fold_convert_loc (loc, TREE_TYPE (arg1),
11559 tem), arg1);
11560 }
11561 }
11562
11563 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11564 C1 is a valid shift constant, and C2 is a power of two, i.e.
11565 a single bit. */
11566 if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11568 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11569 == INTEGER_CST
11570 && integer_pow2p (TREE_OPERAND (arg0, 1))
11571 && integer_zerop (arg1))
11572 {
11573 tree itype = TREE_TYPE (arg0);
11574 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11575 prec = TYPE_PRECISION (itype);
11576
11577 /* Check for a valid shift count. */
11578 if (wi::ltu_p (wi::to_wide (arg001), prec))
11579 {
11580 tree arg01 = TREE_OPERAND (arg0, 1);
11581 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11582 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11583 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11584 can be rewritten as (X & (C2 << C1)) != 0. */
11585 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11586 {
11587 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11588 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11589 return fold_build2_loc (loc, code, type, tem,
11590 fold_convert_loc (loc, itype, arg1));
11591 }
11592 /* Otherwise, for signed (arithmetic) shifts,
11593 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11594 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11595 else if (!TYPE_UNSIGNED (itype))
11596 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11597 arg000, build_int_cst (itype, 0));
11598 /* Otherwise, of unsigned (logical) shifts,
11599 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11600 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11601 else
11602 return omit_one_operand_loc (loc, type,
11603 code == EQ_EXPR ? integer_one_node
11604 : integer_zero_node,
11605 arg000);
11606 }
11607 }
11608
11609 /* If this is a comparison of a field, we may be able to simplify it. */
11610 if ((TREE_CODE (arg0) == COMPONENT_REF
11611 || TREE_CODE (arg0) == BIT_FIELD_REF)
11612 /* Handle the constant case even without -O
11613 to make sure the warnings are given. */
11614 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11615 {
11616 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11617 if (t1)
11618 return t1;
11619 }
11620
11621 /* Optimize comparisons of strlen vs zero to a compare of the
11622 first character of the string vs zero. To wit,
11623 strlen(ptr) == 0 => *ptr == 0
11624 strlen(ptr) != 0 => *ptr != 0
11625 Other cases should reduce to one of these two (or a constant)
11626 due to the return value of strlen being unsigned. */
11627 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11628 {
11629 tree fndecl = get_callee_fndecl (arg0);
11630
11631 if (fndecl
11632 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11633 && call_expr_nargs (arg0) == 1
11634 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11635 == POINTER_TYPE))
11636 {
11637 tree ptrtype
11638 = build_pointer_type (build_qualified_type (char_type_node,
11639 TYPE_QUAL_CONST));
11640 tree ptr = fold_convert_loc (loc, ptrtype,
11641 CALL_EXPR_ARG (arg0, 0));
11642 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11643 return fold_build2_loc (loc, code, type, iref,
11644 build_int_cst (TREE_TYPE (iref), 0));
11645 }
11646 }
11647
11648 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11649 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11650 if (TREE_CODE (arg0) == RSHIFT_EXPR
11651 && integer_zerop (arg1)
11652 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11653 {
11654 tree arg00 = TREE_OPERAND (arg0, 0);
11655 tree arg01 = TREE_OPERAND (arg0, 1);
11656 tree itype = TREE_TYPE (arg00);
11657 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11658 {
11659 if (TYPE_UNSIGNED (itype))
11660 {
11661 itype = signed_type_for (itype);
11662 arg00 = fold_convert_loc (loc, itype, arg00);
11663 }
11664 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11665 type, arg00, build_zero_cst (itype));
11666 }
11667 }
11668
11669 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11670 (X & C) == 0 when C is a single bit. */
11671 if (TREE_CODE (arg0) == BIT_AND_EXPR
11672 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11673 && integer_zerop (arg1)
11674 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11675 {
11676 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11677 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11678 TREE_OPERAND (arg0, 1));
11679 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11680 type, tem,
11681 fold_convert_loc (loc, TREE_TYPE (arg0),
11682 arg1));
11683 }
11684
11685 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11686 constant C is a power of two, i.e. a single bit. */
11687 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11688 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11689 && integer_zerop (arg1)
11690 && integer_pow2p (TREE_OPERAND (arg0, 1))
11691 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11692 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11693 {
11694 tree arg00 = TREE_OPERAND (arg0, 0);
11695 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11696 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11697 }
11698
11699 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11700 when is C is a power of two, i.e. a single bit. */
11701 if (TREE_CODE (arg0) == BIT_AND_EXPR
11702 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11703 && integer_zerop (arg1)
11704 && integer_pow2p (TREE_OPERAND (arg0, 1))
11705 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11706 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11707 {
11708 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11709 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11710 arg000, TREE_OPERAND (arg0, 1));
11711 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11712 tem, build_int_cst (TREE_TYPE (tem), 0));
11713 }
11714
11715 if (integer_zerop (arg1)
11716 && tree_expr_nonzero_p (arg0))
11717 {
11718 tree res = constant_boolean_node (code==NE_EXPR, type);
11719 return omit_one_operand_loc (loc, type, res, arg0);
11720 }
11721
11722 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11723 if (TREE_CODE (arg0) == BIT_AND_EXPR
11724 && TREE_CODE (arg1) == BIT_AND_EXPR)
11725 {
11726 tree arg00 = TREE_OPERAND (arg0, 0);
11727 tree arg01 = TREE_OPERAND (arg0, 1);
11728 tree arg10 = TREE_OPERAND (arg1, 0);
11729 tree arg11 = TREE_OPERAND (arg1, 1);
11730 tree itype = TREE_TYPE (arg0);
11731
11732 if (operand_equal_p (arg01, arg11, 0))
11733 {
11734 tem = fold_convert_loc (loc, itype, arg10);
11735 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11736 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11737 return fold_build2_loc (loc, code, type, tem,
11738 build_zero_cst (itype));
11739 }
11740 if (operand_equal_p (arg01, arg10, 0))
11741 {
11742 tem = fold_convert_loc (loc, itype, arg11);
11743 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11744 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11745 return fold_build2_loc (loc, code, type, tem,
11746 build_zero_cst (itype));
11747 }
11748 if (operand_equal_p (arg00, arg11, 0))
11749 {
11750 tem = fold_convert_loc (loc, itype, arg10);
11751 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11752 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11753 return fold_build2_loc (loc, code, type, tem,
11754 build_zero_cst (itype));
11755 }
11756 if (operand_equal_p (arg00, arg10, 0))
11757 {
11758 tem = fold_convert_loc (loc, itype, arg11);
11759 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11760 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11761 return fold_build2_loc (loc, code, type, tem,
11762 build_zero_cst (itype));
11763 }
11764 }
11765
11766 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11767 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11768 {
11769 tree arg00 = TREE_OPERAND (arg0, 0);
11770 tree arg01 = TREE_OPERAND (arg0, 1);
11771 tree arg10 = TREE_OPERAND (arg1, 0);
11772 tree arg11 = TREE_OPERAND (arg1, 1);
11773 tree itype = TREE_TYPE (arg0);
11774
11775 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11776 operand_equal_p guarantees no side-effects so we don't need
11777 to use omit_one_operand on Z. */
11778 if (operand_equal_p (arg01, arg11, 0))
11779 return fold_build2_loc (loc, code, type, arg00,
11780 fold_convert_loc (loc, TREE_TYPE (arg00),
11781 arg10));
11782 if (operand_equal_p (arg01, arg10, 0))
11783 return fold_build2_loc (loc, code, type, arg00,
11784 fold_convert_loc (loc, TREE_TYPE (arg00),
11785 arg11));
11786 if (operand_equal_p (arg00, arg11, 0))
11787 return fold_build2_loc (loc, code, type, arg01,
11788 fold_convert_loc (loc, TREE_TYPE (arg01),
11789 arg10));
11790 if (operand_equal_p (arg00, arg10, 0))
11791 return fold_build2_loc (loc, code, type, arg01,
11792 fold_convert_loc (loc, TREE_TYPE (arg01),
11793 arg11));
11794
11795 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11796 if (TREE_CODE (arg01) == INTEGER_CST
11797 && TREE_CODE (arg11) == INTEGER_CST)
11798 {
11799 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11800 fold_convert_loc (loc, itype, arg11));
11801 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11802 return fold_build2_loc (loc, code, type, tem,
11803 fold_convert_loc (loc, itype, arg10));
11804 }
11805 }
11806
11807 /* Attempt to simplify equality/inequality comparisons of complex
11808 values. Only lower the comparison if the result is known or
11809 can be simplified to a single scalar comparison. */
11810 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11811 || TREE_CODE (arg0) == COMPLEX_CST)
11812 && (TREE_CODE (arg1) == COMPLEX_EXPR
11813 || TREE_CODE (arg1) == COMPLEX_CST))
11814 {
11815 tree real0, imag0, real1, imag1;
11816 tree rcond, icond;
11817
11818 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11819 {
11820 real0 = TREE_OPERAND (arg0, 0);
11821 imag0 = TREE_OPERAND (arg0, 1);
11822 }
11823 else
11824 {
11825 real0 = TREE_REALPART (arg0);
11826 imag0 = TREE_IMAGPART (arg0);
11827 }
11828
11829 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11830 {
11831 real1 = TREE_OPERAND (arg1, 0);
11832 imag1 = TREE_OPERAND (arg1, 1);
11833 }
11834 else
11835 {
11836 real1 = TREE_REALPART (arg1);
11837 imag1 = TREE_IMAGPART (arg1);
11838 }
11839
11840 rcond = fold_binary_loc (loc, code, type, real0, real1);
11841 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11842 {
11843 if (integer_zerop (rcond))
11844 {
11845 if (code == EQ_EXPR)
11846 return omit_two_operands_loc (loc, type, boolean_false_node,
11847 imag0, imag1);
11848 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11849 }
11850 else
11851 {
11852 if (code == NE_EXPR)
11853 return omit_two_operands_loc (loc, type, boolean_true_node,
11854 imag0, imag1);
11855 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11856 }
11857 }
11858
11859 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11860 if (icond && TREE_CODE (icond) == INTEGER_CST)
11861 {
11862 if (integer_zerop (icond))
11863 {
11864 if (code == EQ_EXPR)
11865 return omit_two_operands_loc (loc, type, boolean_false_node,
11866 real0, real1);
11867 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11868 }
11869 else
11870 {
11871 if (code == NE_EXPR)
11872 return omit_two_operands_loc (loc, type, boolean_true_node,
11873 real0, real1);
11874 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11875 }
11876 }
11877 }
11878
11879 return NULL_TREE;
11880
11881 case LT_EXPR:
11882 case GT_EXPR:
11883 case LE_EXPR:
11884 case GE_EXPR:
11885 tem = fold_comparison (loc, code, type, op0, op1);
11886 if (tem != NULL_TREE)
11887 return tem;
11888
11889 /* Transform comparisons of the form X +- C CMP X. */
11890 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11891 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11893 && !HONOR_SNANS (arg0))
11894 {
11895 tree arg01 = TREE_OPERAND (arg0, 1);
11896 enum tree_code code0 = TREE_CODE (arg0);
11897 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11898
11899 /* (X - c) > X becomes false. */
11900 if (code == GT_EXPR
11901 && ((code0 == MINUS_EXPR && is_positive >= 0)
11902 || (code0 == PLUS_EXPR && is_positive <= 0)))
11903 return constant_boolean_node (0, type);
11904
11905 /* Likewise (X + c) < X becomes false. */
11906 if (code == LT_EXPR
11907 && ((code0 == PLUS_EXPR && is_positive >= 0)
11908 || (code0 == MINUS_EXPR && is_positive <= 0)))
11909 return constant_boolean_node (0, type);
11910
11911 /* Convert (X - c) <= X to true. */
11912 if (!HONOR_NANS (arg1)
11913 && code == LE_EXPR
11914 && ((code0 == MINUS_EXPR && is_positive >= 0)
11915 || (code0 == PLUS_EXPR && is_positive <= 0)))
11916 return constant_boolean_node (1, type);
11917
11918 /* Convert (X + c) >= X to true. */
11919 if (!HONOR_NANS (arg1)
11920 && code == GE_EXPR
11921 && ((code0 == PLUS_EXPR && is_positive >= 0)
11922 || (code0 == MINUS_EXPR && is_positive <= 0)))
11923 return constant_boolean_node (1, type);
11924 }
11925
11926 /* If we are comparing an ABS_EXPR with a constant, we can
11927 convert all the cases into explicit comparisons, but they may
11928 well not be faster than doing the ABS and one comparison.
11929 But ABS (X) <= C is a range comparison, which becomes a subtraction
11930 and a comparison, and is probably faster. */
11931 if (code == LE_EXPR
11932 && TREE_CODE (arg1) == INTEGER_CST
11933 && TREE_CODE (arg0) == ABS_EXPR
11934 && ! TREE_SIDE_EFFECTS (arg0)
11935 && (tem = negate_expr (arg1)) != 0
11936 && TREE_CODE (tem) == INTEGER_CST
11937 && !TREE_OVERFLOW (tem))
11938 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11939 build2 (GE_EXPR, type,
11940 TREE_OPERAND (arg0, 0), tem),
11941 build2 (LE_EXPR, type,
11942 TREE_OPERAND (arg0, 0), arg1));
11943
11944 /* Convert ABS_EXPR<x> >= 0 to true. */
11945 strict_overflow_p = false;
11946 if (code == GE_EXPR
11947 && (integer_zerop (arg1)
11948 || (! HONOR_NANS (arg0)
11949 && real_zerop (arg1)))
11950 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11951 {
11952 if (strict_overflow_p)
11953 fold_overflow_warning (("assuming signed overflow does not occur "
11954 "when simplifying comparison of "
11955 "absolute value and zero"),
11956 WARN_STRICT_OVERFLOW_CONDITIONAL);
11957 return omit_one_operand_loc (loc, type,
11958 constant_boolean_node (true, type),
11959 arg0);
11960 }
11961
11962 /* Convert ABS_EXPR<x> < 0 to false. */
11963 strict_overflow_p = false;
11964 if (code == LT_EXPR
11965 && (integer_zerop (arg1) || real_zerop (arg1))
11966 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11967 {
11968 if (strict_overflow_p)
11969 fold_overflow_warning (("assuming signed overflow does not occur "
11970 "when simplifying comparison of "
11971 "absolute value and zero"),
11972 WARN_STRICT_OVERFLOW_CONDITIONAL);
11973 return omit_one_operand_loc (loc, type,
11974 constant_boolean_node (false, type),
11975 arg0);
11976 }
11977
11978 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11979 and similarly for >= into !=. */
11980 if ((code == LT_EXPR || code == GE_EXPR)
11981 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11982 && TREE_CODE (arg1) == LSHIFT_EXPR
11983 && integer_onep (TREE_OPERAND (arg1, 0)))
11984 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11985 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11986 TREE_OPERAND (arg1, 1)),
11987 build_zero_cst (TREE_TYPE (arg0)));
11988
11989 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11990 otherwise Y might be >= # of bits in X's type and thus e.g.
11991 (unsigned char) (1 << Y) for Y 15 might be 0.
11992 If the cast is widening, then 1 << Y should have unsigned type,
11993 otherwise if Y is number of bits in the signed shift type minus 1,
11994 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11995 31 might be 0xffffffff80000000. */
11996 if ((code == LT_EXPR || code == GE_EXPR)
11997 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11998 && CONVERT_EXPR_P (arg1)
11999 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12000 && (element_precision (TREE_TYPE (arg1))
12001 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12002 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12003 || (element_precision (TREE_TYPE (arg1))
12004 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12005 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12006 {
12007 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12008 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12009 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12010 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12011 build_zero_cst (TREE_TYPE (arg0)));
12012 }
12013
12014 return NULL_TREE;
12015
12016 case UNORDERED_EXPR:
12017 case ORDERED_EXPR:
12018 case UNLT_EXPR:
12019 case UNLE_EXPR:
12020 case UNGT_EXPR:
12021 case UNGE_EXPR:
12022 case UNEQ_EXPR:
12023 case LTGT_EXPR:
12024 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12025 {
12026 tree targ0 = strip_float_extensions (arg0);
12027 tree targ1 = strip_float_extensions (arg1);
12028 tree newtype = TREE_TYPE (targ0);
12029
12030 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12031 newtype = TREE_TYPE (targ1);
12032
12033 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12034 return fold_build2_loc (loc, code, type,
12035 fold_convert_loc (loc, newtype, targ0),
12036 fold_convert_loc (loc, newtype, targ1));
12037 }
12038
12039 return NULL_TREE;
12040
12041 case COMPOUND_EXPR:
12042 /* When pedantic, a compound expression can be neither an lvalue
12043 nor an integer constant expression. */
12044 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12045 return NULL_TREE;
12046 /* Don't let (0, 0) be null pointer constant. */
12047 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12048 : fold_convert_loc (loc, type, arg1);
12049 return pedantic_non_lvalue_loc (loc, tem);
12050
12051 case ASSERT_EXPR:
12052 /* An ASSERT_EXPR should never be passed to fold_binary. */
12053 gcc_unreachable ();
12054
12055 default:
12056 return NULL_TREE;
12057 } /* switch (code) */
12058 }
12059
12060 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12061 ((A & N) + B) & M -> (A + B) & M
12062 Similarly if (N & M) == 0,
12063 ((A | N) + B) & M -> (A + B) & M
12064 and for - instead of + (or unary - instead of +)
12065 and/or ^ instead of |.
12066 If B is constant and (B & M) == 0, fold into A & M.
12067
12068 This function is a helper for match.pd patterns. Return non-NULL
12069 type in which the simplified operation should be performed only
12070 if any optimization is possible.
12071
12072 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12073 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12074 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12075 +/-. */
12076 tree
12077 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12078 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12079 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12080 tree *pmop)
12081 {
12082 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12083 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12084 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12085 if (~cst1 == 0
12086 || (cst1 & (cst1 + 1)) != 0
12087 || !INTEGRAL_TYPE_P (type)
12088 || (!TYPE_OVERFLOW_WRAPS (type)
12089 && TREE_CODE (type) != INTEGER_TYPE)
12090 || (wi::max_value (type) & cst1) != cst1)
12091 return NULL_TREE;
12092
12093 enum tree_code codes[2] = { code00, code01 };
12094 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12095 int which = 0;
12096 wide_int cst0;
12097
12098 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12099 arg1 (M) is == (1LL << cst) - 1.
12100 Store C into PMOP[0] and D into PMOP[1]. */
12101 pmop[0] = arg00;
12102 pmop[1] = arg01;
12103 which = code != NEGATE_EXPR;
12104
12105 for (; which >= 0; which--)
12106 switch (codes[which])
12107 {
12108 case BIT_AND_EXPR:
12109 case BIT_IOR_EXPR:
12110 case BIT_XOR_EXPR:
12111 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12112 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12113 if (codes[which] == BIT_AND_EXPR)
12114 {
12115 if (cst0 != cst1)
12116 break;
12117 }
12118 else if (cst0 != 0)
12119 break;
12120 /* If C or D is of the form (A & N) where
12121 (N & M) == M, or of the form (A | N) or
12122 (A ^ N) where (N & M) == 0, replace it with A. */
12123 pmop[which] = arg0xx[2 * which];
12124 break;
12125 case ERROR_MARK:
12126 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12127 break;
12128 /* If C or D is a N where (N & M) == 0, it can be
12129 omitted (replaced with 0). */
12130 if ((code == PLUS_EXPR
12131 || (code == MINUS_EXPR && which == 0))
12132 && (cst1 & wi::to_wide (pmop[which])) == 0)
12133 pmop[which] = build_int_cst (type, 0);
12134 /* Similarly, with C - N where (-N & M) == 0. */
12135 if (code == MINUS_EXPR
12136 && which == 1
12137 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12138 pmop[which] = build_int_cst (type, 0);
12139 break;
12140 default:
12141 gcc_unreachable ();
12142 }
12143
12144 /* Only build anything new if we optimized one or both arguments above. */
12145 if (pmop[0] == arg00 && pmop[1] == arg01)
12146 return NULL_TREE;
12147
12148 if (TYPE_OVERFLOW_WRAPS (type))
12149 return type;
12150 else
12151 return unsigned_type_for (type);
12152 }
12153
12154 /* Used by contains_label_[p1]. */
12155
12156 struct contains_label_data
12157 {
12158 hash_set<tree> *pset;
12159 bool inside_switch_p;
12160 };
12161
12162 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12163 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12164 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12165
12166 static tree
12167 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12168 {
12169 contains_label_data *d = (contains_label_data *) data;
12170 switch (TREE_CODE (*tp))
12171 {
12172 case LABEL_EXPR:
12173 return *tp;
12174
12175 case CASE_LABEL_EXPR:
12176 if (!d->inside_switch_p)
12177 return *tp;
12178 return NULL_TREE;
12179
12180 case SWITCH_EXPR:
12181 if (!d->inside_switch_p)
12182 {
12183 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12184 return *tp;
12185 d->inside_switch_p = true;
12186 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12187 return *tp;
12188 d->inside_switch_p = false;
12189 *walk_subtrees = 0;
12190 }
12191 return NULL_TREE;
12192
12193 case GOTO_EXPR:
12194 *walk_subtrees = 0;
12195 return NULL_TREE;
12196
12197 default:
12198 return NULL_TREE;
12199 }
12200 }
12201
12202 /* Return whether the sub-tree ST contains a label which is accessible from
12203 outside the sub-tree. */
12204
12205 static bool
12206 contains_label_p (tree st)
12207 {
12208 hash_set<tree> pset;
12209 contains_label_data data = { &pset, false };
12210 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12211 }
12212
12213 /* Fold a ternary expression of code CODE and type TYPE with operands
12214 OP0, OP1, and OP2. Return the folded expression if folding is
12215 successful. Otherwise, return NULL_TREE. */
12216
12217 tree
12218 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12219 tree op0, tree op1, tree op2)
12220 {
12221 tree tem;
12222 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12223 enum tree_code_class kind = TREE_CODE_CLASS (code);
12224
12225 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12226 && TREE_CODE_LENGTH (code) == 3);
12227
12228 /* If this is a commutative operation, and OP0 is a constant, move it
12229 to OP1 to reduce the number of tests below. */
12230 if (commutative_ternary_tree_code (code)
12231 && tree_swap_operands_p (op0, op1))
12232 return fold_build3_loc (loc, code, type, op1, op0, op2);
12233
12234 tem = generic_simplify (loc, code, type, op0, op1, op2);
12235 if (tem)
12236 return tem;
12237
12238 /* Strip any conversions that don't change the mode. This is safe
12239 for every expression, except for a comparison expression because
12240 its signedness is derived from its operands. So, in the latter
12241 case, only strip conversions that don't change the signedness.
12242
12243 Note that this is done as an internal manipulation within the
12244 constant folder, in order to find the simplest representation of
12245 the arguments so that their form can be studied. In any cases,
12246 the appropriate type conversions should be put back in the tree
12247 that will get out of the constant folder. */
12248 if (op0)
12249 {
12250 arg0 = op0;
12251 STRIP_NOPS (arg0);
12252 }
12253
12254 if (op1)
12255 {
12256 arg1 = op1;
12257 STRIP_NOPS (arg1);
12258 }
12259
12260 if (op2)
12261 {
12262 arg2 = op2;
12263 STRIP_NOPS (arg2);
12264 }
12265
12266 switch (code)
12267 {
12268 case COMPONENT_REF:
12269 if (TREE_CODE (arg0) == CONSTRUCTOR
12270 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12271 {
12272 unsigned HOST_WIDE_INT idx;
12273 tree field, value;
12274 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12275 if (field == arg1)
12276 return value;
12277 }
12278 return NULL_TREE;
12279
12280 case COND_EXPR:
12281 case VEC_COND_EXPR:
12282 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12283 so all simple results must be passed through pedantic_non_lvalue. */
12284 if (TREE_CODE (arg0) == INTEGER_CST)
12285 {
12286 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12287 tem = integer_zerop (arg0) ? op2 : op1;
12288 /* Only optimize constant conditions when the selected branch
12289 has the same type as the COND_EXPR. This avoids optimizing
12290 away "c ? x : throw", where the throw has a void type.
12291 Avoid throwing away that operand which contains label. */
12292 if ((!TREE_SIDE_EFFECTS (unused_op)
12293 || !contains_label_p (unused_op))
12294 && (! VOID_TYPE_P (TREE_TYPE (tem))
12295 || VOID_TYPE_P (type)))
12296 return pedantic_non_lvalue_loc (loc, tem);
12297 return NULL_TREE;
12298 }
12299 else if (TREE_CODE (arg0) == VECTOR_CST)
12300 {
12301 unsigned HOST_WIDE_INT nelts;
12302 if ((TREE_CODE (arg1) == VECTOR_CST
12303 || TREE_CODE (arg1) == CONSTRUCTOR)
12304 && (TREE_CODE (arg2) == VECTOR_CST
12305 || TREE_CODE (arg2) == CONSTRUCTOR)
12306 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12307 {
12308 vec_perm_builder sel (nelts, nelts, 1);
12309 for (unsigned int i = 0; i < nelts; i++)
12310 {
12311 tree val = VECTOR_CST_ELT (arg0, i);
12312 if (integer_all_onesp (val))
12313 sel.quick_push (i);
12314 else if (integer_zerop (val))
12315 sel.quick_push (nelts + i);
12316 else /* Currently unreachable. */
12317 return NULL_TREE;
12318 }
12319 vec_perm_indices indices (sel, 2, nelts);
12320 tree t = fold_vec_perm (type, arg1, arg2, indices);
12321 if (t != NULL_TREE)
12322 return t;
12323 }
12324 }
12325
12326 /* If we have A op B ? A : C, we may be able to convert this to a
12327 simpler expression, depending on the operation and the values
12328 of B and C. Signed zeros prevent all of these transformations,
12329 for reasons given above each one.
12330
12331 Also try swapping the arguments and inverting the conditional. */
12332 if (COMPARISON_CLASS_P (arg0)
12333 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12334 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12335 {
12336 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12337 if (tem)
12338 return tem;
12339 }
12340
12341 if (COMPARISON_CLASS_P (arg0)
12342 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12343 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12344 {
12345 location_t loc0 = expr_location_or (arg0, loc);
12346 tem = fold_invert_truthvalue (loc0, arg0);
12347 if (tem && COMPARISON_CLASS_P (tem))
12348 {
12349 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12350 if (tem)
12351 return tem;
12352 }
12353 }
12354
12355 /* If the second operand is simpler than the third, swap them
12356 since that produces better jump optimization results. */
12357 if (truth_value_p (TREE_CODE (arg0))
12358 && tree_swap_operands_p (op1, op2))
12359 {
12360 location_t loc0 = expr_location_or (arg0, loc);
12361 /* See if this can be inverted. If it can't, possibly because
12362 it was a floating-point inequality comparison, don't do
12363 anything. */
12364 tem = fold_invert_truthvalue (loc0, arg0);
12365 if (tem)
12366 return fold_build3_loc (loc, code, type, tem, op2, op1);
12367 }
12368
12369 /* Convert A ? 1 : 0 to simply A. */
12370 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12371 : (integer_onep (op1)
12372 && !VECTOR_TYPE_P (type)))
12373 && integer_zerop (op2)
12374 /* If we try to convert OP0 to our type, the
12375 call to fold will try to move the conversion inside
12376 a COND, which will recurse. In that case, the COND_EXPR
12377 is probably the best choice, so leave it alone. */
12378 && type == TREE_TYPE (arg0))
12379 return pedantic_non_lvalue_loc (loc, arg0);
12380
12381 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12382 over COND_EXPR in cases such as floating point comparisons. */
12383 if (integer_zerop (op1)
12384 && code == COND_EXPR
12385 && integer_onep (op2)
12386 && !VECTOR_TYPE_P (type)
12387 && truth_value_p (TREE_CODE (arg0)))
12388 return pedantic_non_lvalue_loc (loc,
12389 fold_convert_loc (loc, type,
12390 invert_truthvalue_loc (loc,
12391 arg0)));
12392
12393 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12394 if (TREE_CODE (arg0) == LT_EXPR
12395 && integer_zerop (TREE_OPERAND (arg0, 1))
12396 && integer_zerop (op2)
12397 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12398 {
12399 /* sign_bit_p looks through both zero and sign extensions,
12400 but for this optimization only sign extensions are
12401 usable. */
12402 tree tem2 = TREE_OPERAND (arg0, 0);
12403 while (tem != tem2)
12404 {
12405 if (TREE_CODE (tem2) != NOP_EXPR
12406 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12407 {
12408 tem = NULL_TREE;
12409 break;
12410 }
12411 tem2 = TREE_OPERAND (tem2, 0);
12412 }
12413 /* sign_bit_p only checks ARG1 bits within A's precision.
12414 If <sign bit of A> has wider type than A, bits outside
12415 of A's precision in <sign bit of A> need to be checked.
12416 If they are all 0, this optimization needs to be done
12417 in unsigned A's type, if they are all 1 in signed A's type,
12418 otherwise this can't be done. */
12419 if (tem
12420 && TYPE_PRECISION (TREE_TYPE (tem))
12421 < TYPE_PRECISION (TREE_TYPE (arg1))
12422 && TYPE_PRECISION (TREE_TYPE (tem))
12423 < TYPE_PRECISION (type))
12424 {
12425 int inner_width, outer_width;
12426 tree tem_type;
12427
12428 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12429 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12430 if (outer_width > TYPE_PRECISION (type))
12431 outer_width = TYPE_PRECISION (type);
12432
12433 wide_int mask = wi::shifted_mask
12434 (inner_width, outer_width - inner_width, false,
12435 TYPE_PRECISION (TREE_TYPE (arg1)));
12436
12437 wide_int common = mask & wi::to_wide (arg1);
12438 if (common == mask)
12439 {
12440 tem_type = signed_type_for (TREE_TYPE (tem));
12441 tem = fold_convert_loc (loc, tem_type, tem);
12442 }
12443 else if (common == 0)
12444 {
12445 tem_type = unsigned_type_for (TREE_TYPE (tem));
12446 tem = fold_convert_loc (loc, tem_type, tem);
12447 }
12448 else
12449 tem = NULL;
12450 }
12451
12452 if (tem)
12453 return
12454 fold_convert_loc (loc, type,
12455 fold_build2_loc (loc, BIT_AND_EXPR,
12456 TREE_TYPE (tem), tem,
12457 fold_convert_loc (loc,
12458 TREE_TYPE (tem),
12459 arg1)));
12460 }
12461
12462 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12463 already handled above. */
12464 if (TREE_CODE (arg0) == BIT_AND_EXPR
12465 && integer_onep (TREE_OPERAND (arg0, 1))
12466 && integer_zerop (op2)
12467 && integer_pow2p (arg1))
12468 {
12469 tree tem = TREE_OPERAND (arg0, 0);
12470 STRIP_NOPS (tem);
12471 if (TREE_CODE (tem) == RSHIFT_EXPR
12472 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12473 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12474 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12475 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12476 fold_convert_loc (loc, type,
12477 TREE_OPERAND (tem, 0)),
12478 op1);
12479 }
12480
12481 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12482 is probably obsolete because the first operand should be a
12483 truth value (that's why we have the two cases above), but let's
12484 leave it in until we can confirm this for all front-ends. */
12485 if (integer_zerop (op2)
12486 && TREE_CODE (arg0) == NE_EXPR
12487 && integer_zerop (TREE_OPERAND (arg0, 1))
12488 && integer_pow2p (arg1)
12489 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12491 arg1, OEP_ONLY_CONST)
12492 /* operand_equal_p compares just value, not precision, so e.g.
12493 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12494 second operand 32-bit -128, which is not a power of two (or vice
12495 versa. */
12496 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12497 return pedantic_non_lvalue_loc (loc,
12498 fold_convert_loc (loc, type,
12499 TREE_OPERAND (arg0,
12500 0)));
12501
12502 /* Disable the transformations below for vectors, since
12503 fold_binary_op_with_conditional_arg may undo them immediately,
12504 yielding an infinite loop. */
12505 if (code == VEC_COND_EXPR)
12506 return NULL_TREE;
12507
12508 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12509 if (integer_zerop (op2)
12510 && truth_value_p (TREE_CODE (arg0))
12511 && truth_value_p (TREE_CODE (arg1))
12512 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12513 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12514 : TRUTH_ANDIF_EXPR,
12515 type, fold_convert_loc (loc, type, arg0), op1);
12516
12517 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12518 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12519 && truth_value_p (TREE_CODE (arg0))
12520 && truth_value_p (TREE_CODE (arg1))
12521 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12522 {
12523 location_t loc0 = expr_location_or (arg0, loc);
12524 /* Only perform transformation if ARG0 is easily inverted. */
12525 tem = fold_invert_truthvalue (loc0, arg0);
12526 if (tem)
12527 return fold_build2_loc (loc, code == VEC_COND_EXPR
12528 ? BIT_IOR_EXPR
12529 : TRUTH_ORIF_EXPR,
12530 type, fold_convert_loc (loc, type, tem),
12531 op1);
12532 }
12533
12534 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12535 if (integer_zerop (arg1)
12536 && truth_value_p (TREE_CODE (arg0))
12537 && truth_value_p (TREE_CODE (op2))
12538 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12539 {
12540 location_t loc0 = expr_location_or (arg0, loc);
12541 /* Only perform transformation if ARG0 is easily inverted. */
12542 tem = fold_invert_truthvalue (loc0, arg0);
12543 if (tem)
12544 return fold_build2_loc (loc, code == VEC_COND_EXPR
12545 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12546 type, fold_convert_loc (loc, type, tem),
12547 op2);
12548 }
12549
12550 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12551 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12552 && truth_value_p (TREE_CODE (arg0))
12553 && truth_value_p (TREE_CODE (op2))
12554 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12555 return fold_build2_loc (loc, code == VEC_COND_EXPR
12556 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12557 type, fold_convert_loc (loc, type, arg0), op2);
12558
12559 return NULL_TREE;
12560
12561 case CALL_EXPR:
12562 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12563 of fold_ternary on them. */
12564 gcc_unreachable ();
12565
12566 case BIT_FIELD_REF:
12567 if (TREE_CODE (arg0) == VECTOR_CST
12568 && (type == TREE_TYPE (TREE_TYPE (arg0))
12569 || (VECTOR_TYPE_P (type)
12570 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12571 && tree_fits_uhwi_p (op1)
12572 && tree_fits_uhwi_p (op2))
12573 {
12574 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12575 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12576 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12577 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12578
12579 if (n != 0
12580 && (idx % width) == 0
12581 && (n % width) == 0
12582 && known_le ((idx + n) / width,
12583 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12584 {
12585 idx = idx / width;
12586 n = n / width;
12587
12588 if (TREE_CODE (arg0) == VECTOR_CST)
12589 {
12590 if (n == 1)
12591 {
12592 tem = VECTOR_CST_ELT (arg0, idx);
12593 if (VECTOR_TYPE_P (type))
12594 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12595 return tem;
12596 }
12597
12598 tree_vector_builder vals (type, n, 1);
12599 for (unsigned i = 0; i < n; ++i)
12600 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12601 return vals.build ();
12602 }
12603 }
12604 }
12605
12606 /* On constants we can use native encode/interpret to constant
12607 fold (nearly) all BIT_FIELD_REFs. */
12608 if (CONSTANT_CLASS_P (arg0)
12609 && can_native_interpret_type_p (type)
12610 && BITS_PER_UNIT == 8
12611 && tree_fits_uhwi_p (op1)
12612 && tree_fits_uhwi_p (op2))
12613 {
12614 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12615 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12616 /* Limit us to a reasonable amount of work. To relax the
12617 other limitations we need bit-shifting of the buffer
12618 and rounding up the size. */
12619 if (bitpos % BITS_PER_UNIT == 0
12620 && bitsize % BITS_PER_UNIT == 0
12621 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12622 {
12623 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12624 unsigned HOST_WIDE_INT len
12625 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12626 bitpos / BITS_PER_UNIT);
12627 if (len > 0
12628 && len * BITS_PER_UNIT >= bitsize)
12629 {
12630 tree v = native_interpret_expr (type, b,
12631 bitsize / BITS_PER_UNIT);
12632 if (v)
12633 return v;
12634 }
12635 }
12636 }
12637
12638 return NULL_TREE;
12639
12640 case VEC_PERM_EXPR:
12641 /* Perform constant folding of BIT_INSERT_EXPR. */
12642 if (TREE_CODE (arg2) == VECTOR_CST
12643 && TREE_CODE (op0) == VECTOR_CST
12644 && TREE_CODE (op1) == VECTOR_CST)
12645 {
12646 /* Build a vector of integers from the tree mask. */
12647 vec_perm_builder builder;
12648 if (!tree_to_vec_perm_builder (&builder, arg2))
12649 return NULL_TREE;
12650
12651 /* Create a vec_perm_indices for the integer vector. */
12652 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12653 bool single_arg = (op0 == op1);
12654 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12655 return fold_vec_perm (type, op0, op1, sel);
12656 }
12657 return NULL_TREE;
12658
12659 case BIT_INSERT_EXPR:
12660 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12661 if (TREE_CODE (arg0) == INTEGER_CST
12662 && TREE_CODE (arg1) == INTEGER_CST)
12663 {
12664 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12665 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12666 wide_int tem = (wi::to_wide (arg0)
12667 & wi::shifted_mask (bitpos, bitsize, true,
12668 TYPE_PRECISION (type)));
12669 wide_int tem2
12670 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12671 bitsize), bitpos);
12672 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12673 }
12674 else if (TREE_CODE (arg0) == VECTOR_CST
12675 && CONSTANT_CLASS_P (arg1)
12676 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12677 TREE_TYPE (arg1)))
12678 {
12679 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12680 unsigned HOST_WIDE_INT elsize
12681 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12682 if (bitpos % elsize == 0)
12683 {
12684 unsigned k = bitpos / elsize;
12685 unsigned HOST_WIDE_INT nelts;
12686 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12687 return arg0;
12688 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12689 {
12690 tree_vector_builder elts (type, nelts, 1);
12691 elts.quick_grow (nelts);
12692 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12693 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12694 return elts.build ();
12695 }
12696 }
12697 }
12698 return NULL_TREE;
12699
12700 default:
12701 return NULL_TREE;
12702 } /* switch (code) */
12703 }
12704
12705 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12706 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12707 constructor element index of the value returned. If the element is
12708 not found NULL_TREE is returned and *CTOR_IDX is updated to
12709 the index of the element after the ACCESS_INDEX position (which
12710 may be outside of the CTOR array). */
12711
12712 tree
12713 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12714 unsigned *ctor_idx)
12715 {
12716 tree index_type = NULL_TREE;
12717 signop index_sgn = UNSIGNED;
12718 offset_int low_bound = 0;
12719
12720 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12721 {
12722 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12723 if (domain_type && TYPE_MIN_VALUE (domain_type))
12724 {
12725 /* Static constructors for variably sized objects makes no sense. */
12726 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12727 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12728 /* ??? When it is obvious that the range is signed, treat it so. */
12729 if (TYPE_UNSIGNED (index_type)
12730 && TYPE_MAX_VALUE (domain_type)
12731 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12732 TYPE_MIN_VALUE (domain_type)))
12733 {
12734 index_sgn = SIGNED;
12735 low_bound
12736 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12737 SIGNED);
12738 }
12739 else
12740 {
12741 index_sgn = TYPE_SIGN (index_type);
12742 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12743 }
12744 }
12745 }
12746
12747 if (index_type)
12748 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12749 index_sgn);
12750
12751 offset_int index = low_bound;
12752 if (index_type)
12753 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12754
12755 offset_int max_index = index;
12756 unsigned cnt;
12757 tree cfield, cval;
12758 bool first_p = true;
12759
12760 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12761 {
12762 /* Array constructor might explicitly set index, or specify a range,
12763 or leave index NULL meaning that it is next index after previous
12764 one. */
12765 if (cfield)
12766 {
12767 if (TREE_CODE (cfield) == INTEGER_CST)
12768 max_index = index
12769 = offset_int::from (wi::to_wide (cfield), index_sgn);
12770 else
12771 {
12772 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12773 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12774 index_sgn);
12775 max_index
12776 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12777 index_sgn);
12778 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12779 }
12780 }
12781 else if (!first_p)
12782 {
12783 index = max_index + 1;
12784 if (index_type)
12785 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12786 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12787 max_index = index;
12788 }
12789 else
12790 first_p = false;
12791
12792 /* Do we have match? */
12793 if (wi::cmp (access_index, index, index_sgn) >= 0)
12794 {
12795 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12796 {
12797 if (ctor_idx)
12798 *ctor_idx = cnt;
12799 return cval;
12800 }
12801 }
12802 else if (in_gimple_form)
12803 /* We're past the element we search for. Note during parsing
12804 the elements might not be sorted.
12805 ??? We should use a binary search and a flag on the
12806 CONSTRUCTOR as to whether elements are sorted in declaration
12807 order. */
12808 break;
12809 }
12810 if (ctor_idx)
12811 *ctor_idx = cnt;
12812 return NULL_TREE;
12813 }
12814
12815 /* Perform constant folding and related simplification of EXPR.
12816 The related simplifications include x*1 => x, x*0 => 0, etc.,
12817 and application of the associative law.
12818 NOP_EXPR conversions may be removed freely (as long as we
12819 are careful not to change the type of the overall expression).
12820 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12821 but we can constant-fold them if they have constant operands. */
12822
12823 #ifdef ENABLE_FOLD_CHECKING
12824 # define fold(x) fold_1 (x)
12825 static tree fold_1 (tree);
12826 static
12827 #endif
12828 tree
12829 fold (tree expr)
12830 {
12831 const tree t = expr;
12832 enum tree_code code = TREE_CODE (t);
12833 enum tree_code_class kind = TREE_CODE_CLASS (code);
12834 tree tem;
12835 location_t loc = EXPR_LOCATION (expr);
12836
12837 /* Return right away if a constant. */
12838 if (kind == tcc_constant)
12839 return t;
12840
12841 /* CALL_EXPR-like objects with variable numbers of operands are
12842 treated specially. */
12843 if (kind == tcc_vl_exp)
12844 {
12845 if (code == CALL_EXPR)
12846 {
12847 tem = fold_call_expr (loc, expr, false);
12848 return tem ? tem : expr;
12849 }
12850 return expr;
12851 }
12852
12853 if (IS_EXPR_CODE_CLASS (kind))
12854 {
12855 tree type = TREE_TYPE (t);
12856 tree op0, op1, op2;
12857
12858 switch (TREE_CODE_LENGTH (code))
12859 {
12860 case 1:
12861 op0 = TREE_OPERAND (t, 0);
12862 tem = fold_unary_loc (loc, code, type, op0);
12863 return tem ? tem : expr;
12864 case 2:
12865 op0 = TREE_OPERAND (t, 0);
12866 op1 = TREE_OPERAND (t, 1);
12867 tem = fold_binary_loc (loc, code, type, op0, op1);
12868 return tem ? tem : expr;
12869 case 3:
12870 op0 = TREE_OPERAND (t, 0);
12871 op1 = TREE_OPERAND (t, 1);
12872 op2 = TREE_OPERAND (t, 2);
12873 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12874 return tem ? tem : expr;
12875 default:
12876 break;
12877 }
12878 }
12879
12880 switch (code)
12881 {
12882 case ARRAY_REF:
12883 {
12884 tree op0 = TREE_OPERAND (t, 0);
12885 tree op1 = TREE_OPERAND (t, 1);
12886
12887 if (TREE_CODE (op1) == INTEGER_CST
12888 && TREE_CODE (op0) == CONSTRUCTOR
12889 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12890 {
12891 tree val = get_array_ctor_element_at_index (op0,
12892 wi::to_offset (op1));
12893 if (val)
12894 return val;
12895 }
12896
12897 return t;
12898 }
12899
12900 /* Return a VECTOR_CST if possible. */
12901 case CONSTRUCTOR:
12902 {
12903 tree type = TREE_TYPE (t);
12904 if (TREE_CODE (type) != VECTOR_TYPE)
12905 return t;
12906
12907 unsigned i;
12908 tree val;
12909 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12910 if (! CONSTANT_CLASS_P (val))
12911 return t;
12912
12913 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12914 }
12915
12916 case CONST_DECL:
12917 return fold (DECL_INITIAL (t));
12918
12919 default:
12920 return t;
12921 } /* switch (code) */
12922 }
12923
12924 #ifdef ENABLE_FOLD_CHECKING
12925 #undef fold
12926
12927 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12928 hash_table<nofree_ptr_hash<const tree_node> > *);
12929 static void fold_check_failed (const_tree, const_tree);
12930 void print_fold_checksum (const_tree);
12931
12932 /* When --enable-checking=fold, compute a digest of expr before
12933 and after actual fold call to see if fold did not accidentally
12934 change original expr. */
12935
12936 tree
12937 fold (tree expr)
12938 {
12939 tree ret;
12940 struct md5_ctx ctx;
12941 unsigned char checksum_before[16], checksum_after[16];
12942 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12943
12944 md5_init_ctx (&ctx);
12945 fold_checksum_tree (expr, &ctx, &ht);
12946 md5_finish_ctx (&ctx, checksum_before);
12947 ht.empty ();
12948
12949 ret = fold_1 (expr);
12950
12951 md5_init_ctx (&ctx);
12952 fold_checksum_tree (expr, &ctx, &ht);
12953 md5_finish_ctx (&ctx, checksum_after);
12954
12955 if (memcmp (checksum_before, checksum_after, 16))
12956 fold_check_failed (expr, ret);
12957
12958 return ret;
12959 }
12960
12961 void
12962 print_fold_checksum (const_tree expr)
12963 {
12964 struct md5_ctx ctx;
12965 unsigned char checksum[16], cnt;
12966 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12967
12968 md5_init_ctx (&ctx);
12969 fold_checksum_tree (expr, &ctx, &ht);
12970 md5_finish_ctx (&ctx, checksum);
12971 for (cnt = 0; cnt < 16; ++cnt)
12972 fprintf (stderr, "%02x", checksum[cnt]);
12973 putc ('\n', stderr);
12974 }
12975
12976 static void
12977 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12978 {
12979 internal_error ("fold check: original tree changed by fold");
12980 }
12981
12982 static void
12983 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12984 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12985 {
12986 const tree_node **slot;
12987 enum tree_code code;
12988 union tree_node *buf;
12989 int i, len;
12990
12991 recursive_label:
12992 if (expr == NULL)
12993 return;
12994 slot = ht->find_slot (expr, INSERT);
12995 if (*slot != NULL)
12996 return;
12997 *slot = expr;
12998 code = TREE_CODE (expr);
12999 if (TREE_CODE_CLASS (code) == tcc_declaration
13000 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13001 {
13002 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13003 size_t sz = tree_size (expr);
13004 buf = XALLOCAVAR (union tree_node, sz);
13005 memcpy ((char *) buf, expr, sz);
13006 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13007 buf->decl_with_vis.symtab_node = NULL;
13008 buf->base.nowarning_flag = 0;
13009 expr = (tree) buf;
13010 }
13011 else if (TREE_CODE_CLASS (code) == tcc_type
13012 && (TYPE_POINTER_TO (expr)
13013 || TYPE_REFERENCE_TO (expr)
13014 || TYPE_CACHED_VALUES_P (expr)
13015 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13016 || TYPE_NEXT_VARIANT (expr)
13017 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13018 {
13019 /* Allow these fields to be modified. */
13020 tree tmp;
13021 size_t sz = tree_size (expr);
13022 buf = XALLOCAVAR (union tree_node, sz);
13023 memcpy ((char *) buf, expr, sz);
13024 expr = tmp = (tree) buf;
13025 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13026 TYPE_POINTER_TO (tmp) = NULL;
13027 TYPE_REFERENCE_TO (tmp) = NULL;
13028 TYPE_NEXT_VARIANT (tmp) = NULL;
13029 TYPE_ALIAS_SET (tmp) = -1;
13030 if (TYPE_CACHED_VALUES_P (tmp))
13031 {
13032 TYPE_CACHED_VALUES_P (tmp) = 0;
13033 TYPE_CACHED_VALUES (tmp) = NULL;
13034 }
13035 }
13036 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13037 {
13038 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13039 and change builtins.c etc. instead - see PR89543. */
13040 size_t sz = tree_size (expr);
13041 buf = XALLOCAVAR (union tree_node, sz);
13042 memcpy ((char *) buf, expr, sz);
13043 buf->base.nowarning_flag = 0;
13044 expr = (tree) buf;
13045 }
13046 md5_process_bytes (expr, tree_size (expr), ctx);
13047 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13048 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13049 if (TREE_CODE_CLASS (code) != tcc_type
13050 && TREE_CODE_CLASS (code) != tcc_declaration
13051 && code != TREE_LIST
13052 && code != SSA_NAME
13053 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13054 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13055 switch (TREE_CODE_CLASS (code))
13056 {
13057 case tcc_constant:
13058 switch (code)
13059 {
13060 case STRING_CST:
13061 md5_process_bytes (TREE_STRING_POINTER (expr),
13062 TREE_STRING_LENGTH (expr), ctx);
13063 break;
13064 case COMPLEX_CST:
13065 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13066 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13067 break;
13068 case VECTOR_CST:
13069 len = vector_cst_encoded_nelts (expr);
13070 for (i = 0; i < len; ++i)
13071 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13072 break;
13073 default:
13074 break;
13075 }
13076 break;
13077 case tcc_exceptional:
13078 switch (code)
13079 {
13080 case TREE_LIST:
13081 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13082 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13083 expr = TREE_CHAIN (expr);
13084 goto recursive_label;
13085 break;
13086 case TREE_VEC:
13087 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13088 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13089 break;
13090 default:
13091 break;
13092 }
13093 break;
13094 case tcc_expression:
13095 case tcc_reference:
13096 case tcc_comparison:
13097 case tcc_unary:
13098 case tcc_binary:
13099 case tcc_statement:
13100 case tcc_vl_exp:
13101 len = TREE_OPERAND_LENGTH (expr);
13102 for (i = 0; i < len; ++i)
13103 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13104 break;
13105 case tcc_declaration:
13106 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13107 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13108 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13109 {
13110 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13111 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13112 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13113 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13114 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13115 }
13116
13117 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13118 {
13119 if (TREE_CODE (expr) == FUNCTION_DECL)
13120 {
13121 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13122 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13123 }
13124 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13125 }
13126 break;
13127 case tcc_type:
13128 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13129 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13130 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13131 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13132 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13133 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13134 if (INTEGRAL_TYPE_P (expr)
13135 || SCALAR_FLOAT_TYPE_P (expr))
13136 {
13137 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13138 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13139 }
13140 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13141 if (TREE_CODE (expr) == RECORD_TYPE
13142 || TREE_CODE (expr) == UNION_TYPE
13143 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13144 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13145 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13146 break;
13147 default:
13148 break;
13149 }
13150 }
13151
13152 /* Helper function for outputting the checksum of a tree T. When
13153 debugging with gdb, you can "define mynext" to be "next" followed
13154 by "call debug_fold_checksum (op0)", then just trace down till the
13155 outputs differ. */
13156
13157 DEBUG_FUNCTION void
13158 debug_fold_checksum (const_tree t)
13159 {
13160 int i;
13161 unsigned char checksum[16];
13162 struct md5_ctx ctx;
13163 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13164
13165 md5_init_ctx (&ctx);
13166 fold_checksum_tree (t, &ctx, &ht);
13167 md5_finish_ctx (&ctx, checksum);
13168 ht.empty ();
13169
13170 for (i = 0; i < 16; i++)
13171 fprintf (stderr, "%d ", checksum[i]);
13172
13173 fprintf (stderr, "\n");
13174 }
13175
13176 #endif
13177
13178 /* Fold a unary tree expression with code CODE of type TYPE with an
13179 operand OP0. LOC is the location of the resulting expression.
13180 Return a folded expression if successful. Otherwise, return a tree
13181 expression with code CODE of type TYPE with an operand OP0. */
13182
13183 tree
13184 fold_build1_loc (location_t loc,
13185 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13186 {
13187 tree tem;
13188 #ifdef ENABLE_FOLD_CHECKING
13189 unsigned char checksum_before[16], checksum_after[16];
13190 struct md5_ctx ctx;
13191 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13192
13193 md5_init_ctx (&ctx);
13194 fold_checksum_tree (op0, &ctx, &ht);
13195 md5_finish_ctx (&ctx, checksum_before);
13196 ht.empty ();
13197 #endif
13198
13199 tem = fold_unary_loc (loc, code, type, op0);
13200 if (!tem)
13201 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13202
13203 #ifdef ENABLE_FOLD_CHECKING
13204 md5_init_ctx (&ctx);
13205 fold_checksum_tree (op0, &ctx, &ht);
13206 md5_finish_ctx (&ctx, checksum_after);
13207
13208 if (memcmp (checksum_before, checksum_after, 16))
13209 fold_check_failed (op0, tem);
13210 #endif
13211 return tem;
13212 }
13213
13214 /* Fold a binary tree expression with code CODE of type TYPE with
13215 operands OP0 and OP1. LOC is the location of the resulting
13216 expression. Return a folded expression if successful. Otherwise,
13217 return a tree expression with code CODE of type TYPE with operands
13218 OP0 and OP1. */
13219
13220 tree
13221 fold_build2_loc (location_t loc,
13222 enum tree_code code, tree type, tree op0, tree op1
13223 MEM_STAT_DECL)
13224 {
13225 tree tem;
13226 #ifdef ENABLE_FOLD_CHECKING
13227 unsigned char checksum_before_op0[16],
13228 checksum_before_op1[16],
13229 checksum_after_op0[16],
13230 checksum_after_op1[16];
13231 struct md5_ctx ctx;
13232 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13233
13234 md5_init_ctx (&ctx);
13235 fold_checksum_tree (op0, &ctx, &ht);
13236 md5_finish_ctx (&ctx, checksum_before_op0);
13237 ht.empty ();
13238
13239 md5_init_ctx (&ctx);
13240 fold_checksum_tree (op1, &ctx, &ht);
13241 md5_finish_ctx (&ctx, checksum_before_op1);
13242 ht.empty ();
13243 #endif
13244
13245 tem = fold_binary_loc (loc, code, type, op0, op1);
13246 if (!tem)
13247 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13248
13249 #ifdef ENABLE_FOLD_CHECKING
13250 md5_init_ctx (&ctx);
13251 fold_checksum_tree (op0, &ctx, &ht);
13252 md5_finish_ctx (&ctx, checksum_after_op0);
13253 ht.empty ();
13254
13255 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13256 fold_check_failed (op0, tem);
13257
13258 md5_init_ctx (&ctx);
13259 fold_checksum_tree (op1, &ctx, &ht);
13260 md5_finish_ctx (&ctx, checksum_after_op1);
13261
13262 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13263 fold_check_failed (op1, tem);
13264 #endif
13265 return tem;
13266 }
13267
13268 /* Fold a ternary tree expression with code CODE of type TYPE with
13269 operands OP0, OP1, and OP2. Return a folded expression if
13270 successful. Otherwise, return a tree expression with code CODE of
13271 type TYPE with operands OP0, OP1, and OP2. */
13272
13273 tree
13274 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13275 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13276 {
13277 tree tem;
13278 #ifdef ENABLE_FOLD_CHECKING
13279 unsigned char checksum_before_op0[16],
13280 checksum_before_op1[16],
13281 checksum_before_op2[16],
13282 checksum_after_op0[16],
13283 checksum_after_op1[16],
13284 checksum_after_op2[16];
13285 struct md5_ctx ctx;
13286 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13287
13288 md5_init_ctx (&ctx);
13289 fold_checksum_tree (op0, &ctx, &ht);
13290 md5_finish_ctx (&ctx, checksum_before_op0);
13291 ht.empty ();
13292
13293 md5_init_ctx (&ctx);
13294 fold_checksum_tree (op1, &ctx, &ht);
13295 md5_finish_ctx (&ctx, checksum_before_op1);
13296 ht.empty ();
13297
13298 md5_init_ctx (&ctx);
13299 fold_checksum_tree (op2, &ctx, &ht);
13300 md5_finish_ctx (&ctx, checksum_before_op2);
13301 ht.empty ();
13302 #endif
13303
13304 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13305 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13306 if (!tem)
13307 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13308
13309 #ifdef ENABLE_FOLD_CHECKING
13310 md5_init_ctx (&ctx);
13311 fold_checksum_tree (op0, &ctx, &ht);
13312 md5_finish_ctx (&ctx, checksum_after_op0);
13313 ht.empty ();
13314
13315 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13316 fold_check_failed (op0, tem);
13317
13318 md5_init_ctx (&ctx);
13319 fold_checksum_tree (op1, &ctx, &ht);
13320 md5_finish_ctx (&ctx, checksum_after_op1);
13321 ht.empty ();
13322
13323 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13324 fold_check_failed (op1, tem);
13325
13326 md5_init_ctx (&ctx);
13327 fold_checksum_tree (op2, &ctx, &ht);
13328 md5_finish_ctx (&ctx, checksum_after_op2);
13329
13330 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13331 fold_check_failed (op2, tem);
13332 #endif
13333 return tem;
13334 }
13335
13336 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13337 arguments in ARGARRAY, and a null static chain.
13338 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13339 of type TYPE from the given operands as constructed by build_call_array. */
13340
13341 tree
13342 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13343 int nargs, tree *argarray)
13344 {
13345 tree tem;
13346 #ifdef ENABLE_FOLD_CHECKING
13347 unsigned char checksum_before_fn[16],
13348 checksum_before_arglist[16],
13349 checksum_after_fn[16],
13350 checksum_after_arglist[16];
13351 struct md5_ctx ctx;
13352 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13353 int i;
13354
13355 md5_init_ctx (&ctx);
13356 fold_checksum_tree (fn, &ctx, &ht);
13357 md5_finish_ctx (&ctx, checksum_before_fn);
13358 ht.empty ();
13359
13360 md5_init_ctx (&ctx);
13361 for (i = 0; i < nargs; i++)
13362 fold_checksum_tree (argarray[i], &ctx, &ht);
13363 md5_finish_ctx (&ctx, checksum_before_arglist);
13364 ht.empty ();
13365 #endif
13366
13367 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13368 if (!tem)
13369 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13370
13371 #ifdef ENABLE_FOLD_CHECKING
13372 md5_init_ctx (&ctx);
13373 fold_checksum_tree (fn, &ctx, &ht);
13374 md5_finish_ctx (&ctx, checksum_after_fn);
13375 ht.empty ();
13376
13377 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13378 fold_check_failed (fn, tem);
13379
13380 md5_init_ctx (&ctx);
13381 for (i = 0; i < nargs; i++)
13382 fold_checksum_tree (argarray[i], &ctx, &ht);
13383 md5_finish_ctx (&ctx, checksum_after_arglist);
13384
13385 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13386 fold_check_failed (NULL_TREE, tem);
13387 #endif
13388 return tem;
13389 }
13390
13391 /* Perform constant folding and related simplification of initializer
13392 expression EXPR. These behave identically to "fold_buildN" but ignore
13393 potential run-time traps and exceptions that fold must preserve. */
13394
13395 #define START_FOLD_INIT \
13396 int saved_signaling_nans = flag_signaling_nans;\
13397 int saved_trapping_math = flag_trapping_math;\
13398 int saved_rounding_math = flag_rounding_math;\
13399 int saved_trapv = flag_trapv;\
13400 int saved_folding_initializer = folding_initializer;\
13401 flag_signaling_nans = 0;\
13402 flag_trapping_math = 0;\
13403 flag_rounding_math = 0;\
13404 flag_trapv = 0;\
13405 folding_initializer = 1;
13406
13407 #define END_FOLD_INIT \
13408 flag_signaling_nans = saved_signaling_nans;\
13409 flag_trapping_math = saved_trapping_math;\
13410 flag_rounding_math = saved_rounding_math;\
13411 flag_trapv = saved_trapv;\
13412 folding_initializer = saved_folding_initializer;
13413
13414 tree
13415 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13416 tree type, tree op)
13417 {
13418 tree result;
13419 START_FOLD_INIT;
13420
13421 result = fold_build1_loc (loc, code, type, op);
13422
13423 END_FOLD_INIT;
13424 return result;
13425 }
13426
13427 tree
13428 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13429 tree type, tree op0, tree op1)
13430 {
13431 tree result;
13432 START_FOLD_INIT;
13433
13434 result = fold_build2_loc (loc, code, type, op0, op1);
13435
13436 END_FOLD_INIT;
13437 return result;
13438 }
13439
13440 tree
13441 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13442 int nargs, tree *argarray)
13443 {
13444 tree result;
13445 START_FOLD_INIT;
13446
13447 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13448
13449 END_FOLD_INIT;
13450 return result;
13451 }
13452
13453 #undef START_FOLD_INIT
13454 #undef END_FOLD_INIT
13455
13456 /* Determine if first argument is a multiple of second argument. Return 0 if
13457 it is not, or we cannot easily determined it to be.
13458
13459 An example of the sort of thing we care about (at this point; this routine
13460 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13461 fold cases do now) is discovering that
13462
13463 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13464
13465 is a multiple of
13466
13467 SAVE_EXPR (J * 8)
13468
13469 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13470
13471 This code also handles discovering that
13472
13473 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13474
13475 is a multiple of 8 so we don't have to worry about dealing with a
13476 possible remainder.
13477
13478 Note that we *look* inside a SAVE_EXPR only to determine how it was
13479 calculated; it is not safe for fold to do much of anything else with the
13480 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13481 at run time. For example, the latter example above *cannot* be implemented
13482 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13483 evaluation time of the original SAVE_EXPR is not necessarily the same at
13484 the time the new expression is evaluated. The only optimization of this
13485 sort that would be valid is changing
13486
13487 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13488
13489 divided by 8 to
13490
13491 SAVE_EXPR (I) * SAVE_EXPR (J)
13492
13493 (where the same SAVE_EXPR (J) is used in the original and the
13494 transformed version). */
13495
13496 int
13497 multiple_of_p (tree type, const_tree top, const_tree bottom)
13498 {
13499 gimple *stmt;
13500 tree t1, op1, op2;
13501
13502 if (operand_equal_p (top, bottom, 0))
13503 return 1;
13504
13505 if (TREE_CODE (type) != INTEGER_TYPE)
13506 return 0;
13507
13508 switch (TREE_CODE (top))
13509 {
13510 case BIT_AND_EXPR:
13511 /* Bitwise and provides a power of two multiple. If the mask is
13512 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13513 if (!integer_pow2p (bottom))
13514 return 0;
13515 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13516 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13517
13518 case MULT_EXPR:
13519 if (TREE_CODE (bottom) == INTEGER_CST)
13520 {
13521 op1 = TREE_OPERAND (top, 0);
13522 op2 = TREE_OPERAND (top, 1);
13523 if (TREE_CODE (op1) == INTEGER_CST)
13524 std::swap (op1, op2);
13525 if (TREE_CODE (op2) == INTEGER_CST)
13526 {
13527 if (multiple_of_p (type, op2, bottom))
13528 return 1;
13529 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13530 if (multiple_of_p (type, bottom, op2))
13531 {
13532 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13533 wi::to_widest (op2));
13534 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13535 {
13536 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13537 return multiple_of_p (type, op1, op2);
13538 }
13539 }
13540 return multiple_of_p (type, op1, bottom);
13541 }
13542 }
13543 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13544 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13545
13546 case MINUS_EXPR:
13547 /* It is impossible to prove if op0 - op1 is multiple of bottom
13548 precisely, so be conservative here checking if both op0 and op1
13549 are multiple of bottom. Note we check the second operand first
13550 since it's usually simpler. */
13551 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13552 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13553
13554 case PLUS_EXPR:
13555 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13556 as op0 - 3 if the expression has unsigned type. For example,
13557 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13558 op1 = TREE_OPERAND (top, 1);
13559 if (TYPE_UNSIGNED (type)
13560 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13561 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13562 return (multiple_of_p (type, op1, bottom)
13563 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13564
13565 case LSHIFT_EXPR:
13566 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13567 {
13568 op1 = TREE_OPERAND (top, 1);
13569 /* const_binop may not detect overflow correctly,
13570 so check for it explicitly here. */
13571 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13572 wi::to_wide (op1))
13573 && (t1 = fold_convert (type,
13574 const_binop (LSHIFT_EXPR, size_one_node,
13575 op1))) != 0
13576 && !TREE_OVERFLOW (t1))
13577 return multiple_of_p (type, t1, bottom);
13578 }
13579 return 0;
13580
13581 case NOP_EXPR:
13582 /* Can't handle conversions from non-integral or wider integral type. */
13583 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13584 || (TYPE_PRECISION (type)
13585 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13586 return 0;
13587
13588 /* fall through */
13589
13590 case SAVE_EXPR:
13591 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13592
13593 case COND_EXPR:
13594 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13595 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13596
13597 case INTEGER_CST:
13598 if (TREE_CODE (bottom) != INTEGER_CST
13599 || integer_zerop (bottom)
13600 || (TYPE_UNSIGNED (type)
13601 && (tree_int_cst_sgn (top) < 0
13602 || tree_int_cst_sgn (bottom) < 0)))
13603 return 0;
13604 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13605 SIGNED);
13606
13607 case SSA_NAME:
13608 if (TREE_CODE (bottom) == INTEGER_CST
13609 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13610 && gimple_code (stmt) == GIMPLE_ASSIGN)
13611 {
13612 enum tree_code code = gimple_assign_rhs_code (stmt);
13613
13614 /* Check for special cases to see if top is defined as multiple
13615 of bottom:
13616
13617 top = (X & ~(bottom - 1) ; bottom is power of 2
13618
13619 or
13620
13621 Y = X % bottom
13622 top = X - Y. */
13623 if (code == BIT_AND_EXPR
13624 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13625 && TREE_CODE (op2) == INTEGER_CST
13626 && integer_pow2p (bottom)
13627 && wi::multiple_of_p (wi::to_widest (op2),
13628 wi::to_widest (bottom), UNSIGNED))
13629 return 1;
13630
13631 op1 = gimple_assign_rhs1 (stmt);
13632 if (code == MINUS_EXPR
13633 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13634 && TREE_CODE (op2) == SSA_NAME
13635 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13636 && gimple_code (stmt) == GIMPLE_ASSIGN
13637 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13638 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13639 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13640 return 1;
13641 }
13642
13643 /* fall through */
13644
13645 default:
13646 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13647 return multiple_p (wi::to_poly_widest (top),
13648 wi::to_poly_widest (bottom));
13649
13650 return 0;
13651 }
13652 }
13653
13654 #define tree_expr_nonnegative_warnv_p(X, Y) \
13655 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13656
13657 #define RECURSE(X) \
13658 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13659
13660 /* Return true if CODE or TYPE is known to be non-negative. */
13661
13662 static bool
13663 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13664 {
13665 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13666 && truth_value_p (code))
13667 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13668 have a signed:1 type (where the value is -1 and 0). */
13669 return true;
13670 return false;
13671 }
13672
13673 /* Return true if (CODE OP0) is known to be non-negative. If the return
13674 value is based on the assumption that signed overflow is undefined,
13675 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13676 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13677
13678 bool
13679 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13680 bool *strict_overflow_p, int depth)
13681 {
13682 if (TYPE_UNSIGNED (type))
13683 return true;
13684
13685 switch (code)
13686 {
13687 case ABS_EXPR:
13688 /* We can't return 1 if flag_wrapv is set because
13689 ABS_EXPR<INT_MIN> = INT_MIN. */
13690 if (!ANY_INTEGRAL_TYPE_P (type))
13691 return true;
13692 if (TYPE_OVERFLOW_UNDEFINED (type))
13693 {
13694 *strict_overflow_p = true;
13695 return true;
13696 }
13697 break;
13698
13699 case NON_LVALUE_EXPR:
13700 case FLOAT_EXPR:
13701 case FIX_TRUNC_EXPR:
13702 return RECURSE (op0);
13703
13704 CASE_CONVERT:
13705 {
13706 tree inner_type = TREE_TYPE (op0);
13707 tree outer_type = type;
13708
13709 if (TREE_CODE (outer_type) == REAL_TYPE)
13710 {
13711 if (TREE_CODE (inner_type) == REAL_TYPE)
13712 return RECURSE (op0);
13713 if (INTEGRAL_TYPE_P (inner_type))
13714 {
13715 if (TYPE_UNSIGNED (inner_type))
13716 return true;
13717 return RECURSE (op0);
13718 }
13719 }
13720 else if (INTEGRAL_TYPE_P (outer_type))
13721 {
13722 if (TREE_CODE (inner_type) == REAL_TYPE)
13723 return RECURSE (op0);
13724 if (INTEGRAL_TYPE_P (inner_type))
13725 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13726 && TYPE_UNSIGNED (inner_type);
13727 }
13728 }
13729 break;
13730
13731 default:
13732 return tree_simple_nonnegative_warnv_p (code, type);
13733 }
13734
13735 /* We don't know sign of `t', so be conservative and return false. */
13736 return false;
13737 }
13738
13739 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13740 value is based on the assumption that signed overflow is undefined,
13741 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13742 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13743
13744 bool
13745 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13746 tree op1, bool *strict_overflow_p,
13747 int depth)
13748 {
13749 if (TYPE_UNSIGNED (type))
13750 return true;
13751
13752 switch (code)
13753 {
13754 case POINTER_PLUS_EXPR:
13755 case PLUS_EXPR:
13756 if (FLOAT_TYPE_P (type))
13757 return RECURSE (op0) && RECURSE (op1);
13758
13759 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13760 both unsigned and at least 2 bits shorter than the result. */
13761 if (TREE_CODE (type) == INTEGER_TYPE
13762 && TREE_CODE (op0) == NOP_EXPR
13763 && TREE_CODE (op1) == NOP_EXPR)
13764 {
13765 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13766 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13767 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13768 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13769 {
13770 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13771 TYPE_PRECISION (inner2)) + 1;
13772 return prec < TYPE_PRECISION (type);
13773 }
13774 }
13775 break;
13776
13777 case MULT_EXPR:
13778 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13779 {
13780 /* x * x is always non-negative for floating point x
13781 or without overflow. */
13782 if (operand_equal_p (op0, op1, 0)
13783 || (RECURSE (op0) && RECURSE (op1)))
13784 {
13785 if (ANY_INTEGRAL_TYPE_P (type)
13786 && TYPE_OVERFLOW_UNDEFINED (type))
13787 *strict_overflow_p = true;
13788 return true;
13789 }
13790 }
13791
13792 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13793 both unsigned and their total bits is shorter than the result. */
13794 if (TREE_CODE (type) == INTEGER_TYPE
13795 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13796 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13797 {
13798 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13799 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13800 : TREE_TYPE (op0);
13801 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13802 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13803 : TREE_TYPE (op1);
13804
13805 bool unsigned0 = TYPE_UNSIGNED (inner0);
13806 bool unsigned1 = TYPE_UNSIGNED (inner1);
13807
13808 if (TREE_CODE (op0) == INTEGER_CST)
13809 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13810
13811 if (TREE_CODE (op1) == INTEGER_CST)
13812 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13813
13814 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13815 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13816 {
13817 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13818 ? tree_int_cst_min_precision (op0, UNSIGNED)
13819 : TYPE_PRECISION (inner0);
13820
13821 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13822 ? tree_int_cst_min_precision (op1, UNSIGNED)
13823 : TYPE_PRECISION (inner1);
13824
13825 return precision0 + precision1 < TYPE_PRECISION (type);
13826 }
13827 }
13828 return false;
13829
13830 case BIT_AND_EXPR:
13831 case MAX_EXPR:
13832 return RECURSE (op0) || RECURSE (op1);
13833
13834 case BIT_IOR_EXPR:
13835 case BIT_XOR_EXPR:
13836 case MIN_EXPR:
13837 case RDIV_EXPR:
13838 case TRUNC_DIV_EXPR:
13839 case CEIL_DIV_EXPR:
13840 case FLOOR_DIV_EXPR:
13841 case ROUND_DIV_EXPR:
13842 return RECURSE (op0) && RECURSE (op1);
13843
13844 case TRUNC_MOD_EXPR:
13845 return RECURSE (op0);
13846
13847 case FLOOR_MOD_EXPR:
13848 return RECURSE (op1);
13849
13850 case CEIL_MOD_EXPR:
13851 case ROUND_MOD_EXPR:
13852 default:
13853 return tree_simple_nonnegative_warnv_p (code, type);
13854 }
13855
13856 /* We don't know sign of `t', so be conservative and return false. */
13857 return false;
13858 }
13859
13860 /* Return true if T is known to be non-negative. If the return
13861 value is based on the assumption that signed overflow is undefined,
13862 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13863 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13864
13865 bool
13866 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13867 {
13868 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13869 return true;
13870
13871 switch (TREE_CODE (t))
13872 {
13873 case INTEGER_CST:
13874 return tree_int_cst_sgn (t) >= 0;
13875
13876 case REAL_CST:
13877 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13878
13879 case FIXED_CST:
13880 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13881
13882 case COND_EXPR:
13883 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13884
13885 case SSA_NAME:
13886 /* Limit the depth of recursion to avoid quadratic behavior.
13887 This is expected to catch almost all occurrences in practice.
13888 If this code misses important cases that unbounded recursion
13889 would not, passes that need this information could be revised
13890 to provide it through dataflow propagation. */
13891 return (!name_registered_for_update_p (t)
13892 && depth < param_max_ssa_name_query_depth
13893 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13894 strict_overflow_p, depth));
13895
13896 default:
13897 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13898 }
13899 }
13900
13901 /* Return true if T is known to be non-negative. If the return
13902 value is based on the assumption that signed overflow is undefined,
13903 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13904 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13905
13906 bool
13907 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13908 bool *strict_overflow_p, int depth)
13909 {
13910 switch (fn)
13911 {
13912 CASE_CFN_ACOS:
13913 CASE_CFN_ACOSH:
13914 CASE_CFN_CABS:
13915 CASE_CFN_COSH:
13916 CASE_CFN_ERFC:
13917 CASE_CFN_EXP:
13918 CASE_CFN_EXP10:
13919 CASE_CFN_EXP2:
13920 CASE_CFN_FABS:
13921 CASE_CFN_FDIM:
13922 CASE_CFN_HYPOT:
13923 CASE_CFN_POW10:
13924 CASE_CFN_FFS:
13925 CASE_CFN_PARITY:
13926 CASE_CFN_POPCOUNT:
13927 CASE_CFN_CLZ:
13928 CASE_CFN_CLRSB:
13929 case CFN_BUILT_IN_BSWAP32:
13930 case CFN_BUILT_IN_BSWAP64:
13931 /* Always true. */
13932 return true;
13933
13934 CASE_CFN_SQRT:
13935 CASE_CFN_SQRT_FN:
13936 /* sqrt(-0.0) is -0.0. */
13937 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13938 return true;
13939 return RECURSE (arg0);
13940
13941 CASE_CFN_ASINH:
13942 CASE_CFN_ATAN:
13943 CASE_CFN_ATANH:
13944 CASE_CFN_CBRT:
13945 CASE_CFN_CEIL:
13946 CASE_CFN_CEIL_FN:
13947 CASE_CFN_ERF:
13948 CASE_CFN_EXPM1:
13949 CASE_CFN_FLOOR:
13950 CASE_CFN_FLOOR_FN:
13951 CASE_CFN_FMOD:
13952 CASE_CFN_FREXP:
13953 CASE_CFN_ICEIL:
13954 CASE_CFN_IFLOOR:
13955 CASE_CFN_IRINT:
13956 CASE_CFN_IROUND:
13957 CASE_CFN_LCEIL:
13958 CASE_CFN_LDEXP:
13959 CASE_CFN_LFLOOR:
13960 CASE_CFN_LLCEIL:
13961 CASE_CFN_LLFLOOR:
13962 CASE_CFN_LLRINT:
13963 CASE_CFN_LLROUND:
13964 CASE_CFN_LRINT:
13965 CASE_CFN_LROUND:
13966 CASE_CFN_MODF:
13967 CASE_CFN_NEARBYINT:
13968 CASE_CFN_NEARBYINT_FN:
13969 CASE_CFN_RINT:
13970 CASE_CFN_RINT_FN:
13971 CASE_CFN_ROUND:
13972 CASE_CFN_ROUND_FN:
13973 CASE_CFN_ROUNDEVEN:
13974 CASE_CFN_ROUNDEVEN_FN:
13975 CASE_CFN_SCALB:
13976 CASE_CFN_SCALBLN:
13977 CASE_CFN_SCALBN:
13978 CASE_CFN_SIGNBIT:
13979 CASE_CFN_SIGNIFICAND:
13980 CASE_CFN_SINH:
13981 CASE_CFN_TANH:
13982 CASE_CFN_TRUNC:
13983 CASE_CFN_TRUNC_FN:
13984 /* True if the 1st argument is nonnegative. */
13985 return RECURSE (arg0);
13986
13987 CASE_CFN_FMAX:
13988 CASE_CFN_FMAX_FN:
13989 /* True if the 1st OR 2nd arguments are nonnegative. */
13990 return RECURSE (arg0) || RECURSE (arg1);
13991
13992 CASE_CFN_FMIN:
13993 CASE_CFN_FMIN_FN:
13994 /* True if the 1st AND 2nd arguments are nonnegative. */
13995 return RECURSE (arg0) && RECURSE (arg1);
13996
13997 CASE_CFN_COPYSIGN:
13998 CASE_CFN_COPYSIGN_FN:
13999 /* True if the 2nd argument is nonnegative. */
14000 return RECURSE (arg1);
14001
14002 CASE_CFN_POWI:
14003 /* True if the 1st argument is nonnegative or the second
14004 argument is an even integer. */
14005 if (TREE_CODE (arg1) == INTEGER_CST
14006 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14007 return true;
14008 return RECURSE (arg0);
14009
14010 CASE_CFN_POW:
14011 /* True if the 1st argument is nonnegative or the second
14012 argument is an even integer valued real. */
14013 if (TREE_CODE (arg1) == REAL_CST)
14014 {
14015 REAL_VALUE_TYPE c;
14016 HOST_WIDE_INT n;
14017
14018 c = TREE_REAL_CST (arg1);
14019 n = real_to_integer (&c);
14020 if ((n & 1) == 0)
14021 {
14022 REAL_VALUE_TYPE cint;
14023 real_from_integer (&cint, VOIDmode, n, SIGNED);
14024 if (real_identical (&c, &cint))
14025 return true;
14026 }
14027 }
14028 return RECURSE (arg0);
14029
14030 default:
14031 break;
14032 }
14033 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14034 }
14035
14036 /* Return true if T is known to be non-negative. If the return
14037 value is based on the assumption that signed overflow is undefined,
14038 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14039 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14040
14041 static bool
14042 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14043 {
14044 enum tree_code code = TREE_CODE (t);
14045 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14046 return true;
14047
14048 switch (code)
14049 {
14050 case TARGET_EXPR:
14051 {
14052 tree temp = TARGET_EXPR_SLOT (t);
14053 t = TARGET_EXPR_INITIAL (t);
14054
14055 /* If the initializer is non-void, then it's a normal expression
14056 that will be assigned to the slot. */
14057 if (!VOID_TYPE_P (t))
14058 return RECURSE (t);
14059
14060 /* Otherwise, the initializer sets the slot in some way. One common
14061 way is an assignment statement at the end of the initializer. */
14062 while (1)
14063 {
14064 if (TREE_CODE (t) == BIND_EXPR)
14065 t = expr_last (BIND_EXPR_BODY (t));
14066 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14067 || TREE_CODE (t) == TRY_CATCH_EXPR)
14068 t = expr_last (TREE_OPERAND (t, 0));
14069 else if (TREE_CODE (t) == STATEMENT_LIST)
14070 t = expr_last (t);
14071 else
14072 break;
14073 }
14074 if (TREE_CODE (t) == MODIFY_EXPR
14075 && TREE_OPERAND (t, 0) == temp)
14076 return RECURSE (TREE_OPERAND (t, 1));
14077
14078 return false;
14079 }
14080
14081 case CALL_EXPR:
14082 {
14083 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14084 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14085
14086 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14087 get_call_combined_fn (t),
14088 arg0,
14089 arg1,
14090 strict_overflow_p, depth);
14091 }
14092 case COMPOUND_EXPR:
14093 case MODIFY_EXPR:
14094 return RECURSE (TREE_OPERAND (t, 1));
14095
14096 case BIND_EXPR:
14097 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14098
14099 case SAVE_EXPR:
14100 return RECURSE (TREE_OPERAND (t, 0));
14101
14102 default:
14103 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14104 }
14105 }
14106
14107 #undef RECURSE
14108 #undef tree_expr_nonnegative_warnv_p
14109
14110 /* Return true if T is known to be non-negative. If the return
14111 value is based on the assumption that signed overflow is undefined,
14112 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14113 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14114
14115 bool
14116 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14117 {
14118 enum tree_code code;
14119 if (t == error_mark_node)
14120 return false;
14121
14122 code = TREE_CODE (t);
14123 switch (TREE_CODE_CLASS (code))
14124 {
14125 case tcc_binary:
14126 case tcc_comparison:
14127 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14128 TREE_TYPE (t),
14129 TREE_OPERAND (t, 0),
14130 TREE_OPERAND (t, 1),
14131 strict_overflow_p, depth);
14132
14133 case tcc_unary:
14134 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14135 TREE_TYPE (t),
14136 TREE_OPERAND (t, 0),
14137 strict_overflow_p, depth);
14138
14139 case tcc_constant:
14140 case tcc_declaration:
14141 case tcc_reference:
14142 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14143
14144 default:
14145 break;
14146 }
14147
14148 switch (code)
14149 {
14150 case TRUTH_AND_EXPR:
14151 case TRUTH_OR_EXPR:
14152 case TRUTH_XOR_EXPR:
14153 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14154 TREE_TYPE (t),
14155 TREE_OPERAND (t, 0),
14156 TREE_OPERAND (t, 1),
14157 strict_overflow_p, depth);
14158 case TRUTH_NOT_EXPR:
14159 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14160 TREE_TYPE (t),
14161 TREE_OPERAND (t, 0),
14162 strict_overflow_p, depth);
14163
14164 case COND_EXPR:
14165 case CONSTRUCTOR:
14166 case OBJ_TYPE_REF:
14167 case ASSERT_EXPR:
14168 case ADDR_EXPR:
14169 case WITH_SIZE_EXPR:
14170 case SSA_NAME:
14171 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14172
14173 default:
14174 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14175 }
14176 }
14177
14178 /* Return true if `t' is known to be non-negative. Handle warnings
14179 about undefined signed overflow. */
14180
14181 bool
14182 tree_expr_nonnegative_p (tree t)
14183 {
14184 bool ret, strict_overflow_p;
14185
14186 strict_overflow_p = false;
14187 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14188 if (strict_overflow_p)
14189 fold_overflow_warning (("assuming signed overflow does not occur when "
14190 "determining that expression is always "
14191 "non-negative"),
14192 WARN_STRICT_OVERFLOW_MISC);
14193 return ret;
14194 }
14195
14196
14197 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14198 For floating point we further ensure that T is not denormal.
14199 Similar logic is present in nonzero_address in rtlanal.h.
14200
14201 If the return value is based on the assumption that signed overflow
14202 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14203 change *STRICT_OVERFLOW_P. */
14204
14205 bool
14206 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14207 bool *strict_overflow_p)
14208 {
14209 switch (code)
14210 {
14211 case ABS_EXPR:
14212 return tree_expr_nonzero_warnv_p (op0,
14213 strict_overflow_p);
14214
14215 case NOP_EXPR:
14216 {
14217 tree inner_type = TREE_TYPE (op0);
14218 tree outer_type = type;
14219
14220 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14221 && tree_expr_nonzero_warnv_p (op0,
14222 strict_overflow_p));
14223 }
14224 break;
14225
14226 case NON_LVALUE_EXPR:
14227 return tree_expr_nonzero_warnv_p (op0,
14228 strict_overflow_p);
14229
14230 default:
14231 break;
14232 }
14233
14234 return false;
14235 }
14236
14237 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14238 For floating point we further ensure that T is not denormal.
14239 Similar logic is present in nonzero_address in rtlanal.h.
14240
14241 If the return value is based on the assumption that signed overflow
14242 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14243 change *STRICT_OVERFLOW_P. */
14244
14245 bool
14246 tree_binary_nonzero_warnv_p (enum tree_code code,
14247 tree type,
14248 tree op0,
14249 tree op1, bool *strict_overflow_p)
14250 {
14251 bool sub_strict_overflow_p;
14252 switch (code)
14253 {
14254 case POINTER_PLUS_EXPR:
14255 case PLUS_EXPR:
14256 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14257 {
14258 /* With the presence of negative values it is hard
14259 to say something. */
14260 sub_strict_overflow_p = false;
14261 if (!tree_expr_nonnegative_warnv_p (op0,
14262 &sub_strict_overflow_p)
14263 || !tree_expr_nonnegative_warnv_p (op1,
14264 &sub_strict_overflow_p))
14265 return false;
14266 /* One of operands must be positive and the other non-negative. */
14267 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14268 overflows, on a twos-complement machine the sum of two
14269 nonnegative numbers can never be zero. */
14270 return (tree_expr_nonzero_warnv_p (op0,
14271 strict_overflow_p)
14272 || tree_expr_nonzero_warnv_p (op1,
14273 strict_overflow_p));
14274 }
14275 break;
14276
14277 case MULT_EXPR:
14278 if (TYPE_OVERFLOW_UNDEFINED (type))
14279 {
14280 if (tree_expr_nonzero_warnv_p (op0,
14281 strict_overflow_p)
14282 && tree_expr_nonzero_warnv_p (op1,
14283 strict_overflow_p))
14284 {
14285 *strict_overflow_p = true;
14286 return true;
14287 }
14288 }
14289 break;
14290
14291 case MIN_EXPR:
14292 sub_strict_overflow_p = false;
14293 if (tree_expr_nonzero_warnv_p (op0,
14294 &sub_strict_overflow_p)
14295 && tree_expr_nonzero_warnv_p (op1,
14296 &sub_strict_overflow_p))
14297 {
14298 if (sub_strict_overflow_p)
14299 *strict_overflow_p = true;
14300 }
14301 break;
14302
14303 case MAX_EXPR:
14304 sub_strict_overflow_p = false;
14305 if (tree_expr_nonzero_warnv_p (op0,
14306 &sub_strict_overflow_p))
14307 {
14308 if (sub_strict_overflow_p)
14309 *strict_overflow_p = true;
14310
14311 /* When both operands are nonzero, then MAX must be too. */
14312 if (tree_expr_nonzero_warnv_p (op1,
14313 strict_overflow_p))
14314 return true;
14315
14316 /* MAX where operand 0 is positive is positive. */
14317 return tree_expr_nonnegative_warnv_p (op0,
14318 strict_overflow_p);
14319 }
14320 /* MAX where operand 1 is positive is positive. */
14321 else if (tree_expr_nonzero_warnv_p (op1,
14322 &sub_strict_overflow_p)
14323 && tree_expr_nonnegative_warnv_p (op1,
14324 &sub_strict_overflow_p))
14325 {
14326 if (sub_strict_overflow_p)
14327 *strict_overflow_p = true;
14328 return true;
14329 }
14330 break;
14331
14332 case BIT_IOR_EXPR:
14333 return (tree_expr_nonzero_warnv_p (op1,
14334 strict_overflow_p)
14335 || tree_expr_nonzero_warnv_p (op0,
14336 strict_overflow_p));
14337
14338 default:
14339 break;
14340 }
14341
14342 return false;
14343 }
14344
14345 /* Return true when T is an address and is known to be nonzero.
14346 For floating point we further ensure that T is not denormal.
14347 Similar logic is present in nonzero_address in rtlanal.h.
14348
14349 If the return value is based on the assumption that signed overflow
14350 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14351 change *STRICT_OVERFLOW_P. */
14352
14353 bool
14354 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14355 {
14356 bool sub_strict_overflow_p;
14357 switch (TREE_CODE (t))
14358 {
14359 case INTEGER_CST:
14360 return !integer_zerop (t);
14361
14362 case ADDR_EXPR:
14363 {
14364 tree base = TREE_OPERAND (t, 0);
14365
14366 if (!DECL_P (base))
14367 base = get_base_address (base);
14368
14369 if (base && TREE_CODE (base) == TARGET_EXPR)
14370 base = TARGET_EXPR_SLOT (base);
14371
14372 if (!base)
14373 return false;
14374
14375 /* For objects in symbol table check if we know they are non-zero.
14376 Don't do anything for variables and functions before symtab is built;
14377 it is quite possible that they will be declared weak later. */
14378 int nonzero_addr = maybe_nonzero_address (base);
14379 if (nonzero_addr >= 0)
14380 return nonzero_addr;
14381
14382 /* Constants are never weak. */
14383 if (CONSTANT_CLASS_P (base))
14384 return true;
14385
14386 return false;
14387 }
14388
14389 case COND_EXPR:
14390 sub_strict_overflow_p = false;
14391 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14392 &sub_strict_overflow_p)
14393 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14394 &sub_strict_overflow_p))
14395 {
14396 if (sub_strict_overflow_p)
14397 *strict_overflow_p = true;
14398 return true;
14399 }
14400 break;
14401
14402 case SSA_NAME:
14403 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14404 break;
14405 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14406
14407 default:
14408 break;
14409 }
14410 return false;
14411 }
14412
14413 #define integer_valued_real_p(X) \
14414 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14415
14416 #define RECURSE(X) \
14417 ((integer_valued_real_p) (X, depth + 1))
14418
14419 /* Return true if the floating point result of (CODE OP0) has an
14420 integer value. We also allow +Inf, -Inf and NaN to be considered
14421 integer values. Return false for signaling NaN.
14422
14423 DEPTH is the current nesting depth of the query. */
14424
14425 bool
14426 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14427 {
14428 switch (code)
14429 {
14430 case FLOAT_EXPR:
14431 return true;
14432
14433 case ABS_EXPR:
14434 return RECURSE (op0);
14435
14436 CASE_CONVERT:
14437 {
14438 tree type = TREE_TYPE (op0);
14439 if (TREE_CODE (type) == INTEGER_TYPE)
14440 return true;
14441 if (TREE_CODE (type) == REAL_TYPE)
14442 return RECURSE (op0);
14443 break;
14444 }
14445
14446 default:
14447 break;
14448 }
14449 return false;
14450 }
14451
14452 /* Return true if the floating point result of (CODE OP0 OP1) has an
14453 integer value. We also allow +Inf, -Inf and NaN to be considered
14454 integer values. Return false for signaling NaN.
14455
14456 DEPTH is the current nesting depth of the query. */
14457
14458 bool
14459 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14460 {
14461 switch (code)
14462 {
14463 case PLUS_EXPR:
14464 case MINUS_EXPR:
14465 case MULT_EXPR:
14466 case MIN_EXPR:
14467 case MAX_EXPR:
14468 return RECURSE (op0) && RECURSE (op1);
14469
14470 default:
14471 break;
14472 }
14473 return false;
14474 }
14475
14476 /* Return true if the floating point result of calling FNDECL with arguments
14477 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14478 considered integer values. Return false for signaling NaN. If FNDECL
14479 takes fewer than 2 arguments, the remaining ARGn are null.
14480
14481 DEPTH is the current nesting depth of the query. */
14482
14483 bool
14484 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14485 {
14486 switch (fn)
14487 {
14488 CASE_CFN_CEIL:
14489 CASE_CFN_CEIL_FN:
14490 CASE_CFN_FLOOR:
14491 CASE_CFN_FLOOR_FN:
14492 CASE_CFN_NEARBYINT:
14493 CASE_CFN_NEARBYINT_FN:
14494 CASE_CFN_RINT:
14495 CASE_CFN_RINT_FN:
14496 CASE_CFN_ROUND:
14497 CASE_CFN_ROUND_FN:
14498 CASE_CFN_ROUNDEVEN:
14499 CASE_CFN_ROUNDEVEN_FN:
14500 CASE_CFN_TRUNC:
14501 CASE_CFN_TRUNC_FN:
14502 return true;
14503
14504 CASE_CFN_FMIN:
14505 CASE_CFN_FMIN_FN:
14506 CASE_CFN_FMAX:
14507 CASE_CFN_FMAX_FN:
14508 return RECURSE (arg0) && RECURSE (arg1);
14509
14510 default:
14511 break;
14512 }
14513 return false;
14514 }
14515
14516 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14517 has an integer value. We also allow +Inf, -Inf and NaN to be
14518 considered integer values. Return false for signaling NaN.
14519
14520 DEPTH is the current nesting depth of the query. */
14521
14522 bool
14523 integer_valued_real_single_p (tree t, int depth)
14524 {
14525 switch (TREE_CODE (t))
14526 {
14527 case REAL_CST:
14528 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14529
14530 case COND_EXPR:
14531 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14532
14533 case SSA_NAME:
14534 /* Limit the depth of recursion to avoid quadratic behavior.
14535 This is expected to catch almost all occurrences in practice.
14536 If this code misses important cases that unbounded recursion
14537 would not, passes that need this information could be revised
14538 to provide it through dataflow propagation. */
14539 return (!name_registered_for_update_p (t)
14540 && depth < param_max_ssa_name_query_depth
14541 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14542 depth));
14543
14544 default:
14545 break;
14546 }
14547 return false;
14548 }
14549
14550 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14551 has an integer value. We also allow +Inf, -Inf and NaN to be
14552 considered integer values. Return false for signaling NaN.
14553
14554 DEPTH is the current nesting depth of the query. */
14555
14556 static bool
14557 integer_valued_real_invalid_p (tree t, int depth)
14558 {
14559 switch (TREE_CODE (t))
14560 {
14561 case COMPOUND_EXPR:
14562 case MODIFY_EXPR:
14563 case BIND_EXPR:
14564 return RECURSE (TREE_OPERAND (t, 1));
14565
14566 case SAVE_EXPR:
14567 return RECURSE (TREE_OPERAND (t, 0));
14568
14569 default:
14570 break;
14571 }
14572 return false;
14573 }
14574
14575 #undef RECURSE
14576 #undef integer_valued_real_p
14577
14578 /* Return true if the floating point expression T has an integer value.
14579 We also allow +Inf, -Inf and NaN to be considered integer values.
14580 Return false for signaling NaN.
14581
14582 DEPTH is the current nesting depth of the query. */
14583
14584 bool
14585 integer_valued_real_p (tree t, int depth)
14586 {
14587 if (t == error_mark_node)
14588 return false;
14589
14590 STRIP_ANY_LOCATION_WRAPPER (t);
14591
14592 tree_code code = TREE_CODE (t);
14593 switch (TREE_CODE_CLASS (code))
14594 {
14595 case tcc_binary:
14596 case tcc_comparison:
14597 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14598 TREE_OPERAND (t, 1), depth);
14599
14600 case tcc_unary:
14601 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14602
14603 case tcc_constant:
14604 case tcc_declaration:
14605 case tcc_reference:
14606 return integer_valued_real_single_p (t, depth);
14607
14608 default:
14609 break;
14610 }
14611
14612 switch (code)
14613 {
14614 case COND_EXPR:
14615 case SSA_NAME:
14616 return integer_valued_real_single_p (t, depth);
14617
14618 case CALL_EXPR:
14619 {
14620 tree arg0 = (call_expr_nargs (t) > 0
14621 ? CALL_EXPR_ARG (t, 0)
14622 : NULL_TREE);
14623 tree arg1 = (call_expr_nargs (t) > 1
14624 ? CALL_EXPR_ARG (t, 1)
14625 : NULL_TREE);
14626 return integer_valued_real_call_p (get_call_combined_fn (t),
14627 arg0, arg1, depth);
14628 }
14629
14630 default:
14631 return integer_valued_real_invalid_p (t, depth);
14632 }
14633 }
14634
14635 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14636 attempt to fold the expression to a constant without modifying TYPE,
14637 OP0 or OP1.
14638
14639 If the expression could be simplified to a constant, then return
14640 the constant. If the expression would not be simplified to a
14641 constant, then return NULL_TREE. */
14642
14643 tree
14644 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14645 {
14646 tree tem = fold_binary (code, type, op0, op1);
14647 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14648 }
14649
14650 /* Given the components of a unary expression CODE, TYPE and OP0,
14651 attempt to fold the expression to a constant without modifying
14652 TYPE or OP0.
14653
14654 If the expression could be simplified to a constant, then return
14655 the constant. If the expression would not be simplified to a
14656 constant, then return NULL_TREE. */
14657
14658 tree
14659 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14660 {
14661 tree tem = fold_unary (code, type, op0);
14662 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14663 }
14664
14665 /* If EXP represents referencing an element in a constant string
14666 (either via pointer arithmetic or array indexing), return the
14667 tree representing the value accessed, otherwise return NULL. */
14668
14669 tree
14670 fold_read_from_constant_string (tree exp)
14671 {
14672 if ((TREE_CODE (exp) == INDIRECT_REF
14673 || TREE_CODE (exp) == ARRAY_REF)
14674 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14675 {
14676 tree exp1 = TREE_OPERAND (exp, 0);
14677 tree index;
14678 tree string;
14679 location_t loc = EXPR_LOCATION (exp);
14680
14681 if (TREE_CODE (exp) == INDIRECT_REF)
14682 string = string_constant (exp1, &index, NULL, NULL);
14683 else
14684 {
14685 tree low_bound = array_ref_low_bound (exp);
14686 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14687
14688 /* Optimize the special-case of a zero lower bound.
14689
14690 We convert the low_bound to sizetype to avoid some problems
14691 with constant folding. (E.g. suppose the lower bound is 1,
14692 and its mode is QI. Without the conversion,l (ARRAY
14693 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14694 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14695 if (! integer_zerop (low_bound))
14696 index = size_diffop_loc (loc, index,
14697 fold_convert_loc (loc, sizetype, low_bound));
14698
14699 string = exp1;
14700 }
14701
14702 scalar_int_mode char_mode;
14703 if (string
14704 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14705 && TREE_CODE (string) == STRING_CST
14706 && tree_fits_uhwi_p (index)
14707 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14708 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14709 &char_mode)
14710 && GET_MODE_SIZE (char_mode) == 1)
14711 return build_int_cst_type (TREE_TYPE (exp),
14712 (TREE_STRING_POINTER (string)
14713 [TREE_INT_CST_LOW (index)]));
14714 }
14715 return NULL;
14716 }
14717
14718 /* Folds a read from vector element at IDX of vector ARG. */
14719
14720 tree
14721 fold_read_from_vector (tree arg, poly_uint64 idx)
14722 {
14723 unsigned HOST_WIDE_INT i;
14724 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14725 && known_ge (idx, 0u)
14726 && idx.is_constant (&i))
14727 {
14728 if (TREE_CODE (arg) == VECTOR_CST)
14729 return VECTOR_CST_ELT (arg, i);
14730 else if (TREE_CODE (arg) == CONSTRUCTOR)
14731 {
14732 if (CONSTRUCTOR_NELTS (arg)
14733 && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
14734 return NULL_TREE;
14735 if (i >= CONSTRUCTOR_NELTS (arg))
14736 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14737 return CONSTRUCTOR_ELT (arg, i)->value;
14738 }
14739 }
14740 return NULL_TREE;
14741 }
14742
14743 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14744 an integer constant, real, or fixed-point constant.
14745
14746 TYPE is the type of the result. */
14747
14748 static tree
14749 fold_negate_const (tree arg0, tree type)
14750 {
14751 tree t = NULL_TREE;
14752
14753 switch (TREE_CODE (arg0))
14754 {
14755 case REAL_CST:
14756 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14757 break;
14758
14759 case FIXED_CST:
14760 {
14761 FIXED_VALUE_TYPE f;
14762 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14763 &(TREE_FIXED_CST (arg0)), NULL,
14764 TYPE_SATURATING (type));
14765 t = build_fixed (type, f);
14766 /* Propagate overflow flags. */
14767 if (overflow_p | TREE_OVERFLOW (arg0))
14768 TREE_OVERFLOW (t) = 1;
14769 break;
14770 }
14771
14772 default:
14773 if (poly_int_tree_p (arg0))
14774 {
14775 wi::overflow_type overflow;
14776 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14777 t = force_fit_type (type, res, 1,
14778 (overflow && ! TYPE_UNSIGNED (type))
14779 || TREE_OVERFLOW (arg0));
14780 break;
14781 }
14782
14783 gcc_unreachable ();
14784 }
14785
14786 return t;
14787 }
14788
14789 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14790 an integer constant or real constant.
14791
14792 TYPE is the type of the result. */
14793
14794 tree
14795 fold_abs_const (tree arg0, tree type)
14796 {
14797 tree t = NULL_TREE;
14798
14799 switch (TREE_CODE (arg0))
14800 {
14801 case INTEGER_CST:
14802 {
14803 /* If the value is unsigned or non-negative, then the absolute value
14804 is the same as the ordinary value. */
14805 wide_int val = wi::to_wide (arg0);
14806 wi::overflow_type overflow = wi::OVF_NONE;
14807 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14808 ;
14809
14810 /* If the value is negative, then the absolute value is
14811 its negation. */
14812 else
14813 val = wi::neg (val, &overflow);
14814
14815 /* Force to the destination type, set TREE_OVERFLOW for signed
14816 TYPE only. */
14817 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14818 }
14819 break;
14820
14821 case REAL_CST:
14822 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14823 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14824 else
14825 t = arg0;
14826 break;
14827
14828 default:
14829 gcc_unreachable ();
14830 }
14831
14832 return t;
14833 }
14834
14835 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14836 constant. TYPE is the type of the result. */
14837
14838 static tree
14839 fold_not_const (const_tree arg0, tree type)
14840 {
14841 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14842
14843 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14844 }
14845
14846 /* Given CODE, a relational operator, the target type, TYPE and two
14847 constant operands OP0 and OP1, return the result of the
14848 relational operation. If the result is not a compile time
14849 constant, then return NULL_TREE. */
14850
14851 static tree
14852 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14853 {
14854 int result, invert;
14855
14856 /* From here on, the only cases we handle are when the result is
14857 known to be a constant. */
14858
14859 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14860 {
14861 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14862 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14863
14864 /* Handle the cases where either operand is a NaN. */
14865 if (real_isnan (c0) || real_isnan (c1))
14866 {
14867 switch (code)
14868 {
14869 case EQ_EXPR:
14870 case ORDERED_EXPR:
14871 result = 0;
14872 break;
14873
14874 case NE_EXPR:
14875 case UNORDERED_EXPR:
14876 case UNLT_EXPR:
14877 case UNLE_EXPR:
14878 case UNGT_EXPR:
14879 case UNGE_EXPR:
14880 case UNEQ_EXPR:
14881 result = 1;
14882 break;
14883
14884 case LT_EXPR:
14885 case LE_EXPR:
14886 case GT_EXPR:
14887 case GE_EXPR:
14888 case LTGT_EXPR:
14889 if (flag_trapping_math)
14890 return NULL_TREE;
14891 result = 0;
14892 break;
14893
14894 default:
14895 gcc_unreachable ();
14896 }
14897
14898 return constant_boolean_node (result, type);
14899 }
14900
14901 return constant_boolean_node (real_compare (code, c0, c1), type);
14902 }
14903
14904 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14905 {
14906 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14907 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14908 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14909 }
14910
14911 /* Handle equality/inequality of complex constants. */
14912 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14913 {
14914 tree rcond = fold_relational_const (code, type,
14915 TREE_REALPART (op0),
14916 TREE_REALPART (op1));
14917 tree icond = fold_relational_const (code, type,
14918 TREE_IMAGPART (op0),
14919 TREE_IMAGPART (op1));
14920 if (code == EQ_EXPR)
14921 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14922 else if (code == NE_EXPR)
14923 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14924 else
14925 return NULL_TREE;
14926 }
14927
14928 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14929 {
14930 if (!VECTOR_TYPE_P (type))
14931 {
14932 /* Have vector comparison with scalar boolean result. */
14933 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14934 && known_eq (VECTOR_CST_NELTS (op0),
14935 VECTOR_CST_NELTS (op1)));
14936 unsigned HOST_WIDE_INT nunits;
14937 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14938 return NULL_TREE;
14939 for (unsigned i = 0; i < nunits; i++)
14940 {
14941 tree elem0 = VECTOR_CST_ELT (op0, i);
14942 tree elem1 = VECTOR_CST_ELT (op1, i);
14943 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14944 if (tmp == NULL_TREE)
14945 return NULL_TREE;
14946 if (integer_zerop (tmp))
14947 return constant_boolean_node (code == NE_EXPR, type);
14948 }
14949 return constant_boolean_node (code == EQ_EXPR, type);
14950 }
14951 tree_vector_builder elts;
14952 if (!elts.new_binary_operation (type, op0, op1, false))
14953 return NULL_TREE;
14954 unsigned int count = elts.encoded_nelts ();
14955 for (unsigned i = 0; i < count; i++)
14956 {
14957 tree elem_type = TREE_TYPE (type);
14958 tree elem0 = VECTOR_CST_ELT (op0, i);
14959 tree elem1 = VECTOR_CST_ELT (op1, i);
14960
14961 tree tem = fold_relational_const (code, elem_type,
14962 elem0, elem1);
14963
14964 if (tem == NULL_TREE)
14965 return NULL_TREE;
14966
14967 elts.quick_push (build_int_cst (elem_type,
14968 integer_zerop (tem) ? 0 : -1));
14969 }
14970
14971 return elts.build ();
14972 }
14973
14974 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14975
14976 To compute GT, swap the arguments and do LT.
14977 To compute GE, do LT and invert the result.
14978 To compute LE, swap the arguments, do LT and invert the result.
14979 To compute NE, do EQ and invert the result.
14980
14981 Therefore, the code below must handle only EQ and LT. */
14982
14983 if (code == LE_EXPR || code == GT_EXPR)
14984 {
14985 std::swap (op0, op1);
14986 code = swap_tree_comparison (code);
14987 }
14988
14989 /* Note that it is safe to invert for real values here because we
14990 have already handled the one case that it matters. */
14991
14992 invert = 0;
14993 if (code == NE_EXPR || code == GE_EXPR)
14994 {
14995 invert = 1;
14996 code = invert_tree_comparison (code, false);
14997 }
14998
14999 /* Compute a result for LT or EQ if args permit;
15000 Otherwise return T. */
15001 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15002 {
15003 if (code == EQ_EXPR)
15004 result = tree_int_cst_equal (op0, op1);
15005 else
15006 result = tree_int_cst_lt (op0, op1);
15007 }
15008 else
15009 return NULL_TREE;
15010
15011 if (invert)
15012 result ^= 1;
15013 return constant_boolean_node (result, type);
15014 }
15015
15016 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15017 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15018 itself. */
15019
15020 tree
15021 fold_build_cleanup_point_expr (tree type, tree expr)
15022 {
15023 /* If the expression does not have side effects then we don't have to wrap
15024 it with a cleanup point expression. */
15025 if (!TREE_SIDE_EFFECTS (expr))
15026 return expr;
15027
15028 /* If the expression is a return, check to see if the expression inside the
15029 return has no side effects or the right hand side of the modify expression
15030 inside the return. If either don't have side effects set we don't need to
15031 wrap the expression in a cleanup point expression. Note we don't check the
15032 left hand side of the modify because it should always be a return decl. */
15033 if (TREE_CODE (expr) == RETURN_EXPR)
15034 {
15035 tree op = TREE_OPERAND (expr, 0);
15036 if (!op || !TREE_SIDE_EFFECTS (op))
15037 return expr;
15038 op = TREE_OPERAND (op, 1);
15039 if (!TREE_SIDE_EFFECTS (op))
15040 return expr;
15041 }
15042
15043 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15044 }
15045
15046 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15047 of an indirection through OP0, or NULL_TREE if no simplification is
15048 possible. */
15049
15050 tree
15051 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15052 {
15053 tree sub = op0;
15054 tree subtype;
15055 poly_uint64 const_op01;
15056
15057 STRIP_NOPS (sub);
15058 subtype = TREE_TYPE (sub);
15059 if (!POINTER_TYPE_P (subtype)
15060 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15061 return NULL_TREE;
15062
15063 if (TREE_CODE (sub) == ADDR_EXPR)
15064 {
15065 tree op = TREE_OPERAND (sub, 0);
15066 tree optype = TREE_TYPE (op);
15067
15068 /* *&CONST_DECL -> to the value of the const decl. */
15069 if (TREE_CODE (op) == CONST_DECL)
15070 return DECL_INITIAL (op);
15071 /* *&p => p; make sure to handle *&"str"[cst] here. */
15072 if (type == optype)
15073 {
15074 tree fop = fold_read_from_constant_string (op);
15075 if (fop)
15076 return fop;
15077 else
15078 return op;
15079 }
15080 /* *(foo *)&fooarray => fooarray[0] */
15081 else if (TREE_CODE (optype) == ARRAY_TYPE
15082 && type == TREE_TYPE (optype)
15083 && (!in_gimple_form
15084 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15085 {
15086 tree type_domain = TYPE_DOMAIN (optype);
15087 tree min_val = size_zero_node;
15088 if (type_domain && TYPE_MIN_VALUE (type_domain))
15089 min_val = TYPE_MIN_VALUE (type_domain);
15090 if (in_gimple_form
15091 && TREE_CODE (min_val) != INTEGER_CST)
15092 return NULL_TREE;
15093 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15094 NULL_TREE, NULL_TREE);
15095 }
15096 /* *(foo *)&complexfoo => __real__ complexfoo */
15097 else if (TREE_CODE (optype) == COMPLEX_TYPE
15098 && type == TREE_TYPE (optype))
15099 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15100 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15101 else if (VECTOR_TYPE_P (optype)
15102 && type == TREE_TYPE (optype))
15103 {
15104 tree part_width = TYPE_SIZE (type);
15105 tree index = bitsize_int (0);
15106 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15107 index);
15108 }
15109 }
15110
15111 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15112 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15113 {
15114 tree op00 = TREE_OPERAND (sub, 0);
15115 tree op01 = TREE_OPERAND (sub, 1);
15116
15117 STRIP_NOPS (op00);
15118 if (TREE_CODE (op00) == ADDR_EXPR)
15119 {
15120 tree op00type;
15121 op00 = TREE_OPERAND (op00, 0);
15122 op00type = TREE_TYPE (op00);
15123
15124 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15125 if (VECTOR_TYPE_P (op00type)
15126 && type == TREE_TYPE (op00type)
15127 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15128 but we want to treat offsets with MSB set as negative.
15129 For the code below negative offsets are invalid and
15130 TYPE_SIZE of the element is something unsigned, so
15131 check whether op01 fits into poly_int64, which implies
15132 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15133 then just use poly_uint64 because we want to treat the
15134 value as unsigned. */
15135 && tree_fits_poly_int64_p (op01))
15136 {
15137 tree part_width = TYPE_SIZE (type);
15138 poly_uint64 max_offset
15139 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15140 * TYPE_VECTOR_SUBPARTS (op00type));
15141 if (known_lt (const_op01, max_offset))
15142 {
15143 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15144 return fold_build3_loc (loc,
15145 BIT_FIELD_REF, type, op00,
15146 part_width, index);
15147 }
15148 }
15149 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15150 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15151 && type == TREE_TYPE (op00type))
15152 {
15153 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15154 const_op01))
15155 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15156 }
15157 /* ((foo *)&fooarray)[1] => fooarray[1] */
15158 else if (TREE_CODE (op00type) == ARRAY_TYPE
15159 && type == TREE_TYPE (op00type))
15160 {
15161 tree type_domain = TYPE_DOMAIN (op00type);
15162 tree min_val = size_zero_node;
15163 if (type_domain && TYPE_MIN_VALUE (type_domain))
15164 min_val = TYPE_MIN_VALUE (type_domain);
15165 poly_uint64 type_size, index;
15166 if (poly_int_tree_p (min_val)
15167 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15168 && multiple_p (const_op01, type_size, &index))
15169 {
15170 poly_offset_int off = index + wi::to_poly_offset (min_val);
15171 op01 = wide_int_to_tree (sizetype, off);
15172 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15173 NULL_TREE, NULL_TREE);
15174 }
15175 }
15176 }
15177 }
15178
15179 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15180 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15181 && type == TREE_TYPE (TREE_TYPE (subtype))
15182 && (!in_gimple_form
15183 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15184 {
15185 tree type_domain;
15186 tree min_val = size_zero_node;
15187 sub = build_fold_indirect_ref_loc (loc, sub);
15188 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15189 if (type_domain && TYPE_MIN_VALUE (type_domain))
15190 min_val = TYPE_MIN_VALUE (type_domain);
15191 if (in_gimple_form
15192 && TREE_CODE (min_val) != INTEGER_CST)
15193 return NULL_TREE;
15194 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15195 NULL_TREE);
15196 }
15197
15198 return NULL_TREE;
15199 }
15200
15201 /* Builds an expression for an indirection through T, simplifying some
15202 cases. */
15203
15204 tree
15205 build_fold_indirect_ref_loc (location_t loc, tree t)
15206 {
15207 tree type = TREE_TYPE (TREE_TYPE (t));
15208 tree sub = fold_indirect_ref_1 (loc, type, t);
15209
15210 if (sub)
15211 return sub;
15212
15213 return build1_loc (loc, INDIRECT_REF, type, t);
15214 }
15215
15216 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15217
15218 tree
15219 fold_indirect_ref_loc (location_t loc, tree t)
15220 {
15221 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15222
15223 if (sub)
15224 return sub;
15225 else
15226 return t;
15227 }
15228
15229 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15230 whose result is ignored. The type of the returned tree need not be
15231 the same as the original expression. */
15232
15233 tree
15234 fold_ignored_result (tree t)
15235 {
15236 if (!TREE_SIDE_EFFECTS (t))
15237 return integer_zero_node;
15238
15239 for (;;)
15240 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15241 {
15242 case tcc_unary:
15243 t = TREE_OPERAND (t, 0);
15244 break;
15245
15246 case tcc_binary:
15247 case tcc_comparison:
15248 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15249 t = TREE_OPERAND (t, 0);
15250 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15251 t = TREE_OPERAND (t, 1);
15252 else
15253 return t;
15254 break;
15255
15256 case tcc_expression:
15257 switch (TREE_CODE (t))
15258 {
15259 case COMPOUND_EXPR:
15260 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15261 return t;
15262 t = TREE_OPERAND (t, 0);
15263 break;
15264
15265 case COND_EXPR:
15266 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15267 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15268 return t;
15269 t = TREE_OPERAND (t, 0);
15270 break;
15271
15272 default:
15273 return t;
15274 }
15275 break;
15276
15277 default:
15278 return t;
15279 }
15280 }
15281
15282 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15283
15284 tree
15285 round_up_loc (location_t loc, tree value, unsigned int divisor)
15286 {
15287 tree div = NULL_TREE;
15288
15289 if (divisor == 1)
15290 return value;
15291
15292 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15293 have to do anything. Only do this when we are not given a const,
15294 because in that case, this check is more expensive than just
15295 doing it. */
15296 if (TREE_CODE (value) != INTEGER_CST)
15297 {
15298 div = build_int_cst (TREE_TYPE (value), divisor);
15299
15300 if (multiple_of_p (TREE_TYPE (value), value, div))
15301 return value;
15302 }
15303
15304 /* If divisor is a power of two, simplify this to bit manipulation. */
15305 if (pow2_or_zerop (divisor))
15306 {
15307 if (TREE_CODE (value) == INTEGER_CST)
15308 {
15309 wide_int val = wi::to_wide (value);
15310 bool overflow_p;
15311
15312 if ((val & (divisor - 1)) == 0)
15313 return value;
15314
15315 overflow_p = TREE_OVERFLOW (value);
15316 val += divisor - 1;
15317 val &= (int) -divisor;
15318 if (val == 0)
15319 overflow_p = true;
15320
15321 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15322 }
15323 else
15324 {
15325 tree t;
15326
15327 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15328 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15329 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15330 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15331 }
15332 }
15333 else
15334 {
15335 if (!div)
15336 div = build_int_cst (TREE_TYPE (value), divisor);
15337 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15338 value = size_binop_loc (loc, MULT_EXPR, value, div);
15339 }
15340
15341 return value;
15342 }
15343
15344 /* Likewise, but round down. */
15345
15346 tree
15347 round_down_loc (location_t loc, tree value, int divisor)
15348 {
15349 tree div = NULL_TREE;
15350
15351 gcc_assert (divisor > 0);
15352 if (divisor == 1)
15353 return value;
15354
15355 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15356 have to do anything. Only do this when we are not given a const,
15357 because in that case, this check is more expensive than just
15358 doing it. */
15359 if (TREE_CODE (value) != INTEGER_CST)
15360 {
15361 div = build_int_cst (TREE_TYPE (value), divisor);
15362
15363 if (multiple_of_p (TREE_TYPE (value), value, div))
15364 return value;
15365 }
15366
15367 /* If divisor is a power of two, simplify this to bit manipulation. */
15368 if (pow2_or_zerop (divisor))
15369 {
15370 tree t;
15371
15372 t = build_int_cst (TREE_TYPE (value), -divisor);
15373 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15374 }
15375 else
15376 {
15377 if (!div)
15378 div = build_int_cst (TREE_TYPE (value), divisor);
15379 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15380 value = size_binop_loc (loc, MULT_EXPR, value, div);
15381 }
15382
15383 return value;
15384 }
15385
15386 /* Returns the pointer to the base of the object addressed by EXP and
15387 extracts the information about the offset of the access, storing it
15388 to PBITPOS and POFFSET. */
15389
15390 static tree
15391 split_address_to_core_and_offset (tree exp,
15392 poly_int64_pod *pbitpos, tree *poffset)
15393 {
15394 tree core;
15395 machine_mode mode;
15396 int unsignedp, reversep, volatilep;
15397 poly_int64 bitsize;
15398 location_t loc = EXPR_LOCATION (exp);
15399
15400 if (TREE_CODE (exp) == ADDR_EXPR)
15401 {
15402 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15403 poffset, &mode, &unsignedp, &reversep,
15404 &volatilep);
15405 core = build_fold_addr_expr_loc (loc, core);
15406 }
15407 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15408 {
15409 core = TREE_OPERAND (exp, 0);
15410 STRIP_NOPS (core);
15411 *pbitpos = 0;
15412 *poffset = TREE_OPERAND (exp, 1);
15413 if (poly_int_tree_p (*poffset))
15414 {
15415 poly_offset_int tem
15416 = wi::sext (wi::to_poly_offset (*poffset),
15417 TYPE_PRECISION (TREE_TYPE (*poffset)));
15418 tem <<= LOG2_BITS_PER_UNIT;
15419 if (tem.to_shwi (pbitpos))
15420 *poffset = NULL_TREE;
15421 }
15422 }
15423 else
15424 {
15425 core = exp;
15426 *pbitpos = 0;
15427 *poffset = NULL_TREE;
15428 }
15429
15430 return core;
15431 }
15432
15433 /* Returns true if addresses of E1 and E2 differ by a constant, false
15434 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15435
15436 bool
15437 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15438 {
15439 tree core1, core2;
15440 poly_int64 bitpos1, bitpos2;
15441 tree toffset1, toffset2, tdiff, type;
15442
15443 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15444 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15445
15446 poly_int64 bytepos1, bytepos2;
15447 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15448 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15449 || !operand_equal_p (core1, core2, 0))
15450 return false;
15451
15452 if (toffset1 && toffset2)
15453 {
15454 type = TREE_TYPE (toffset1);
15455 if (type != TREE_TYPE (toffset2))
15456 toffset2 = fold_convert (type, toffset2);
15457
15458 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15459 if (!cst_and_fits_in_hwi (tdiff))
15460 return false;
15461
15462 *diff = int_cst_value (tdiff);
15463 }
15464 else if (toffset1 || toffset2)
15465 {
15466 /* If only one of the offsets is non-constant, the difference cannot
15467 be a constant. */
15468 return false;
15469 }
15470 else
15471 *diff = 0;
15472
15473 *diff += bytepos1 - bytepos2;
15474 return true;
15475 }
15476
15477 /* Return OFF converted to a pointer offset type suitable as offset for
15478 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15479 tree
15480 convert_to_ptrofftype_loc (location_t loc, tree off)
15481 {
15482 return fold_convert_loc (loc, sizetype, off);
15483 }
15484
15485 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15486 tree
15487 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15488 {
15489 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15490 ptr, convert_to_ptrofftype_loc (loc, off));
15491 }
15492
15493 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15494 tree
15495 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15496 {
15497 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15498 ptr, size_int (off));
15499 }
15500
15501 /* Return a pointer P to a NUL-terminated string containing the sequence
15502 of bytes corresponding to the representation of the object referred to
15503 by SRC (or a subsequence of such bytes within it if SRC is a reference
15504 to an initialized constant array plus some constant offset).
15505 If STRSIZE is non-null, store the number of bytes in the constant
15506 sequence including the terminating NUL byte. *STRSIZE is equal to
15507 sizeof(A) - OFFSET where A is the array that stores the constant
15508 sequence that SRC points to and OFFSET is the byte offset of SRC from
15509 the beginning of A. SRC need not point to a string or even an array
15510 of characters but may point to an object of any type. */
15511
15512 const char *
15513 c_getstr (tree src, unsigned HOST_WIDE_INT *strsize /* = NULL */)
15514 {
15515 /* The offset into the array A storing the string, and A's byte size. */
15516 tree offset_node;
15517 tree mem_size;
15518
15519 if (strsize)
15520 *strsize = 0;
15521
15522 src = string_constant (src, &offset_node, &mem_size, NULL);
15523 if (!src)
15524 return NULL;
15525
15526 unsigned HOST_WIDE_INT offset = 0;
15527 if (offset_node != NULL_TREE)
15528 {
15529 if (!tree_fits_uhwi_p (offset_node))
15530 return NULL;
15531 else
15532 offset = tree_to_uhwi (offset_node);
15533 }
15534
15535 if (!tree_fits_uhwi_p (mem_size))
15536 return NULL;
15537
15538 /* ARRAY_SIZE is the byte size of the array the constant sequence
15539 is stored in and equal to sizeof A. INIT_BYTES is the number
15540 of bytes in the constant sequence used to initialize the array,
15541 including any embedded NULs as well as the terminating NUL (for
15542 strings), but not including any trailing zeros/NULs past
15543 the terminating one appended implicitly to a string literal to
15544 zero out the remainder of the array it's stored in. For example,
15545 given:
15546 const char a[7] = "abc\0d";
15547 n = strlen (a + 1);
15548 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15549 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15550 is equal to strlen (A) + 1. */
15551 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
15552 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
15553
15554 /* Ideally this would turn into a gcc_checking_assert over time. */
15555 if (init_bytes > array_size)
15556 init_bytes = array_size;
15557
15558 const char *string = TREE_STRING_POINTER (src);
15559
15560 /* Ideally this would turn into a gcc_checking_assert over time. */
15561 if (init_bytes > array_size)
15562 init_bytes = array_size;
15563
15564 if (init_bytes == 0 || offset >= array_size)
15565 return NULL;
15566
15567 if (strsize)
15568 {
15569 /* Compute and store the number of characters from the beginning
15570 of the substring at OFFSET to the end, including the terminating
15571 nul. Offsets past the initial length refer to null strings. */
15572 if (offset < init_bytes)
15573 *strsize = init_bytes - offset;
15574 else
15575 *strsize = 1;
15576 }
15577 else
15578 {
15579 tree eltype = TREE_TYPE (TREE_TYPE (src));
15580 /* Support only properly NUL-terminated single byte strings. */
15581 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15582 return NULL;
15583 if (string[init_bytes - 1] != '\0')
15584 return NULL;
15585 }
15586
15587 return offset < init_bytes ? string + offset : "";
15588 }
15589
15590 /* Given a tree T, compute which bits in T may be nonzero. */
15591
15592 wide_int
15593 tree_nonzero_bits (const_tree t)
15594 {
15595 switch (TREE_CODE (t))
15596 {
15597 case INTEGER_CST:
15598 return wi::to_wide (t);
15599 case SSA_NAME:
15600 return get_nonzero_bits (t);
15601 case NON_LVALUE_EXPR:
15602 case SAVE_EXPR:
15603 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15604 case BIT_AND_EXPR:
15605 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15606 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15607 case BIT_IOR_EXPR:
15608 case BIT_XOR_EXPR:
15609 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15610 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15611 case COND_EXPR:
15612 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15613 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15614 CASE_CONVERT:
15615 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15616 TYPE_PRECISION (TREE_TYPE (t)),
15617 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15618 case PLUS_EXPR:
15619 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15620 {
15621 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15622 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15623 if (wi::bit_and (nzbits1, nzbits2) == 0)
15624 return wi::bit_or (nzbits1, nzbits2);
15625 }
15626 break;
15627 case LSHIFT_EXPR:
15628 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15629 {
15630 tree type = TREE_TYPE (t);
15631 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15632 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15633 TYPE_PRECISION (type));
15634 return wi::neg_p (arg1)
15635 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15636 : wi::lshift (nzbits, arg1);
15637 }
15638 break;
15639 case RSHIFT_EXPR:
15640 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15641 {
15642 tree type = TREE_TYPE (t);
15643 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15644 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15645 TYPE_PRECISION (type));
15646 return wi::neg_p (arg1)
15647 ? wi::lshift (nzbits, -arg1)
15648 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15649 }
15650 break;
15651 default:
15652 break;
15653 }
15654
15655 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15656 }
15657
15658 #if CHECKING_P
15659
15660 namespace selftest {
15661
15662 /* Helper functions for writing tests of folding trees. */
15663
15664 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15665
15666 static void
15667 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15668 tree constant)
15669 {
15670 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15671 }
15672
15673 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15674 wrapping WRAPPED_EXPR. */
15675
15676 static void
15677 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15678 tree wrapped_expr)
15679 {
15680 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15681 ASSERT_NE (wrapped_expr, result);
15682 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15683 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15684 }
15685
15686 /* Verify that various arithmetic binary operations are folded
15687 correctly. */
15688
15689 static void
15690 test_arithmetic_folding ()
15691 {
15692 tree type = integer_type_node;
15693 tree x = create_tmp_var_raw (type, "x");
15694 tree zero = build_zero_cst (type);
15695 tree one = build_int_cst (type, 1);
15696
15697 /* Addition. */
15698 /* 1 <-- (0 + 1) */
15699 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15700 one);
15701 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15702 one);
15703
15704 /* (nonlvalue)x <-- (x + 0) */
15705 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15706 x);
15707
15708 /* Subtraction. */
15709 /* 0 <-- (x - x) */
15710 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15711 zero);
15712 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15713 x);
15714
15715 /* Multiplication. */
15716 /* 0 <-- (x * 0) */
15717 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15718 zero);
15719
15720 /* (nonlvalue)x <-- (x * 1) */
15721 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15722 x);
15723 }
15724
15725 /* Verify that various binary operations on vectors are folded
15726 correctly. */
15727
15728 static void
15729 test_vector_folding ()
15730 {
15731 tree inner_type = integer_type_node;
15732 tree type = build_vector_type (inner_type, 4);
15733 tree zero = build_zero_cst (type);
15734 tree one = build_one_cst (type);
15735 tree index = build_index_vector (type, 0, 1);
15736
15737 /* Verify equality tests that return a scalar boolean result. */
15738 tree res_type = boolean_type_node;
15739 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15740 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15741 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15742 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15743 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15744 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15745 index, one)));
15746 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15747 index, index)));
15748 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15749 index, index)));
15750 }
15751
15752 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15753
15754 static void
15755 test_vec_duplicate_folding ()
15756 {
15757 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15758 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15759 /* This will be 1 if VEC_MODE isn't a vector mode. */
15760 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15761
15762 tree type = build_vector_type (ssizetype, nunits);
15763 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15764 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15765 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15766 }
15767
15768 /* Run all of the selftests within this file. */
15769
15770 void
15771 fold_const_c_tests ()
15772 {
15773 test_arithmetic_folding ();
15774 test_vector_folding ();
15775 test_vec_duplicate_folding ();
15776 }
15777
15778 } // namespace selftest
15779
15780 #endif /* CHECKING_P */
This page took 0.766856 seconds and 5 git commands to generate.