]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
fold-const.c (fold_comparison): Remove compile-time evaluation of complex constant...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
46
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
71
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
92 };
93
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static int multiple_of_p (tree, tree, tree);
135 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static bool fold_real_zero_addition_p (tree, tree, int);
139 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 tree, tree, tree);
141 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
142 static tree fold_div_compare (enum tree_code, tree, tree, tree);
143 static bool reorder_operands_p (tree, tree);
144 static tree fold_negate_const (tree, tree);
145 static tree fold_not_const (tree, tree);
146 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 static int native_encode_expr (tree, unsigned char *, int);
148 static tree native_interpret_expr (tree, unsigned char *, int);
149
150
151 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
152 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
153 and SUM1. Then this yields nonzero if overflow occurred during the
154 addition.
155
156 Overflow occurs if A and B have the same sign, but A and SUM differ in
157 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 sign. */
159 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 \f
161 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
162 We do that by representing the two-word integer in 4 words, with only
163 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
164 number. The value of the word is LOWPART + HIGHPART * BASE. */
165
166 #define LOWPART(x) \
167 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
168 #define HIGHPART(x) \
169 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
170 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171
172 /* Unpack a two-word integer into 4 words.
173 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
174 WORDS points to the array of HOST_WIDE_INTs. */
175
176 static void
177 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 {
179 words[0] = LOWPART (low);
180 words[1] = HIGHPART (low);
181 words[2] = LOWPART (hi);
182 words[3] = HIGHPART (hi);
183 }
184
185 /* Pack an array of 4 words into a two-word integer.
186 WORDS points to the array of words.
187 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188
189 static void
190 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 HOST_WIDE_INT *hi)
192 {
193 *low = words[0] + words[1] * BASE;
194 *hi = words[2] + words[3] * BASE;
195 }
196 \f
197 /* Force the double-word integer L1, H1 to be within the range of the
198 integer type TYPE. Stores the properly truncated and sign-extended
199 double-word integer in *LV, *HV. Returns true if the operation
200 overflows, that is, argument and result are different. */
201
202 int
203 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
204 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 {
206 unsigned HOST_WIDE_INT low0 = l1;
207 HOST_WIDE_INT high0 = h1;
208 unsigned int prec;
209 int sign_extended_type;
210
211 if (POINTER_TYPE_P (type)
212 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (type);
216
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (type)
219 || (TREE_CODE (type) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (type)));
221
222 /* First clear all bits that are beyond the type's precision. */
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 ;
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 h1 = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
232 }
233
234 /* Then do sign extension if necessary. */
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
240 {
241 /* Sign extend top half? */
242 if (h1 & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 }
246 else if (prec == HOST_BITS_PER_WIDE_INT)
247 {
248 if ((HOST_WIDE_INT)l1 < 0)
249 h1 = -1;
250 }
251 else
252 {
253 /* Sign extend bottom half? */
254 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 {
256 h1 = -1;
257 l1 |= (HOST_WIDE_INT)(-1) << prec;
258 }
259 }
260
261 *lv = l1;
262 *hv = h1;
263
264 /* If the value didn't fit, signal overflow. */
265 return l1 != low0 || h1 != high0;
266 }
267
268 /* We force the double-int HIGH:LOW to the range of the type TYPE by
269 sign or zero extending it.
270 OVERFLOWABLE indicates if we are interested
271 in overflow of the value, when >0 we are only interested in signed
272 overflow, for <0 we are interested in any overflow. OVERFLOWED
273 indicates whether overflow has already occurred. CONST_OVERFLOWED
274 indicates whether constant overflow has already occurred. We force
275 T's value to be within range of T's type (by setting to 0 or 1 all
276 the bits outside the type's range). We set TREE_OVERFLOWED if,
277 OVERFLOWED is nonzero,
278 or OVERFLOWABLE is >0 and signed overflow occurs
279 or OVERFLOWABLE is <0 and any overflow occurs
280 We return a new tree node for the extended double-int. The node
281 is shared if no overflow flags are set. */
282
283 tree
284 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
285 HOST_WIDE_INT high, int overflowable,
286 bool overflowed)
287 {
288 int sign_extended_type;
289 bool overflow;
290
291 /* Size types *are* sign extended. */
292 sign_extended_type = (!TYPE_UNSIGNED (type)
293 || (TREE_CODE (type) == INTEGER_TYPE
294 && TYPE_IS_SIZETYPE (type)));
295
296 overflow = fit_double_type (low, high, &low, &high, type);
297
298 /* If we need to set overflow flags, return a new unshared node. */
299 if (overflowed || overflow)
300 {
301 if (overflowed
302 || overflowable < 0
303 || (overflowable > 0 && sign_extended_type))
304 {
305 tree t = make_node (INTEGER_CST);
306 TREE_INT_CST_LOW (t) = low;
307 TREE_INT_CST_HIGH (t) = high;
308 TREE_TYPE (t) = type;
309 TREE_OVERFLOW (t) = 1;
310 return t;
311 }
312 }
313
314 /* Else build a shared node. */
315 return build_int_cst_wide (type, low, high);
316 }
317 \f
318 /* Add two doubleword integers with doubleword result.
319 Return nonzero if the operation overflows according to UNSIGNED_P.
320 Each argument is given as two `HOST_WIDE_INT' pieces.
321 One argument is L1 and H1; the other, L2 and H2.
322 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
323
324 int
325 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
326 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
327 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
328 bool unsigned_p)
329 {
330 unsigned HOST_WIDE_INT l;
331 HOST_WIDE_INT h;
332
333 l = l1 + l2;
334 h = h1 + h2 + (l < l1);
335
336 *lv = l;
337 *hv = h;
338
339 if (unsigned_p)
340 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 else
342 return OVERFLOW_SUM_SIGN (h1, h2, h);
343 }
344
345 /* Negate a doubleword integer with doubleword result.
346 Return nonzero if the operation overflows, assuming it's signed.
347 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
348 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
349
350 int
351 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
352 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 {
354 if (l1 == 0)
355 {
356 *lv = 0;
357 *hv = - h1;
358 return (*hv & h1) < 0;
359 }
360 else
361 {
362 *lv = -l1;
363 *hv = ~h1;
364 return 0;
365 }
366 }
367 \f
368 /* Multiply two doubleword integers with doubleword result.
369 Return nonzero if the operation overflows according to UNSIGNED_P.
370 Each argument is given as two `HOST_WIDE_INT' pieces.
371 One argument is L1 and H1; the other, L2 and H2.
372 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
373
374 int
375 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
376 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
377 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
378 bool unsigned_p)
379 {
380 HOST_WIDE_INT arg1[4];
381 HOST_WIDE_INT arg2[4];
382 HOST_WIDE_INT prod[4 * 2];
383 unsigned HOST_WIDE_INT carry;
384 int i, j, k;
385 unsigned HOST_WIDE_INT toplow, neglow;
386 HOST_WIDE_INT tophigh, neghigh;
387
388 encode (arg1, l1, h1);
389 encode (arg2, l2, h2);
390
391 memset (prod, 0, sizeof prod);
392
393 for (i = 0; i < 4; i++)
394 {
395 carry = 0;
396 for (j = 0; j < 4; j++)
397 {
398 k = i + j;
399 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
400 carry += arg1[i] * arg2[j];
401 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 carry += prod[k];
403 prod[k] = LOWPART (carry);
404 carry = HIGHPART (carry);
405 }
406 prod[i + 4] = carry;
407 }
408
409 decode (prod, lv, hv);
410 decode (prod + 4, &toplow, &tophigh);
411
412 /* Unsigned overflow is immediate. */
413 if (unsigned_p)
414 return (toplow | tophigh) != 0;
415
416 /* Check for signed overflow by calculating the signed representation of the
417 top half of the result; it should agree with the low half's sign bit. */
418 if (h1 < 0)
419 {
420 neg_double (l2, h2, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 }
423 if (h2 < 0)
424 {
425 neg_double (l1, h1, &neglow, &neghigh);
426 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 }
428 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
429 }
430 \f
431 /* Shift the doubleword integer in L1, H1 left by COUNT places
432 keeping only PREC bits of result.
433 Shift right if COUNT is negative.
434 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
435 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436
437 void
438 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
439 HOST_WIDE_INT count, unsigned int prec,
440 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 {
442 unsigned HOST_WIDE_INT signmask;
443
444 if (count < 0)
445 {
446 rshift_double (l1, h1, -count, prec, lv, hv, arith);
447 return;
448 }
449
450 if (SHIFT_COUNT_TRUNCATED)
451 count %= prec;
452
453 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 {
455 /* Shifting by the host word size is undefined according to the
456 ANSI standard, so we must handle this as a special case. */
457 *hv = 0;
458 *lv = 0;
459 }
460 else if (count >= HOST_BITS_PER_WIDE_INT)
461 {
462 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *lv = 0;
464 }
465 else
466 {
467 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
468 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
469 *lv = l1 << count;
470 }
471
472 /* Sign extend all bits that are beyond the precision. */
473
474 signmask = -((prec > HOST_BITS_PER_WIDE_INT
475 ? ((unsigned HOST_WIDE_INT) *hv
476 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
477 : (*lv >> (prec - 1))) & 1);
478
479 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 ;
481 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 {
483 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
484 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 }
486 else
487 {
488 *hv = signmask;
489 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
490 *lv |= signmask << prec;
491 }
492 }
493
494 /* Shift the doubleword integer in L1, H1 right by COUNT places
495 keeping only PREC bits of result. COUNT must be positive.
496 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
497 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498
499 void
500 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
501 HOST_WIDE_INT count, unsigned int prec,
502 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
503 int arith)
504 {
505 unsigned HOST_WIDE_INT signmask;
506
507 signmask = (arith
508 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
509 : 0);
510
511 if (SHIFT_COUNT_TRUNCATED)
512 count %= prec;
513
514 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 {
516 /* Shifting by the host word size is undefined according to the
517 ANSI standard, so we must handle this as a special case. */
518 *hv = 0;
519 *lv = 0;
520 }
521 else if (count >= HOST_BITS_PER_WIDE_INT)
522 {
523 *hv = 0;
524 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 }
526 else
527 {
528 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 *lv = ((l1 >> count)
530 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
531 }
532
533 /* Zero / sign extend all bits that are beyond the precision. */
534
535 if (count >= (HOST_WIDE_INT)prec)
536 {
537 *hv = signmask;
538 *lv = signmask;
539 }
540 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 ;
542 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 {
544 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
545 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 }
547 else
548 {
549 *hv = signmask;
550 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
551 *lv |= signmask << (prec - count);
552 }
553 }
554 \f
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556 keeping only PREC bits of result.
557 Rotate right if COUNT is negative.
558 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559
560 void
561 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
562 HOST_WIDE_INT count, unsigned int prec,
563 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 {
565 unsigned HOST_WIDE_INT s1l, s2l;
566 HOST_WIDE_INT s1h, s2h;
567
568 count %= prec;
569 if (count < 0)
570 count += prec;
571
572 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
573 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 *lv = s1l | s2l;
575 *hv = s1h | s2h;
576 }
577
578 /* Rotate the doubleword integer in L1, H1 left by COUNT places
579 keeping only PREC bits of result. COUNT must be positive.
580 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581
582 void
583 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
584 HOST_WIDE_INT count, unsigned int prec,
585 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 {
587 unsigned HOST_WIDE_INT s1l, s2l;
588 HOST_WIDE_INT s1h, s2h;
589
590 count %= prec;
591 if (count < 0)
592 count += prec;
593
594 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
595 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 *lv = s1l | s2l;
597 *hv = s1h | s2h;
598 }
599 \f
600 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
601 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
602 CODE is a tree code for a kind of division, one of
603 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 or EXACT_DIV_EXPR
605 It controls how the quotient is rounded to an integer.
606 Return nonzero if the operation overflows.
607 UNS nonzero says do unsigned division. */
608
609 int
610 div_and_round_double (enum tree_code code, int uns,
611 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
612 HOST_WIDE_INT hnum_orig,
613 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
614 HOST_WIDE_INT hden_orig,
615 unsigned HOST_WIDE_INT *lquo,
616 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
617 HOST_WIDE_INT *hrem)
618 {
619 int quo_neg = 0;
620 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
621 HOST_WIDE_INT den[4], quo[4];
622 int i, j;
623 unsigned HOST_WIDE_INT work;
624 unsigned HOST_WIDE_INT carry = 0;
625 unsigned HOST_WIDE_INT lnum = lnum_orig;
626 HOST_WIDE_INT hnum = hnum_orig;
627 unsigned HOST_WIDE_INT lden = lden_orig;
628 HOST_WIDE_INT hden = hden_orig;
629 int overflow = 0;
630
631 if (hden == 0 && lden == 0)
632 overflow = 1, lden = 1;
633
634 /* Calculate quotient sign and convert operands to unsigned. */
635 if (!uns)
636 {
637 if (hnum < 0)
638 {
639 quo_neg = ~ quo_neg;
640 /* (minimum integer) / (-1) is the only overflow case. */
641 if (neg_double (lnum, hnum, &lnum, &hnum)
642 && ((HOST_WIDE_INT) lden & hden) == -1)
643 overflow = 1;
644 }
645 if (hden < 0)
646 {
647 quo_neg = ~ quo_neg;
648 neg_double (lden, hden, &lden, &hden);
649 }
650 }
651
652 if (hnum == 0 && hden == 0)
653 { /* single precision */
654 *hquo = *hrem = 0;
655 /* This unsigned division rounds toward zero. */
656 *lquo = lnum / lden;
657 goto finish_up;
658 }
659
660 if (hnum == 0)
661 { /* trivial case: dividend < divisor */
662 /* hden != 0 already checked. */
663 *hquo = *lquo = 0;
664 *hrem = hnum;
665 *lrem = lnum;
666 goto finish_up;
667 }
668
669 memset (quo, 0, sizeof quo);
670
671 memset (num, 0, sizeof num); /* to zero 9th element */
672 memset (den, 0, sizeof den);
673
674 encode (num, lnum, hnum);
675 encode (den, lden, hden);
676
677 /* Special code for when the divisor < BASE. */
678 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 {
680 /* hnum != 0 already checked. */
681 for (i = 4 - 1; i >= 0; i--)
682 {
683 work = num[i] + carry * BASE;
684 quo[i] = work / lden;
685 carry = work % lden;
686 }
687 }
688 else
689 {
690 /* Full double precision division,
691 with thanks to Don Knuth's "Seminumerical Algorithms". */
692 int num_hi_sig, den_hi_sig;
693 unsigned HOST_WIDE_INT quo_est, scale;
694
695 /* Find the highest nonzero divisor digit. */
696 for (i = 4 - 1;; i--)
697 if (den[i] != 0)
698 {
699 den_hi_sig = i;
700 break;
701 }
702
703 /* Insure that the first digit of the divisor is at least BASE/2.
704 This is required by the quotient digit estimation algorithm. */
705
706 scale = BASE / (den[den_hi_sig] + 1);
707 if (scale > 1)
708 { /* scale divisor and dividend */
709 carry = 0;
710 for (i = 0; i <= 4 - 1; i++)
711 {
712 work = (num[i] * scale) + carry;
713 num[i] = LOWPART (work);
714 carry = HIGHPART (work);
715 }
716
717 num[4] = carry;
718 carry = 0;
719 for (i = 0; i <= 4 - 1; i++)
720 {
721 work = (den[i] * scale) + carry;
722 den[i] = LOWPART (work);
723 carry = HIGHPART (work);
724 if (den[i] != 0) den_hi_sig = i;
725 }
726 }
727
728 num_hi_sig = 4;
729
730 /* Main loop */
731 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 {
733 /* Guess the next quotient digit, quo_est, by dividing the first
734 two remaining dividend digits by the high order quotient digit.
735 quo_est is never low and is at most 2 high. */
736 unsigned HOST_WIDE_INT tmp;
737
738 num_hi_sig = i + den_hi_sig + 1;
739 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
740 if (num[num_hi_sig] != den[den_hi_sig])
741 quo_est = work / den[den_hi_sig];
742 else
743 quo_est = BASE - 1;
744
745 /* Refine quo_est so it's usually correct, and at most one high. */
746 tmp = work - quo_est * den[den_hi_sig];
747 if (tmp < BASE
748 && (den[den_hi_sig - 1] * quo_est
749 > (tmp * BASE + num[num_hi_sig - 2])))
750 quo_est--;
751
752 /* Try QUO_EST as the quotient digit, by multiplying the
753 divisor by QUO_EST and subtracting from the remaining dividend.
754 Keep in mind that QUO_EST is the I - 1st digit. */
755
756 carry = 0;
757 for (j = 0; j <= den_hi_sig; j++)
758 {
759 work = quo_est * den[j] + carry;
760 carry = HIGHPART (work);
761 work = num[i + j] - LOWPART (work);
762 num[i + j] = LOWPART (work);
763 carry += HIGHPART (work) != 0;
764 }
765
766 /* If quo_est was high by one, then num[i] went negative and
767 we need to correct things. */
768 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 {
770 quo_est--;
771 carry = 0; /* add divisor back in */
772 for (j = 0; j <= den_hi_sig; j++)
773 {
774 work = num[i + j] + den[j] + carry;
775 carry = HIGHPART (work);
776 num[i + j] = LOWPART (work);
777 }
778
779 num [num_hi_sig] += carry;
780 }
781
782 /* Store the quotient digit. */
783 quo[i] = quo_est;
784 }
785 }
786
787 decode (quo, lquo, hquo);
788
789 finish_up:
790 /* If result is negative, make it so. */
791 if (quo_neg)
792 neg_double (*lquo, *hquo, lquo, hquo);
793
794 /* Compute trial remainder: rem = num - (quo * den) */
795 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
796 neg_double (*lrem, *hrem, lrem, hrem);
797 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798
799 switch (code)
800 {
801 case TRUNC_DIV_EXPR:
802 case TRUNC_MOD_EXPR: /* round toward zero */
803 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
804 return overflow;
805
806 case FLOOR_DIV_EXPR:
807 case FLOOR_MOD_EXPR: /* round toward negative infinity */
808 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 {
810 /* quo = quo - 1; */
811 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
812 lquo, hquo);
813 }
814 else
815 return overflow;
816 break;
817
818 case CEIL_DIV_EXPR:
819 case CEIL_MOD_EXPR: /* round toward positive infinity */
820 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 {
822 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
823 lquo, hquo);
824 }
825 else
826 return overflow;
827 break;
828
829 case ROUND_DIV_EXPR:
830 case ROUND_MOD_EXPR: /* round to closest integer */
831 {
832 unsigned HOST_WIDE_INT labs_rem = *lrem;
833 HOST_WIDE_INT habs_rem = *hrem;
834 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
835 HOST_WIDE_INT habs_den = hden, htwice;
836
837 /* Get absolute values. */
838 if (*hrem < 0)
839 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 if (hden < 0)
841 neg_double (lden, hden, &labs_den, &habs_den);
842
843 /* If (2 * abs (lrem) >= abs (lden)) */
844 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
845 labs_rem, habs_rem, &ltwice, &htwice);
846
847 if (((unsigned HOST_WIDE_INT) habs_den
848 < (unsigned HOST_WIDE_INT) htwice)
849 || (((unsigned HOST_WIDE_INT) habs_den
850 == (unsigned HOST_WIDE_INT) htwice)
851 && (labs_den < ltwice)))
852 {
853 if (*hquo < 0)
854 /* quo = quo - 1; */
855 add_double (*lquo, *hquo,
856 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
857 else
858 /* quo = quo + 1; */
859 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
860 lquo, hquo);
861 }
862 else
863 return overflow;
864 }
865 break;
866
867 default:
868 gcc_unreachable ();
869 }
870
871 /* Compute true remainder: rem = num - (quo * den) */
872 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
873 neg_double (*lrem, *hrem, lrem, hrem);
874 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
875 return overflow;
876 }
877
878 /* If ARG2 divides ARG1 with zero remainder, carries out the division
879 of type CODE and returns the quotient.
880 Otherwise returns NULL_TREE. */
881
882 static tree
883 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 {
885 unsigned HOST_WIDE_INT int1l, int2l;
886 HOST_WIDE_INT int1h, int2h;
887 unsigned HOST_WIDE_INT quol, reml;
888 HOST_WIDE_INT quoh, remh;
889 tree type = TREE_TYPE (arg1);
890 int uns = TYPE_UNSIGNED (type);
891
892 int1l = TREE_INT_CST_LOW (arg1);
893 int1h = TREE_INT_CST_HIGH (arg1);
894 int2l = TREE_INT_CST_LOW (arg2);
895 int2h = TREE_INT_CST_HIGH (arg2);
896
897 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
898 &quol, &quoh, &reml, &remh);
899 if (remh != 0 || reml != 0)
900 return NULL_TREE;
901
902 return build_int_cst_wide (type, quol, quoh);
903 }
904 \f
905 /* This is non-zero if we should defer warnings about undefined
906 overflow. This facility exists because these warnings are a
907 special case. The code to estimate loop iterations does not want
908 to issue any warnings, since it works with expressions which do not
909 occur in user code. Various bits of cleanup code call fold(), but
910 only use the result if it has certain characteristics (e.g., is a
911 constant); that code only wants to issue a warning if the result is
912 used. */
913
914 static int fold_deferring_overflow_warnings;
915
916 /* If a warning about undefined overflow is deferred, this is the
917 warning. Note that this may cause us to turn two warnings into
918 one, but that is fine since it is sufficient to only give one
919 warning per expression. */
920
921 static const char* fold_deferred_overflow_warning;
922
923 /* If a warning about undefined overflow is deferred, this is the
924 level at which the warning should be emitted. */
925
926 static enum warn_strict_overflow_code fold_deferred_overflow_code;
927
928 /* Start deferring overflow warnings. We could use a stack here to
929 permit nested calls, but at present it is not necessary. */
930
931 void
932 fold_defer_overflow_warnings (void)
933 {
934 ++fold_deferring_overflow_warnings;
935 }
936
937 /* Stop deferring overflow warnings. If there is a pending warning,
938 and ISSUE is true, then issue the warning if appropriate. STMT is
939 the statement with which the warning should be associated (used for
940 location information); STMT may be NULL. CODE is the level of the
941 warning--a warn_strict_overflow_code value. This function will use
942 the smaller of CODE and the deferred code when deciding whether to
943 issue the warning. CODE may be zero to mean to always use the
944 deferred code. */
945
946 void
947 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
948 {
949 const char *warnmsg;
950 location_t locus;
951
952 gcc_assert (fold_deferring_overflow_warnings > 0);
953 --fold_deferring_overflow_warnings;
954 if (fold_deferring_overflow_warnings > 0)
955 {
956 if (fold_deferred_overflow_warning != NULL
957 && code != 0
958 && code < (int) fold_deferred_overflow_code)
959 fold_deferred_overflow_code = code;
960 return;
961 }
962
963 warnmsg = fold_deferred_overflow_warning;
964 fold_deferred_overflow_warning = NULL;
965
966 if (!issue || warnmsg == NULL)
967 return;
968
969 /* Use the smallest code level when deciding to issue the
970 warning. */
971 if (code == 0 || code > (int) fold_deferred_overflow_code)
972 code = fold_deferred_overflow_code;
973
974 if (!issue_strict_overflow_warning (code))
975 return;
976
977 if (stmt == NULL_TREE || !expr_has_location (stmt))
978 locus = input_location;
979 else
980 locus = expr_location (stmt);
981 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
982 }
983
984 /* Stop deferring overflow warnings, ignoring any deferred
985 warnings. */
986
987 void
988 fold_undefer_and_ignore_overflow_warnings (void)
989 {
990 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
991 }
992
993 /* Whether we are deferring overflow warnings. */
994
995 bool
996 fold_deferring_overflow_warnings_p (void)
997 {
998 return fold_deferring_overflow_warnings > 0;
999 }
1000
1001 /* This is called when we fold something based on the fact that signed
1002 overflow is undefined. */
1003
1004 static void
1005 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1006 {
1007 gcc_assert (!flag_wrapv && !flag_trapv);
1008 if (fold_deferring_overflow_warnings > 0)
1009 {
1010 if (fold_deferred_overflow_warning == NULL
1011 || wc < fold_deferred_overflow_code)
1012 {
1013 fold_deferred_overflow_warning = gmsgid;
1014 fold_deferred_overflow_code = wc;
1015 }
1016 }
1017 else if (issue_strict_overflow_warning (wc))
1018 warning (OPT_Wstrict_overflow, gmsgid);
1019 }
1020 \f
1021 /* Return true if the built-in mathematical function specified by CODE
1022 is odd, i.e. -f(x) == f(-x). */
1023
1024 static bool
1025 negate_mathfn_p (enum built_in_function code)
1026 {
1027 switch (code)
1028 {
1029 CASE_FLT_FN (BUILT_IN_ASIN):
1030 CASE_FLT_FN (BUILT_IN_ASINH):
1031 CASE_FLT_FN (BUILT_IN_ATAN):
1032 CASE_FLT_FN (BUILT_IN_ATANH):
1033 CASE_FLT_FN (BUILT_IN_CASIN):
1034 CASE_FLT_FN (BUILT_IN_CASINH):
1035 CASE_FLT_FN (BUILT_IN_CATAN):
1036 CASE_FLT_FN (BUILT_IN_CATANH):
1037 CASE_FLT_FN (BUILT_IN_CBRT):
1038 CASE_FLT_FN (BUILT_IN_CPROJ):
1039 CASE_FLT_FN (BUILT_IN_CSIN):
1040 CASE_FLT_FN (BUILT_IN_CSINH):
1041 CASE_FLT_FN (BUILT_IN_CTAN):
1042 CASE_FLT_FN (BUILT_IN_CTANH):
1043 CASE_FLT_FN (BUILT_IN_ERF):
1044 CASE_FLT_FN (BUILT_IN_LLROUND):
1045 CASE_FLT_FN (BUILT_IN_LROUND):
1046 CASE_FLT_FN (BUILT_IN_ROUND):
1047 CASE_FLT_FN (BUILT_IN_SIN):
1048 CASE_FLT_FN (BUILT_IN_SINH):
1049 CASE_FLT_FN (BUILT_IN_TAN):
1050 CASE_FLT_FN (BUILT_IN_TANH):
1051 CASE_FLT_FN (BUILT_IN_TRUNC):
1052 return true;
1053
1054 CASE_FLT_FN (BUILT_IN_LLRINT):
1055 CASE_FLT_FN (BUILT_IN_LRINT):
1056 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1057 CASE_FLT_FN (BUILT_IN_RINT):
1058 return !flag_rounding_math;
1059
1060 default:
1061 break;
1062 }
1063 return false;
1064 }
1065
1066 /* Check whether we may negate an integer constant T without causing
1067 overflow. */
1068
1069 bool
1070 may_negate_without_overflow_p (tree t)
1071 {
1072 unsigned HOST_WIDE_INT val;
1073 unsigned int prec;
1074 tree type;
1075
1076 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1077
1078 type = TREE_TYPE (t);
1079 if (TYPE_UNSIGNED (type))
1080 return false;
1081
1082 prec = TYPE_PRECISION (type);
1083 if (prec > HOST_BITS_PER_WIDE_INT)
1084 {
1085 if (TREE_INT_CST_LOW (t) != 0)
1086 return true;
1087 prec -= HOST_BITS_PER_WIDE_INT;
1088 val = TREE_INT_CST_HIGH (t);
1089 }
1090 else
1091 val = TREE_INT_CST_LOW (t);
1092 if (prec < HOST_BITS_PER_WIDE_INT)
1093 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1094 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1095 }
1096
1097 /* Determine whether an expression T can be cheaply negated using
1098 the function negate_expr without introducing undefined overflow. */
1099
1100 static bool
1101 negate_expr_p (tree t)
1102 {
1103 tree type;
1104
1105 if (t == 0)
1106 return false;
1107
1108 type = TREE_TYPE (t);
1109
1110 STRIP_SIGN_NOPS (t);
1111 switch (TREE_CODE (t))
1112 {
1113 case INTEGER_CST:
1114 if (TYPE_OVERFLOW_WRAPS (type))
1115 return true;
1116
1117 /* Check that -CST will not overflow type. */
1118 return may_negate_without_overflow_p (t);
1119 case BIT_NOT_EXPR:
1120 return (INTEGRAL_TYPE_P (type)
1121 && TYPE_OVERFLOW_WRAPS (type));
1122
1123 case REAL_CST:
1124 case NEGATE_EXPR:
1125 return true;
1126
1127 case COMPLEX_CST:
1128 return negate_expr_p (TREE_REALPART (t))
1129 && negate_expr_p (TREE_IMAGPART (t));
1130
1131 case COMPLEX_EXPR:
1132 return negate_expr_p (TREE_OPERAND (t, 0))
1133 && negate_expr_p (TREE_OPERAND (t, 1));
1134
1135 case CONJ_EXPR:
1136 return negate_expr_p (TREE_OPERAND (t, 0));
1137
1138 case PLUS_EXPR:
1139 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1140 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1141 return false;
1142 /* -(A + B) -> (-B) - A. */
1143 if (negate_expr_p (TREE_OPERAND (t, 1))
1144 && reorder_operands_p (TREE_OPERAND (t, 0),
1145 TREE_OPERAND (t, 1)))
1146 return true;
1147 /* -(A + B) -> (-A) - B. */
1148 return negate_expr_p (TREE_OPERAND (t, 0));
1149
1150 case MINUS_EXPR:
1151 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1152 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1));
1156
1157 case MULT_EXPR:
1158 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1159 break;
1160
1161 /* Fall through. */
1162
1163 case RDIV_EXPR:
1164 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1165 return negate_expr_p (TREE_OPERAND (t, 1))
1166 || negate_expr_p (TREE_OPERAND (t, 0));
1167 break;
1168
1169 case TRUNC_DIV_EXPR:
1170 case ROUND_DIV_EXPR:
1171 case FLOOR_DIV_EXPR:
1172 case CEIL_DIV_EXPR:
1173 case EXACT_DIV_EXPR:
1174 /* In general we can't negate A / B, because if A is INT_MIN and
1175 B is 1, we may turn this into INT_MIN / -1 which is undefined
1176 and actually traps on some architectures. But if overflow is
1177 undefined, we can negate, because - (INT_MIN / 1) is an
1178 overflow. */
1179 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1180 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1181 break;
1182 return negate_expr_p (TREE_OPERAND (t, 1))
1183 || negate_expr_p (TREE_OPERAND (t, 0));
1184
1185 case NOP_EXPR:
1186 /* Negate -((double)float) as (double)(-float). */
1187 if (TREE_CODE (type) == REAL_TYPE)
1188 {
1189 tree tem = strip_float_extensions (t);
1190 if (tem != t)
1191 return negate_expr_p (tem);
1192 }
1193 break;
1194
1195 case CALL_EXPR:
1196 /* Negate -f(x) as f(-x). */
1197 if (negate_mathfn_p (builtin_mathfn_code (t)))
1198 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1199 break;
1200
1201 case RSHIFT_EXPR:
1202 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1203 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1204 {
1205 tree op1 = TREE_OPERAND (t, 1);
1206 if (TREE_INT_CST_HIGH (op1) == 0
1207 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1208 == TREE_INT_CST_LOW (op1))
1209 return true;
1210 }
1211 break;
1212
1213 default:
1214 break;
1215 }
1216 return false;
1217 }
1218
1219 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1220 simplification is possible.
1221 If negate_expr_p would return true for T, NULL_TREE will never be
1222 returned. */
1223
1224 static tree
1225 fold_negate_expr (tree t)
1226 {
1227 tree type = TREE_TYPE (t);
1228 tree tem;
1229
1230 switch (TREE_CODE (t))
1231 {
1232 /* Convert - (~A) to A + 1. */
1233 case BIT_NOT_EXPR:
1234 if (INTEGRAL_TYPE_P (type))
1235 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1236 build_int_cst (type, 1));
1237 break;
1238
1239 case INTEGER_CST:
1240 tem = fold_negate_const (t, type);
1241 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1242 || !TYPE_OVERFLOW_TRAPS (type))
1243 return tem;
1244 break;
1245
1246 case REAL_CST:
1247 tem = fold_negate_const (t, type);
1248 /* Two's complement FP formats, such as c4x, may overflow. */
1249 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1250 return tem;
1251 break;
1252
1253 case COMPLEX_CST:
1254 {
1255 tree rpart = negate_expr (TREE_REALPART (t));
1256 tree ipart = negate_expr (TREE_IMAGPART (t));
1257
1258 if ((TREE_CODE (rpart) == REAL_CST
1259 && TREE_CODE (ipart) == REAL_CST)
1260 || (TREE_CODE (rpart) == INTEGER_CST
1261 && TREE_CODE (ipart) == INTEGER_CST))
1262 return build_complex (type, rpart, ipart);
1263 }
1264 break;
1265
1266 case COMPLEX_EXPR:
1267 if (negate_expr_p (t))
1268 return fold_build2 (COMPLEX_EXPR, type,
1269 fold_negate_expr (TREE_OPERAND (t, 0)),
1270 fold_negate_expr (TREE_OPERAND (t, 1)));
1271 break;
1272
1273 case CONJ_EXPR:
1274 if (negate_expr_p (t))
1275 return fold_build1 (CONJ_EXPR, type,
1276 fold_negate_expr (TREE_OPERAND (t, 0)));
1277 break;
1278
1279 case NEGATE_EXPR:
1280 return TREE_OPERAND (t, 0);
1281
1282 case PLUS_EXPR:
1283 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1284 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1285 {
1286 /* -(A + B) -> (-B) - A. */
1287 if (negate_expr_p (TREE_OPERAND (t, 1))
1288 && reorder_operands_p (TREE_OPERAND (t, 0),
1289 TREE_OPERAND (t, 1)))
1290 {
1291 tem = negate_expr (TREE_OPERAND (t, 1));
1292 return fold_build2 (MINUS_EXPR, type,
1293 tem, TREE_OPERAND (t, 0));
1294 }
1295
1296 /* -(A + B) -> (-A) - B. */
1297 if (negate_expr_p (TREE_OPERAND (t, 0)))
1298 {
1299 tem = negate_expr (TREE_OPERAND (t, 0));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 1));
1302 }
1303 }
1304 break;
1305
1306 case MINUS_EXPR:
1307 /* - (A - B) -> B - A */
1308 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1309 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1310 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1311 return fold_build2 (MINUS_EXPR, type,
1312 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1313 break;
1314
1315 case MULT_EXPR:
1316 if (TYPE_UNSIGNED (type))
1317 break;
1318
1319 /* Fall through. */
1320
1321 case RDIV_EXPR:
1322 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1323 {
1324 tem = TREE_OPERAND (t, 1);
1325 if (negate_expr_p (tem))
1326 return fold_build2 (TREE_CODE (t), type,
1327 TREE_OPERAND (t, 0), negate_expr (tem));
1328 tem = TREE_OPERAND (t, 0);
1329 if (negate_expr_p (tem))
1330 return fold_build2 (TREE_CODE (t), type,
1331 negate_expr (tem), TREE_OPERAND (t, 1));
1332 }
1333 break;
1334
1335 case TRUNC_DIV_EXPR:
1336 case ROUND_DIV_EXPR:
1337 case FLOOR_DIV_EXPR:
1338 case CEIL_DIV_EXPR:
1339 case EXACT_DIV_EXPR:
1340 /* In general we can't negate A / B, because if A is INT_MIN and
1341 B is 1, we may turn this into INT_MIN / -1 which is undefined
1342 and actually traps on some architectures. But if overflow is
1343 undefined, we can negate, because - (INT_MIN / 1) is an
1344 overflow. */
1345 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1346 {
1347 const char * const warnmsg = G_("assuming signed overflow does not "
1348 "occur when negating a division");
1349 tem = TREE_OPERAND (t, 1);
1350 if (negate_expr_p (tem))
1351 {
1352 if (INTEGRAL_TYPE_P (type)
1353 && (TREE_CODE (tem) != INTEGER_CST
1354 || integer_onep (tem)))
1355 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1356 return fold_build2 (TREE_CODE (t), type,
1357 TREE_OPERAND (t, 0), negate_expr (tem));
1358 }
1359 tem = TREE_OPERAND (t, 0);
1360 if (negate_expr_p (tem))
1361 {
1362 if (INTEGRAL_TYPE_P (type)
1363 && (TREE_CODE (tem) != INTEGER_CST
1364 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1365 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1366 return fold_build2 (TREE_CODE (t), type,
1367 negate_expr (tem), TREE_OPERAND (t, 1));
1368 }
1369 }
1370 break;
1371
1372 case NOP_EXPR:
1373 /* Convert -((double)float) into (double)(-float). */
1374 if (TREE_CODE (type) == REAL_TYPE)
1375 {
1376 tem = strip_float_extensions (t);
1377 if (tem != t && negate_expr_p (tem))
1378 return negate_expr (tem);
1379 }
1380 break;
1381
1382 case CALL_EXPR:
1383 /* Negate -f(x) as f(-x). */
1384 if (negate_mathfn_p (builtin_mathfn_code (t))
1385 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1386 {
1387 tree fndecl, arg;
1388
1389 fndecl = get_callee_fndecl (t);
1390 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1391 return build_call_expr (fndecl, 1, arg);
1392 }
1393 break;
1394
1395 case RSHIFT_EXPR:
1396 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1397 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1398 {
1399 tree op1 = TREE_OPERAND (t, 1);
1400 if (TREE_INT_CST_HIGH (op1) == 0
1401 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1402 == TREE_INT_CST_LOW (op1))
1403 {
1404 tree ntype = TYPE_UNSIGNED (type)
1405 ? lang_hooks.types.signed_type (type)
1406 : lang_hooks.types.unsigned_type (type);
1407 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1408 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1409 return fold_convert (type, temp);
1410 }
1411 }
1412 break;
1413
1414 default:
1415 break;
1416 }
1417
1418 return NULL_TREE;
1419 }
1420
1421 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1422 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1423 return NULL_TREE. */
1424
1425 static tree
1426 negate_expr (tree t)
1427 {
1428 tree type, tem;
1429
1430 if (t == NULL_TREE)
1431 return NULL_TREE;
1432
1433 type = TREE_TYPE (t);
1434 STRIP_SIGN_NOPS (t);
1435
1436 tem = fold_negate_expr (t);
1437 if (!tem)
1438 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1439 return fold_convert (type, tem);
1440 }
1441 \f
1442 /* Split a tree IN into a constant, literal and variable parts that could be
1443 combined with CODE to make IN. "constant" means an expression with
1444 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1445 commutative arithmetic operation. Store the constant part into *CONP,
1446 the literal in *LITP and return the variable part. If a part isn't
1447 present, set it to null. If the tree does not decompose in this way,
1448 return the entire tree as the variable part and the other parts as null.
1449
1450 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1451 case, we negate an operand that was subtracted. Except if it is a
1452 literal for which we use *MINUS_LITP instead.
1453
1454 If NEGATE_P is true, we are negating all of IN, again except a literal
1455 for which we use *MINUS_LITP instead.
1456
1457 If IN is itself a literal or constant, return it as appropriate.
1458
1459 Note that we do not guarantee that any of the three values will be the
1460 same type as IN, but they will have the same signedness and mode. */
1461
1462 static tree
1463 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1464 tree *minus_litp, int negate_p)
1465 {
1466 tree var = 0;
1467
1468 *conp = 0;
1469 *litp = 0;
1470 *minus_litp = 0;
1471
1472 /* Strip any conversions that don't change the machine mode or signedness. */
1473 STRIP_SIGN_NOPS (in);
1474
1475 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1476 *litp = in;
1477 else if (TREE_CODE (in) == code
1478 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1479 /* We can associate addition and subtraction together (even
1480 though the C standard doesn't say so) for integers because
1481 the value is not affected. For reals, the value might be
1482 affected, so we can't. */
1483 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1484 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1485 {
1486 tree op0 = TREE_OPERAND (in, 0);
1487 tree op1 = TREE_OPERAND (in, 1);
1488 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1489 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1490
1491 /* First see if either of the operands is a literal, then a constant. */
1492 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1493 *litp = op0, op0 = 0;
1494 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1495 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1496
1497 if (op0 != 0 && TREE_CONSTANT (op0))
1498 *conp = op0, op0 = 0;
1499 else if (op1 != 0 && TREE_CONSTANT (op1))
1500 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1501
1502 /* If we haven't dealt with either operand, this is not a case we can
1503 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1504 if (op0 != 0 && op1 != 0)
1505 var = in;
1506 else if (op0 != 0)
1507 var = op0;
1508 else
1509 var = op1, neg_var_p = neg1_p;
1510
1511 /* Now do any needed negations. */
1512 if (neg_litp_p)
1513 *minus_litp = *litp, *litp = 0;
1514 if (neg_conp_p)
1515 *conp = negate_expr (*conp);
1516 if (neg_var_p)
1517 var = negate_expr (var);
1518 }
1519 else if (TREE_CONSTANT (in))
1520 *conp = in;
1521 else
1522 var = in;
1523
1524 if (negate_p)
1525 {
1526 if (*litp)
1527 *minus_litp = *litp, *litp = 0;
1528 else if (*minus_litp)
1529 *litp = *minus_litp, *minus_litp = 0;
1530 *conp = negate_expr (*conp);
1531 var = negate_expr (var);
1532 }
1533
1534 return var;
1535 }
1536
1537 /* Re-associate trees split by the above function. T1 and T2 are either
1538 expressions to associate or null. Return the new expression, if any. If
1539 we build an operation, do it in TYPE and with CODE. */
1540
1541 static tree
1542 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1543 {
1544 if (t1 == 0)
1545 return t2;
1546 else if (t2 == 0)
1547 return t1;
1548
1549 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1550 try to fold this since we will have infinite recursion. But do
1551 deal with any NEGATE_EXPRs. */
1552 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1553 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1554 {
1555 if (code == PLUS_EXPR)
1556 {
1557 if (TREE_CODE (t1) == NEGATE_EXPR)
1558 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1559 fold_convert (type, TREE_OPERAND (t1, 0)));
1560 else if (TREE_CODE (t2) == NEGATE_EXPR)
1561 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1562 fold_convert (type, TREE_OPERAND (t2, 0)));
1563 else if (integer_zerop (t2))
1564 return fold_convert (type, t1);
1565 }
1566 else if (code == MINUS_EXPR)
1567 {
1568 if (integer_zerop (t2))
1569 return fold_convert (type, t1);
1570 }
1571
1572 return build2 (code, type, fold_convert (type, t1),
1573 fold_convert (type, t2));
1574 }
1575
1576 return fold_build2 (code, type, fold_convert (type, t1),
1577 fold_convert (type, t2));
1578 }
1579 \f
1580 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1581 for use in int_const_binop, size_binop and size_diffop. */
1582
1583 static bool
1584 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1585 {
1586 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1587 return false;
1588 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1589 return false;
1590
1591 switch (code)
1592 {
1593 case LSHIFT_EXPR:
1594 case RSHIFT_EXPR:
1595 case LROTATE_EXPR:
1596 case RROTATE_EXPR:
1597 return true;
1598
1599 default:
1600 break;
1601 }
1602
1603 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1604 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1605 && TYPE_MODE (type1) == TYPE_MODE (type2);
1606 }
1607
1608
1609 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1610 to produce a new constant. Return NULL_TREE if we don't know how
1611 to evaluate CODE at compile-time.
1612
1613 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1614
1615 tree
1616 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1617 {
1618 unsigned HOST_WIDE_INT int1l, int2l;
1619 HOST_WIDE_INT int1h, int2h;
1620 unsigned HOST_WIDE_INT low;
1621 HOST_WIDE_INT hi;
1622 unsigned HOST_WIDE_INT garbagel;
1623 HOST_WIDE_INT garbageh;
1624 tree t;
1625 tree type = TREE_TYPE (arg1);
1626 int uns = TYPE_UNSIGNED (type);
1627 int is_sizetype
1628 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1629 int overflow = 0;
1630
1631 int1l = TREE_INT_CST_LOW (arg1);
1632 int1h = TREE_INT_CST_HIGH (arg1);
1633 int2l = TREE_INT_CST_LOW (arg2);
1634 int2h = TREE_INT_CST_HIGH (arg2);
1635
1636 switch (code)
1637 {
1638 case BIT_IOR_EXPR:
1639 low = int1l | int2l, hi = int1h | int2h;
1640 break;
1641
1642 case BIT_XOR_EXPR:
1643 low = int1l ^ int2l, hi = int1h ^ int2h;
1644 break;
1645
1646 case BIT_AND_EXPR:
1647 low = int1l & int2l, hi = int1h & int2h;
1648 break;
1649
1650 case RSHIFT_EXPR:
1651 int2l = -int2l;
1652 case LSHIFT_EXPR:
1653 /* It's unclear from the C standard whether shifts can overflow.
1654 The following code ignores overflow; perhaps a C standard
1655 interpretation ruling is needed. */
1656 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1657 &low, &hi, !uns);
1658 break;
1659
1660 case RROTATE_EXPR:
1661 int2l = - int2l;
1662 case LROTATE_EXPR:
1663 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1664 &low, &hi);
1665 break;
1666
1667 case PLUS_EXPR:
1668 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1669 break;
1670
1671 case MINUS_EXPR:
1672 neg_double (int2l, int2h, &low, &hi);
1673 add_double (int1l, int1h, low, hi, &low, &hi);
1674 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1675 break;
1676
1677 case MULT_EXPR:
1678 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1679 break;
1680
1681 case TRUNC_DIV_EXPR:
1682 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1683 case EXACT_DIV_EXPR:
1684 /* This is a shortcut for a common special case. */
1685 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1686 && !TREE_OVERFLOW (arg1)
1687 && !TREE_OVERFLOW (arg2)
1688 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1689 {
1690 if (code == CEIL_DIV_EXPR)
1691 int1l += int2l - 1;
1692
1693 low = int1l / int2l, hi = 0;
1694 break;
1695 }
1696
1697 /* ... fall through ... */
1698
1699 case ROUND_DIV_EXPR:
1700 if (int2h == 0 && int2l == 0)
1701 return NULL_TREE;
1702 if (int2h == 0 && int2l == 1)
1703 {
1704 low = int1l, hi = int1h;
1705 break;
1706 }
1707 if (int1l == int2l && int1h == int2h
1708 && ! (int1l == 0 && int1h == 0))
1709 {
1710 low = 1, hi = 0;
1711 break;
1712 }
1713 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1714 &low, &hi, &garbagel, &garbageh);
1715 break;
1716
1717 case TRUNC_MOD_EXPR:
1718 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1719 /* This is a shortcut for a common special case. */
1720 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1721 && !TREE_OVERFLOW (arg1)
1722 && !TREE_OVERFLOW (arg2)
1723 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1724 {
1725 if (code == CEIL_MOD_EXPR)
1726 int1l += int2l - 1;
1727 low = int1l % int2l, hi = 0;
1728 break;
1729 }
1730
1731 /* ... fall through ... */
1732
1733 case ROUND_MOD_EXPR:
1734 if (int2h == 0 && int2l == 0)
1735 return NULL_TREE;
1736 overflow = div_and_round_double (code, uns,
1737 int1l, int1h, int2l, int2h,
1738 &garbagel, &garbageh, &low, &hi);
1739 break;
1740
1741 case MIN_EXPR:
1742 case MAX_EXPR:
1743 if (uns)
1744 low = (((unsigned HOST_WIDE_INT) int1h
1745 < (unsigned HOST_WIDE_INT) int2h)
1746 || (((unsigned HOST_WIDE_INT) int1h
1747 == (unsigned HOST_WIDE_INT) int2h)
1748 && int1l < int2l));
1749 else
1750 low = (int1h < int2h
1751 || (int1h == int2h && int1l < int2l));
1752
1753 if (low == (code == MIN_EXPR))
1754 low = int1l, hi = int1h;
1755 else
1756 low = int2l, hi = int2h;
1757 break;
1758
1759 default:
1760 return NULL_TREE;
1761 }
1762
1763 if (notrunc)
1764 {
1765 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1766
1767 /* Propagate overflow flags ourselves. */
1768 if (((!uns || is_sizetype) && overflow)
1769 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1770 {
1771 t = copy_node (t);
1772 TREE_OVERFLOW (t) = 1;
1773 }
1774 }
1775 else
1776 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1777 ((!uns || is_sizetype) && overflow)
1778 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1779
1780 return t;
1781 }
1782
1783 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1784 constant. We assume ARG1 and ARG2 have the same data type, or at least
1785 are the same kind of constant and the same machine mode. Return zero if
1786 combining the constants is not allowed in the current operating mode.
1787
1788 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1789
1790 static tree
1791 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1792 {
1793 /* Sanity check for the recursive cases. */
1794 if (!arg1 || !arg2)
1795 return NULL_TREE;
1796
1797 STRIP_NOPS (arg1);
1798 STRIP_NOPS (arg2);
1799
1800 if (TREE_CODE (arg1) == INTEGER_CST)
1801 return int_const_binop (code, arg1, arg2, notrunc);
1802
1803 if (TREE_CODE (arg1) == REAL_CST)
1804 {
1805 enum machine_mode mode;
1806 REAL_VALUE_TYPE d1;
1807 REAL_VALUE_TYPE d2;
1808 REAL_VALUE_TYPE value;
1809 REAL_VALUE_TYPE result;
1810 bool inexact;
1811 tree t, type;
1812
1813 /* The following codes are handled by real_arithmetic. */
1814 switch (code)
1815 {
1816 case PLUS_EXPR:
1817 case MINUS_EXPR:
1818 case MULT_EXPR:
1819 case RDIV_EXPR:
1820 case MIN_EXPR:
1821 case MAX_EXPR:
1822 break;
1823
1824 default:
1825 return NULL_TREE;
1826 }
1827
1828 d1 = TREE_REAL_CST (arg1);
1829 d2 = TREE_REAL_CST (arg2);
1830
1831 type = TREE_TYPE (arg1);
1832 mode = TYPE_MODE (type);
1833
1834 /* Don't perform operation if we honor signaling NaNs and
1835 either operand is a NaN. */
1836 if (HONOR_SNANS (mode)
1837 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1838 return NULL_TREE;
1839
1840 /* Don't perform operation if it would raise a division
1841 by zero exception. */
1842 if (code == RDIV_EXPR
1843 && REAL_VALUES_EQUAL (d2, dconst0)
1844 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1845 return NULL_TREE;
1846
1847 /* If either operand is a NaN, just return it. Otherwise, set up
1848 for floating-point trap; we return an overflow. */
1849 if (REAL_VALUE_ISNAN (d1))
1850 return arg1;
1851 else if (REAL_VALUE_ISNAN (d2))
1852 return arg2;
1853
1854 inexact = real_arithmetic (&value, code, &d1, &d2);
1855 real_convert (&result, mode, &value);
1856
1857 /* Don't constant fold this floating point operation if
1858 the result has overflowed and flag_trapping_math. */
1859 if (flag_trapping_math
1860 && MODE_HAS_INFINITIES (mode)
1861 && REAL_VALUE_ISINF (result)
1862 && !REAL_VALUE_ISINF (d1)
1863 && !REAL_VALUE_ISINF (d2))
1864 return NULL_TREE;
1865
1866 /* Don't constant fold this floating point operation if the
1867 result may dependent upon the run-time rounding mode and
1868 flag_rounding_math is set, or if GCC's software emulation
1869 is unable to accurately represent the result. */
1870 if ((flag_rounding_math
1871 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1872 && !flag_unsafe_math_optimizations))
1873 && (inexact || !real_identical (&result, &value)))
1874 return NULL_TREE;
1875
1876 t = build_real (type, result);
1877
1878 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1879 return t;
1880 }
1881
1882 if (TREE_CODE (arg1) == COMPLEX_CST)
1883 {
1884 tree type = TREE_TYPE (arg1);
1885 tree r1 = TREE_REALPART (arg1);
1886 tree i1 = TREE_IMAGPART (arg1);
1887 tree r2 = TREE_REALPART (arg2);
1888 tree i2 = TREE_IMAGPART (arg2);
1889 tree real, imag;
1890
1891 switch (code)
1892 {
1893 case PLUS_EXPR:
1894 case MINUS_EXPR:
1895 real = const_binop (code, r1, r2, notrunc);
1896 imag = const_binop (code, i1, i2, notrunc);
1897 break;
1898
1899 case MULT_EXPR:
1900 real = const_binop (MINUS_EXPR,
1901 const_binop (MULT_EXPR, r1, r2, notrunc),
1902 const_binop (MULT_EXPR, i1, i2, notrunc),
1903 notrunc);
1904 imag = const_binop (PLUS_EXPR,
1905 const_binop (MULT_EXPR, r1, i2, notrunc),
1906 const_binop (MULT_EXPR, i1, r2, notrunc),
1907 notrunc);
1908 break;
1909
1910 case RDIV_EXPR:
1911 {
1912 tree magsquared
1913 = const_binop (PLUS_EXPR,
1914 const_binop (MULT_EXPR, r2, r2, notrunc),
1915 const_binop (MULT_EXPR, i2, i2, notrunc),
1916 notrunc);
1917 tree t1
1918 = const_binop (PLUS_EXPR,
1919 const_binop (MULT_EXPR, r1, r2, notrunc),
1920 const_binop (MULT_EXPR, i1, i2, notrunc),
1921 notrunc);
1922 tree t2
1923 = const_binop (MINUS_EXPR,
1924 const_binop (MULT_EXPR, i1, r2, notrunc),
1925 const_binop (MULT_EXPR, r1, i2, notrunc),
1926 notrunc);
1927
1928 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1929 code = TRUNC_DIV_EXPR;
1930
1931 real = const_binop (code, t1, magsquared, notrunc);
1932 imag = const_binop (code, t2, magsquared, notrunc);
1933 }
1934 break;
1935
1936 default:
1937 return NULL_TREE;
1938 }
1939
1940 if (real && imag)
1941 return build_complex (type, real, imag);
1942 }
1943
1944 return NULL_TREE;
1945 }
1946
1947 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1948 indicates which particular sizetype to create. */
1949
1950 tree
1951 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1952 {
1953 return build_int_cst (sizetype_tab[(int) kind], number);
1954 }
1955 \f
1956 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1957 is a tree code. The type of the result is taken from the operands.
1958 Both must be equivalent integer types, ala int_binop_types_match_p.
1959 If the operands are constant, so is the result. */
1960
1961 tree
1962 size_binop (enum tree_code code, tree arg0, tree arg1)
1963 {
1964 tree type = TREE_TYPE (arg0);
1965
1966 if (arg0 == error_mark_node || arg1 == error_mark_node)
1967 return error_mark_node;
1968
1969 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1970 TREE_TYPE (arg1)));
1971
1972 /* Handle the special case of two integer constants faster. */
1973 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1974 {
1975 /* And some specific cases even faster than that. */
1976 if (code == PLUS_EXPR)
1977 {
1978 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1979 return arg1;
1980 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1981 return arg0;
1982 }
1983 else if (code == MINUS_EXPR)
1984 {
1985 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1986 return arg0;
1987 }
1988 else if (code == MULT_EXPR)
1989 {
1990 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1991 return arg1;
1992 }
1993
1994 /* Handle general case of two integer constants. */
1995 return int_const_binop (code, arg0, arg1, 0);
1996 }
1997
1998 return fold_build2 (code, type, arg0, arg1);
1999 }
2000
2001 /* Given two values, either both of sizetype or both of bitsizetype,
2002 compute the difference between the two values. Return the value
2003 in signed type corresponding to the type of the operands. */
2004
2005 tree
2006 size_diffop (tree arg0, tree arg1)
2007 {
2008 tree type = TREE_TYPE (arg0);
2009 tree ctype;
2010
2011 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2012 TREE_TYPE (arg1)));
2013
2014 /* If the type is already signed, just do the simple thing. */
2015 if (!TYPE_UNSIGNED (type))
2016 return size_binop (MINUS_EXPR, arg0, arg1);
2017
2018 if (type == sizetype)
2019 ctype = ssizetype;
2020 else if (type == bitsizetype)
2021 ctype = sbitsizetype;
2022 else
2023 ctype = lang_hooks.types.signed_type (type);
2024
2025 /* If either operand is not a constant, do the conversions to the signed
2026 type and subtract. The hardware will do the right thing with any
2027 overflow in the subtraction. */
2028 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2029 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2030 fold_convert (ctype, arg1));
2031
2032 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2033 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2034 overflow) and negate (which can't either). Special-case a result
2035 of zero while we're here. */
2036 if (tree_int_cst_equal (arg0, arg1))
2037 return build_int_cst (ctype, 0);
2038 else if (tree_int_cst_lt (arg1, arg0))
2039 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2040 else
2041 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2042 fold_convert (ctype, size_binop (MINUS_EXPR,
2043 arg1, arg0)));
2044 }
2045 \f
2046 /* A subroutine of fold_convert_const handling conversions of an
2047 INTEGER_CST to another integer type. */
2048
2049 static tree
2050 fold_convert_const_int_from_int (tree type, tree arg1)
2051 {
2052 tree t;
2053
2054 /* Given an integer constant, make new constant with new type,
2055 appropriately sign-extended or truncated. */
2056 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2057 TREE_INT_CST_HIGH (arg1),
2058 /* Don't set the overflow when
2059 converting a pointer */
2060 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2061 (TREE_INT_CST_HIGH (arg1) < 0
2062 && (TYPE_UNSIGNED (type)
2063 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2064 | TREE_OVERFLOW (arg1));
2065
2066 return t;
2067 }
2068
2069 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2070 to an integer type. */
2071
2072 static tree
2073 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2074 {
2075 int overflow = 0;
2076 tree t;
2077
2078 /* The following code implements the floating point to integer
2079 conversion rules required by the Java Language Specification,
2080 that IEEE NaNs are mapped to zero and values that overflow
2081 the target precision saturate, i.e. values greater than
2082 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2083 are mapped to INT_MIN. These semantics are allowed by the
2084 C and C++ standards that simply state that the behavior of
2085 FP-to-integer conversion is unspecified upon overflow. */
2086
2087 HOST_WIDE_INT high, low;
2088 REAL_VALUE_TYPE r;
2089 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2090
2091 switch (code)
2092 {
2093 case FIX_TRUNC_EXPR:
2094 real_trunc (&r, VOIDmode, &x);
2095 break;
2096
2097 default:
2098 gcc_unreachable ();
2099 }
2100
2101 /* If R is NaN, return zero and show we have an overflow. */
2102 if (REAL_VALUE_ISNAN (r))
2103 {
2104 overflow = 1;
2105 high = 0;
2106 low = 0;
2107 }
2108
2109 /* See if R is less than the lower bound or greater than the
2110 upper bound. */
2111
2112 if (! overflow)
2113 {
2114 tree lt = TYPE_MIN_VALUE (type);
2115 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2116 if (REAL_VALUES_LESS (r, l))
2117 {
2118 overflow = 1;
2119 high = TREE_INT_CST_HIGH (lt);
2120 low = TREE_INT_CST_LOW (lt);
2121 }
2122 }
2123
2124 if (! overflow)
2125 {
2126 tree ut = TYPE_MAX_VALUE (type);
2127 if (ut)
2128 {
2129 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2130 if (REAL_VALUES_LESS (u, r))
2131 {
2132 overflow = 1;
2133 high = TREE_INT_CST_HIGH (ut);
2134 low = TREE_INT_CST_LOW (ut);
2135 }
2136 }
2137 }
2138
2139 if (! overflow)
2140 REAL_VALUE_TO_INT (&low, &high, r);
2141
2142 t = force_fit_type_double (type, low, high, -1,
2143 overflow | TREE_OVERFLOW (arg1));
2144 return t;
2145 }
2146
2147 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2148 to another floating point type. */
2149
2150 static tree
2151 fold_convert_const_real_from_real (tree type, tree arg1)
2152 {
2153 REAL_VALUE_TYPE value;
2154 tree t;
2155
2156 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2157 t = build_real (type, value);
2158
2159 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2160 return t;
2161 }
2162
2163 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2164 type TYPE. If no simplification can be done return NULL_TREE. */
2165
2166 static tree
2167 fold_convert_const (enum tree_code code, tree type, tree arg1)
2168 {
2169 if (TREE_TYPE (arg1) == type)
2170 return arg1;
2171
2172 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2173 {
2174 if (TREE_CODE (arg1) == INTEGER_CST)
2175 return fold_convert_const_int_from_int (type, arg1);
2176 else if (TREE_CODE (arg1) == REAL_CST)
2177 return fold_convert_const_int_from_real (code, type, arg1);
2178 }
2179 else if (TREE_CODE (type) == REAL_TYPE)
2180 {
2181 if (TREE_CODE (arg1) == INTEGER_CST)
2182 return build_real_from_int_cst (type, arg1);
2183 if (TREE_CODE (arg1) == REAL_CST)
2184 return fold_convert_const_real_from_real (type, arg1);
2185 }
2186 return NULL_TREE;
2187 }
2188
2189 /* Construct a vector of zero elements of vector type TYPE. */
2190
2191 static tree
2192 build_zero_vector (tree type)
2193 {
2194 tree elem, list;
2195 int i, units;
2196
2197 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2198 units = TYPE_VECTOR_SUBPARTS (type);
2199
2200 list = NULL_TREE;
2201 for (i = 0; i < units; i++)
2202 list = tree_cons (NULL_TREE, elem, list);
2203 return build_vector (type, list);
2204 }
2205
2206 /* Convert expression ARG to type TYPE. Used by the middle-end for
2207 simple conversions in preference to calling the front-end's convert. */
2208
2209 tree
2210 fold_convert (tree type, tree arg)
2211 {
2212 tree orig = TREE_TYPE (arg);
2213 tree tem;
2214
2215 if (type == orig)
2216 return arg;
2217
2218 if (TREE_CODE (arg) == ERROR_MARK
2219 || TREE_CODE (type) == ERROR_MARK
2220 || TREE_CODE (orig) == ERROR_MARK)
2221 return error_mark_node;
2222
2223 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2224 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2225 TYPE_MAIN_VARIANT (orig)))
2226 return fold_build1 (NOP_EXPR, type, arg);
2227
2228 switch (TREE_CODE (type))
2229 {
2230 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2231 case POINTER_TYPE: case REFERENCE_TYPE:
2232 case OFFSET_TYPE:
2233 if (TREE_CODE (arg) == INTEGER_CST)
2234 {
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2240 || TREE_CODE (orig) == OFFSET_TYPE)
2241 return fold_build1 (NOP_EXPR, type, arg);
2242 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 {
2244 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2245 return fold_convert (type, tem);
2246 }
2247 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2248 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2249 return fold_build1 (NOP_EXPR, type, arg);
2250
2251 case REAL_TYPE:
2252 if (TREE_CODE (arg) == INTEGER_CST)
2253 {
2254 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2255 if (tem != NULL_TREE)
2256 return tem;
2257 }
2258 else if (TREE_CODE (arg) == REAL_CST)
2259 {
2260 tem = fold_convert_const (NOP_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2262 return tem;
2263 }
2264
2265 switch (TREE_CODE (orig))
2266 {
2267 case INTEGER_TYPE:
2268 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2269 case POINTER_TYPE: case REFERENCE_TYPE:
2270 return fold_build1 (FLOAT_EXPR, type, arg);
2271
2272 case REAL_TYPE:
2273 return fold_build1 (NOP_EXPR, type, arg);
2274
2275 case COMPLEX_TYPE:
2276 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2277 return fold_convert (type, tem);
2278
2279 default:
2280 gcc_unreachable ();
2281 }
2282
2283 case COMPLEX_TYPE:
2284 switch (TREE_CODE (orig))
2285 {
2286 case INTEGER_TYPE:
2287 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2288 case POINTER_TYPE: case REFERENCE_TYPE:
2289 case REAL_TYPE:
2290 return build2 (COMPLEX_EXPR, type,
2291 fold_convert (TREE_TYPE (type), arg),
2292 fold_convert (TREE_TYPE (type), integer_zero_node));
2293 case COMPLEX_TYPE:
2294 {
2295 tree rpart, ipart;
2296
2297 if (TREE_CODE (arg) == COMPLEX_EXPR)
2298 {
2299 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2300 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2301 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2302 }
2303
2304 arg = save_expr (arg);
2305 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2306 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2307 rpart = fold_convert (TREE_TYPE (type), rpart);
2308 ipart = fold_convert (TREE_TYPE (type), ipart);
2309 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2310 }
2311
2312 default:
2313 gcc_unreachable ();
2314 }
2315
2316 case VECTOR_TYPE:
2317 if (integer_zerop (arg))
2318 return build_zero_vector (type);
2319 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2320 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2321 || TREE_CODE (orig) == VECTOR_TYPE);
2322 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2323
2324 case VOID_TYPE:
2325 tem = fold_ignored_result (arg);
2326 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2327 return tem;
2328 return fold_build1 (NOP_EXPR, type, tem);
2329
2330 default:
2331 gcc_unreachable ();
2332 }
2333 }
2334 \f
2335 /* Return false if expr can be assumed not to be an lvalue, true
2336 otherwise. */
2337
2338 static bool
2339 maybe_lvalue_p (tree x)
2340 {
2341 /* We only need to wrap lvalue tree codes. */
2342 switch (TREE_CODE (x))
2343 {
2344 case VAR_DECL:
2345 case PARM_DECL:
2346 case RESULT_DECL:
2347 case LABEL_DECL:
2348 case FUNCTION_DECL:
2349 case SSA_NAME:
2350
2351 case COMPONENT_REF:
2352 case INDIRECT_REF:
2353 case ALIGN_INDIRECT_REF:
2354 case MISALIGNED_INDIRECT_REF:
2355 case ARRAY_REF:
2356 case ARRAY_RANGE_REF:
2357 case BIT_FIELD_REF:
2358 case OBJ_TYPE_REF:
2359
2360 case REALPART_EXPR:
2361 case IMAGPART_EXPR:
2362 case PREINCREMENT_EXPR:
2363 case PREDECREMENT_EXPR:
2364 case SAVE_EXPR:
2365 case TRY_CATCH_EXPR:
2366 case WITH_CLEANUP_EXPR:
2367 case COMPOUND_EXPR:
2368 case MODIFY_EXPR:
2369 case GIMPLE_MODIFY_STMT:
2370 case TARGET_EXPR:
2371 case COND_EXPR:
2372 case BIND_EXPR:
2373 case MIN_EXPR:
2374 case MAX_EXPR:
2375 break;
2376
2377 default:
2378 /* Assume the worst for front-end tree codes. */
2379 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2380 break;
2381 return false;
2382 }
2383
2384 return true;
2385 }
2386
2387 /* Return an expr equal to X but certainly not valid as an lvalue. */
2388
2389 tree
2390 non_lvalue (tree x)
2391 {
2392 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2393 us. */
2394 if (in_gimple_form)
2395 return x;
2396
2397 if (! maybe_lvalue_p (x))
2398 return x;
2399 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2400 }
2401
2402 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2403 Zero means allow extended lvalues. */
2404
2405 int pedantic_lvalues;
2406
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2409
2410 static tree
2411 pedantic_non_lvalue (tree x)
2412 {
2413 if (pedantic_lvalues)
2414 return non_lvalue (x);
2415 else
2416 return x;
2417 }
2418 \f
2419 /* Given a tree comparison code, return the code that is the logical inverse
2420 of the given code. It is not safe to do this for floating-point
2421 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2422 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2423
2424 enum tree_code
2425 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 {
2427 if (honor_nans && flag_trapping_math)
2428 return ERROR_MARK;
2429
2430 switch (code)
2431 {
2432 case EQ_EXPR:
2433 return NE_EXPR;
2434 case NE_EXPR:
2435 return EQ_EXPR;
2436 case GT_EXPR:
2437 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 case GE_EXPR:
2439 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 case LT_EXPR:
2441 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 case LE_EXPR:
2443 return honor_nans ? UNGT_EXPR : GT_EXPR;
2444 case LTGT_EXPR:
2445 return UNEQ_EXPR;
2446 case UNEQ_EXPR:
2447 return LTGT_EXPR;
2448 case UNGT_EXPR:
2449 return LE_EXPR;
2450 case UNGE_EXPR:
2451 return LT_EXPR;
2452 case UNLT_EXPR:
2453 return GE_EXPR;
2454 case UNLE_EXPR:
2455 return GT_EXPR;
2456 case ORDERED_EXPR:
2457 return UNORDERED_EXPR;
2458 case UNORDERED_EXPR:
2459 return ORDERED_EXPR;
2460 default:
2461 gcc_unreachable ();
2462 }
2463 }
2464
2465 /* Similar, but return the comparison that results if the operands are
2466 swapped. This is safe for floating-point. */
2467
2468 enum tree_code
2469 swap_tree_comparison (enum tree_code code)
2470 {
2471 switch (code)
2472 {
2473 case EQ_EXPR:
2474 case NE_EXPR:
2475 case ORDERED_EXPR:
2476 case UNORDERED_EXPR:
2477 case LTGT_EXPR:
2478 case UNEQ_EXPR:
2479 return code;
2480 case GT_EXPR:
2481 return LT_EXPR;
2482 case GE_EXPR:
2483 return LE_EXPR;
2484 case LT_EXPR:
2485 return GT_EXPR;
2486 case LE_EXPR:
2487 return GE_EXPR;
2488 case UNGT_EXPR:
2489 return UNLT_EXPR;
2490 case UNGE_EXPR:
2491 return UNLE_EXPR;
2492 case UNLT_EXPR:
2493 return UNGT_EXPR;
2494 case UNLE_EXPR:
2495 return UNGE_EXPR;
2496 default:
2497 gcc_unreachable ();
2498 }
2499 }
2500
2501
2502 /* Convert a comparison tree code from an enum tree_code representation
2503 into a compcode bit-based encoding. This function is the inverse of
2504 compcode_to_comparison. */
2505
2506 static enum comparison_code
2507 comparison_to_compcode (enum tree_code code)
2508 {
2509 switch (code)
2510 {
2511 case LT_EXPR:
2512 return COMPCODE_LT;
2513 case EQ_EXPR:
2514 return COMPCODE_EQ;
2515 case LE_EXPR:
2516 return COMPCODE_LE;
2517 case GT_EXPR:
2518 return COMPCODE_GT;
2519 case NE_EXPR:
2520 return COMPCODE_NE;
2521 case GE_EXPR:
2522 return COMPCODE_GE;
2523 case ORDERED_EXPR:
2524 return COMPCODE_ORD;
2525 case UNORDERED_EXPR:
2526 return COMPCODE_UNORD;
2527 case UNLT_EXPR:
2528 return COMPCODE_UNLT;
2529 case UNEQ_EXPR:
2530 return COMPCODE_UNEQ;
2531 case UNLE_EXPR:
2532 return COMPCODE_UNLE;
2533 case UNGT_EXPR:
2534 return COMPCODE_UNGT;
2535 case LTGT_EXPR:
2536 return COMPCODE_LTGT;
2537 case UNGE_EXPR:
2538 return COMPCODE_UNGE;
2539 default:
2540 gcc_unreachable ();
2541 }
2542 }
2543
2544 /* Convert a compcode bit-based encoding of a comparison operator back
2545 to GCC's enum tree_code representation. This function is the
2546 inverse of comparison_to_compcode. */
2547
2548 static enum tree_code
2549 compcode_to_comparison (enum comparison_code code)
2550 {
2551 switch (code)
2552 {
2553 case COMPCODE_LT:
2554 return LT_EXPR;
2555 case COMPCODE_EQ:
2556 return EQ_EXPR;
2557 case COMPCODE_LE:
2558 return LE_EXPR;
2559 case COMPCODE_GT:
2560 return GT_EXPR;
2561 case COMPCODE_NE:
2562 return NE_EXPR;
2563 case COMPCODE_GE:
2564 return GE_EXPR;
2565 case COMPCODE_ORD:
2566 return ORDERED_EXPR;
2567 case COMPCODE_UNORD:
2568 return UNORDERED_EXPR;
2569 case COMPCODE_UNLT:
2570 return UNLT_EXPR;
2571 case COMPCODE_UNEQ:
2572 return UNEQ_EXPR;
2573 case COMPCODE_UNLE:
2574 return UNLE_EXPR;
2575 case COMPCODE_UNGT:
2576 return UNGT_EXPR;
2577 case COMPCODE_LTGT:
2578 return LTGT_EXPR;
2579 case COMPCODE_UNGE:
2580 return UNGE_EXPR;
2581 default:
2582 gcc_unreachable ();
2583 }
2584 }
2585
2586 /* Return a tree for the comparison which is the combination of
2587 doing the AND or OR (depending on CODE) of the two operations LCODE
2588 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2589 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2590 if this makes the transformation invalid. */
2591
2592 tree
2593 combine_comparisons (enum tree_code code, enum tree_code lcode,
2594 enum tree_code rcode, tree truth_type,
2595 tree ll_arg, tree lr_arg)
2596 {
2597 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2598 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2599 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2600 enum comparison_code compcode;
2601
2602 switch (code)
2603 {
2604 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2605 compcode = lcompcode & rcompcode;
2606 break;
2607
2608 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2609 compcode = lcompcode | rcompcode;
2610 break;
2611
2612 default:
2613 return NULL_TREE;
2614 }
2615
2616 if (!honor_nans)
2617 {
2618 /* Eliminate unordered comparisons, as well as LTGT and ORD
2619 which are not used unless the mode has NaNs. */
2620 compcode &= ~COMPCODE_UNORD;
2621 if (compcode == COMPCODE_LTGT)
2622 compcode = COMPCODE_NE;
2623 else if (compcode == COMPCODE_ORD)
2624 compcode = COMPCODE_TRUE;
2625 }
2626 else if (flag_trapping_math)
2627 {
2628 /* Check that the original operation and the optimized ones will trap
2629 under the same condition. */
2630 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2631 && (lcompcode != COMPCODE_EQ)
2632 && (lcompcode != COMPCODE_ORD);
2633 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2634 && (rcompcode != COMPCODE_EQ)
2635 && (rcompcode != COMPCODE_ORD);
2636 bool trap = (compcode & COMPCODE_UNORD) == 0
2637 && (compcode != COMPCODE_EQ)
2638 && (compcode != COMPCODE_ORD);
2639
2640 /* In a short-circuited boolean expression the LHS might be
2641 such that the RHS, if evaluated, will never trap. For
2642 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2643 if neither x nor y is NaN. (This is a mixed blessing: for
2644 example, the expression above will never trap, hence
2645 optimizing it to x < y would be invalid). */
2646 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2647 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2648 rtrap = false;
2649
2650 /* If the comparison was short-circuited, and only the RHS
2651 trapped, we may now generate a spurious trap. */
2652 if (rtrap && !ltrap
2653 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2654 return NULL_TREE;
2655
2656 /* If we changed the conditions that cause a trap, we lose. */
2657 if ((ltrap || rtrap) != trap)
2658 return NULL_TREE;
2659 }
2660
2661 if (compcode == COMPCODE_TRUE)
2662 return constant_boolean_node (true, truth_type);
2663 else if (compcode == COMPCODE_FALSE)
2664 return constant_boolean_node (false, truth_type);
2665 else
2666 return fold_build2 (compcode_to_comparison (compcode),
2667 truth_type, ll_arg, lr_arg);
2668 }
2669
2670 /* Return nonzero if CODE is a tree code that represents a truth value. */
2671
2672 static int
2673 truth_value_p (enum tree_code code)
2674 {
2675 return (TREE_CODE_CLASS (code) == tcc_comparison
2676 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2677 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2678 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2679 }
2680 \f
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2684
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2702
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2706
2707 int
2708 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2709 {
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2712 return 0;
2713
2714 /* If both types don't have the same signedness, then we can't consider
2715 them equal. We must check this before the STRIP_NOPS calls
2716 because they may change the signedness of the arguments. */
2717 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2718 return 0;
2719
2720 /* If both types don't have the same precision, then it is not safe
2721 to strip NOPs. */
2722 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2723 return 0;
2724
2725 STRIP_NOPS (arg0);
2726 STRIP_NOPS (arg1);
2727
2728 /* In case both args are comparisons but with different comparison
2729 code, try to swap the comparison operands of one arg to produce
2730 a match and compare that variant. */
2731 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2732 && COMPARISON_CLASS_P (arg0)
2733 && COMPARISON_CLASS_P (arg1))
2734 {
2735 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2736
2737 if (TREE_CODE (arg0) == swap_code)
2738 return operand_equal_p (TREE_OPERAND (arg0, 0),
2739 TREE_OPERAND (arg1, 1), flags)
2740 && operand_equal_p (TREE_OPERAND (arg0, 1),
2741 TREE_OPERAND (arg1, 0), flags);
2742 }
2743
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 /* This is needed for conversions and for COMPONENT_REF.
2746 Might as well play it safe and always test this. */
2747 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2748 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2749 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2750 return 0;
2751
2752 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2753 We don't care about side effects in that case because the SAVE_EXPR
2754 takes care of that for us. In all other cases, two expressions are
2755 equal if they have no side effects. If we have two identical
2756 expressions with side effects that should be treated the same due
2757 to the only side effects being identical SAVE_EXPR's, that will
2758 be detected in the recursive calls below. */
2759 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2760 && (TREE_CODE (arg0) == SAVE_EXPR
2761 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2762 return 1;
2763
2764 /* Next handle constant cases, those for which we can return 1 even
2765 if ONLY_CONST is set. */
2766 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2767 switch (TREE_CODE (arg0))
2768 {
2769 case INTEGER_CST:
2770 return tree_int_cst_equal (arg0, arg1);
2771
2772 case REAL_CST:
2773 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2774 TREE_REAL_CST (arg1)))
2775 return 1;
2776
2777
2778 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2779 {
2780 /* If we do not distinguish between signed and unsigned zero,
2781 consider them equal. */
2782 if (real_zerop (arg0) && real_zerop (arg1))
2783 return 1;
2784 }
2785 return 0;
2786
2787 case VECTOR_CST:
2788 {
2789 tree v1, v2;
2790
2791 v1 = TREE_VECTOR_CST_ELTS (arg0);
2792 v2 = TREE_VECTOR_CST_ELTS (arg1);
2793 while (v1 && v2)
2794 {
2795 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2796 flags))
2797 return 0;
2798 v1 = TREE_CHAIN (v1);
2799 v2 = TREE_CHAIN (v2);
2800 }
2801
2802 return v1 == v2;
2803 }
2804
2805 case COMPLEX_CST:
2806 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2807 flags)
2808 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2809 flags));
2810
2811 case STRING_CST:
2812 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2813 && ! memcmp (TREE_STRING_POINTER (arg0),
2814 TREE_STRING_POINTER (arg1),
2815 TREE_STRING_LENGTH (arg0)));
2816
2817 case ADDR_EXPR:
2818 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2819 0);
2820 default:
2821 break;
2822 }
2823
2824 if (flags & OEP_ONLY_CONST)
2825 return 0;
2826
2827 /* Define macros to test an operand from arg0 and arg1 for equality and a
2828 variant that allows null and views null as being different from any
2829 non-null value. In the latter case, if either is null, the both
2830 must be; otherwise, do the normal comparison. */
2831 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2832 TREE_OPERAND (arg1, N), flags)
2833
2834 #define OP_SAME_WITH_NULL(N) \
2835 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2836 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2837
2838 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2839 {
2840 case tcc_unary:
2841 /* Two conversions are equal only if signedness and modes match. */
2842 switch (TREE_CODE (arg0))
2843 {
2844 case NOP_EXPR:
2845 case CONVERT_EXPR:
2846 case FIX_TRUNC_EXPR:
2847 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2848 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2849 return 0;
2850 break;
2851 default:
2852 break;
2853 }
2854
2855 return OP_SAME (0);
2856
2857
2858 case tcc_comparison:
2859 case tcc_binary:
2860 if (OP_SAME (0) && OP_SAME (1))
2861 return 1;
2862
2863 /* For commutative ops, allow the other order. */
2864 return (commutative_tree_code (TREE_CODE (arg0))
2865 && operand_equal_p (TREE_OPERAND (arg0, 0),
2866 TREE_OPERAND (arg1, 1), flags)
2867 && operand_equal_p (TREE_OPERAND (arg0, 1),
2868 TREE_OPERAND (arg1, 0), flags));
2869
2870 case tcc_reference:
2871 /* If either of the pointer (or reference) expressions we are
2872 dereferencing contain a side effect, these cannot be equal. */
2873 if (TREE_SIDE_EFFECTS (arg0)
2874 || TREE_SIDE_EFFECTS (arg1))
2875 return 0;
2876
2877 switch (TREE_CODE (arg0))
2878 {
2879 case INDIRECT_REF:
2880 case ALIGN_INDIRECT_REF:
2881 case MISALIGNED_INDIRECT_REF:
2882 case REALPART_EXPR:
2883 case IMAGPART_EXPR:
2884 return OP_SAME (0);
2885
2886 case ARRAY_REF:
2887 case ARRAY_RANGE_REF:
2888 /* Operands 2 and 3 may be null. */
2889 return (OP_SAME (0)
2890 && OP_SAME (1)
2891 && OP_SAME_WITH_NULL (2)
2892 && OP_SAME_WITH_NULL (3));
2893
2894 case COMPONENT_REF:
2895 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2896 may be NULL when we're called to compare MEM_EXPRs. */
2897 return OP_SAME_WITH_NULL (0)
2898 && OP_SAME (1)
2899 && OP_SAME_WITH_NULL (2);
2900
2901 case BIT_FIELD_REF:
2902 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2903
2904 default:
2905 return 0;
2906 }
2907
2908 case tcc_expression:
2909 switch (TREE_CODE (arg0))
2910 {
2911 case ADDR_EXPR:
2912 case TRUTH_NOT_EXPR:
2913 return OP_SAME (0);
2914
2915 case TRUTH_ANDIF_EXPR:
2916 case TRUTH_ORIF_EXPR:
2917 return OP_SAME (0) && OP_SAME (1);
2918
2919 case TRUTH_AND_EXPR:
2920 case TRUTH_OR_EXPR:
2921 case TRUTH_XOR_EXPR:
2922 if (OP_SAME (0) && OP_SAME (1))
2923 return 1;
2924
2925 /* Otherwise take into account this is a commutative operation. */
2926 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2927 TREE_OPERAND (arg1, 1), flags)
2928 && operand_equal_p (TREE_OPERAND (arg0, 1),
2929 TREE_OPERAND (arg1, 0), flags));
2930
2931 default:
2932 return 0;
2933 }
2934
2935 case tcc_vl_exp:
2936 switch (TREE_CODE (arg0))
2937 {
2938 case CALL_EXPR:
2939 /* If the CALL_EXPRs call different functions, then they
2940 clearly can not be equal. */
2941 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2942 flags))
2943 return 0;
2944
2945 {
2946 unsigned int cef = call_expr_flags (arg0);
2947 if (flags & OEP_PURE_SAME)
2948 cef &= ECF_CONST | ECF_PURE;
2949 else
2950 cef &= ECF_CONST;
2951 if (!cef)
2952 return 0;
2953 }
2954
2955 /* Now see if all the arguments are the same. */
2956 {
2957 call_expr_arg_iterator iter0, iter1;
2958 tree a0, a1;
2959 for (a0 = first_call_expr_arg (arg0, &iter0),
2960 a1 = first_call_expr_arg (arg1, &iter1);
2961 a0 && a1;
2962 a0 = next_call_expr_arg (&iter0),
2963 a1 = next_call_expr_arg (&iter1))
2964 if (! operand_equal_p (a0, a1, flags))
2965 return 0;
2966
2967 /* If we get here and both argument lists are exhausted
2968 then the CALL_EXPRs are equal. */
2969 return ! (a0 || a1);
2970 }
2971 default:
2972 return 0;
2973 }
2974
2975 case tcc_declaration:
2976 /* Consider __builtin_sqrt equal to sqrt. */
2977 return (TREE_CODE (arg0) == FUNCTION_DECL
2978 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2979 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2980 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2981
2982 default:
2983 return 0;
2984 }
2985
2986 #undef OP_SAME
2987 #undef OP_SAME_WITH_NULL
2988 }
2989 \f
2990 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2991 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2992
2993 When in doubt, return 0. */
2994
2995 static int
2996 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2997 {
2998 int unsignedp1, unsignedpo;
2999 tree primarg0, primarg1, primother;
3000 unsigned int correct_width;
3001
3002 if (operand_equal_p (arg0, arg1, 0))
3003 return 1;
3004
3005 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3006 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3007 return 0;
3008
3009 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3010 and see if the inner values are the same. This removes any
3011 signedness comparison, which doesn't matter here. */
3012 primarg0 = arg0, primarg1 = arg1;
3013 STRIP_NOPS (primarg0);
3014 STRIP_NOPS (primarg1);
3015 if (operand_equal_p (primarg0, primarg1, 0))
3016 return 1;
3017
3018 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3019 actual comparison operand, ARG0.
3020
3021 First throw away any conversions to wider types
3022 already present in the operands. */
3023
3024 primarg1 = get_narrower (arg1, &unsignedp1);
3025 primother = get_narrower (other, &unsignedpo);
3026
3027 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3028 if (unsignedp1 == unsignedpo
3029 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3030 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3031 {
3032 tree type = TREE_TYPE (arg0);
3033
3034 /* Make sure shorter operand is extended the right way
3035 to match the longer operand. */
3036 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
3037 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3038
3039 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3040 return 1;
3041 }
3042
3043 return 0;
3044 }
3045 \f
3046 /* See if ARG is an expression that is either a comparison or is performing
3047 arithmetic on comparisons. The comparisons must only be comparing
3048 two different values, which will be stored in *CVAL1 and *CVAL2; if
3049 they are nonzero it means that some operands have already been found.
3050 No variables may be used anywhere else in the expression except in the
3051 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3052 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3053
3054 If this is true, return 1. Otherwise, return zero. */
3055
3056 static int
3057 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3058 {
3059 enum tree_code code = TREE_CODE (arg);
3060 enum tree_code_class class = TREE_CODE_CLASS (code);
3061
3062 /* We can handle some of the tcc_expression cases here. */
3063 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3064 class = tcc_unary;
3065 else if (class == tcc_expression
3066 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3067 || code == COMPOUND_EXPR))
3068 class = tcc_binary;
3069
3070 else if (class == tcc_expression && code == SAVE_EXPR
3071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3072 {
3073 /* If we've already found a CVAL1 or CVAL2, this expression is
3074 two complex to handle. */
3075 if (*cval1 || *cval2)
3076 return 0;
3077
3078 class = tcc_unary;
3079 *save_p = 1;
3080 }
3081
3082 switch (class)
3083 {
3084 case tcc_unary:
3085 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3086
3087 case tcc_binary:
3088 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3089 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3090 cval1, cval2, save_p));
3091
3092 case tcc_constant:
3093 return 1;
3094
3095 case tcc_expression:
3096 if (code == COND_EXPR)
3097 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3098 cval1, cval2, save_p)
3099 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3100 cval1, cval2, save_p)
3101 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3102 cval1, cval2, save_p));
3103 return 0;
3104
3105 case tcc_comparison:
3106 /* First see if we can handle the first operand, then the second. For
3107 the second operand, we know *CVAL1 can't be zero. It must be that
3108 one side of the comparison is each of the values; test for the
3109 case where this isn't true by failing if the two operands
3110 are the same. */
3111
3112 if (operand_equal_p (TREE_OPERAND (arg, 0),
3113 TREE_OPERAND (arg, 1), 0))
3114 return 0;
3115
3116 if (*cval1 == 0)
3117 *cval1 = TREE_OPERAND (arg, 0);
3118 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3119 ;
3120 else if (*cval2 == 0)
3121 *cval2 = TREE_OPERAND (arg, 0);
3122 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3123 ;
3124 else
3125 return 0;
3126
3127 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3128 ;
3129 else if (*cval2 == 0)
3130 *cval2 = TREE_OPERAND (arg, 1);
3131 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3132 ;
3133 else
3134 return 0;
3135
3136 return 1;
3137
3138 default:
3139 return 0;
3140 }
3141 }
3142 \f
3143 /* ARG is a tree that is known to contain just arithmetic operations and
3144 comparisons. Evaluate the operations in the tree substituting NEW0 for
3145 any occurrence of OLD0 as an operand of a comparison and likewise for
3146 NEW1 and OLD1. */
3147
3148 static tree
3149 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3150 {
3151 tree type = TREE_TYPE (arg);
3152 enum tree_code code = TREE_CODE (arg);
3153 enum tree_code_class class = TREE_CODE_CLASS (code);
3154
3155 /* We can handle some of the tcc_expression cases here. */
3156 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3157 class = tcc_unary;
3158 else if (class == tcc_expression
3159 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3160 class = tcc_binary;
3161
3162 switch (class)
3163 {
3164 case tcc_unary:
3165 return fold_build1 (code, type,
3166 eval_subst (TREE_OPERAND (arg, 0),
3167 old0, new0, old1, new1));
3168
3169 case tcc_binary:
3170 return fold_build2 (code, type,
3171 eval_subst (TREE_OPERAND (arg, 0),
3172 old0, new0, old1, new1),
3173 eval_subst (TREE_OPERAND (arg, 1),
3174 old0, new0, old1, new1));
3175
3176 case tcc_expression:
3177 switch (code)
3178 {
3179 case SAVE_EXPR:
3180 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3181
3182 case COMPOUND_EXPR:
3183 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3184
3185 case COND_EXPR:
3186 return fold_build3 (code, type,
3187 eval_subst (TREE_OPERAND (arg, 0),
3188 old0, new0, old1, new1),
3189 eval_subst (TREE_OPERAND (arg, 1),
3190 old0, new0, old1, new1),
3191 eval_subst (TREE_OPERAND (arg, 2),
3192 old0, new0, old1, new1));
3193 default:
3194 break;
3195 }
3196 /* Fall through - ??? */
3197
3198 case tcc_comparison:
3199 {
3200 tree arg0 = TREE_OPERAND (arg, 0);
3201 tree arg1 = TREE_OPERAND (arg, 1);
3202
3203 /* We need to check both for exact equality and tree equality. The
3204 former will be true if the operand has a side-effect. In that
3205 case, we know the operand occurred exactly once. */
3206
3207 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3208 arg0 = new0;
3209 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3210 arg0 = new1;
3211
3212 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3213 arg1 = new0;
3214 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3215 arg1 = new1;
3216
3217 return fold_build2 (code, type, arg0, arg1);
3218 }
3219
3220 default:
3221 return arg;
3222 }
3223 }
3224 \f
3225 /* Return a tree for the case when the result of an expression is RESULT
3226 converted to TYPE and OMITTED was previously an operand of the expression
3227 but is now not needed (e.g., we folded OMITTED * 0).
3228
3229 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3230 the conversion of RESULT to TYPE. */
3231
3232 tree
3233 omit_one_operand (tree type, tree result, tree omitted)
3234 {
3235 tree t = fold_convert (type, result);
3236
3237 if (TREE_SIDE_EFFECTS (omitted))
3238 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3239
3240 return non_lvalue (t);
3241 }
3242
3243 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3244
3245 static tree
3246 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3247 {
3248 tree t = fold_convert (type, result);
3249
3250 if (TREE_SIDE_EFFECTS (omitted))
3251 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3252
3253 return pedantic_non_lvalue (t);
3254 }
3255
3256 /* Return a tree for the case when the result of an expression is RESULT
3257 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3258 of the expression but are now not needed.
3259
3260 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3261 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3262 evaluated before OMITTED2. Otherwise, if neither has side effects,
3263 just do the conversion of RESULT to TYPE. */
3264
3265 tree
3266 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3267 {
3268 tree t = fold_convert (type, result);
3269
3270 if (TREE_SIDE_EFFECTS (omitted2))
3271 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3272 if (TREE_SIDE_EFFECTS (omitted1))
3273 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3274
3275 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3276 }
3277
3278 \f
3279 /* Return a simplified tree node for the truth-negation of ARG. This
3280 never alters ARG itself. We assume that ARG is an operation that
3281 returns a truth value (0 or 1).
3282
3283 FIXME: one would think we would fold the result, but it causes
3284 problems with the dominator optimizer. */
3285
3286 tree
3287 fold_truth_not_expr (tree arg)
3288 {
3289 tree type = TREE_TYPE (arg);
3290 enum tree_code code = TREE_CODE (arg);
3291
3292 /* If this is a comparison, we can simply invert it, except for
3293 floating-point non-equality comparisons, in which case we just
3294 enclose a TRUTH_NOT_EXPR around what we have. */
3295
3296 if (TREE_CODE_CLASS (code) == tcc_comparison)
3297 {
3298 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3299 if (FLOAT_TYPE_P (op_type)
3300 && flag_trapping_math
3301 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3302 && code != NE_EXPR && code != EQ_EXPR)
3303 return NULL_TREE;
3304 else
3305 {
3306 code = invert_tree_comparison (code,
3307 HONOR_NANS (TYPE_MODE (op_type)));
3308 if (code == ERROR_MARK)
3309 return NULL_TREE;
3310 else
3311 return build2 (code, type,
3312 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3313 }
3314 }
3315
3316 switch (code)
3317 {
3318 case INTEGER_CST:
3319 return constant_boolean_node (integer_zerop (arg), type);
3320
3321 case TRUTH_AND_EXPR:
3322 return build2 (TRUTH_OR_EXPR, type,
3323 invert_truthvalue (TREE_OPERAND (arg, 0)),
3324 invert_truthvalue (TREE_OPERAND (arg, 1)));
3325
3326 case TRUTH_OR_EXPR:
3327 return build2 (TRUTH_AND_EXPR, type,
3328 invert_truthvalue (TREE_OPERAND (arg, 0)),
3329 invert_truthvalue (TREE_OPERAND (arg, 1)));
3330
3331 case TRUTH_XOR_EXPR:
3332 /* Here we can invert either operand. We invert the first operand
3333 unless the second operand is a TRUTH_NOT_EXPR in which case our
3334 result is the XOR of the first operand with the inside of the
3335 negation of the second operand. */
3336
3337 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3338 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3339 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3340 else
3341 return build2 (TRUTH_XOR_EXPR, type,
3342 invert_truthvalue (TREE_OPERAND (arg, 0)),
3343 TREE_OPERAND (arg, 1));
3344
3345 case TRUTH_ANDIF_EXPR:
3346 return build2 (TRUTH_ORIF_EXPR, type,
3347 invert_truthvalue (TREE_OPERAND (arg, 0)),
3348 invert_truthvalue (TREE_OPERAND (arg, 1)));
3349
3350 case TRUTH_ORIF_EXPR:
3351 return build2 (TRUTH_ANDIF_EXPR, type,
3352 invert_truthvalue (TREE_OPERAND (arg, 0)),
3353 invert_truthvalue (TREE_OPERAND (arg, 1)));
3354
3355 case TRUTH_NOT_EXPR:
3356 return TREE_OPERAND (arg, 0);
3357
3358 case COND_EXPR:
3359 {
3360 tree arg1 = TREE_OPERAND (arg, 1);
3361 tree arg2 = TREE_OPERAND (arg, 2);
3362 /* A COND_EXPR may have a throw as one operand, which
3363 then has void type. Just leave void operands
3364 as they are. */
3365 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3366 VOID_TYPE_P (TREE_TYPE (arg1))
3367 ? arg1 : invert_truthvalue (arg1),
3368 VOID_TYPE_P (TREE_TYPE (arg2))
3369 ? arg2 : invert_truthvalue (arg2));
3370 }
3371
3372 case COMPOUND_EXPR:
3373 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3374 invert_truthvalue (TREE_OPERAND (arg, 1)));
3375
3376 case NON_LVALUE_EXPR:
3377 return invert_truthvalue (TREE_OPERAND (arg, 0));
3378
3379 case NOP_EXPR:
3380 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3381 return build1 (TRUTH_NOT_EXPR, type, arg);
3382
3383 case CONVERT_EXPR:
3384 case FLOAT_EXPR:
3385 return build1 (TREE_CODE (arg), type,
3386 invert_truthvalue (TREE_OPERAND (arg, 0)));
3387
3388 case BIT_AND_EXPR:
3389 if (!integer_onep (TREE_OPERAND (arg, 1)))
3390 break;
3391 return build2 (EQ_EXPR, type, arg,
3392 build_int_cst (type, 0));
3393
3394 case SAVE_EXPR:
3395 return build1 (TRUTH_NOT_EXPR, type, arg);
3396
3397 case CLEANUP_POINT_EXPR:
3398 return build1 (CLEANUP_POINT_EXPR, type,
3399 invert_truthvalue (TREE_OPERAND (arg, 0)));
3400
3401 default:
3402 break;
3403 }
3404
3405 return NULL_TREE;
3406 }
3407
3408 /* Return a simplified tree node for the truth-negation of ARG. This
3409 never alters ARG itself. We assume that ARG is an operation that
3410 returns a truth value (0 or 1).
3411
3412 FIXME: one would think we would fold the result, but it causes
3413 problems with the dominator optimizer. */
3414
3415 tree
3416 invert_truthvalue (tree arg)
3417 {
3418 tree tem;
3419
3420 if (TREE_CODE (arg) == ERROR_MARK)
3421 return arg;
3422
3423 tem = fold_truth_not_expr (arg);
3424 if (!tem)
3425 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3426
3427 return tem;
3428 }
3429
3430 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3431 operands are another bit-wise operation with a common input. If so,
3432 distribute the bit operations to save an operation and possibly two if
3433 constants are involved. For example, convert
3434 (A | B) & (A | C) into A | (B & C)
3435 Further simplification will occur if B and C are constants.
3436
3437 If this optimization cannot be done, 0 will be returned. */
3438
3439 static tree
3440 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3441 {
3442 tree common;
3443 tree left, right;
3444
3445 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3446 || TREE_CODE (arg0) == code
3447 || (TREE_CODE (arg0) != BIT_AND_EXPR
3448 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3449 return 0;
3450
3451 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3452 {
3453 common = TREE_OPERAND (arg0, 0);
3454 left = TREE_OPERAND (arg0, 1);
3455 right = TREE_OPERAND (arg1, 1);
3456 }
3457 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3458 {
3459 common = TREE_OPERAND (arg0, 0);
3460 left = TREE_OPERAND (arg0, 1);
3461 right = TREE_OPERAND (arg1, 0);
3462 }
3463 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3464 {
3465 common = TREE_OPERAND (arg0, 1);
3466 left = TREE_OPERAND (arg0, 0);
3467 right = TREE_OPERAND (arg1, 1);
3468 }
3469 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3470 {
3471 common = TREE_OPERAND (arg0, 1);
3472 left = TREE_OPERAND (arg0, 0);
3473 right = TREE_OPERAND (arg1, 0);
3474 }
3475 else
3476 return 0;
3477
3478 return fold_build2 (TREE_CODE (arg0), type, common,
3479 fold_build2 (code, type, left, right));
3480 }
3481
3482 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3483 with code CODE. This optimization is unsafe. */
3484 static tree
3485 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3486 {
3487 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3488 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3489
3490 /* (A / C) +- (B / C) -> (A +- B) / C. */
3491 if (mul0 == mul1
3492 && operand_equal_p (TREE_OPERAND (arg0, 1),
3493 TREE_OPERAND (arg1, 1), 0))
3494 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3495 fold_build2 (code, type,
3496 TREE_OPERAND (arg0, 0),
3497 TREE_OPERAND (arg1, 0)),
3498 TREE_OPERAND (arg0, 1));
3499
3500 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3501 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3502 TREE_OPERAND (arg1, 0), 0)
3503 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3504 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3505 {
3506 REAL_VALUE_TYPE r0, r1;
3507 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3508 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3509 if (!mul0)
3510 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3511 if (!mul1)
3512 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3513 real_arithmetic (&r0, code, &r0, &r1);
3514 return fold_build2 (MULT_EXPR, type,
3515 TREE_OPERAND (arg0, 0),
3516 build_real (type, r0));
3517 }
3518
3519 return NULL_TREE;
3520 }
3521 \f
3522 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3523 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3524
3525 static tree
3526 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3527 int unsignedp)
3528 {
3529 tree result;
3530
3531 if (bitpos == 0)
3532 {
3533 tree size = TYPE_SIZE (TREE_TYPE (inner));
3534 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3535 || POINTER_TYPE_P (TREE_TYPE (inner)))
3536 && host_integerp (size, 0)
3537 && tree_low_cst (size, 0) == bitsize)
3538 return fold_convert (type, inner);
3539 }
3540
3541 result = build3 (BIT_FIELD_REF, type, inner,
3542 size_int (bitsize), bitsize_int (bitpos));
3543
3544 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3545
3546 return result;
3547 }
3548
3549 /* Optimize a bit-field compare.
3550
3551 There are two cases: First is a compare against a constant and the
3552 second is a comparison of two items where the fields are at the same
3553 bit position relative to the start of a chunk (byte, halfword, word)
3554 large enough to contain it. In these cases we can avoid the shift
3555 implicit in bitfield extractions.
3556
3557 For constants, we emit a compare of the shifted constant with the
3558 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3559 compared. For two fields at the same position, we do the ANDs with the
3560 similar mask and compare the result of the ANDs.
3561
3562 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3563 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3564 are the left and right operands of the comparison, respectively.
3565
3566 If the optimization described above can be done, we return the resulting
3567 tree. Otherwise we return zero. */
3568
3569 static tree
3570 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3571 tree lhs, tree rhs)
3572 {
3573 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3574 tree type = TREE_TYPE (lhs);
3575 tree signed_type, unsigned_type;
3576 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3577 enum machine_mode lmode, rmode, nmode;
3578 int lunsignedp, runsignedp;
3579 int lvolatilep = 0, rvolatilep = 0;
3580 tree linner, rinner = NULL_TREE;
3581 tree mask;
3582 tree offset;
3583
3584 /* Get all the information about the extractions being done. If the bit size
3585 if the same as the size of the underlying object, we aren't doing an
3586 extraction at all and so can do nothing. We also don't want to
3587 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3588 then will no longer be able to replace it. */
3589 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3590 &lunsignedp, &lvolatilep, false);
3591 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3592 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3593 return 0;
3594
3595 if (!const_p)
3596 {
3597 /* If this is not a constant, we can only do something if bit positions,
3598 sizes, and signedness are the same. */
3599 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3600 &runsignedp, &rvolatilep, false);
3601
3602 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3603 || lunsignedp != runsignedp || offset != 0
3604 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3605 return 0;
3606 }
3607
3608 /* See if we can find a mode to refer to this field. We should be able to,
3609 but fail if we can't. */
3610 nmode = get_best_mode (lbitsize, lbitpos,
3611 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3612 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3613 TYPE_ALIGN (TREE_TYPE (rinner))),
3614 word_mode, lvolatilep || rvolatilep);
3615 if (nmode == VOIDmode)
3616 return 0;
3617
3618 /* Set signed and unsigned types of the precision of this mode for the
3619 shifts below. */
3620 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3621 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3622
3623 /* Compute the bit position and size for the new reference and our offset
3624 within it. If the new reference is the same size as the original, we
3625 won't optimize anything, so return zero. */
3626 nbitsize = GET_MODE_BITSIZE (nmode);
3627 nbitpos = lbitpos & ~ (nbitsize - 1);
3628 lbitpos -= nbitpos;
3629 if (nbitsize == lbitsize)
3630 return 0;
3631
3632 if (BYTES_BIG_ENDIAN)
3633 lbitpos = nbitsize - lbitsize - lbitpos;
3634
3635 /* Make the mask to be used against the extracted field. */
3636 mask = build_int_cst_type (unsigned_type, -1);
3637 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3638 mask = const_binop (RSHIFT_EXPR, mask,
3639 size_int (nbitsize - lbitsize - lbitpos), 0);
3640
3641 if (! const_p)
3642 /* If not comparing with constant, just rework the comparison
3643 and return. */
3644 return fold_build2 (code, compare_type,
3645 fold_build2 (BIT_AND_EXPR, unsigned_type,
3646 make_bit_field_ref (linner,
3647 unsigned_type,
3648 nbitsize, nbitpos,
3649 1),
3650 mask),
3651 fold_build2 (BIT_AND_EXPR, unsigned_type,
3652 make_bit_field_ref (rinner,
3653 unsigned_type,
3654 nbitsize, nbitpos,
3655 1),
3656 mask));
3657
3658 /* Otherwise, we are handling the constant case. See if the constant is too
3659 big for the field. Warn and return a tree of for 0 (false) if so. We do
3660 this not only for its own sake, but to avoid having to test for this
3661 error case below. If we didn't, we might generate wrong code.
3662
3663 For unsigned fields, the constant shifted right by the field length should
3664 be all zero. For signed fields, the high-order bits should agree with
3665 the sign bit. */
3666
3667 if (lunsignedp)
3668 {
3669 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3670 fold_convert (unsigned_type, rhs),
3671 size_int (lbitsize), 0)))
3672 {
3673 warning (0, "comparison is always %d due to width of bit-field",
3674 code == NE_EXPR);
3675 return constant_boolean_node (code == NE_EXPR, compare_type);
3676 }
3677 }
3678 else
3679 {
3680 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3681 size_int (lbitsize - 1), 0);
3682 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3683 {
3684 warning (0, "comparison is always %d due to width of bit-field",
3685 code == NE_EXPR);
3686 return constant_boolean_node (code == NE_EXPR, compare_type);
3687 }
3688 }
3689
3690 /* Single-bit compares should always be against zero. */
3691 if (lbitsize == 1 && ! integer_zerop (rhs))
3692 {
3693 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3694 rhs = build_int_cst (type, 0);
3695 }
3696
3697 /* Make a new bitfield reference, shift the constant over the
3698 appropriate number of bits and mask it with the computed mask
3699 (in case this was a signed field). If we changed it, make a new one. */
3700 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3701 if (lvolatilep)
3702 {
3703 TREE_SIDE_EFFECTS (lhs) = 1;
3704 TREE_THIS_VOLATILE (lhs) = 1;
3705 }
3706
3707 rhs = const_binop (BIT_AND_EXPR,
3708 const_binop (LSHIFT_EXPR,
3709 fold_convert (unsigned_type, rhs),
3710 size_int (lbitpos), 0),
3711 mask, 0);
3712
3713 return build2 (code, compare_type,
3714 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3715 rhs);
3716 }
3717 \f
3718 /* Subroutine for fold_truthop: decode a field reference.
3719
3720 If EXP is a comparison reference, we return the innermost reference.
3721
3722 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3723 set to the starting bit number.
3724
3725 If the innermost field can be completely contained in a mode-sized
3726 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3727
3728 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3729 otherwise it is not changed.
3730
3731 *PUNSIGNEDP is set to the signedness of the field.
3732
3733 *PMASK is set to the mask used. This is either contained in a
3734 BIT_AND_EXPR or derived from the width of the field.
3735
3736 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3737
3738 Return 0 if this is not a component reference or is one that we can't
3739 do anything with. */
3740
3741 static tree
3742 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3743 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3744 int *punsignedp, int *pvolatilep,
3745 tree *pmask, tree *pand_mask)
3746 {
3747 tree outer_type = 0;
3748 tree and_mask = 0;
3749 tree mask, inner, offset;
3750 tree unsigned_type;
3751 unsigned int precision;
3752
3753 /* All the optimizations using this function assume integer fields.
3754 There are problems with FP fields since the type_for_size call
3755 below can fail for, e.g., XFmode. */
3756 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3757 return 0;
3758
3759 /* We are interested in the bare arrangement of bits, so strip everything
3760 that doesn't affect the machine mode. However, record the type of the
3761 outermost expression if it may matter below. */
3762 if (TREE_CODE (exp) == NOP_EXPR
3763 || TREE_CODE (exp) == CONVERT_EXPR
3764 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3765 outer_type = TREE_TYPE (exp);
3766 STRIP_NOPS (exp);
3767
3768 if (TREE_CODE (exp) == BIT_AND_EXPR)
3769 {
3770 and_mask = TREE_OPERAND (exp, 1);
3771 exp = TREE_OPERAND (exp, 0);
3772 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3773 if (TREE_CODE (and_mask) != INTEGER_CST)
3774 return 0;
3775 }
3776
3777 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3778 punsignedp, pvolatilep, false);
3779 if ((inner == exp && and_mask == 0)
3780 || *pbitsize < 0 || offset != 0
3781 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3782 return 0;
3783
3784 /* If the number of bits in the reference is the same as the bitsize of
3785 the outer type, then the outer type gives the signedness. Otherwise
3786 (in case of a small bitfield) the signedness is unchanged. */
3787 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3788 *punsignedp = TYPE_UNSIGNED (outer_type);
3789
3790 /* Compute the mask to access the bitfield. */
3791 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3792 precision = TYPE_PRECISION (unsigned_type);
3793
3794 mask = build_int_cst_type (unsigned_type, -1);
3795
3796 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3797 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3798
3799 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3800 if (and_mask != 0)
3801 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3802 fold_convert (unsigned_type, and_mask), mask);
3803
3804 *pmask = mask;
3805 *pand_mask = and_mask;
3806 return inner;
3807 }
3808
3809 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3810 bit positions. */
3811
3812 static int
3813 all_ones_mask_p (tree mask, int size)
3814 {
3815 tree type = TREE_TYPE (mask);
3816 unsigned int precision = TYPE_PRECISION (type);
3817 tree tmask;
3818
3819 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3820
3821 return
3822 tree_int_cst_equal (mask,
3823 const_binop (RSHIFT_EXPR,
3824 const_binop (LSHIFT_EXPR, tmask,
3825 size_int (precision - size),
3826 0),
3827 size_int (precision - size), 0));
3828 }
3829
3830 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3831 represents the sign bit of EXP's type. If EXP represents a sign
3832 or zero extension, also test VAL against the unextended type.
3833 The return value is the (sub)expression whose sign bit is VAL,
3834 or NULL_TREE otherwise. */
3835
3836 static tree
3837 sign_bit_p (tree exp, tree val)
3838 {
3839 unsigned HOST_WIDE_INT mask_lo, lo;
3840 HOST_WIDE_INT mask_hi, hi;
3841 int width;
3842 tree t;
3843
3844 /* Tree EXP must have an integral type. */
3845 t = TREE_TYPE (exp);
3846 if (! INTEGRAL_TYPE_P (t))
3847 return NULL_TREE;
3848
3849 /* Tree VAL must be an integer constant. */
3850 if (TREE_CODE (val) != INTEGER_CST
3851 || TREE_OVERFLOW (val))
3852 return NULL_TREE;
3853
3854 width = TYPE_PRECISION (t);
3855 if (width > HOST_BITS_PER_WIDE_INT)
3856 {
3857 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3858 lo = 0;
3859
3860 mask_hi = ((unsigned HOST_WIDE_INT) -1
3861 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3862 mask_lo = -1;
3863 }
3864 else
3865 {
3866 hi = 0;
3867 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3868
3869 mask_hi = 0;
3870 mask_lo = ((unsigned HOST_WIDE_INT) -1
3871 >> (HOST_BITS_PER_WIDE_INT - width));
3872 }
3873
3874 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3875 treat VAL as if it were unsigned. */
3876 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3877 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3878 return exp;
3879
3880 /* Handle extension from a narrower type. */
3881 if (TREE_CODE (exp) == NOP_EXPR
3882 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3883 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3884
3885 return NULL_TREE;
3886 }
3887
3888 /* Subroutine for fold_truthop: determine if an operand is simple enough
3889 to be evaluated unconditionally. */
3890
3891 static int
3892 simple_operand_p (tree exp)
3893 {
3894 /* Strip any conversions that don't change the machine mode. */
3895 STRIP_NOPS (exp);
3896
3897 return (CONSTANT_CLASS_P (exp)
3898 || TREE_CODE (exp) == SSA_NAME
3899 || (DECL_P (exp)
3900 && ! TREE_ADDRESSABLE (exp)
3901 && ! TREE_THIS_VOLATILE (exp)
3902 && ! DECL_NONLOCAL (exp)
3903 /* Don't regard global variables as simple. They may be
3904 allocated in ways unknown to the compiler (shared memory,
3905 #pragma weak, etc). */
3906 && ! TREE_PUBLIC (exp)
3907 && ! DECL_EXTERNAL (exp)
3908 /* Loading a static variable is unduly expensive, but global
3909 registers aren't expensive. */
3910 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3911 }
3912 \f
3913 /* The following functions are subroutines to fold_range_test and allow it to
3914 try to change a logical combination of comparisons into a range test.
3915
3916 For example, both
3917 X == 2 || X == 3 || X == 4 || X == 5
3918 and
3919 X >= 2 && X <= 5
3920 are converted to
3921 (unsigned) (X - 2) <= 3
3922
3923 We describe each set of comparisons as being either inside or outside
3924 a range, using a variable named like IN_P, and then describe the
3925 range with a lower and upper bound. If one of the bounds is omitted,
3926 it represents either the highest or lowest value of the type.
3927
3928 In the comments below, we represent a range by two numbers in brackets
3929 preceded by a "+" to designate being inside that range, or a "-" to
3930 designate being outside that range, so the condition can be inverted by
3931 flipping the prefix. An omitted bound is represented by a "-". For
3932 example, "- [-, 10]" means being outside the range starting at the lowest
3933 possible value and ending at 10, in other words, being greater than 10.
3934 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3935 always false.
3936
3937 We set up things so that the missing bounds are handled in a consistent
3938 manner so neither a missing bound nor "true" and "false" need to be
3939 handled using a special case. */
3940
3941 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3942 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3943 and UPPER1_P are nonzero if the respective argument is an upper bound
3944 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3945 must be specified for a comparison. ARG1 will be converted to ARG0's
3946 type if both are specified. */
3947
3948 static tree
3949 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3950 tree arg1, int upper1_p)
3951 {
3952 tree tem;
3953 int result;
3954 int sgn0, sgn1;
3955
3956 /* If neither arg represents infinity, do the normal operation.
3957 Else, if not a comparison, return infinity. Else handle the special
3958 comparison rules. Note that most of the cases below won't occur, but
3959 are handled for consistency. */
3960
3961 if (arg0 != 0 && arg1 != 0)
3962 {
3963 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3964 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3965 STRIP_NOPS (tem);
3966 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3967 }
3968
3969 if (TREE_CODE_CLASS (code) != tcc_comparison)
3970 return 0;
3971
3972 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3973 for neither. In real maths, we cannot assume open ended ranges are
3974 the same. But, this is computer arithmetic, where numbers are finite.
3975 We can therefore make the transformation of any unbounded range with
3976 the value Z, Z being greater than any representable number. This permits
3977 us to treat unbounded ranges as equal. */
3978 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3979 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3980 switch (code)
3981 {
3982 case EQ_EXPR:
3983 result = sgn0 == sgn1;
3984 break;
3985 case NE_EXPR:
3986 result = sgn0 != sgn1;
3987 break;
3988 case LT_EXPR:
3989 result = sgn0 < sgn1;
3990 break;
3991 case LE_EXPR:
3992 result = sgn0 <= sgn1;
3993 break;
3994 case GT_EXPR:
3995 result = sgn0 > sgn1;
3996 break;
3997 case GE_EXPR:
3998 result = sgn0 >= sgn1;
3999 break;
4000 default:
4001 gcc_unreachable ();
4002 }
4003
4004 return constant_boolean_node (result, type);
4005 }
4006 \f
4007 /* Given EXP, a logical expression, set the range it is testing into
4008 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4009 actually being tested. *PLOW and *PHIGH will be made of the same
4010 type as the returned expression. If EXP is not a comparison, we
4011 will most likely not be returning a useful value and range. Set
4012 *STRICT_OVERFLOW_P to true if the return value is only valid
4013 because signed overflow is undefined; otherwise, do not change
4014 *STRICT_OVERFLOW_P. */
4015
4016 static tree
4017 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4018 bool *strict_overflow_p)
4019 {
4020 enum tree_code code;
4021 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4022 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4023 int in_p, n_in_p;
4024 tree low, high, n_low, n_high;
4025
4026 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4027 and see if we can refine the range. Some of the cases below may not
4028 happen, but it doesn't seem worth worrying about this. We "continue"
4029 the outer loop when we've changed something; otherwise we "break"
4030 the switch, which will "break" the while. */
4031
4032 in_p = 0;
4033 low = high = build_int_cst (TREE_TYPE (exp), 0);
4034
4035 while (1)
4036 {
4037 code = TREE_CODE (exp);
4038 exp_type = TREE_TYPE (exp);
4039
4040 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4041 {
4042 if (TREE_OPERAND_LENGTH (exp) > 0)
4043 arg0 = TREE_OPERAND (exp, 0);
4044 if (TREE_CODE_CLASS (code) == tcc_comparison
4045 || TREE_CODE_CLASS (code) == tcc_unary
4046 || TREE_CODE_CLASS (code) == tcc_binary)
4047 arg0_type = TREE_TYPE (arg0);
4048 if (TREE_CODE_CLASS (code) == tcc_binary
4049 || TREE_CODE_CLASS (code) == tcc_comparison
4050 || (TREE_CODE_CLASS (code) == tcc_expression
4051 && TREE_OPERAND_LENGTH (exp) > 1))
4052 arg1 = TREE_OPERAND (exp, 1);
4053 }
4054
4055 switch (code)
4056 {
4057 case TRUTH_NOT_EXPR:
4058 in_p = ! in_p, exp = arg0;
4059 continue;
4060
4061 case EQ_EXPR: case NE_EXPR:
4062 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4063 /* We can only do something if the range is testing for zero
4064 and if the second operand is an integer constant. Note that
4065 saying something is "in" the range we make is done by
4066 complementing IN_P since it will set in the initial case of
4067 being not equal to zero; "out" is leaving it alone. */
4068 if (low == 0 || high == 0
4069 || ! integer_zerop (low) || ! integer_zerop (high)
4070 || TREE_CODE (arg1) != INTEGER_CST)
4071 break;
4072
4073 switch (code)
4074 {
4075 case NE_EXPR: /* - [c, c] */
4076 low = high = arg1;
4077 break;
4078 case EQ_EXPR: /* + [c, c] */
4079 in_p = ! in_p, low = high = arg1;
4080 break;
4081 case GT_EXPR: /* - [-, c] */
4082 low = 0, high = arg1;
4083 break;
4084 case GE_EXPR: /* + [c, -] */
4085 in_p = ! in_p, low = arg1, high = 0;
4086 break;
4087 case LT_EXPR: /* - [c, -] */
4088 low = arg1, high = 0;
4089 break;
4090 case LE_EXPR: /* + [-, c] */
4091 in_p = ! in_p, low = 0, high = arg1;
4092 break;
4093 default:
4094 gcc_unreachable ();
4095 }
4096
4097 /* If this is an unsigned comparison, we also know that EXP is
4098 greater than or equal to zero. We base the range tests we make
4099 on that fact, so we record it here so we can parse existing
4100 range tests. We test arg0_type since often the return type
4101 of, e.g. EQ_EXPR, is boolean. */
4102 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4103 {
4104 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4105 in_p, low, high, 1,
4106 build_int_cst (arg0_type, 0),
4107 NULL_TREE))
4108 break;
4109
4110 in_p = n_in_p, low = n_low, high = n_high;
4111
4112 /* If the high bound is missing, but we have a nonzero low
4113 bound, reverse the range so it goes from zero to the low bound
4114 minus 1. */
4115 if (high == 0 && low && ! integer_zerop (low))
4116 {
4117 in_p = ! in_p;
4118 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4119 integer_one_node, 0);
4120 low = build_int_cst (arg0_type, 0);
4121 }
4122 }
4123
4124 exp = arg0;
4125 continue;
4126
4127 case NEGATE_EXPR:
4128 /* (-x) IN [a,b] -> x in [-b, -a] */
4129 n_low = range_binop (MINUS_EXPR, exp_type,
4130 build_int_cst (exp_type, 0),
4131 0, high, 1);
4132 n_high = range_binop (MINUS_EXPR, exp_type,
4133 build_int_cst (exp_type, 0),
4134 0, low, 0);
4135 low = n_low, high = n_high;
4136 exp = arg0;
4137 continue;
4138
4139 case BIT_NOT_EXPR:
4140 /* ~ X -> -X - 1 */
4141 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4142 build_int_cst (exp_type, 1));
4143 continue;
4144
4145 case PLUS_EXPR: case MINUS_EXPR:
4146 if (TREE_CODE (arg1) != INTEGER_CST)
4147 break;
4148
4149 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4150 move a constant to the other side. */
4151 if (!TYPE_UNSIGNED (arg0_type)
4152 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4153 break;
4154
4155 /* If EXP is signed, any overflow in the computation is undefined,
4156 so we don't worry about it so long as our computations on
4157 the bounds don't overflow. For unsigned, overflow is defined
4158 and this is exactly the right thing. */
4159 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4160 arg0_type, low, 0, arg1, 0);
4161 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4162 arg0_type, high, 1, arg1, 0);
4163 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4164 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4165 break;
4166
4167 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4168 *strict_overflow_p = true;
4169
4170 /* Check for an unsigned range which has wrapped around the maximum
4171 value thus making n_high < n_low, and normalize it. */
4172 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4173 {
4174 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4175 integer_one_node, 0);
4176 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4177 integer_one_node, 0);
4178
4179 /* If the range is of the form +/- [ x+1, x ], we won't
4180 be able to normalize it. But then, it represents the
4181 whole range or the empty set, so make it
4182 +/- [ -, - ]. */
4183 if (tree_int_cst_equal (n_low, low)
4184 && tree_int_cst_equal (n_high, high))
4185 low = high = 0;
4186 else
4187 in_p = ! in_p;
4188 }
4189 else
4190 low = n_low, high = n_high;
4191
4192 exp = arg0;
4193 continue;
4194
4195 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4196 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4197 break;
4198
4199 if (! INTEGRAL_TYPE_P (arg0_type)
4200 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4201 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4202 break;
4203
4204 n_low = low, n_high = high;
4205
4206 if (n_low != 0)
4207 n_low = fold_convert (arg0_type, n_low);
4208
4209 if (n_high != 0)
4210 n_high = fold_convert (arg0_type, n_high);
4211
4212
4213 /* If we're converting arg0 from an unsigned type, to exp,
4214 a signed type, we will be doing the comparison as unsigned.
4215 The tests above have already verified that LOW and HIGH
4216 are both positive.
4217
4218 So we have to ensure that we will handle large unsigned
4219 values the same way that the current signed bounds treat
4220 negative values. */
4221
4222 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4223 {
4224 tree high_positive;
4225 tree equiv_type = lang_hooks.types.type_for_mode
4226 (TYPE_MODE (arg0_type), 1);
4227
4228 /* A range without an upper bound is, naturally, unbounded.
4229 Since convert would have cropped a very large value, use
4230 the max value for the destination type. */
4231 high_positive
4232 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4233 : TYPE_MAX_VALUE (arg0_type);
4234
4235 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4236 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4237 fold_convert (arg0_type,
4238 high_positive),
4239 build_int_cst (arg0_type, 1));
4240
4241 /* If the low bound is specified, "and" the range with the
4242 range for which the original unsigned value will be
4243 positive. */
4244 if (low != 0)
4245 {
4246 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4247 1, n_low, n_high, 1,
4248 fold_convert (arg0_type,
4249 integer_zero_node),
4250 high_positive))
4251 break;
4252
4253 in_p = (n_in_p == in_p);
4254 }
4255 else
4256 {
4257 /* Otherwise, "or" the range with the range of the input
4258 that will be interpreted as negative. */
4259 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4260 0, n_low, n_high, 1,
4261 fold_convert (arg0_type,
4262 integer_zero_node),
4263 high_positive))
4264 break;
4265
4266 in_p = (in_p != n_in_p);
4267 }
4268 }
4269
4270 exp = arg0;
4271 low = n_low, high = n_high;
4272 continue;
4273
4274 default:
4275 break;
4276 }
4277
4278 break;
4279 }
4280
4281 /* If EXP is a constant, we can evaluate whether this is true or false. */
4282 if (TREE_CODE (exp) == INTEGER_CST)
4283 {
4284 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4285 exp, 0, low, 0))
4286 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4287 exp, 1, high, 1)));
4288 low = high = 0;
4289 exp = 0;
4290 }
4291
4292 *pin_p = in_p, *plow = low, *phigh = high;
4293 return exp;
4294 }
4295 \f
4296 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4297 type, TYPE, return an expression to test if EXP is in (or out of, depending
4298 on IN_P) the range. Return 0 if the test couldn't be created. */
4299
4300 static tree
4301 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4302 {
4303 tree etype = TREE_TYPE (exp);
4304 tree value;
4305
4306 #ifdef HAVE_canonicalize_funcptr_for_compare
4307 /* Disable this optimization for function pointer expressions
4308 on targets that require function pointer canonicalization. */
4309 if (HAVE_canonicalize_funcptr_for_compare
4310 && TREE_CODE (etype) == POINTER_TYPE
4311 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4312 return NULL_TREE;
4313 #endif
4314
4315 if (! in_p)
4316 {
4317 value = build_range_check (type, exp, 1, low, high);
4318 if (value != 0)
4319 return invert_truthvalue (value);
4320
4321 return 0;
4322 }
4323
4324 if (low == 0 && high == 0)
4325 return build_int_cst (type, 1);
4326
4327 if (low == 0)
4328 return fold_build2 (LE_EXPR, type, exp,
4329 fold_convert (etype, high));
4330
4331 if (high == 0)
4332 return fold_build2 (GE_EXPR, type, exp,
4333 fold_convert (etype, low));
4334
4335 if (operand_equal_p (low, high, 0))
4336 return fold_build2 (EQ_EXPR, type, exp,
4337 fold_convert (etype, low));
4338
4339 if (integer_zerop (low))
4340 {
4341 if (! TYPE_UNSIGNED (etype))
4342 {
4343 etype = lang_hooks.types.unsigned_type (etype);
4344 high = fold_convert (etype, high);
4345 exp = fold_convert (etype, exp);
4346 }
4347 return build_range_check (type, exp, 1, 0, high);
4348 }
4349
4350 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4351 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4352 {
4353 unsigned HOST_WIDE_INT lo;
4354 HOST_WIDE_INT hi;
4355 int prec;
4356
4357 prec = TYPE_PRECISION (etype);
4358 if (prec <= HOST_BITS_PER_WIDE_INT)
4359 {
4360 hi = 0;
4361 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4362 }
4363 else
4364 {
4365 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4366 lo = (unsigned HOST_WIDE_INT) -1;
4367 }
4368
4369 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4370 {
4371 if (TYPE_UNSIGNED (etype))
4372 {
4373 etype = lang_hooks.types.signed_type (etype);
4374 exp = fold_convert (etype, exp);
4375 }
4376 return fold_build2 (GT_EXPR, type, exp,
4377 build_int_cst (etype, 0));
4378 }
4379 }
4380
4381 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4382 This requires wrap-around arithmetics for the type of the expression. */
4383 switch (TREE_CODE (etype))
4384 {
4385 case INTEGER_TYPE:
4386 /* There is no requirement that LOW be within the range of ETYPE
4387 if the latter is a subtype. It must, however, be within the base
4388 type of ETYPE. So be sure we do the subtraction in that type. */
4389 if (TREE_TYPE (etype))
4390 etype = TREE_TYPE (etype);
4391 break;
4392
4393 case ENUMERAL_TYPE:
4394 case BOOLEAN_TYPE:
4395 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4396 TYPE_UNSIGNED (etype));
4397 break;
4398
4399 default:
4400 break;
4401 }
4402
4403 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4404 if (TREE_CODE (etype) == INTEGER_TYPE
4405 && !TYPE_OVERFLOW_WRAPS (etype))
4406 {
4407 tree utype, minv, maxv;
4408
4409 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4410 for the type in question, as we rely on this here. */
4411 utype = lang_hooks.types.unsigned_type (etype);
4412 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4413 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4414 integer_one_node, 1);
4415 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4416
4417 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4418 minv, 1, maxv, 1)))
4419 etype = utype;
4420 else
4421 return 0;
4422 }
4423
4424 high = fold_convert (etype, high);
4425 low = fold_convert (etype, low);
4426 exp = fold_convert (etype, exp);
4427
4428 value = const_binop (MINUS_EXPR, high, low, 0);
4429
4430 if (value != 0 && !TREE_OVERFLOW (value))
4431 return build_range_check (type,
4432 fold_build2 (MINUS_EXPR, etype, exp, low),
4433 1, build_int_cst (etype, 0), value);
4434
4435 return 0;
4436 }
4437 \f
4438 /* Return the predecessor of VAL in its type, handling the infinite case. */
4439
4440 static tree
4441 range_predecessor (tree val)
4442 {
4443 tree type = TREE_TYPE (val);
4444
4445 if (INTEGRAL_TYPE_P (type)
4446 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4447 return 0;
4448 else
4449 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4450 }
4451
4452 /* Return the successor of VAL in its type, handling the infinite case. */
4453
4454 static tree
4455 range_successor (tree val)
4456 {
4457 tree type = TREE_TYPE (val);
4458
4459 if (INTEGRAL_TYPE_P (type)
4460 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4461 return 0;
4462 else
4463 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4464 }
4465
4466 /* Given two ranges, see if we can merge them into one. Return 1 if we
4467 can, 0 if we can't. Set the output range into the specified parameters. */
4468
4469 static int
4470 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4471 tree high0, int in1_p, tree low1, tree high1)
4472 {
4473 int no_overlap;
4474 int subset;
4475 int temp;
4476 tree tem;
4477 int in_p;
4478 tree low, high;
4479 int lowequal = ((low0 == 0 && low1 == 0)
4480 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4481 low0, 0, low1, 0)));
4482 int highequal = ((high0 == 0 && high1 == 0)
4483 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4484 high0, 1, high1, 1)));
4485
4486 /* Make range 0 be the range that starts first, or ends last if they
4487 start at the same value. Swap them if it isn't. */
4488 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4489 low0, 0, low1, 0))
4490 || (lowequal
4491 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4492 high1, 1, high0, 1))))
4493 {
4494 temp = in0_p, in0_p = in1_p, in1_p = temp;
4495 tem = low0, low0 = low1, low1 = tem;
4496 tem = high0, high0 = high1, high1 = tem;
4497 }
4498
4499 /* Now flag two cases, whether the ranges are disjoint or whether the
4500 second range is totally subsumed in the first. Note that the tests
4501 below are simplified by the ones above. */
4502 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4503 high0, 1, low1, 0));
4504 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4505 high1, 1, high0, 1));
4506
4507 /* We now have four cases, depending on whether we are including or
4508 excluding the two ranges. */
4509 if (in0_p && in1_p)
4510 {
4511 /* If they don't overlap, the result is false. If the second range
4512 is a subset it is the result. Otherwise, the range is from the start
4513 of the second to the end of the first. */
4514 if (no_overlap)
4515 in_p = 0, low = high = 0;
4516 else if (subset)
4517 in_p = 1, low = low1, high = high1;
4518 else
4519 in_p = 1, low = low1, high = high0;
4520 }
4521
4522 else if (in0_p && ! in1_p)
4523 {
4524 /* If they don't overlap, the result is the first range. If they are
4525 equal, the result is false. If the second range is a subset of the
4526 first, and the ranges begin at the same place, we go from just after
4527 the end of the second range to the end of the first. If the second
4528 range is not a subset of the first, or if it is a subset and both
4529 ranges end at the same place, the range starts at the start of the
4530 first range and ends just before the second range.
4531 Otherwise, we can't describe this as a single range. */
4532 if (no_overlap)
4533 in_p = 1, low = low0, high = high0;
4534 else if (lowequal && highequal)
4535 in_p = 0, low = high = 0;
4536 else if (subset && lowequal)
4537 {
4538 low = range_successor (high1);
4539 high = high0;
4540 in_p = (low != 0);
4541 }
4542 else if (! subset || highequal)
4543 {
4544 low = low0;
4545 high = range_predecessor (low1);
4546 in_p = (high != 0);
4547 }
4548 else
4549 return 0;
4550 }
4551
4552 else if (! in0_p && in1_p)
4553 {
4554 /* If they don't overlap, the result is the second range. If the second
4555 is a subset of the first, the result is false. Otherwise,
4556 the range starts just after the first range and ends at the
4557 end of the second. */
4558 if (no_overlap)
4559 in_p = 1, low = low1, high = high1;
4560 else if (subset || highequal)
4561 in_p = 0, low = high = 0;
4562 else
4563 {
4564 low = range_successor (high0);
4565 high = high1;
4566 in_p = (low != 0);
4567 }
4568 }
4569
4570 else
4571 {
4572 /* The case where we are excluding both ranges. Here the complex case
4573 is if they don't overlap. In that case, the only time we have a
4574 range is if they are adjacent. If the second is a subset of the
4575 first, the result is the first. Otherwise, the range to exclude
4576 starts at the beginning of the first range and ends at the end of the
4577 second. */
4578 if (no_overlap)
4579 {
4580 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4581 range_successor (high0),
4582 1, low1, 0)))
4583 in_p = 0, low = low0, high = high1;
4584 else
4585 {
4586 /* Canonicalize - [min, x] into - [-, x]. */
4587 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4588 switch (TREE_CODE (TREE_TYPE (low0)))
4589 {
4590 case ENUMERAL_TYPE:
4591 if (TYPE_PRECISION (TREE_TYPE (low0))
4592 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4593 break;
4594 /* FALLTHROUGH */
4595 case INTEGER_TYPE:
4596 if (tree_int_cst_equal (low0,
4597 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4598 low0 = 0;
4599 break;
4600 case POINTER_TYPE:
4601 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4602 && integer_zerop (low0))
4603 low0 = 0;
4604 break;
4605 default:
4606 break;
4607 }
4608
4609 /* Canonicalize - [x, max] into - [x, -]. */
4610 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4611 switch (TREE_CODE (TREE_TYPE (high1)))
4612 {
4613 case ENUMERAL_TYPE:
4614 if (TYPE_PRECISION (TREE_TYPE (high1))
4615 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4616 break;
4617 /* FALLTHROUGH */
4618 case INTEGER_TYPE:
4619 if (tree_int_cst_equal (high1,
4620 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4621 high1 = 0;
4622 break;
4623 case POINTER_TYPE:
4624 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4625 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4626 high1, 1,
4627 integer_one_node, 1)))
4628 high1 = 0;
4629 break;
4630 default:
4631 break;
4632 }
4633
4634 /* The ranges might be also adjacent between the maximum and
4635 minimum values of the given type. For
4636 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4637 return + [x + 1, y - 1]. */
4638 if (low0 == 0 && high1 == 0)
4639 {
4640 low = range_successor (high0);
4641 high = range_predecessor (low1);
4642 if (low == 0 || high == 0)
4643 return 0;
4644
4645 in_p = 1;
4646 }
4647 else
4648 return 0;
4649 }
4650 }
4651 else if (subset)
4652 in_p = 0, low = low0, high = high0;
4653 else
4654 in_p = 0, low = low0, high = high1;
4655 }
4656
4657 *pin_p = in_p, *plow = low, *phigh = high;
4658 return 1;
4659 }
4660 \f
4661
4662 /* Subroutine of fold, looking inside expressions of the form
4663 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4664 of the COND_EXPR. This function is being used also to optimize
4665 A op B ? C : A, by reversing the comparison first.
4666
4667 Return a folded expression whose code is not a COND_EXPR
4668 anymore, or NULL_TREE if no folding opportunity is found. */
4669
4670 static tree
4671 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4672 {
4673 enum tree_code comp_code = TREE_CODE (arg0);
4674 tree arg00 = TREE_OPERAND (arg0, 0);
4675 tree arg01 = TREE_OPERAND (arg0, 1);
4676 tree arg1_type = TREE_TYPE (arg1);
4677 tree tem;
4678
4679 STRIP_NOPS (arg1);
4680 STRIP_NOPS (arg2);
4681
4682 /* If we have A op 0 ? A : -A, consider applying the following
4683 transformations:
4684
4685 A == 0? A : -A same as -A
4686 A != 0? A : -A same as A
4687 A >= 0? A : -A same as abs (A)
4688 A > 0? A : -A same as abs (A)
4689 A <= 0? A : -A same as -abs (A)
4690 A < 0? A : -A same as -abs (A)
4691
4692 None of these transformations work for modes with signed
4693 zeros. If A is +/-0, the first two transformations will
4694 change the sign of the result (from +0 to -0, or vice
4695 versa). The last four will fix the sign of the result,
4696 even though the original expressions could be positive or
4697 negative, depending on the sign of A.
4698
4699 Note that all these transformations are correct if A is
4700 NaN, since the two alternatives (A and -A) are also NaNs. */
4701 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4702 ? real_zerop (arg01)
4703 : integer_zerop (arg01))
4704 && ((TREE_CODE (arg2) == NEGATE_EXPR
4705 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4706 /* In the case that A is of the form X-Y, '-A' (arg2) may
4707 have already been folded to Y-X, check for that. */
4708 || (TREE_CODE (arg1) == MINUS_EXPR
4709 && TREE_CODE (arg2) == MINUS_EXPR
4710 && operand_equal_p (TREE_OPERAND (arg1, 0),
4711 TREE_OPERAND (arg2, 1), 0)
4712 && operand_equal_p (TREE_OPERAND (arg1, 1),
4713 TREE_OPERAND (arg2, 0), 0))))
4714 switch (comp_code)
4715 {
4716 case EQ_EXPR:
4717 case UNEQ_EXPR:
4718 tem = fold_convert (arg1_type, arg1);
4719 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4720 case NE_EXPR:
4721 case LTGT_EXPR:
4722 return pedantic_non_lvalue (fold_convert (type, arg1));
4723 case UNGE_EXPR:
4724 case UNGT_EXPR:
4725 if (flag_trapping_math)
4726 break;
4727 /* Fall through. */
4728 case GE_EXPR:
4729 case GT_EXPR:
4730 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4731 arg1 = fold_convert (lang_hooks.types.signed_type
4732 (TREE_TYPE (arg1)), arg1);
4733 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4734 return pedantic_non_lvalue (fold_convert (type, tem));
4735 case UNLE_EXPR:
4736 case UNLT_EXPR:
4737 if (flag_trapping_math)
4738 break;
4739 case LE_EXPR:
4740 case LT_EXPR:
4741 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4742 arg1 = fold_convert (lang_hooks.types.signed_type
4743 (TREE_TYPE (arg1)), arg1);
4744 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4745 return negate_expr (fold_convert (type, tem));
4746 default:
4747 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4748 break;
4749 }
4750
4751 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4752 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4753 both transformations are correct when A is NaN: A != 0
4754 is then true, and A == 0 is false. */
4755
4756 if (integer_zerop (arg01) && integer_zerop (arg2))
4757 {
4758 if (comp_code == NE_EXPR)
4759 return pedantic_non_lvalue (fold_convert (type, arg1));
4760 else if (comp_code == EQ_EXPR)
4761 return build_int_cst (type, 0);
4762 }
4763
4764 /* Try some transformations of A op B ? A : B.
4765
4766 A == B? A : B same as B
4767 A != B? A : B same as A
4768 A >= B? A : B same as max (A, B)
4769 A > B? A : B same as max (B, A)
4770 A <= B? A : B same as min (A, B)
4771 A < B? A : B same as min (B, A)
4772
4773 As above, these transformations don't work in the presence
4774 of signed zeros. For example, if A and B are zeros of
4775 opposite sign, the first two transformations will change
4776 the sign of the result. In the last four, the original
4777 expressions give different results for (A=+0, B=-0) and
4778 (A=-0, B=+0), but the transformed expressions do not.
4779
4780 The first two transformations are correct if either A or B
4781 is a NaN. In the first transformation, the condition will
4782 be false, and B will indeed be chosen. In the case of the
4783 second transformation, the condition A != B will be true,
4784 and A will be chosen.
4785
4786 The conversions to max() and min() are not correct if B is
4787 a number and A is not. The conditions in the original
4788 expressions will be false, so all four give B. The min()
4789 and max() versions would give a NaN instead. */
4790 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4791 /* Avoid these transformations if the COND_EXPR may be used
4792 as an lvalue in the C++ front-end. PR c++/19199. */
4793 && (in_gimple_form
4794 || (strcmp (lang_hooks.name, "GNU C++") != 0
4795 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4796 || ! maybe_lvalue_p (arg1)
4797 || ! maybe_lvalue_p (arg2)))
4798 {
4799 tree comp_op0 = arg00;
4800 tree comp_op1 = arg01;
4801 tree comp_type = TREE_TYPE (comp_op0);
4802
4803 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4804 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4805 {
4806 comp_type = type;
4807 comp_op0 = arg1;
4808 comp_op1 = arg2;
4809 }
4810
4811 switch (comp_code)
4812 {
4813 case EQ_EXPR:
4814 return pedantic_non_lvalue (fold_convert (type, arg2));
4815 case NE_EXPR:
4816 return pedantic_non_lvalue (fold_convert (type, arg1));
4817 case LE_EXPR:
4818 case LT_EXPR:
4819 case UNLE_EXPR:
4820 case UNLT_EXPR:
4821 /* In C++ a ?: expression can be an lvalue, so put the
4822 operand which will be used if they are equal first
4823 so that we can convert this back to the
4824 corresponding COND_EXPR. */
4825 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4826 {
4827 comp_op0 = fold_convert (comp_type, comp_op0);
4828 comp_op1 = fold_convert (comp_type, comp_op1);
4829 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4830 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4831 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4832 return pedantic_non_lvalue (fold_convert (type, tem));
4833 }
4834 break;
4835 case GE_EXPR:
4836 case GT_EXPR:
4837 case UNGE_EXPR:
4838 case UNGT_EXPR:
4839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 {
4841 comp_op0 = fold_convert (comp_type, comp_op0);
4842 comp_op1 = fold_convert (comp_type, comp_op1);
4843 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4844 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4845 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4846 return pedantic_non_lvalue (fold_convert (type, tem));
4847 }
4848 break;
4849 case UNEQ_EXPR:
4850 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4851 return pedantic_non_lvalue (fold_convert (type, arg2));
4852 break;
4853 case LTGT_EXPR:
4854 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4855 return pedantic_non_lvalue (fold_convert (type, arg1));
4856 break;
4857 default:
4858 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4859 break;
4860 }
4861 }
4862
4863 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4864 we might still be able to simplify this. For example,
4865 if C1 is one less or one more than C2, this might have started
4866 out as a MIN or MAX and been transformed by this function.
4867 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4868
4869 if (INTEGRAL_TYPE_P (type)
4870 && TREE_CODE (arg01) == INTEGER_CST
4871 && TREE_CODE (arg2) == INTEGER_CST)
4872 switch (comp_code)
4873 {
4874 case EQ_EXPR:
4875 /* We can replace A with C1 in this case. */
4876 arg1 = fold_convert (type, arg01);
4877 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4878
4879 case LT_EXPR:
4880 /* If C1 is C2 + 1, this is min(A, C2). */
4881 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4882 OEP_ONLY_CONST)
4883 && operand_equal_p (arg01,
4884 const_binop (PLUS_EXPR, arg2,
4885 build_int_cst (type, 1), 0),
4886 OEP_ONLY_CONST))
4887 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4888 type, arg1, arg2));
4889 break;
4890
4891 case LE_EXPR:
4892 /* If C1 is C2 - 1, this is min(A, C2). */
4893 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (MINUS_EXPR, arg2,
4897 build_int_cst (type, 1), 0),
4898 OEP_ONLY_CONST))
4899 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4900 type, arg1, arg2));
4901 break;
4902
4903 case GT_EXPR:
4904 /* If C1 is C2 - 1, this is max(A, C2). */
4905 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4906 OEP_ONLY_CONST)
4907 && operand_equal_p (arg01,
4908 const_binop (MINUS_EXPR, arg2,
4909 build_int_cst (type, 1), 0),
4910 OEP_ONLY_CONST))
4911 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4912 type, arg1, arg2));
4913 break;
4914
4915 case GE_EXPR:
4916 /* If C1 is C2 + 1, this is max(A, C2). */
4917 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4918 OEP_ONLY_CONST)
4919 && operand_equal_p (arg01,
4920 const_binop (PLUS_EXPR, arg2,
4921 build_int_cst (type, 1), 0),
4922 OEP_ONLY_CONST))
4923 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4924 type, arg1, arg2));
4925 break;
4926 case NE_EXPR:
4927 break;
4928 default:
4929 gcc_unreachable ();
4930 }
4931
4932 return NULL_TREE;
4933 }
4934
4935
4936 \f
4937 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4938 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4939 #endif
4940
4941 /* EXP is some logical combination of boolean tests. See if we can
4942 merge it into some range test. Return the new tree if so. */
4943
4944 static tree
4945 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4946 {
4947 int or_op = (code == TRUTH_ORIF_EXPR
4948 || code == TRUTH_OR_EXPR);
4949 int in0_p, in1_p, in_p;
4950 tree low0, low1, low, high0, high1, high;
4951 bool strict_overflow_p = false;
4952 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4953 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4954 tree tem;
4955 const char * const warnmsg = G_("assuming signed overflow does not occur "
4956 "when simplifying range test");
4957
4958 /* If this is an OR operation, invert both sides; we will invert
4959 again at the end. */
4960 if (or_op)
4961 in0_p = ! in0_p, in1_p = ! in1_p;
4962
4963 /* If both expressions are the same, if we can merge the ranges, and we
4964 can build the range test, return it or it inverted. If one of the
4965 ranges is always true or always false, consider it to be the same
4966 expression as the other. */
4967 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4968 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4969 in1_p, low1, high1)
4970 && 0 != (tem = (build_range_check (type,
4971 lhs != 0 ? lhs
4972 : rhs != 0 ? rhs : integer_zero_node,
4973 in_p, low, high))))
4974 {
4975 if (strict_overflow_p)
4976 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4977 return or_op ? invert_truthvalue (tem) : tem;
4978 }
4979
4980 /* On machines where the branch cost is expensive, if this is a
4981 short-circuited branch and the underlying object on both sides
4982 is the same, make a non-short-circuit operation. */
4983 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4984 && lhs != 0 && rhs != 0
4985 && (code == TRUTH_ANDIF_EXPR
4986 || code == TRUTH_ORIF_EXPR)
4987 && operand_equal_p (lhs, rhs, 0))
4988 {
4989 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4990 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4991 which cases we can't do this. */
4992 if (simple_operand_p (lhs))
4993 return build2 (code == TRUTH_ANDIF_EXPR
4994 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4995 type, op0, op1);
4996
4997 else if (lang_hooks.decls.global_bindings_p () == 0
4998 && ! CONTAINS_PLACEHOLDER_P (lhs))
4999 {
5000 tree common = save_expr (lhs);
5001
5002 if (0 != (lhs = build_range_check (type, common,
5003 or_op ? ! in0_p : in0_p,
5004 low0, high0))
5005 && (0 != (rhs = build_range_check (type, common,
5006 or_op ? ! in1_p : in1_p,
5007 low1, high1))))
5008 {
5009 if (strict_overflow_p)
5010 fold_overflow_warning (warnmsg,
5011 WARN_STRICT_OVERFLOW_COMPARISON);
5012 return build2 (code == TRUTH_ANDIF_EXPR
5013 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5014 type, lhs, rhs);
5015 }
5016 }
5017 }
5018
5019 return 0;
5020 }
5021 \f
5022 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5023 bit value. Arrange things so the extra bits will be set to zero if and
5024 only if C is signed-extended to its full width. If MASK is nonzero,
5025 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5026
5027 static tree
5028 unextend (tree c, int p, int unsignedp, tree mask)
5029 {
5030 tree type = TREE_TYPE (c);
5031 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5032 tree temp;
5033
5034 if (p == modesize || unsignedp)
5035 return c;
5036
5037 /* We work by getting just the sign bit into the low-order bit, then
5038 into the high-order bit, then sign-extend. We then XOR that value
5039 with C. */
5040 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5041 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5042
5043 /* We must use a signed type in order to get an arithmetic right shift.
5044 However, we must also avoid introducing accidental overflows, so that
5045 a subsequent call to integer_zerop will work. Hence we must
5046 do the type conversion here. At this point, the constant is either
5047 zero or one, and the conversion to a signed type can never overflow.
5048 We could get an overflow if this conversion is done anywhere else. */
5049 if (TYPE_UNSIGNED (type))
5050 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5051
5052 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5053 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5054 if (mask != 0)
5055 temp = const_binop (BIT_AND_EXPR, temp,
5056 fold_convert (TREE_TYPE (c), mask), 0);
5057 /* If necessary, convert the type back to match the type of C. */
5058 if (TYPE_UNSIGNED (type))
5059 temp = fold_convert (type, temp);
5060
5061 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5062 }
5063 \f
5064 /* Find ways of folding logical expressions of LHS and RHS:
5065 Try to merge two comparisons to the same innermost item.
5066 Look for range tests like "ch >= '0' && ch <= '9'".
5067 Look for combinations of simple terms on machines with expensive branches
5068 and evaluate the RHS unconditionally.
5069
5070 For example, if we have p->a == 2 && p->b == 4 and we can make an
5071 object large enough to span both A and B, we can do this with a comparison
5072 against the object ANDed with the a mask.
5073
5074 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5075 operations to do this with one comparison.
5076
5077 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5078 function and the one above.
5079
5080 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5081 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5082
5083 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5084 two operands.
5085
5086 We return the simplified tree or 0 if no optimization is possible. */
5087
5088 static tree
5089 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5090 {
5091 /* If this is the "or" of two comparisons, we can do something if
5092 the comparisons are NE_EXPR. If this is the "and", we can do something
5093 if the comparisons are EQ_EXPR. I.e.,
5094 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5095
5096 WANTED_CODE is this operation code. For single bit fields, we can
5097 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5098 comparison for one-bit fields. */
5099
5100 enum tree_code wanted_code;
5101 enum tree_code lcode, rcode;
5102 tree ll_arg, lr_arg, rl_arg, rr_arg;
5103 tree ll_inner, lr_inner, rl_inner, rr_inner;
5104 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5105 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5106 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5107 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5108 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5109 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5110 enum machine_mode lnmode, rnmode;
5111 tree ll_mask, lr_mask, rl_mask, rr_mask;
5112 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5113 tree l_const, r_const;
5114 tree lntype, rntype, result;
5115 int first_bit, end_bit;
5116 int volatilep;
5117 tree orig_lhs = lhs, orig_rhs = rhs;
5118 enum tree_code orig_code = code;
5119
5120 /* Start by getting the comparison codes. Fail if anything is volatile.
5121 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5122 it were surrounded with a NE_EXPR. */
5123
5124 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5125 return 0;
5126
5127 lcode = TREE_CODE (lhs);
5128 rcode = TREE_CODE (rhs);
5129
5130 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5131 {
5132 lhs = build2 (NE_EXPR, truth_type, lhs,
5133 build_int_cst (TREE_TYPE (lhs), 0));
5134 lcode = NE_EXPR;
5135 }
5136
5137 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5138 {
5139 rhs = build2 (NE_EXPR, truth_type, rhs,
5140 build_int_cst (TREE_TYPE (rhs), 0));
5141 rcode = NE_EXPR;
5142 }
5143
5144 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5145 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5146 return 0;
5147
5148 ll_arg = TREE_OPERAND (lhs, 0);
5149 lr_arg = TREE_OPERAND (lhs, 1);
5150 rl_arg = TREE_OPERAND (rhs, 0);
5151 rr_arg = TREE_OPERAND (rhs, 1);
5152
5153 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5154 if (simple_operand_p (ll_arg)
5155 && simple_operand_p (lr_arg))
5156 {
5157 tree result;
5158 if (operand_equal_p (ll_arg, rl_arg, 0)
5159 && operand_equal_p (lr_arg, rr_arg, 0))
5160 {
5161 result = combine_comparisons (code, lcode, rcode,
5162 truth_type, ll_arg, lr_arg);
5163 if (result)
5164 return result;
5165 }
5166 else if (operand_equal_p (ll_arg, rr_arg, 0)
5167 && operand_equal_p (lr_arg, rl_arg, 0))
5168 {
5169 result = combine_comparisons (code, lcode,
5170 swap_tree_comparison (rcode),
5171 truth_type, ll_arg, lr_arg);
5172 if (result)
5173 return result;
5174 }
5175 }
5176
5177 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5178 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5179
5180 /* If the RHS can be evaluated unconditionally and its operands are
5181 simple, it wins to evaluate the RHS unconditionally on machines
5182 with expensive branches. In this case, this isn't a comparison
5183 that can be merged. Avoid doing this if the RHS is a floating-point
5184 comparison since those can trap. */
5185
5186 if (BRANCH_COST >= 2
5187 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5188 && simple_operand_p (rl_arg)
5189 && simple_operand_p (rr_arg))
5190 {
5191 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5192 if (code == TRUTH_OR_EXPR
5193 && lcode == NE_EXPR && integer_zerop (lr_arg)
5194 && rcode == NE_EXPR && integer_zerop (rr_arg)
5195 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5196 return build2 (NE_EXPR, truth_type,
5197 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5198 ll_arg, rl_arg),
5199 build_int_cst (TREE_TYPE (ll_arg), 0));
5200
5201 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5202 if (code == TRUTH_AND_EXPR
5203 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5204 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5205 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5206 return build2 (EQ_EXPR, truth_type,
5207 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5208 ll_arg, rl_arg),
5209 build_int_cst (TREE_TYPE (ll_arg), 0));
5210
5211 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5212 {
5213 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5214 return build2 (code, truth_type, lhs, rhs);
5215 return NULL_TREE;
5216 }
5217 }
5218
5219 /* See if the comparisons can be merged. Then get all the parameters for
5220 each side. */
5221
5222 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5223 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5224 return 0;
5225
5226 volatilep = 0;
5227 ll_inner = decode_field_reference (ll_arg,
5228 &ll_bitsize, &ll_bitpos, &ll_mode,
5229 &ll_unsignedp, &volatilep, &ll_mask,
5230 &ll_and_mask);
5231 lr_inner = decode_field_reference (lr_arg,
5232 &lr_bitsize, &lr_bitpos, &lr_mode,
5233 &lr_unsignedp, &volatilep, &lr_mask,
5234 &lr_and_mask);
5235 rl_inner = decode_field_reference (rl_arg,
5236 &rl_bitsize, &rl_bitpos, &rl_mode,
5237 &rl_unsignedp, &volatilep, &rl_mask,
5238 &rl_and_mask);
5239 rr_inner = decode_field_reference (rr_arg,
5240 &rr_bitsize, &rr_bitpos, &rr_mode,
5241 &rr_unsignedp, &volatilep, &rr_mask,
5242 &rr_and_mask);
5243
5244 /* It must be true that the inner operation on the lhs of each
5245 comparison must be the same if we are to be able to do anything.
5246 Then see if we have constants. If not, the same must be true for
5247 the rhs's. */
5248 if (volatilep || ll_inner == 0 || rl_inner == 0
5249 || ! operand_equal_p (ll_inner, rl_inner, 0))
5250 return 0;
5251
5252 if (TREE_CODE (lr_arg) == INTEGER_CST
5253 && TREE_CODE (rr_arg) == INTEGER_CST)
5254 l_const = lr_arg, r_const = rr_arg;
5255 else if (lr_inner == 0 || rr_inner == 0
5256 || ! operand_equal_p (lr_inner, rr_inner, 0))
5257 return 0;
5258 else
5259 l_const = r_const = 0;
5260
5261 /* If either comparison code is not correct for our logical operation,
5262 fail. However, we can convert a one-bit comparison against zero into
5263 the opposite comparison against that bit being set in the field. */
5264
5265 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5266 if (lcode != wanted_code)
5267 {
5268 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5269 {
5270 /* Make the left operand unsigned, since we are only interested
5271 in the value of one bit. Otherwise we are doing the wrong
5272 thing below. */
5273 ll_unsignedp = 1;
5274 l_const = ll_mask;
5275 }
5276 else
5277 return 0;
5278 }
5279
5280 /* This is analogous to the code for l_const above. */
5281 if (rcode != wanted_code)
5282 {
5283 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5284 {
5285 rl_unsignedp = 1;
5286 r_const = rl_mask;
5287 }
5288 else
5289 return 0;
5290 }
5291
5292 /* See if we can find a mode that contains both fields being compared on
5293 the left. If we can't, fail. Otherwise, update all constants and masks
5294 to be relative to a field of that size. */
5295 first_bit = MIN (ll_bitpos, rl_bitpos);
5296 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5297 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5298 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5299 volatilep);
5300 if (lnmode == VOIDmode)
5301 return 0;
5302
5303 lnbitsize = GET_MODE_BITSIZE (lnmode);
5304 lnbitpos = first_bit & ~ (lnbitsize - 1);
5305 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5306 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5307
5308 if (BYTES_BIG_ENDIAN)
5309 {
5310 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5311 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5312 }
5313
5314 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5315 size_int (xll_bitpos), 0);
5316 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5317 size_int (xrl_bitpos), 0);
5318
5319 if (l_const)
5320 {
5321 l_const = fold_convert (lntype, l_const);
5322 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5323 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5324 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5325 fold_build1 (BIT_NOT_EXPR,
5326 lntype, ll_mask),
5327 0)))
5328 {
5329 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5330
5331 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5332 }
5333 }
5334 if (r_const)
5335 {
5336 r_const = fold_convert (lntype, r_const);
5337 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5338 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5339 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5340 fold_build1 (BIT_NOT_EXPR,
5341 lntype, rl_mask),
5342 0)))
5343 {
5344 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5345
5346 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5347 }
5348 }
5349
5350 /* If the right sides are not constant, do the same for it. Also,
5351 disallow this optimization if a size or signedness mismatch occurs
5352 between the left and right sides. */
5353 if (l_const == 0)
5354 {
5355 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5356 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5357 /* Make sure the two fields on the right
5358 correspond to the left without being swapped. */
5359 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5360 return 0;
5361
5362 first_bit = MIN (lr_bitpos, rr_bitpos);
5363 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5364 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5365 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5366 volatilep);
5367 if (rnmode == VOIDmode)
5368 return 0;
5369
5370 rnbitsize = GET_MODE_BITSIZE (rnmode);
5371 rnbitpos = first_bit & ~ (rnbitsize - 1);
5372 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5373 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5374
5375 if (BYTES_BIG_ENDIAN)
5376 {
5377 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5378 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5379 }
5380
5381 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5382 size_int (xlr_bitpos), 0);
5383 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5384 size_int (xrr_bitpos), 0);
5385
5386 /* Make a mask that corresponds to both fields being compared.
5387 Do this for both items being compared. If the operands are the
5388 same size and the bits being compared are in the same position
5389 then we can do this by masking both and comparing the masked
5390 results. */
5391 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5392 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5393 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5394 {
5395 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5396 ll_unsignedp || rl_unsignedp);
5397 if (! all_ones_mask_p (ll_mask, lnbitsize))
5398 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5399
5400 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5401 lr_unsignedp || rr_unsignedp);
5402 if (! all_ones_mask_p (lr_mask, rnbitsize))
5403 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5404
5405 return build2 (wanted_code, truth_type, lhs, rhs);
5406 }
5407
5408 /* There is still another way we can do something: If both pairs of
5409 fields being compared are adjacent, we may be able to make a wider
5410 field containing them both.
5411
5412 Note that we still must mask the lhs/rhs expressions. Furthermore,
5413 the mask must be shifted to account for the shift done by
5414 make_bit_field_ref. */
5415 if ((ll_bitsize + ll_bitpos == rl_bitpos
5416 && lr_bitsize + lr_bitpos == rr_bitpos)
5417 || (ll_bitpos == rl_bitpos + rl_bitsize
5418 && lr_bitpos == rr_bitpos + rr_bitsize))
5419 {
5420 tree type;
5421
5422 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5423 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5424 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5425 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5426
5427 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5428 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5429 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5430 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5431
5432 /* Convert to the smaller type before masking out unwanted bits. */
5433 type = lntype;
5434 if (lntype != rntype)
5435 {
5436 if (lnbitsize > rnbitsize)
5437 {
5438 lhs = fold_convert (rntype, lhs);
5439 ll_mask = fold_convert (rntype, ll_mask);
5440 type = rntype;
5441 }
5442 else if (lnbitsize < rnbitsize)
5443 {
5444 rhs = fold_convert (lntype, rhs);
5445 lr_mask = fold_convert (lntype, lr_mask);
5446 type = lntype;
5447 }
5448 }
5449
5450 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5451 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5452
5453 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5454 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5455
5456 return build2 (wanted_code, truth_type, lhs, rhs);
5457 }
5458
5459 return 0;
5460 }
5461
5462 /* Handle the case of comparisons with constants. If there is something in
5463 common between the masks, those bits of the constants must be the same.
5464 If not, the condition is always false. Test for this to avoid generating
5465 incorrect code below. */
5466 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5467 if (! integer_zerop (result)
5468 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5469 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5470 {
5471 if (wanted_code == NE_EXPR)
5472 {
5473 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5474 return constant_boolean_node (true, truth_type);
5475 }
5476 else
5477 {
5478 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5479 return constant_boolean_node (false, truth_type);
5480 }
5481 }
5482
5483 /* Construct the expression we will return. First get the component
5484 reference we will make. Unless the mask is all ones the width of
5485 that field, perform the mask operation. Then compare with the
5486 merged constant. */
5487 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5488 ll_unsignedp || rl_unsignedp);
5489
5490 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5491 if (! all_ones_mask_p (ll_mask, lnbitsize))
5492 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5493
5494 return build2 (wanted_code, truth_type, result,
5495 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5496 }
5497 \f
5498 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5499 constant. */
5500
5501 static tree
5502 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5503 {
5504 tree arg0 = op0;
5505 enum tree_code op_code;
5506 tree comp_const = op1;
5507 tree minmax_const;
5508 int consts_equal, consts_lt;
5509 tree inner;
5510
5511 STRIP_SIGN_NOPS (arg0);
5512
5513 op_code = TREE_CODE (arg0);
5514 minmax_const = TREE_OPERAND (arg0, 1);
5515 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5516 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5517 inner = TREE_OPERAND (arg0, 0);
5518
5519 /* If something does not permit us to optimize, return the original tree. */
5520 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5521 || TREE_CODE (comp_const) != INTEGER_CST
5522 || TREE_OVERFLOW (comp_const)
5523 || TREE_CODE (minmax_const) != INTEGER_CST
5524 || TREE_OVERFLOW (minmax_const))
5525 return NULL_TREE;
5526
5527 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5528 and GT_EXPR, doing the rest with recursive calls using logical
5529 simplifications. */
5530 switch (code)
5531 {
5532 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5533 {
5534 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5535 type, op0, op1);
5536 if (tem)
5537 return invert_truthvalue (tem);
5538 return NULL_TREE;
5539 }
5540
5541 case GE_EXPR:
5542 return
5543 fold_build2 (TRUTH_ORIF_EXPR, type,
5544 optimize_minmax_comparison
5545 (EQ_EXPR, type, arg0, comp_const),
5546 optimize_minmax_comparison
5547 (GT_EXPR, type, arg0, comp_const));
5548
5549 case EQ_EXPR:
5550 if (op_code == MAX_EXPR && consts_equal)
5551 /* MAX (X, 0) == 0 -> X <= 0 */
5552 return fold_build2 (LE_EXPR, type, inner, comp_const);
5553
5554 else if (op_code == MAX_EXPR && consts_lt)
5555 /* MAX (X, 0) == 5 -> X == 5 */
5556 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5557
5558 else if (op_code == MAX_EXPR)
5559 /* MAX (X, 0) == -1 -> false */
5560 return omit_one_operand (type, integer_zero_node, inner);
5561
5562 else if (consts_equal)
5563 /* MIN (X, 0) == 0 -> X >= 0 */
5564 return fold_build2 (GE_EXPR, type, inner, comp_const);
5565
5566 else if (consts_lt)
5567 /* MIN (X, 0) == 5 -> false */
5568 return omit_one_operand (type, integer_zero_node, inner);
5569
5570 else
5571 /* MIN (X, 0) == -1 -> X == -1 */
5572 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5573
5574 case GT_EXPR:
5575 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5576 /* MAX (X, 0) > 0 -> X > 0
5577 MAX (X, 0) > 5 -> X > 5 */
5578 return fold_build2 (GT_EXPR, type, inner, comp_const);
5579
5580 else if (op_code == MAX_EXPR)
5581 /* MAX (X, 0) > -1 -> true */
5582 return omit_one_operand (type, integer_one_node, inner);
5583
5584 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5585 /* MIN (X, 0) > 0 -> false
5586 MIN (X, 0) > 5 -> false */
5587 return omit_one_operand (type, integer_zero_node, inner);
5588
5589 else
5590 /* MIN (X, 0) > -1 -> X > -1 */
5591 return fold_build2 (GT_EXPR, type, inner, comp_const);
5592
5593 default:
5594 return NULL_TREE;
5595 }
5596 }
5597 \f
5598 /* T is an integer expression that is being multiplied, divided, or taken a
5599 modulus (CODE says which and what kind of divide or modulus) by a
5600 constant C. See if we can eliminate that operation by folding it with
5601 other operations already in T. WIDE_TYPE, if non-null, is a type that
5602 should be used for the computation if wider than our type.
5603
5604 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5605 (X * 2) + (Y * 4). We must, however, be assured that either the original
5606 expression would not overflow or that overflow is undefined for the type
5607 in the language in question.
5608
5609 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5610 the machine has a multiply-accumulate insn or that this is part of an
5611 addressing calculation.
5612
5613 If we return a non-null expression, it is an equivalent form of the
5614 original computation, but need not be in the original type.
5615
5616 We set *STRICT_OVERFLOW_P to true if the return values depends on
5617 signed overflow being undefined. Otherwise we do not change
5618 *STRICT_OVERFLOW_P. */
5619
5620 static tree
5621 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5622 bool *strict_overflow_p)
5623 {
5624 /* To avoid exponential search depth, refuse to allow recursion past
5625 three levels. Beyond that (1) it's highly unlikely that we'll find
5626 something interesting and (2) we've probably processed it before
5627 when we built the inner expression. */
5628
5629 static int depth;
5630 tree ret;
5631
5632 if (depth > 3)
5633 return NULL;
5634
5635 depth++;
5636 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5637 depth--;
5638
5639 return ret;
5640 }
5641
5642 static tree
5643 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5644 bool *strict_overflow_p)
5645 {
5646 tree type = TREE_TYPE (t);
5647 enum tree_code tcode = TREE_CODE (t);
5648 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5649 > GET_MODE_SIZE (TYPE_MODE (type)))
5650 ? wide_type : type);
5651 tree t1, t2;
5652 int same_p = tcode == code;
5653 tree op0 = NULL_TREE, op1 = NULL_TREE;
5654 bool sub_strict_overflow_p;
5655
5656 /* Don't deal with constants of zero here; they confuse the code below. */
5657 if (integer_zerop (c))
5658 return NULL_TREE;
5659
5660 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5661 op0 = TREE_OPERAND (t, 0);
5662
5663 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5664 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5665
5666 /* Note that we need not handle conditional operations here since fold
5667 already handles those cases. So just do arithmetic here. */
5668 switch (tcode)
5669 {
5670 case INTEGER_CST:
5671 /* For a constant, we can always simplify if we are a multiply
5672 or (for divide and modulus) if it is a multiple of our constant. */
5673 if (code == MULT_EXPR
5674 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5675 return const_binop (code, fold_convert (ctype, t),
5676 fold_convert (ctype, c), 0);
5677 break;
5678
5679 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5680 /* If op0 is an expression ... */
5681 if ((COMPARISON_CLASS_P (op0)
5682 || UNARY_CLASS_P (op0)
5683 || BINARY_CLASS_P (op0)
5684 || VL_EXP_CLASS_P (op0)
5685 || EXPRESSION_CLASS_P (op0))
5686 /* ... and is unsigned, and its type is smaller than ctype,
5687 then we cannot pass through as widening. */
5688 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5689 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5690 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5691 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5692 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5693 /* ... or this is a truncation (t is narrower than op0),
5694 then we cannot pass through this narrowing. */
5695 || (GET_MODE_SIZE (TYPE_MODE (type))
5696 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5697 /* ... or signedness changes for division or modulus,
5698 then we cannot pass through this conversion. */
5699 || (code != MULT_EXPR
5700 && (TYPE_UNSIGNED (ctype)
5701 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5702 break;
5703
5704 /* Pass the constant down and see if we can make a simplification. If
5705 we can, replace this expression with the inner simplification for
5706 possible later conversion to our or some other type. */
5707 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5708 && TREE_CODE (t2) == INTEGER_CST
5709 && !TREE_OVERFLOW (t2)
5710 && (0 != (t1 = extract_muldiv (op0, t2, code,
5711 code == MULT_EXPR
5712 ? ctype : NULL_TREE,
5713 strict_overflow_p))))
5714 return t1;
5715 break;
5716
5717 case ABS_EXPR:
5718 /* If widening the type changes it from signed to unsigned, then we
5719 must avoid building ABS_EXPR itself as unsigned. */
5720 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5721 {
5722 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5723 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5724 != 0)
5725 {
5726 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5727 return fold_convert (ctype, t1);
5728 }
5729 break;
5730 }
5731 /* FALLTHROUGH */
5732 case NEGATE_EXPR:
5733 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5734 != 0)
5735 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5736 break;
5737
5738 case MIN_EXPR: case MAX_EXPR:
5739 /* If widening the type changes the signedness, then we can't perform
5740 this optimization as that changes the result. */
5741 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5742 break;
5743
5744 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5745 sub_strict_overflow_p = false;
5746 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5747 &sub_strict_overflow_p)) != 0
5748 && (t2 = extract_muldiv (op1, c, code, wide_type,
5749 &sub_strict_overflow_p)) != 0)
5750 {
5751 if (tree_int_cst_sgn (c) < 0)
5752 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5753 if (sub_strict_overflow_p)
5754 *strict_overflow_p = true;
5755 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5756 fold_convert (ctype, t2));
5757 }
5758 break;
5759
5760 case LSHIFT_EXPR: case RSHIFT_EXPR:
5761 /* If the second operand is constant, this is a multiplication
5762 or floor division, by a power of two, so we can treat it that
5763 way unless the multiplier or divisor overflows. Signed
5764 left-shift overflow is implementation-defined rather than
5765 undefined in C90, so do not convert signed left shift into
5766 multiplication. */
5767 if (TREE_CODE (op1) == INTEGER_CST
5768 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5769 /* const_binop may not detect overflow correctly,
5770 so check for it explicitly here. */
5771 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5772 && TREE_INT_CST_HIGH (op1) == 0
5773 && 0 != (t1 = fold_convert (ctype,
5774 const_binop (LSHIFT_EXPR,
5775 size_one_node,
5776 op1, 0)))
5777 && !TREE_OVERFLOW (t1))
5778 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5779 ? MULT_EXPR : FLOOR_DIV_EXPR,
5780 ctype, fold_convert (ctype, op0), t1),
5781 c, code, wide_type, strict_overflow_p);
5782 break;
5783
5784 case PLUS_EXPR: case MINUS_EXPR:
5785 /* See if we can eliminate the operation on both sides. If we can, we
5786 can return a new PLUS or MINUS. If we can't, the only remaining
5787 cases where we can do anything are if the second operand is a
5788 constant. */
5789 sub_strict_overflow_p = false;
5790 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5791 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5792 if (t1 != 0 && t2 != 0
5793 && (code == MULT_EXPR
5794 /* If not multiplication, we can only do this if both operands
5795 are divisible by c. */
5796 || (multiple_of_p (ctype, op0, c)
5797 && multiple_of_p (ctype, op1, c))))
5798 {
5799 if (sub_strict_overflow_p)
5800 *strict_overflow_p = true;
5801 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5802 fold_convert (ctype, t2));
5803 }
5804
5805 /* If this was a subtraction, negate OP1 and set it to be an addition.
5806 This simplifies the logic below. */
5807 if (tcode == MINUS_EXPR)
5808 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5809
5810 if (TREE_CODE (op1) != INTEGER_CST)
5811 break;
5812
5813 /* If either OP1 or C are negative, this optimization is not safe for
5814 some of the division and remainder types while for others we need
5815 to change the code. */
5816 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5817 {
5818 if (code == CEIL_DIV_EXPR)
5819 code = FLOOR_DIV_EXPR;
5820 else if (code == FLOOR_DIV_EXPR)
5821 code = CEIL_DIV_EXPR;
5822 else if (code != MULT_EXPR
5823 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5824 break;
5825 }
5826
5827 /* If it's a multiply or a division/modulus operation of a multiple
5828 of our constant, do the operation and verify it doesn't overflow. */
5829 if (code == MULT_EXPR
5830 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5831 {
5832 op1 = const_binop (code, fold_convert (ctype, op1),
5833 fold_convert (ctype, c), 0);
5834 /* We allow the constant to overflow with wrapping semantics. */
5835 if (op1 == 0
5836 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5837 break;
5838 }
5839 else
5840 break;
5841
5842 /* If we have an unsigned type is not a sizetype, we cannot widen
5843 the operation since it will change the result if the original
5844 computation overflowed. */
5845 if (TYPE_UNSIGNED (ctype)
5846 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5847 && ctype != type)
5848 break;
5849
5850 /* If we were able to eliminate our operation from the first side,
5851 apply our operation to the second side and reform the PLUS. */
5852 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5853 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5854
5855 /* The last case is if we are a multiply. In that case, we can
5856 apply the distributive law to commute the multiply and addition
5857 if the multiplication of the constants doesn't overflow. */
5858 if (code == MULT_EXPR)
5859 return fold_build2 (tcode, ctype,
5860 fold_build2 (code, ctype,
5861 fold_convert (ctype, op0),
5862 fold_convert (ctype, c)),
5863 op1);
5864
5865 break;
5866
5867 case MULT_EXPR:
5868 /* We have a special case here if we are doing something like
5869 (C * 8) % 4 since we know that's zero. */
5870 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5871 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5872 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5873 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5874 return omit_one_operand (type, integer_zero_node, op0);
5875
5876 /* ... fall through ... */
5877
5878 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5879 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5880 /* If we can extract our operation from the LHS, do so and return a
5881 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5882 do something only if the second operand is a constant. */
5883 if (same_p
5884 && (t1 = extract_muldiv (op0, c, code, wide_type,
5885 strict_overflow_p)) != 0)
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5887 fold_convert (ctype, op1));
5888 else if (tcode == MULT_EXPR && code == MULT_EXPR
5889 && (t1 = extract_muldiv (op1, c, code, wide_type,
5890 strict_overflow_p)) != 0)
5891 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5892 fold_convert (ctype, t1));
5893 else if (TREE_CODE (op1) != INTEGER_CST)
5894 return 0;
5895
5896 /* If these are the same operation types, we can associate them
5897 assuming no overflow. */
5898 if (tcode == code
5899 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5900 fold_convert (ctype, c), 0))
5901 && !TREE_OVERFLOW (t1))
5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5903
5904 /* If these operations "cancel" each other, we have the main
5905 optimizations of this pass, which occur when either constant is a
5906 multiple of the other, in which case we replace this with either an
5907 operation or CODE or TCODE.
5908
5909 If we have an unsigned type that is not a sizetype, we cannot do
5910 this since it will change the result if the original computation
5911 overflowed. */
5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5913 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5914 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5915 || (tcode == MULT_EXPR
5916 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5917 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5918 {
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5920 {
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5922 *strict_overflow_p = true;
5923 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5924 fold_convert (ctype,
5925 const_binop (TRUNC_DIV_EXPR,
5926 op1, c, 0)));
5927 }
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5929 {
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5931 *strict_overflow_p = true;
5932 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5933 fold_convert (ctype,
5934 const_binop (TRUNC_DIV_EXPR,
5935 c, op1, 0)));
5936 }
5937 }
5938 break;
5939
5940 default:
5941 break;
5942 }
5943
5944 return 0;
5945 }
5946 \f
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1), and is of the indicated TYPE. */
5949
5950 tree
5951 constant_boolean_node (int value, tree type)
5952 {
5953 if (type == integer_type_node)
5954 return value ? integer_one_node : integer_zero_node;
5955 else if (type == boolean_type_node)
5956 return value ? boolean_true_node : boolean_false_node;
5957 else
5958 return build_int_cst (type, value);
5959 }
5960
5961
5962 /* Return true if expr looks like an ARRAY_REF and set base and
5963 offset to the appropriate trees. If there is no offset,
5964 offset is set to NULL_TREE. Base will be canonicalized to
5965 something you can get the element type from using
5966 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5967 in bytes to the base. */
5968
5969 static bool
5970 extract_array_ref (tree expr, tree *base, tree *offset)
5971 {
5972 /* One canonical form is a PLUS_EXPR with the first
5973 argument being an ADDR_EXPR with a possible NOP_EXPR
5974 attached. */
5975 if (TREE_CODE (expr) == PLUS_EXPR)
5976 {
5977 tree op0 = TREE_OPERAND (expr, 0);
5978 tree inner_base, dummy1;
5979 /* Strip NOP_EXPRs here because the C frontends and/or
5980 folders present us (int *)&x.a + 4B possibly. */
5981 STRIP_NOPS (op0);
5982 if (extract_array_ref (op0, &inner_base, &dummy1))
5983 {
5984 *base = inner_base;
5985 if (dummy1 == NULL_TREE)
5986 *offset = TREE_OPERAND (expr, 1);
5987 else
5988 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5989 dummy1, TREE_OPERAND (expr, 1));
5990 return true;
5991 }
5992 }
5993 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5994 which we transform into an ADDR_EXPR with appropriate
5995 offset. For other arguments to the ADDR_EXPR we assume
5996 zero offset and as such do not care about the ADDR_EXPR
5997 type and strip possible nops from it. */
5998 else if (TREE_CODE (expr) == ADDR_EXPR)
5999 {
6000 tree op0 = TREE_OPERAND (expr, 0);
6001 if (TREE_CODE (op0) == ARRAY_REF)
6002 {
6003 tree idx = TREE_OPERAND (op0, 1);
6004 *base = TREE_OPERAND (op0, 0);
6005 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6006 array_ref_element_size (op0));
6007 }
6008 else
6009 {
6010 /* Handle array-to-pointer decay as &a. */
6011 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6012 *base = TREE_OPERAND (expr, 0);
6013 else
6014 *base = expr;
6015 *offset = NULL_TREE;
6016 }
6017 return true;
6018 }
6019 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6020 else if (SSA_VAR_P (expr)
6021 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6022 {
6023 *base = expr;
6024 *offset = NULL_TREE;
6025 return true;
6026 }
6027
6028 return false;
6029 }
6030
6031
6032 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6033 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6034 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6035 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6036 COND is the first argument to CODE; otherwise (as in the example
6037 given here), it is the second argument. TYPE is the type of the
6038 original expression. Return NULL_TREE if no simplification is
6039 possible. */
6040
6041 static tree
6042 fold_binary_op_with_conditional_arg (enum tree_code code,
6043 tree type, tree op0, tree op1,
6044 tree cond, tree arg, int cond_first_p)
6045 {
6046 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6047 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6048 tree test, true_value, false_value;
6049 tree lhs = NULL_TREE;
6050 tree rhs = NULL_TREE;
6051
6052 /* This transformation is only worthwhile if we don't have to wrap
6053 arg in a SAVE_EXPR, and the operation can be simplified on at least
6054 one of the branches once its pushed inside the COND_EXPR. */
6055 if (!TREE_CONSTANT (arg))
6056 return NULL_TREE;
6057
6058 if (TREE_CODE (cond) == COND_EXPR)
6059 {
6060 test = TREE_OPERAND (cond, 0);
6061 true_value = TREE_OPERAND (cond, 1);
6062 false_value = TREE_OPERAND (cond, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6065 involving it. */
6066 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 lhs = true_value;
6068 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6069 rhs = false_value;
6070 }
6071 else
6072 {
6073 tree testtype = TREE_TYPE (cond);
6074 test = cond;
6075 true_value = constant_boolean_node (true, testtype);
6076 false_value = constant_boolean_node (false, testtype);
6077 }
6078
6079 arg = fold_convert (arg_type, arg);
6080 if (lhs == 0)
6081 {
6082 true_value = fold_convert (cond_type, true_value);
6083 if (cond_first_p)
6084 lhs = fold_build2 (code, type, true_value, arg);
6085 else
6086 lhs = fold_build2 (code, type, arg, true_value);
6087 }
6088 if (rhs == 0)
6089 {
6090 false_value = fold_convert (cond_type, false_value);
6091 if (cond_first_p)
6092 rhs = fold_build2 (code, type, false_value, arg);
6093 else
6094 rhs = fold_build2 (code, type, arg, false_value);
6095 }
6096
6097 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6098 return fold_convert (type, test);
6099 }
6100
6101 \f
6102 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6103
6104 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6105 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6106 ADDEND is the same as X.
6107
6108 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6109 and finite. The problematic cases are when X is zero, and its mode
6110 has signed zeros. In the case of rounding towards -infinity,
6111 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6112 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6113
6114 static bool
6115 fold_real_zero_addition_p (tree type, tree addend, int negate)
6116 {
6117 if (!real_zerop (addend))
6118 return false;
6119
6120 /* Don't allow the fold with -fsignaling-nans. */
6121 if (HONOR_SNANS (TYPE_MODE (type)))
6122 return false;
6123
6124 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6125 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6126 return true;
6127
6128 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6129 if (TREE_CODE (addend) == REAL_CST
6130 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6131 negate = !negate;
6132
6133 /* The mode has signed zeros, and we have to honor their sign.
6134 In this situation, there is only one case we can return true for.
6135 X - 0 is the same as X unless rounding towards -infinity is
6136 supported. */
6137 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6138 }
6139
6140 /* Subroutine of fold() that checks comparisons of built-in math
6141 functions against real constants.
6142
6143 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6144 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6145 is the type of the result and ARG0 and ARG1 are the operands of the
6146 comparison. ARG1 must be a TREE_REAL_CST.
6147
6148 The function returns the constant folded tree if a simplification
6149 can be made, and NULL_TREE otherwise. */
6150
6151 static tree
6152 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6153 tree type, tree arg0, tree arg1)
6154 {
6155 REAL_VALUE_TYPE c;
6156
6157 if (BUILTIN_SQRT_P (fcode))
6158 {
6159 tree arg = CALL_EXPR_ARG (arg0, 0);
6160 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6161
6162 c = TREE_REAL_CST (arg1);
6163 if (REAL_VALUE_NEGATIVE (c))
6164 {
6165 /* sqrt(x) < y is always false, if y is negative. */
6166 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6167 return omit_one_operand (type, integer_zero_node, arg);
6168
6169 /* sqrt(x) > y is always true, if y is negative and we
6170 don't care about NaNs, i.e. negative values of x. */
6171 if (code == NE_EXPR || !HONOR_NANS (mode))
6172 return omit_one_operand (type, integer_one_node, arg);
6173
6174 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6175 return fold_build2 (GE_EXPR, type, arg,
6176 build_real (TREE_TYPE (arg), dconst0));
6177 }
6178 else if (code == GT_EXPR || code == GE_EXPR)
6179 {
6180 REAL_VALUE_TYPE c2;
6181
6182 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6183 real_convert (&c2, mode, &c2);
6184
6185 if (REAL_VALUE_ISINF (c2))
6186 {
6187 /* sqrt(x) > y is x == +Inf, when y is very large. */
6188 if (HONOR_INFINITIES (mode))
6189 return fold_build2 (EQ_EXPR, type, arg,
6190 build_real (TREE_TYPE (arg), c2));
6191
6192 /* sqrt(x) > y is always false, when y is very large
6193 and we don't care about infinities. */
6194 return omit_one_operand (type, integer_zero_node, arg);
6195 }
6196
6197 /* sqrt(x) > c is the same as x > c*c. */
6198 return fold_build2 (code, type, arg,
6199 build_real (TREE_TYPE (arg), c2));
6200 }
6201 else if (code == LT_EXPR || code == LE_EXPR)
6202 {
6203 REAL_VALUE_TYPE c2;
6204
6205 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6206 real_convert (&c2, mode, &c2);
6207
6208 if (REAL_VALUE_ISINF (c2))
6209 {
6210 /* sqrt(x) < y is always true, when y is a very large
6211 value and we don't care about NaNs or Infinities. */
6212 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6213 return omit_one_operand (type, integer_one_node, arg);
6214
6215 /* sqrt(x) < y is x != +Inf when y is very large and we
6216 don't care about NaNs. */
6217 if (! HONOR_NANS (mode))
6218 return fold_build2 (NE_EXPR, type, arg,
6219 build_real (TREE_TYPE (arg), c2));
6220
6221 /* sqrt(x) < y is x >= 0 when y is very large and we
6222 don't care about Infinities. */
6223 if (! HONOR_INFINITIES (mode))
6224 return fold_build2 (GE_EXPR, type, arg,
6225 build_real (TREE_TYPE (arg), dconst0));
6226
6227 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6228 if (lang_hooks.decls.global_bindings_p () != 0
6229 || CONTAINS_PLACEHOLDER_P (arg))
6230 return NULL_TREE;
6231
6232 arg = save_expr (arg);
6233 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6234 fold_build2 (GE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg),
6236 dconst0)),
6237 fold_build2 (NE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg),
6239 c2)));
6240 }
6241
6242 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6243 if (! HONOR_NANS (mode))
6244 return fold_build2 (code, type, arg,
6245 build_real (TREE_TYPE (arg), c2));
6246
6247 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6248 if (lang_hooks.decls.global_bindings_p () == 0
6249 && ! CONTAINS_PLACEHOLDER_P (arg))
6250 {
6251 arg = save_expr (arg);
6252 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6253 fold_build2 (GE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6255 dconst0)),
6256 fold_build2 (code, type, arg,
6257 build_real (TREE_TYPE (arg),
6258 c2)));
6259 }
6260 }
6261 }
6262
6263 return NULL_TREE;
6264 }
6265
6266 /* Subroutine of fold() that optimizes comparisons against Infinities,
6267 either +Inf or -Inf.
6268
6269 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6270 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6271 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272
6273 The function returns the constant folded tree if a simplification
6274 can be made, and NULL_TREE otherwise. */
6275
6276 static tree
6277 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6278 {
6279 enum machine_mode mode;
6280 REAL_VALUE_TYPE max;
6281 tree temp;
6282 bool neg;
6283
6284 mode = TYPE_MODE (TREE_TYPE (arg0));
6285
6286 /* For negative infinity swap the sense of the comparison. */
6287 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6288 if (neg)
6289 code = swap_tree_comparison (code);
6290
6291 switch (code)
6292 {
6293 case GT_EXPR:
6294 /* x > +Inf is always false, if with ignore sNANs. */
6295 if (HONOR_SNANS (mode))
6296 return NULL_TREE;
6297 return omit_one_operand (type, integer_zero_node, arg0);
6298
6299 case LE_EXPR:
6300 /* x <= +Inf is always true, if we don't case about NaNs. */
6301 if (! HONOR_NANS (mode))
6302 return omit_one_operand (type, integer_one_node, arg0);
6303
6304 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6305 if (lang_hooks.decls.global_bindings_p () == 0
6306 && ! CONTAINS_PLACEHOLDER_P (arg0))
6307 {
6308 arg0 = save_expr (arg0);
6309 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6310 }
6311 break;
6312
6313 case EQ_EXPR:
6314 case GE_EXPR:
6315 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6316 real_maxval (&max, neg, mode);
6317 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6318 arg0, build_real (TREE_TYPE (arg0), max));
6319
6320 case LT_EXPR:
6321 /* x < +Inf is always equal to x <= DBL_MAX. */
6322 real_maxval (&max, neg, mode);
6323 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6324 arg0, build_real (TREE_TYPE (arg0), max));
6325
6326 case NE_EXPR:
6327 /* x != +Inf is always equal to !(x > DBL_MAX). */
6328 real_maxval (&max, neg, mode);
6329 if (! HONOR_NANS (mode))
6330 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6331 arg0, build_real (TREE_TYPE (arg0), max));
6332
6333 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6335 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6336
6337 default:
6338 break;
6339 }
6340
6341 return NULL_TREE;
6342 }
6343
6344 /* Subroutine of fold() that optimizes comparisons of a division by
6345 a nonzero integer constant against an integer constant, i.e.
6346 X/C1 op C2.
6347
6348 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6349 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6350 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351
6352 The function returns the constant folded tree if a simplification
6353 can be made, and NULL_TREE otherwise. */
6354
6355 static tree
6356 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6357 {
6358 tree prod, tmp, hi, lo;
6359 tree arg00 = TREE_OPERAND (arg0, 0);
6360 tree arg01 = TREE_OPERAND (arg0, 1);
6361 unsigned HOST_WIDE_INT lpart;
6362 HOST_WIDE_INT hpart;
6363 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6364 bool neg_overflow;
6365 int overflow;
6366
6367 /* We have to do this the hard way to detect unsigned overflow.
6368 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6369 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6370 TREE_INT_CST_HIGH (arg01),
6371 TREE_INT_CST_LOW (arg1),
6372 TREE_INT_CST_HIGH (arg1),
6373 &lpart, &hpart, unsigned_p);
6374 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6375 -1, overflow);
6376 neg_overflow = false;
6377
6378 if (unsigned_p)
6379 {
6380 tmp = int_const_binop (MINUS_EXPR, arg01,
6381 build_int_cst (TREE_TYPE (arg01), 1), 0);
6382 lo = prod;
6383
6384 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6385 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6386 TREE_INT_CST_HIGH (prod),
6387 TREE_INT_CST_LOW (tmp),
6388 TREE_INT_CST_HIGH (tmp),
6389 &lpart, &hpart, unsigned_p);
6390 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6391 -1, overflow | TREE_OVERFLOW (prod));
6392 }
6393 else if (tree_int_cst_sgn (arg01) >= 0)
6394 {
6395 tmp = int_const_binop (MINUS_EXPR, arg01,
6396 build_int_cst (TREE_TYPE (arg01), 1), 0);
6397 switch (tree_int_cst_sgn (arg1))
6398 {
6399 case -1:
6400 neg_overflow = true;
6401 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6402 hi = prod;
6403 break;
6404
6405 case 0:
6406 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6407 hi = tmp;
6408 break;
6409
6410 case 1:
6411 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6412 lo = prod;
6413 break;
6414
6415 default:
6416 gcc_unreachable ();
6417 }
6418 }
6419 else
6420 {
6421 /* A negative divisor reverses the relational operators. */
6422 code = swap_tree_comparison (code);
6423
6424 tmp = int_const_binop (PLUS_EXPR, arg01,
6425 build_int_cst (TREE_TYPE (arg01), 1), 0);
6426 switch (tree_int_cst_sgn (arg1))
6427 {
6428 case -1:
6429 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6430 lo = prod;
6431 break;
6432
6433 case 0:
6434 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6435 lo = tmp;
6436 break;
6437
6438 case 1:
6439 neg_overflow = true;
6440 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6441 hi = prod;
6442 break;
6443
6444 default:
6445 gcc_unreachable ();
6446 }
6447 }
6448
6449 switch (code)
6450 {
6451 case EQ_EXPR:
6452 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6453 return omit_one_operand (type, integer_zero_node, arg00);
6454 if (TREE_OVERFLOW (hi))
6455 return fold_build2 (GE_EXPR, type, arg00, lo);
6456 if (TREE_OVERFLOW (lo))
6457 return fold_build2 (LE_EXPR, type, arg00, hi);
6458 return build_range_check (type, arg00, 1, lo, hi);
6459
6460 case NE_EXPR:
6461 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6462 return omit_one_operand (type, integer_one_node, arg00);
6463 if (TREE_OVERFLOW (hi))
6464 return fold_build2 (LT_EXPR, type, arg00, lo);
6465 if (TREE_OVERFLOW (lo))
6466 return fold_build2 (GT_EXPR, type, arg00, hi);
6467 return build_range_check (type, arg00, 0, lo, hi);
6468
6469 case LT_EXPR:
6470 if (TREE_OVERFLOW (lo))
6471 {
6472 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6473 return omit_one_operand (type, tmp, arg00);
6474 }
6475 return fold_build2 (LT_EXPR, type, arg00, lo);
6476
6477 case LE_EXPR:
6478 if (TREE_OVERFLOW (hi))
6479 {
6480 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6481 return omit_one_operand (type, tmp, arg00);
6482 }
6483 return fold_build2 (LE_EXPR, type, arg00, hi);
6484
6485 case GT_EXPR:
6486 if (TREE_OVERFLOW (hi))
6487 {
6488 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6489 return omit_one_operand (type, tmp, arg00);
6490 }
6491 return fold_build2 (GT_EXPR, type, arg00, hi);
6492
6493 case GE_EXPR:
6494 if (TREE_OVERFLOW (lo))
6495 {
6496 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6497 return omit_one_operand (type, tmp, arg00);
6498 }
6499 return fold_build2 (GE_EXPR, type, arg00, lo);
6500
6501 default:
6502 break;
6503 }
6504
6505 return NULL_TREE;
6506 }
6507
6508
6509 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6510 equality/inequality test, then return a simplified form of the test
6511 using a sign testing. Otherwise return NULL. TYPE is the desired
6512 result type. */
6513
6514 static tree
6515 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6516 tree result_type)
6517 {
6518 /* If this is testing a single bit, we can optimize the test. */
6519 if ((code == NE_EXPR || code == EQ_EXPR)
6520 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6521 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 {
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526
6527 if (arg00 != NULL_TREE
6528 /* This is only a win if casting to a signed type is cheap,
6529 i.e. when arg00's type is not a partial mode. */
6530 && TYPE_PRECISION (TREE_TYPE (arg00))
6531 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6532 {
6533 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6534 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6535 result_type, fold_convert (stype, arg00),
6536 build_int_cst (stype, 0));
6537 }
6538 }
6539
6540 return NULL_TREE;
6541 }
6542
6543 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6544 equality/inequality test, then return a simplified form of
6545 the test using shifts and logical operations. Otherwise return
6546 NULL. TYPE is the desired result type. */
6547
6548 tree
6549 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6550 tree result_type)
6551 {
6552 /* If this is testing a single bit, we can optimize the test. */
6553 if ((code == NE_EXPR || code == EQ_EXPR)
6554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 {
6557 tree inner = TREE_OPERAND (arg0, 0);
6558 tree type = TREE_TYPE (arg0);
6559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6560 enum machine_mode operand_mode = TYPE_MODE (type);
6561 int ops_unsigned;
6562 tree signed_type, unsigned_type, intermediate_type;
6563 tree tem, one;
6564
6565 /* First, see if we can fold the single bit test into a sign-bit
6566 test. */
6567 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6568 result_type);
6569 if (tem)
6570 return tem;
6571
6572 /* Otherwise we have (A & C) != 0 where C is a single bit,
6573 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6574 Similarly for (A & C) == 0. */
6575
6576 /* If INNER is a right shift of a constant and it plus BITNUM does
6577 not overflow, adjust BITNUM and INNER. */
6578 if (TREE_CODE (inner) == RSHIFT_EXPR
6579 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6580 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6581 && bitnum < TYPE_PRECISION (type)
6582 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6583 bitnum - TYPE_PRECISION (type)))
6584 {
6585 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6586 inner = TREE_OPERAND (inner, 0);
6587 }
6588
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6594 && !flag_syntax_only) ? 0 : 1;
6595 #else
6596 ops_unsigned = 1;
6597 #endif
6598
6599 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6600 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6601 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6602 inner = fold_convert (intermediate_type, inner);
6603
6604 if (bitnum != 0)
6605 inner = build2 (RSHIFT_EXPR, intermediate_type,
6606 inner, size_int (bitnum));
6607
6608 one = build_int_cst (intermediate_type, 1);
6609
6610 if (code == EQ_EXPR)
6611 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6612
6613 /* Put the AND last so it can combine with more things. */
6614 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615
6616 /* Make sure to return the proper type. */
6617 inner = fold_convert (result_type, inner);
6618
6619 return inner;
6620 }
6621 return NULL_TREE;
6622 }
6623
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6626
6627 static bool
6628 reorder_operands_p (tree arg0, tree arg1)
6629 {
6630 if (! flag_evaluation_order)
6631 return true;
6632 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6633 return true;
6634 return ! TREE_SIDE_EFFECTS (arg0)
6635 && ! TREE_SIDE_EFFECTS (arg1);
6636 }
6637
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6642
6643 bool
6644 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6645 {
6646 STRIP_SIGN_NOPS (arg0);
6647 STRIP_SIGN_NOPS (arg1);
6648
6649 if (TREE_CODE (arg1) == INTEGER_CST)
6650 return 0;
6651 if (TREE_CODE (arg0) == INTEGER_CST)
6652 return 1;
6653
6654 if (TREE_CODE (arg1) == REAL_CST)
6655 return 0;
6656 if (TREE_CODE (arg0) == REAL_CST)
6657 return 1;
6658
6659 if (TREE_CODE (arg1) == COMPLEX_CST)
6660 return 0;
6661 if (TREE_CODE (arg0) == COMPLEX_CST)
6662 return 1;
6663
6664 if (TREE_CONSTANT (arg1))
6665 return 0;
6666 if (TREE_CONSTANT (arg0))
6667 return 1;
6668
6669 if (optimize_size)
6670 return 0;
6671
6672 if (reorder && flag_evaluation_order
6673 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6674 return 0;
6675
6676 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6677 for commutative and comparison operators. Ensuring a canonical
6678 form allows the optimizers to find additional redundancies without
6679 having to explicitly check for both orderings. */
6680 if (TREE_CODE (arg0) == SSA_NAME
6681 && TREE_CODE (arg1) == SSA_NAME
6682 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6683 return 1;
6684
6685 /* Put SSA_NAMEs last. */
6686 if (TREE_CODE (arg1) == SSA_NAME)
6687 return 0;
6688 if (TREE_CODE (arg0) == SSA_NAME)
6689 return 1;
6690
6691 /* Put variables last. */
6692 if (DECL_P (arg1))
6693 return 0;
6694 if (DECL_P (arg0))
6695 return 1;
6696
6697 return 0;
6698 }
6699
6700 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6701 ARG0 is extended to a wider type. */
6702
6703 static tree
6704 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 {
6706 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6707 tree arg1_unw;
6708 tree shorter_type, outer_type;
6709 tree min, max;
6710 bool above, below;
6711
6712 if (arg0_unw == arg0)
6713 return NULL_TREE;
6714 shorter_type = TREE_TYPE (arg0_unw);
6715
6716 #ifdef HAVE_canonicalize_funcptr_for_compare
6717 /* Disable this optimization if we're casting a function pointer
6718 type on targets that require function pointer canonicalization. */
6719 if (HAVE_canonicalize_funcptr_for_compare
6720 && TREE_CODE (shorter_type) == POINTER_TYPE
6721 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6722 return NULL_TREE;
6723 #endif
6724
6725 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6726 return NULL_TREE;
6727
6728 arg1_unw = get_unwidened (arg1, shorter_type);
6729
6730 /* If possible, express the comparison in the shorter mode. */
6731 if ((code == EQ_EXPR || code == NE_EXPR
6732 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6733 && (TREE_TYPE (arg1_unw) == shorter_type
6734 || (TREE_CODE (arg1_unw) == INTEGER_CST
6735 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6736 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6737 && int_fits_type_p (arg1_unw, shorter_type))))
6738 return fold_build2 (code, type, arg0_unw,
6739 fold_convert (shorter_type, arg1_unw));
6740
6741 if (TREE_CODE (arg1_unw) != INTEGER_CST
6742 || TREE_CODE (shorter_type) != INTEGER_TYPE
6743 || !int_fits_type_p (arg1_unw, shorter_type))
6744 return NULL_TREE;
6745
6746 /* If we are comparing with the integer that does not fit into the range
6747 of the shorter type, the result is known. */
6748 outer_type = TREE_TYPE (arg1_unw);
6749 min = lower_bound_in_type (outer_type, shorter_type);
6750 max = upper_bound_in_type (outer_type, shorter_type);
6751
6752 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6753 max, arg1_unw));
6754 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 arg1_unw, min));
6756
6757 switch (code)
6758 {
6759 case EQ_EXPR:
6760 if (above || below)
6761 return omit_one_operand (type, integer_zero_node, arg0);
6762 break;
6763
6764 case NE_EXPR:
6765 if (above || below)
6766 return omit_one_operand (type, integer_one_node, arg0);
6767 break;
6768
6769 case LT_EXPR:
6770 case LE_EXPR:
6771 if (above)
6772 return omit_one_operand (type, integer_one_node, arg0);
6773 else if (below)
6774 return omit_one_operand (type, integer_zero_node, arg0);
6775
6776 case GT_EXPR:
6777 case GE_EXPR:
6778 if (above)
6779 return omit_one_operand (type, integer_zero_node, arg0);
6780 else if (below)
6781 return omit_one_operand (type, integer_one_node, arg0);
6782
6783 default:
6784 break;
6785 }
6786
6787 return NULL_TREE;
6788 }
6789
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6791 ARG0 just the signedness is changed. */
6792
6793 static tree
6794 fold_sign_changed_comparison (enum tree_code code, tree type,
6795 tree arg0, tree arg1)
6796 {
6797 tree arg0_inner;
6798 tree inner_type, outer_type;
6799
6800 if (TREE_CODE (arg0) != NOP_EXPR
6801 && TREE_CODE (arg0) != CONVERT_EXPR)
6802 return NULL_TREE;
6803
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6807
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6816
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6819
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !((TREE_CODE (arg1) == NOP_EXPR
6822 || TREE_CODE (arg1) == CONVERT_EXPR)
6823 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6824 return NULL_TREE;
6825
6826 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6827 && code != NE_EXPR
6828 && code != EQ_EXPR)
6829 return NULL_TREE;
6830
6831 if (TREE_CODE (arg1) == INTEGER_CST)
6832 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6833 TREE_INT_CST_HIGH (arg1), 0,
6834 TREE_OVERFLOW (arg1));
6835 else
6836 arg1 = fold_convert (inner_type, arg1);
6837
6838 return fold_build2 (code, type, arg0_inner, arg1);
6839 }
6840
6841 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6842 step of the array. Reconstructs s and delta in the case of s * delta
6843 being an integer constant (and thus already folded).
6844 ADDR is the address. MULT is the multiplicative expression.
6845 If the function succeeds, the new address expression is returned. Otherwise
6846 NULL_TREE is returned. */
6847
6848 static tree
6849 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6850 {
6851 tree s, delta, step;
6852 tree ref = TREE_OPERAND (addr, 0), pref;
6853 tree ret, pos;
6854 tree itype;
6855 bool mdim = false;
6856
6857 /* Canonicalize op1 into a possibly non-constant delta
6858 and an INTEGER_CST s. */
6859 if (TREE_CODE (op1) == MULT_EXPR)
6860 {
6861 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6862
6863 STRIP_NOPS (arg0);
6864 STRIP_NOPS (arg1);
6865
6866 if (TREE_CODE (arg0) == INTEGER_CST)
6867 {
6868 s = arg0;
6869 delta = arg1;
6870 }
6871 else if (TREE_CODE (arg1) == INTEGER_CST)
6872 {
6873 s = arg1;
6874 delta = arg0;
6875 }
6876 else
6877 return NULL_TREE;
6878 }
6879 else if (TREE_CODE (op1) == INTEGER_CST)
6880 {
6881 delta = op1;
6882 s = NULL_TREE;
6883 }
6884 else
6885 {
6886 /* Simulate we are delta * 1. */
6887 delta = op1;
6888 s = integer_one_node;
6889 }
6890
6891 for (;; ref = TREE_OPERAND (ref, 0))
6892 {
6893 if (TREE_CODE (ref) == ARRAY_REF)
6894 {
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6897 mdim = true;
6898
6899 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6900 if (! itype)
6901 continue;
6902
6903 step = array_ref_element_size (ref);
6904 if (TREE_CODE (step) != INTEGER_CST)
6905 continue;
6906
6907 if (s)
6908 {
6909 if (! tree_int_cst_equal (step, s))
6910 continue;
6911 }
6912 else
6913 {
6914 /* Try if delta is a multiple of step. */
6915 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6916 if (! tmp)
6917 continue;
6918 delta = tmp;
6919 }
6920
6921 /* Only fold here if we can verify we do not overflow one
6922 dimension of a multi-dimensional array. */
6923 if (mdim)
6924 {
6925 tree tmp;
6926
6927 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6928 || !INTEGRAL_TYPE_P (itype)
6929 || !TYPE_MAX_VALUE (itype)
6930 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6931 continue;
6932
6933 tmp = fold_binary (code, itype,
6934 fold_convert (itype,
6935 TREE_OPERAND (ref, 1)),
6936 fold_convert (itype, delta));
6937 if (!tmp
6938 || TREE_CODE (tmp) != INTEGER_CST
6939 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6940 continue;
6941 }
6942
6943 break;
6944 }
6945 else
6946 mdim = false;
6947
6948 if (!handled_component_p (ref))
6949 return NULL_TREE;
6950 }
6951
6952 /* We found the suitable array reference. So copy everything up to it,
6953 and replace the index. */
6954
6955 pref = TREE_OPERAND (addr, 0);
6956 ret = copy_node (pref);
6957 pos = ret;
6958
6959 while (pref != ref)
6960 {
6961 pref = TREE_OPERAND (pref, 0);
6962 TREE_OPERAND (pos, 0) = copy_node (pref);
6963 pos = TREE_OPERAND (pos, 0);
6964 }
6965
6966 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6967 fold_convert (itype,
6968 TREE_OPERAND (pos, 1)),
6969 fold_convert (itype, delta));
6970
6971 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6972 }
6973
6974
6975 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6976 means A >= Y && A != MAX, but in this case we know that
6977 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6978
6979 static tree
6980 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6981 {
6982 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6983
6984 if (TREE_CODE (bound) == LT_EXPR)
6985 a = TREE_OPERAND (bound, 0);
6986 else if (TREE_CODE (bound) == GT_EXPR)
6987 a = TREE_OPERAND (bound, 1);
6988 else
6989 return NULL_TREE;
6990
6991 typea = TREE_TYPE (a);
6992 if (!INTEGRAL_TYPE_P (typea)
6993 && !POINTER_TYPE_P (typea))
6994 return NULL_TREE;
6995
6996 if (TREE_CODE (ineq) == LT_EXPR)
6997 {
6998 a1 = TREE_OPERAND (ineq, 1);
6999 y = TREE_OPERAND (ineq, 0);
7000 }
7001 else if (TREE_CODE (ineq) == GT_EXPR)
7002 {
7003 a1 = TREE_OPERAND (ineq, 0);
7004 y = TREE_OPERAND (ineq, 1);
7005 }
7006 else
7007 return NULL_TREE;
7008
7009 if (TREE_TYPE (a1) != typea)
7010 return NULL_TREE;
7011
7012 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7013 if (!integer_onep (diff))
7014 return NULL_TREE;
7015
7016 return fold_build2 (GE_EXPR, type, a, y);
7017 }
7018
7019 /* Fold a sum or difference of at least one multiplication.
7020 Returns the folded tree or NULL if no simplification could be made. */
7021
7022 static tree
7023 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7024 {
7025 tree arg00, arg01, arg10, arg11;
7026 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7027
7028 /* (A * C) +- (B * C) -> (A+-B) * C.
7029 (A * C) +- A -> A * (C+-1).
7030 We are most concerned about the case where C is a constant,
7031 but other combinations show up during loop reduction. Since
7032 it is not difficult, try all four possibilities. */
7033
7034 if (TREE_CODE (arg0) == MULT_EXPR)
7035 {
7036 arg00 = TREE_OPERAND (arg0, 0);
7037 arg01 = TREE_OPERAND (arg0, 1);
7038 }
7039 else
7040 {
7041 arg00 = arg0;
7042 arg01 = build_one_cst (type);
7043 }
7044 if (TREE_CODE (arg1) == MULT_EXPR)
7045 {
7046 arg10 = TREE_OPERAND (arg1, 0);
7047 arg11 = TREE_OPERAND (arg1, 1);
7048 }
7049 else
7050 {
7051 arg10 = arg1;
7052 arg11 = build_one_cst (type);
7053 }
7054 same = NULL_TREE;
7055
7056 if (operand_equal_p (arg01, arg11, 0))
7057 same = arg01, alt0 = arg00, alt1 = arg10;
7058 else if (operand_equal_p (arg00, arg10, 0))
7059 same = arg00, alt0 = arg01, alt1 = arg11;
7060 else if (operand_equal_p (arg00, arg11, 0))
7061 same = arg00, alt0 = arg01, alt1 = arg10;
7062 else if (operand_equal_p (arg01, arg10, 0))
7063 same = arg01, alt0 = arg00, alt1 = arg11;
7064
7065 /* No identical multiplicands; see if we can find a common
7066 power-of-two factor in non-power-of-two multiplies. This
7067 can help in multi-dimensional array access. */
7068 else if (host_integerp (arg01, 0)
7069 && host_integerp (arg11, 0))
7070 {
7071 HOST_WIDE_INT int01, int11, tmp;
7072 bool swap = false;
7073 tree maybe_same;
7074 int01 = TREE_INT_CST_LOW (arg01);
7075 int11 = TREE_INT_CST_LOW (arg11);
7076
7077 /* Move min of absolute values to int11. */
7078 if ((int01 >= 0 ? int01 : -int01)
7079 < (int11 >= 0 ? int11 : -int11))
7080 {
7081 tmp = int01, int01 = int11, int11 = tmp;
7082 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7083 maybe_same = arg01;
7084 swap = true;
7085 }
7086 else
7087 maybe_same = arg11;
7088
7089 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7090 {
7091 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7092 build_int_cst (TREE_TYPE (arg00),
7093 int01 / int11));
7094 alt1 = arg10;
7095 same = maybe_same;
7096 if (swap)
7097 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7098 }
7099 }
7100
7101 if (same)
7102 return fold_build2 (MULT_EXPR, type,
7103 fold_build2 (code, type,
7104 fold_convert (type, alt0),
7105 fold_convert (type, alt1)),
7106 fold_convert (type, same));
7107
7108 return NULL_TREE;
7109 }
7110
7111 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7112 specified by EXPR into the buffer PTR of length LEN bytes.
7113 Return the number of bytes placed in the buffer, or zero
7114 upon failure. */
7115
7116 static int
7117 native_encode_int (tree expr, unsigned char *ptr, int len)
7118 {
7119 tree type = TREE_TYPE (expr);
7120 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7121 int byte, offset, word, words;
7122 unsigned char value;
7123
7124 if (total_bytes > len)
7125 return 0;
7126 words = total_bytes / UNITS_PER_WORD;
7127
7128 for (byte = 0; byte < total_bytes; byte++)
7129 {
7130 int bitpos = byte * BITS_PER_UNIT;
7131 if (bitpos < HOST_BITS_PER_WIDE_INT)
7132 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7133 else
7134 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7135 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7136
7137 if (total_bytes > UNITS_PER_WORD)
7138 {
7139 word = byte / UNITS_PER_WORD;
7140 if (WORDS_BIG_ENDIAN)
7141 word = (words - 1) - word;
7142 offset = word * UNITS_PER_WORD;
7143 if (BYTES_BIG_ENDIAN)
7144 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7145 else
7146 offset += byte % UNITS_PER_WORD;
7147 }
7148 else
7149 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7150 ptr[offset] = value;
7151 }
7152 return total_bytes;
7153 }
7154
7155
7156 /* Subroutine of native_encode_expr. Encode the REAL_CST
7157 specified by EXPR into the buffer PTR of length LEN bytes.
7158 Return the number of bytes placed in the buffer, or zero
7159 upon failure. */
7160
7161 static int
7162 native_encode_real (tree expr, unsigned char *ptr, int len)
7163 {
7164 tree type = TREE_TYPE (expr);
7165 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7166 int byte, offset, word, words;
7167 unsigned char value;
7168
7169 /* There are always 32 bits in each long, no matter the size of
7170 the hosts long. We handle floating point representations with
7171 up to 192 bits. */
7172 long tmp[6];
7173
7174 if (total_bytes > len)
7175 return 0;
7176 words = total_bytes / UNITS_PER_WORD;
7177
7178 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7179
7180 for (byte = 0; byte < total_bytes; byte++)
7181 {
7182 int bitpos = byte * BITS_PER_UNIT;
7183 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7184
7185 if (total_bytes > UNITS_PER_WORD)
7186 {
7187 word = byte / UNITS_PER_WORD;
7188 if (FLOAT_WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7193 else
7194 offset += byte % UNITS_PER_WORD;
7195 }
7196 else
7197 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7198 ptr[offset] = value;
7199 }
7200 return total_bytes;
7201 }
7202
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7206 upon failure. */
7207
7208 static int
7209 native_encode_complex (tree expr, unsigned char *ptr, int len)
7210 {
7211 int rsize, isize;
7212 tree part;
7213
7214 part = TREE_REALPART (expr);
7215 rsize = native_encode_expr (part, ptr, len);
7216 if (rsize == 0)
7217 return 0;
7218 part = TREE_IMAGPART (expr);
7219 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7220 if (isize != rsize)
7221 return 0;
7222 return rsize + isize;
7223 }
7224
7225
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7229 upon failure. */
7230
7231 static int
7232 native_encode_vector (tree expr, unsigned char *ptr, int len)
7233 {
7234 int i, size, offset, count;
7235 tree itype, elem, elements;
7236
7237 offset = 0;
7238 elements = TREE_VECTOR_CST_ELTS (expr);
7239 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7240 itype = TREE_TYPE (TREE_TYPE (expr));
7241 size = GET_MODE_SIZE (TYPE_MODE (itype));
7242 for (i = 0; i < count; i++)
7243 {
7244 if (elements)
7245 {
7246 elem = TREE_VALUE (elements);
7247 elements = TREE_CHAIN (elements);
7248 }
7249 else
7250 elem = NULL_TREE;
7251
7252 if (elem)
7253 {
7254 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7255 return 0;
7256 }
7257 else
7258 {
7259 if (offset + size > len)
7260 return 0;
7261 memset (ptr+offset, 0, size);
7262 }
7263 offset += size;
7264 }
7265 return offset;
7266 }
7267
7268
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. Return the number of bytes
7272 placed in the buffer, or zero upon failure. */
7273
7274 static int
7275 native_encode_expr (tree expr, unsigned char *ptr, int len)
7276 {
7277 switch (TREE_CODE (expr))
7278 {
7279 case INTEGER_CST:
7280 return native_encode_int (expr, ptr, len);
7281
7282 case REAL_CST:
7283 return native_encode_real (expr, ptr, len);
7284
7285 case COMPLEX_CST:
7286 return native_encode_complex (expr, ptr, len);
7287
7288 case VECTOR_CST:
7289 return native_encode_vector (expr, ptr, len);
7290
7291 default:
7292 return 0;
7293 }
7294 }
7295
7296
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7300
7301 static tree
7302 native_interpret_int (tree type, unsigned char *ptr, int len)
7303 {
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7307 unsigned int HOST_WIDE_INT lo = 0;
7308 HOST_WIDE_INT hi = 0;
7309
7310 if (total_bytes > len)
7311 return NULL_TREE;
7312 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7313 return NULL_TREE;
7314 words = total_bytes / UNITS_PER_WORD;
7315
7316 for (byte = 0; byte < total_bytes; byte++)
7317 {
7318 int bitpos = byte * BITS_PER_UNIT;
7319 if (total_bytes > UNITS_PER_WORD)
7320 {
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7327 else
7328 offset += byte % UNITS_PER_WORD;
7329 }
7330 else
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 value = ptr[offset];
7333
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7336 else
7337 hi |= (unsigned HOST_WIDE_INT) value
7338 << (bitpos - HOST_BITS_PER_WIDE_INT);
7339 }
7340
7341 return build_int_cst_wide_type (type, lo, hi);
7342 }
7343
7344
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7348
7349 static tree
7350 native_interpret_real (tree type, unsigned char *ptr, int len)
7351 {
7352 enum machine_mode mode = TYPE_MODE (type);
7353 int total_bytes = GET_MODE_SIZE (mode);
7354 int byte, offset, word, words;
7355 unsigned char value;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7358 up to 192 bits. */
7359 REAL_VALUE_TYPE r;
7360 long tmp[6];
7361
7362 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 if (total_bytes > len || total_bytes > 24)
7364 return NULL_TREE;
7365 words = total_bytes / UNITS_PER_WORD;
7366
7367 memset (tmp, 0, sizeof (tmp));
7368 for (byte = 0; byte < total_bytes; byte++)
7369 {
7370 int bitpos = byte * BITS_PER_UNIT;
7371 if (total_bytes > UNITS_PER_WORD)
7372 {
7373 word = byte / UNITS_PER_WORD;
7374 if (FLOAT_WORDS_BIG_ENDIAN)
7375 word = (words - 1) - word;
7376 offset = word * UNITS_PER_WORD;
7377 if (BYTES_BIG_ENDIAN)
7378 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7379 else
7380 offset += byte % UNITS_PER_WORD;
7381 }
7382 else
7383 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7384 value = ptr[offset];
7385
7386 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7387 }
7388
7389 real_from_target (&r, tmp, mode);
7390 return build_real (type, r);
7391 }
7392
7393
7394 /* Subroutine of native_interpret_expr. Interpret the contents of
7395 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7396 If the buffer cannot be interpreted, return NULL_TREE. */
7397
7398 static tree
7399 native_interpret_complex (tree type, unsigned char *ptr, int len)
7400 {
7401 tree etype, rpart, ipart;
7402 int size;
7403
7404 etype = TREE_TYPE (type);
7405 size = GET_MODE_SIZE (TYPE_MODE (etype));
7406 if (size * 2 > len)
7407 return NULL_TREE;
7408 rpart = native_interpret_expr (etype, ptr, size);
7409 if (!rpart)
7410 return NULL_TREE;
7411 ipart = native_interpret_expr (etype, ptr+size, size);
7412 if (!ipart)
7413 return NULL_TREE;
7414 return build_complex (type, rpart, ipart);
7415 }
7416
7417
7418 /* Subroutine of native_interpret_expr. Interpret the contents of
7419 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7420 If the buffer cannot be interpreted, return NULL_TREE. */
7421
7422 static tree
7423 native_interpret_vector (tree type, unsigned char *ptr, int len)
7424 {
7425 tree etype, elem, elements;
7426 int i, size, count;
7427
7428 etype = TREE_TYPE (type);
7429 size = GET_MODE_SIZE (TYPE_MODE (etype));
7430 count = TYPE_VECTOR_SUBPARTS (type);
7431 if (size * count > len)
7432 return NULL_TREE;
7433
7434 elements = NULL_TREE;
7435 for (i = count - 1; i >= 0; i--)
7436 {
7437 elem = native_interpret_expr (etype, ptr+(i*size), size);
7438 if (!elem)
7439 return NULL_TREE;
7440 elements = tree_cons (NULL_TREE, elem, elements);
7441 }
7442 return build_vector (type, elements);
7443 }
7444
7445
7446 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7447 the buffer PTR of length LEN as a constant of type TYPE. For
7448 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7449 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7450 return NULL_TREE. */
7451
7452 static tree
7453 native_interpret_expr (tree type, unsigned char *ptr, int len)
7454 {
7455 switch (TREE_CODE (type))
7456 {
7457 case INTEGER_TYPE:
7458 case ENUMERAL_TYPE:
7459 case BOOLEAN_TYPE:
7460 return native_interpret_int (type, ptr, len);
7461
7462 case REAL_TYPE:
7463 return native_interpret_real (type, ptr, len);
7464
7465 case COMPLEX_TYPE:
7466 return native_interpret_complex (type, ptr, len);
7467
7468 case VECTOR_TYPE:
7469 return native_interpret_vector (type, ptr, len);
7470
7471 default:
7472 return NULL_TREE;
7473 }
7474 }
7475
7476
7477 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7478 TYPE at compile-time. If we're unable to perform the conversion
7479 return NULL_TREE. */
7480
7481 static tree
7482 fold_view_convert_expr (tree type, tree expr)
7483 {
7484 /* We support up to 512-bit values (for V8DFmode). */
7485 unsigned char buffer[64];
7486 int len;
7487
7488 /* Check that the host and target are sane. */
7489 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7490 return NULL_TREE;
7491
7492 len = native_encode_expr (expr, buffer, sizeof (buffer));
7493 if (len == 0)
7494 return NULL_TREE;
7495
7496 return native_interpret_expr (type, buffer, len);
7497 }
7498
7499
7500 /* Fold a unary expression of code CODE and type TYPE with operand
7501 OP0. Return the folded expression if folding is successful.
7502 Otherwise, return NULL_TREE. */
7503
7504 tree
7505 fold_unary (enum tree_code code, tree type, tree op0)
7506 {
7507 tree tem;
7508 tree arg0;
7509 enum tree_code_class kind = TREE_CODE_CLASS (code);
7510
7511 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7512 && TREE_CODE_LENGTH (code) == 1);
7513
7514 arg0 = op0;
7515 if (arg0)
7516 {
7517 if (code == NOP_EXPR || code == CONVERT_EXPR
7518 || code == FLOAT_EXPR || code == ABS_EXPR)
7519 {
7520 /* Don't use STRIP_NOPS, because signedness of argument type
7521 matters. */
7522 STRIP_SIGN_NOPS (arg0);
7523 }
7524 else
7525 {
7526 /* Strip any conversions that don't change the mode. This
7527 is safe for every expression, except for a comparison
7528 expression because its signedness is derived from its
7529 operands.
7530
7531 Note that this is done as an internal manipulation within
7532 the constant folder, in order to find the simplest
7533 representation of the arguments so that their form can be
7534 studied. In any cases, the appropriate type conversions
7535 should be put back in the tree that will get out of the
7536 constant folder. */
7537 STRIP_NOPS (arg0);
7538 }
7539 }
7540
7541 if (TREE_CODE_CLASS (code) == tcc_unary)
7542 {
7543 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7544 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7545 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7546 else if (TREE_CODE (arg0) == COND_EXPR)
7547 {
7548 tree arg01 = TREE_OPERAND (arg0, 1);
7549 tree arg02 = TREE_OPERAND (arg0, 2);
7550 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7551 arg01 = fold_build1 (code, type, arg01);
7552 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7553 arg02 = fold_build1 (code, type, arg02);
7554 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7555 arg01, arg02);
7556
7557 /* If this was a conversion, and all we did was to move into
7558 inside the COND_EXPR, bring it back out. But leave it if
7559 it is a conversion from integer to integer and the
7560 result precision is no wider than a word since such a
7561 conversion is cheap and may be optimized away by combine,
7562 while it couldn't if it were outside the COND_EXPR. Then return
7563 so we don't get into an infinite recursion loop taking the
7564 conversion out and then back in. */
7565
7566 if ((code == NOP_EXPR || code == CONVERT_EXPR
7567 || code == NON_LVALUE_EXPR)
7568 && TREE_CODE (tem) == COND_EXPR
7569 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7570 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7571 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7572 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7573 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7574 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7575 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7576 && (INTEGRAL_TYPE_P
7577 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7578 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7579 || flag_syntax_only))
7580 tem = build1 (code, type,
7581 build3 (COND_EXPR,
7582 TREE_TYPE (TREE_OPERAND
7583 (TREE_OPERAND (tem, 1), 0)),
7584 TREE_OPERAND (tem, 0),
7585 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7586 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7587 return tem;
7588 }
7589 else if (COMPARISON_CLASS_P (arg0))
7590 {
7591 if (TREE_CODE (type) == BOOLEAN_TYPE)
7592 {
7593 arg0 = copy_node (arg0);
7594 TREE_TYPE (arg0) = type;
7595 return arg0;
7596 }
7597 else if (TREE_CODE (type) != INTEGER_TYPE)
7598 return fold_build3 (COND_EXPR, type, arg0,
7599 fold_build1 (code, type,
7600 integer_one_node),
7601 fold_build1 (code, type,
7602 integer_zero_node));
7603 }
7604 }
7605
7606 switch (code)
7607 {
7608 case NOP_EXPR:
7609 case FLOAT_EXPR:
7610 case CONVERT_EXPR:
7611 case FIX_TRUNC_EXPR:
7612 if (TREE_TYPE (op0) == type)
7613 return op0;
7614
7615 /* If we have (type) (a CMP b) and type is an integral type, return
7616 new expression involving the new type. */
7617 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7618 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7619 TREE_OPERAND (op0, 1));
7620
7621 /* Handle cases of two conversions in a row. */
7622 if (TREE_CODE (op0) == NOP_EXPR
7623 || TREE_CODE (op0) == CONVERT_EXPR)
7624 {
7625 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7626 tree inter_type = TREE_TYPE (op0);
7627 int inside_int = INTEGRAL_TYPE_P (inside_type);
7628 int inside_ptr = POINTER_TYPE_P (inside_type);
7629 int inside_float = FLOAT_TYPE_P (inside_type);
7630 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7631 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7632 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7633 int inter_int = INTEGRAL_TYPE_P (inter_type);
7634 int inter_ptr = POINTER_TYPE_P (inter_type);
7635 int inter_float = FLOAT_TYPE_P (inter_type);
7636 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7637 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7638 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7639 int final_int = INTEGRAL_TYPE_P (type);
7640 int final_ptr = POINTER_TYPE_P (type);
7641 int final_float = FLOAT_TYPE_P (type);
7642 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7643 unsigned int final_prec = TYPE_PRECISION (type);
7644 int final_unsignedp = TYPE_UNSIGNED (type);
7645
7646 /* In addition to the cases of two conversions in a row
7647 handled below, if we are converting something to its own
7648 type via an object of identical or wider precision, neither
7649 conversion is needed. */
7650 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7651 && (((inter_int || inter_ptr) && final_int)
7652 || (inter_float && final_float))
7653 && inter_prec >= final_prec)
7654 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7655
7656 /* Likewise, if the intermediate and final types are either both
7657 float or both integer, we don't need the middle conversion if
7658 it is wider than the final type and doesn't change the signedness
7659 (for integers). Avoid this if the final type is a pointer
7660 since then we sometimes need the inner conversion. Likewise if
7661 the outer has a precision not equal to the size of its mode. */
7662 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7663 || (inter_float && inside_float)
7664 || (inter_vec && inside_vec))
7665 && inter_prec >= inside_prec
7666 && (inter_float || inter_vec
7667 || inter_unsignedp == inside_unsignedp)
7668 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7669 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7670 && ! final_ptr
7671 && (! final_vec || inter_prec == inside_prec))
7672 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7673
7674 /* If we have a sign-extension of a zero-extended value, we can
7675 replace that by a single zero-extension. */
7676 if (inside_int && inter_int && final_int
7677 && inside_prec < inter_prec && inter_prec < final_prec
7678 && inside_unsignedp && !inter_unsignedp)
7679 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7680
7681 /* Two conversions in a row are not needed unless:
7682 - some conversion is floating-point (overstrict for now), or
7683 - some conversion is a vector (overstrict for now), or
7684 - the intermediate type is narrower than both initial and
7685 final, or
7686 - the intermediate type and innermost type differ in signedness,
7687 and the outermost type is wider than the intermediate, or
7688 - the initial type is a pointer type and the precisions of the
7689 intermediate and final types differ, or
7690 - the final type is a pointer type and the precisions of the
7691 initial and intermediate types differ.
7692 - the final type is a pointer type and the initial type not
7693 - the initial type is a pointer to an array and the final type
7694 not. */
7695 if (! inside_float && ! inter_float && ! final_float
7696 && ! inside_vec && ! inter_vec && ! final_vec
7697 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7698 && ! (inside_int && inter_int
7699 && inter_unsignedp != inside_unsignedp
7700 && inter_prec < final_prec)
7701 && ((inter_unsignedp && inter_prec > inside_prec)
7702 == (final_unsignedp && final_prec > inter_prec))
7703 && ! (inside_ptr && inter_prec != final_prec)
7704 && ! (final_ptr && inside_prec != inter_prec)
7705 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7706 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7707 && final_ptr == inside_ptr
7708 && ! (inside_ptr
7709 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7710 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7711 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7712 }
7713
7714 /* Handle (T *)&A.B.C for A being of type T and B and C
7715 living at offset zero. This occurs frequently in
7716 C++ upcasting and then accessing the base. */
7717 if (TREE_CODE (op0) == ADDR_EXPR
7718 && POINTER_TYPE_P (type)
7719 && handled_component_p (TREE_OPERAND (op0, 0)))
7720 {
7721 HOST_WIDE_INT bitsize, bitpos;
7722 tree offset;
7723 enum machine_mode mode;
7724 int unsignedp, volatilep;
7725 tree base = TREE_OPERAND (op0, 0);
7726 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7727 &mode, &unsignedp, &volatilep, false);
7728 /* If the reference was to a (constant) zero offset, we can use
7729 the address of the base if it has the same base type
7730 as the result type. */
7731 if (! offset && bitpos == 0
7732 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7733 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7734 return fold_convert (type, build_fold_addr_expr (base));
7735 }
7736
7737 if ((TREE_CODE (op0) == MODIFY_EXPR
7738 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7739 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7740 /* Detect assigning a bitfield. */
7741 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7742 && DECL_BIT_FIELD
7743 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7744 {
7745 /* Don't leave an assignment inside a conversion
7746 unless assigning a bitfield. */
7747 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7748 /* First do the assignment, then return converted constant. */
7749 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7750 TREE_NO_WARNING (tem) = 1;
7751 TREE_USED (tem) = 1;
7752 return tem;
7753 }
7754
7755 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7756 constants (if x has signed type, the sign bit cannot be set
7757 in c). This folds extension into the BIT_AND_EXPR. */
7758 if (INTEGRAL_TYPE_P (type)
7759 && TREE_CODE (type) != BOOLEAN_TYPE
7760 && TREE_CODE (op0) == BIT_AND_EXPR
7761 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7762 {
7763 tree and = op0;
7764 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7765 int change = 0;
7766
7767 if (TYPE_UNSIGNED (TREE_TYPE (and))
7768 || (TYPE_PRECISION (type)
7769 <= TYPE_PRECISION (TREE_TYPE (and))))
7770 change = 1;
7771 else if (TYPE_PRECISION (TREE_TYPE (and1))
7772 <= HOST_BITS_PER_WIDE_INT
7773 && host_integerp (and1, 1))
7774 {
7775 unsigned HOST_WIDE_INT cst;
7776
7777 cst = tree_low_cst (and1, 1);
7778 cst &= (HOST_WIDE_INT) -1
7779 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7780 change = (cst == 0);
7781 #ifdef LOAD_EXTEND_OP
7782 if (change
7783 && !flag_syntax_only
7784 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7785 == ZERO_EXTEND))
7786 {
7787 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7788 and0 = fold_convert (uns, and0);
7789 and1 = fold_convert (uns, and1);
7790 }
7791 #endif
7792 }
7793 if (change)
7794 {
7795 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7796 TREE_INT_CST_HIGH (and1), 0,
7797 TREE_OVERFLOW (and1));
7798 return fold_build2 (BIT_AND_EXPR, type,
7799 fold_convert (type, and0), tem);
7800 }
7801 }
7802
7803 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7804 T2 being pointers to types of the same size. */
7805 if (POINTER_TYPE_P (type)
7806 && BINARY_CLASS_P (arg0)
7807 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7808 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7809 {
7810 tree arg00 = TREE_OPERAND (arg0, 0);
7811 tree t0 = type;
7812 tree t1 = TREE_TYPE (arg00);
7813 tree tt0 = TREE_TYPE (t0);
7814 tree tt1 = TREE_TYPE (t1);
7815 tree s0 = TYPE_SIZE (tt0);
7816 tree s1 = TYPE_SIZE (tt1);
7817
7818 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7819 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7820 TREE_OPERAND (arg0, 1));
7821 }
7822
7823 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7824 of the same precision, and X is a integer type not narrower than
7825 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7826 if (INTEGRAL_TYPE_P (type)
7827 && TREE_CODE (op0) == BIT_NOT_EXPR
7828 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7829 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7830 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7831 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7832 {
7833 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7834 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7835 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7836 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7837 }
7838
7839 tem = fold_convert_const (code, type, arg0);
7840 return tem ? tem : NULL_TREE;
7841
7842 case VIEW_CONVERT_EXPR:
7843 if (TREE_TYPE (op0) == type)
7844 return op0;
7845 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7846 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7847 return fold_view_convert_expr (type, op0);
7848
7849 case NEGATE_EXPR:
7850 tem = fold_negate_expr (arg0);
7851 if (tem)
7852 return fold_convert (type, tem);
7853 return NULL_TREE;
7854
7855 case ABS_EXPR:
7856 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7857 return fold_abs_const (arg0, type);
7858 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7859 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7860 /* Convert fabs((double)float) into (double)fabsf(float). */
7861 else if (TREE_CODE (arg0) == NOP_EXPR
7862 && TREE_CODE (type) == REAL_TYPE)
7863 {
7864 tree targ0 = strip_float_extensions (arg0);
7865 if (targ0 != arg0)
7866 return fold_convert (type, fold_build1 (ABS_EXPR,
7867 TREE_TYPE (targ0),
7868 targ0));
7869 }
7870 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7871 else if (TREE_CODE (arg0) == ABS_EXPR)
7872 return arg0;
7873 else if (tree_expr_nonnegative_p (arg0))
7874 return arg0;
7875
7876 /* Strip sign ops from argument. */
7877 if (TREE_CODE (type) == REAL_TYPE)
7878 {
7879 tem = fold_strip_sign_ops (arg0);
7880 if (tem)
7881 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7882 }
7883 return NULL_TREE;
7884
7885 case CONJ_EXPR:
7886 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7887 return fold_convert (type, arg0);
7888 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7889 {
7890 tree itype = TREE_TYPE (type);
7891 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7892 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7893 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7894 }
7895 if (TREE_CODE (arg0) == COMPLEX_CST)
7896 {
7897 tree itype = TREE_TYPE (type);
7898 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7899 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7900 return build_complex (type, rpart, negate_expr (ipart));
7901 }
7902 if (TREE_CODE (arg0) == CONJ_EXPR)
7903 return fold_convert (type, TREE_OPERAND (arg0, 0));
7904 return NULL_TREE;
7905
7906 case BIT_NOT_EXPR:
7907 if (TREE_CODE (arg0) == INTEGER_CST)
7908 return fold_not_const (arg0, type);
7909 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7910 return TREE_OPERAND (arg0, 0);
7911 /* Convert ~ (-A) to A - 1. */
7912 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7913 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7914 build_int_cst (type, 1));
7915 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7916 else if (INTEGRAL_TYPE_P (type)
7917 && ((TREE_CODE (arg0) == MINUS_EXPR
7918 && integer_onep (TREE_OPERAND (arg0, 1)))
7919 || (TREE_CODE (arg0) == PLUS_EXPR
7920 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7921 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7922 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7923 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7924 && (tem = fold_unary (BIT_NOT_EXPR, type,
7925 fold_convert (type,
7926 TREE_OPERAND (arg0, 0)))))
7927 return fold_build2 (BIT_XOR_EXPR, type, tem,
7928 fold_convert (type, TREE_OPERAND (arg0, 1)));
7929 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7930 && (tem = fold_unary (BIT_NOT_EXPR, type,
7931 fold_convert (type,
7932 TREE_OPERAND (arg0, 1)))))
7933 return fold_build2 (BIT_XOR_EXPR, type,
7934 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7935
7936 return NULL_TREE;
7937
7938 case TRUTH_NOT_EXPR:
7939 /* The argument to invert_truthvalue must have Boolean type. */
7940 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7941 arg0 = fold_convert (boolean_type_node, arg0);
7942
7943 /* Note that the operand of this must be an int
7944 and its values must be 0 or 1.
7945 ("true" is a fixed value perhaps depending on the language,
7946 but we don't handle values other than 1 correctly yet.) */
7947 tem = fold_truth_not_expr (arg0);
7948 if (!tem)
7949 return NULL_TREE;
7950 return fold_convert (type, tem);
7951
7952 case REALPART_EXPR:
7953 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7954 return fold_convert (type, arg0);
7955 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7956 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7957 TREE_OPERAND (arg0, 1));
7958 if (TREE_CODE (arg0) == COMPLEX_CST)
7959 return fold_convert (type, TREE_REALPART (arg0));
7960 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7961 {
7962 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7963 tem = fold_build2 (TREE_CODE (arg0), itype,
7964 fold_build1 (REALPART_EXPR, itype,
7965 TREE_OPERAND (arg0, 0)),
7966 fold_build1 (REALPART_EXPR, itype,
7967 TREE_OPERAND (arg0, 1)));
7968 return fold_convert (type, tem);
7969 }
7970 if (TREE_CODE (arg0) == CONJ_EXPR)
7971 {
7972 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7973 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7974 return fold_convert (type, tem);
7975 }
7976 if (TREE_CODE (arg0) == CALL_EXPR)
7977 {
7978 tree fn = get_callee_fndecl (arg0);
7979 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7980 switch (DECL_FUNCTION_CODE (fn))
7981 {
7982 CASE_FLT_FN (BUILT_IN_CEXPI):
7983 fn = mathfn_built_in (type, BUILT_IN_COS);
7984 if (fn)
7985 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7986 break;
7987
7988 default:
7989 break;
7990 }
7991 }
7992 return NULL_TREE;
7993
7994 case IMAGPART_EXPR:
7995 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7996 return fold_convert (type, integer_zero_node);
7997 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7998 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7999 TREE_OPERAND (arg0, 0));
8000 if (TREE_CODE (arg0) == COMPLEX_CST)
8001 return fold_convert (type, TREE_IMAGPART (arg0));
8002 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8003 {
8004 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8005 tem = fold_build2 (TREE_CODE (arg0), itype,
8006 fold_build1 (IMAGPART_EXPR, itype,
8007 TREE_OPERAND (arg0, 0)),
8008 fold_build1 (IMAGPART_EXPR, itype,
8009 TREE_OPERAND (arg0, 1)));
8010 return fold_convert (type, tem);
8011 }
8012 if (TREE_CODE (arg0) == CONJ_EXPR)
8013 {
8014 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8015 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8016 return fold_convert (type, negate_expr (tem));
8017 }
8018 if (TREE_CODE (arg0) == CALL_EXPR)
8019 {
8020 tree fn = get_callee_fndecl (arg0);
8021 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8022 switch (DECL_FUNCTION_CODE (fn))
8023 {
8024 CASE_FLT_FN (BUILT_IN_CEXPI):
8025 fn = mathfn_built_in (type, BUILT_IN_SIN);
8026 if (fn)
8027 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8028 break;
8029
8030 default:
8031 break;
8032 }
8033 }
8034 return NULL_TREE;
8035
8036 default:
8037 return NULL_TREE;
8038 } /* switch (code) */
8039 }
8040
8041 /* Fold a binary expression of code CODE and type TYPE with operands
8042 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8043 Return the folded expression if folding is successful. Otherwise,
8044 return NULL_TREE. */
8045
8046 static tree
8047 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8048 {
8049 enum tree_code compl_code;
8050
8051 if (code == MIN_EXPR)
8052 compl_code = MAX_EXPR;
8053 else if (code == MAX_EXPR)
8054 compl_code = MIN_EXPR;
8055 else
8056 gcc_unreachable ();
8057
8058 /* MIN (MAX (a, b), b) == b. */
8059 if (TREE_CODE (op0) == compl_code
8060 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8061 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8062
8063 /* MIN (MAX (b, a), b) == b. */
8064 if (TREE_CODE (op0) == compl_code
8065 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8066 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8067 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8068
8069 /* MIN (a, MAX (a, b)) == a. */
8070 if (TREE_CODE (op1) == compl_code
8071 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8072 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8073 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8074
8075 /* MIN (a, MAX (b, a)) == a. */
8076 if (TREE_CODE (op1) == compl_code
8077 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8078 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8079 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8080
8081 return NULL_TREE;
8082 }
8083
8084 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8085 by changing CODE to reduce the magnitude of constants involved in
8086 ARG0 of the comparison.
8087 Returns a canonicalized comparison tree if a simplification was
8088 possible, otherwise returns NULL_TREE.
8089 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8090 valid if signed overflow is undefined. */
8091
8092 static tree
8093 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8094 tree arg0, tree arg1,
8095 bool *strict_overflow_p)
8096 {
8097 enum tree_code code0 = TREE_CODE (arg0);
8098 tree t, cst0 = NULL_TREE;
8099 int sgn0;
8100 bool swap = false;
8101
8102 /* Match A +- CST code arg1 and CST code arg1. */
8103 if (!(((code0 == MINUS_EXPR
8104 || code0 == PLUS_EXPR)
8105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8106 || code0 == INTEGER_CST))
8107 return NULL_TREE;
8108
8109 /* Identify the constant in arg0 and its sign. */
8110 if (code0 == INTEGER_CST)
8111 cst0 = arg0;
8112 else
8113 cst0 = TREE_OPERAND (arg0, 1);
8114 sgn0 = tree_int_cst_sgn (cst0);
8115
8116 /* Overflowed constants and zero will cause problems. */
8117 if (integer_zerop (cst0)
8118 || TREE_OVERFLOW (cst0))
8119 return NULL_TREE;
8120
8121 /* See if we can reduce the magnitude of the constant in
8122 arg0 by changing the comparison code. */
8123 if (code0 == INTEGER_CST)
8124 {
8125 /* CST <= arg1 -> CST-1 < arg1. */
8126 if (code == LE_EXPR && sgn0 == 1)
8127 code = LT_EXPR;
8128 /* -CST < arg1 -> -CST-1 <= arg1. */
8129 else if (code == LT_EXPR && sgn0 == -1)
8130 code = LE_EXPR;
8131 /* CST > arg1 -> CST-1 >= arg1. */
8132 else if (code == GT_EXPR && sgn0 == 1)
8133 code = GE_EXPR;
8134 /* -CST >= arg1 -> -CST-1 > arg1. */
8135 else if (code == GE_EXPR && sgn0 == -1)
8136 code = GT_EXPR;
8137 else
8138 return NULL_TREE;
8139 /* arg1 code' CST' might be more canonical. */
8140 swap = true;
8141 }
8142 else
8143 {
8144 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8145 if (code == LT_EXPR
8146 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8147 code = LE_EXPR;
8148 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8149 else if (code == GT_EXPR
8150 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8151 code = GE_EXPR;
8152 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8153 else if (code == LE_EXPR
8154 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8155 code = LT_EXPR;
8156 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8157 else if (code == GE_EXPR
8158 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8159 code = GT_EXPR;
8160 else
8161 return NULL_TREE;
8162 *strict_overflow_p = true;
8163 }
8164
8165 /* Now build the constant reduced in magnitude. */
8166 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8167 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8168 if (code0 != INTEGER_CST)
8169 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8170
8171 /* If swapping might yield to a more canonical form, do so. */
8172 if (swap)
8173 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8174 else
8175 return fold_build2 (code, type, t, arg1);
8176 }
8177
8178 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8179 overflow further. Try to decrease the magnitude of constants involved
8180 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8181 and put sole constants at the second argument position.
8182 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8183
8184 static tree
8185 maybe_canonicalize_comparison (enum tree_code code, tree type,
8186 tree arg0, tree arg1)
8187 {
8188 tree t;
8189 bool strict_overflow_p;
8190 const char * const warnmsg = G_("assuming signed overflow does not occur "
8191 "when reducing constant in comparison");
8192
8193 /* In principle pointers also have undefined overflow behavior,
8194 but that causes problems elsewhere. */
8195 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8196 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8197 return NULL_TREE;
8198
8199 /* Try canonicalization by simplifying arg0. */
8200 strict_overflow_p = false;
8201 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8202 &strict_overflow_p);
8203 if (t)
8204 {
8205 if (strict_overflow_p)
8206 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8207 return t;
8208 }
8209
8210 /* Try canonicalization by simplifying arg1 using the swapped
8211 comparison. */
8212 code = swap_tree_comparison (code);
8213 strict_overflow_p = false;
8214 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8215 &strict_overflow_p);
8216 if (t && strict_overflow_p)
8217 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8218 return t;
8219 }
8220
8221 /* Subroutine of fold_binary. This routine performs all of the
8222 transformations that are common to the equality/inequality
8223 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8224 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8225 fold_binary should call fold_binary. Fold a comparison with
8226 tree code CODE and type TYPE with operands OP0 and OP1. Return
8227 the folded comparison or NULL_TREE. */
8228
8229 static tree
8230 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8231 {
8232 tree arg0, arg1, tem;
8233
8234 arg0 = op0;
8235 arg1 = op1;
8236
8237 STRIP_SIGN_NOPS (arg0);
8238 STRIP_SIGN_NOPS (arg1);
8239
8240 tem = fold_relational_const (code, type, arg0, arg1);
8241 if (tem != NULL_TREE)
8242 return tem;
8243
8244 /* If one arg is a real or integer constant, put it last. */
8245 if (tree_swap_operands_p (arg0, arg1, true))
8246 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8247
8248 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8249 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8250 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8251 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8252 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8253 && (TREE_CODE (arg1) == INTEGER_CST
8254 && !TREE_OVERFLOW (arg1)))
8255 {
8256 tree const1 = TREE_OPERAND (arg0, 1);
8257 tree const2 = arg1;
8258 tree variable = TREE_OPERAND (arg0, 0);
8259 tree lhs;
8260 int lhs_add;
8261 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8262
8263 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8264 TREE_TYPE (arg1), const2, const1);
8265
8266 /* If the constant operation overflowed this can be
8267 simplified as a comparison against INT_MAX/INT_MIN. */
8268 if (TREE_CODE (lhs) == INTEGER_CST
8269 && TREE_OVERFLOW (lhs))
8270 {
8271 int const1_sgn = tree_int_cst_sgn (const1);
8272 enum tree_code code2 = code;
8273
8274 /* Get the sign of the constant on the lhs if the
8275 operation were VARIABLE + CONST1. */
8276 if (TREE_CODE (arg0) == MINUS_EXPR)
8277 const1_sgn = -const1_sgn;
8278
8279 /* The sign of the constant determines if we overflowed
8280 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8281 Canonicalize to the INT_MIN overflow by swapping the comparison
8282 if necessary. */
8283 if (const1_sgn == -1)
8284 code2 = swap_tree_comparison (code);
8285
8286 /* We now can look at the canonicalized case
8287 VARIABLE + 1 CODE2 INT_MIN
8288 and decide on the result. */
8289 if (code2 == LT_EXPR
8290 || code2 == LE_EXPR
8291 || code2 == EQ_EXPR)
8292 return omit_one_operand (type, boolean_false_node, variable);
8293 else if (code2 == NE_EXPR
8294 || code2 == GE_EXPR
8295 || code2 == GT_EXPR)
8296 return omit_one_operand (type, boolean_true_node, variable);
8297 }
8298
8299 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8300 && (TREE_CODE (lhs) != INTEGER_CST
8301 || !TREE_OVERFLOW (lhs)))
8302 {
8303 fold_overflow_warning (("assuming signed overflow does not occur "
8304 "when changing X +- C1 cmp C2 to "
8305 "X cmp C1 +- C2"),
8306 WARN_STRICT_OVERFLOW_COMPARISON);
8307 return fold_build2 (code, type, variable, lhs);
8308 }
8309 }
8310
8311 /* For comparisons of pointers we can decompose it to a compile time
8312 comparison of the base objects and the offsets into the object.
8313 This requires at least one operand being an ADDR_EXPR to do more
8314 than the operand_equal_p test below. */
8315 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8316 && (TREE_CODE (arg0) == ADDR_EXPR
8317 || TREE_CODE (arg1) == ADDR_EXPR))
8318 {
8319 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8320 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8321 enum machine_mode mode;
8322 int volatilep, unsignedp;
8323 bool indirect_base0 = false;
8324
8325 /* Get base and offset for the access. Strip ADDR_EXPR for
8326 get_inner_reference, but put it back by stripping INDIRECT_REF
8327 off the base object if possible. */
8328 base0 = arg0;
8329 if (TREE_CODE (arg0) == ADDR_EXPR)
8330 {
8331 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8332 &bitsize, &bitpos0, &offset0, &mode,
8333 &unsignedp, &volatilep, false);
8334 if (TREE_CODE (base0) == INDIRECT_REF)
8335 base0 = TREE_OPERAND (base0, 0);
8336 else
8337 indirect_base0 = true;
8338 }
8339
8340 base1 = arg1;
8341 if (TREE_CODE (arg1) == ADDR_EXPR)
8342 {
8343 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8344 &bitsize, &bitpos1, &offset1, &mode,
8345 &unsignedp, &volatilep, false);
8346 /* We have to make sure to have an indirect/non-indirect base1
8347 just the same as we did for base0. */
8348 if (TREE_CODE (base1) == INDIRECT_REF
8349 && !indirect_base0)
8350 base1 = TREE_OPERAND (base1, 0);
8351 else if (!indirect_base0)
8352 base1 = NULL_TREE;
8353 }
8354 else if (indirect_base0)
8355 base1 = NULL_TREE;
8356
8357 /* If we have equivalent bases we might be able to simplify. */
8358 if (base0 && base1
8359 && operand_equal_p (base0, base1, 0))
8360 {
8361 /* We can fold this expression to a constant if the non-constant
8362 offset parts are equal. */
8363 if (offset0 == offset1
8364 || (offset0 && offset1
8365 && operand_equal_p (offset0, offset1, 0)))
8366 {
8367 switch (code)
8368 {
8369 case EQ_EXPR:
8370 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8371 case NE_EXPR:
8372 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8373 case LT_EXPR:
8374 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8375 case LE_EXPR:
8376 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8377 case GE_EXPR:
8378 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8379 case GT_EXPR:
8380 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8381 default:;
8382 }
8383 }
8384 /* We can simplify the comparison to a comparison of the variable
8385 offset parts if the constant offset parts are equal.
8386 Be careful to use signed size type here because otherwise we
8387 mess with array offsets in the wrong way. This is possible
8388 because pointer arithmetic is restricted to retain within an
8389 object and overflow on pointer differences is undefined as of
8390 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8391 else if (bitpos0 == bitpos1)
8392 {
8393 tree signed_size_type_node;
8394 signed_size_type_node = signed_type_for (size_type_node);
8395
8396 /* By converting to signed size type we cover middle-end pointer
8397 arithmetic which operates on unsigned pointer types of size
8398 type size and ARRAY_REF offsets which are properly sign or
8399 zero extended from their type in case it is narrower than
8400 size type. */
8401 if (offset0 == NULL_TREE)
8402 offset0 = build_int_cst (signed_size_type_node, 0);
8403 else
8404 offset0 = fold_convert (signed_size_type_node, offset0);
8405 if (offset1 == NULL_TREE)
8406 offset1 = build_int_cst (signed_size_type_node, 0);
8407 else
8408 offset1 = fold_convert (signed_size_type_node, offset1);
8409
8410 return fold_build2 (code, type, offset0, offset1);
8411 }
8412 }
8413 }
8414
8415 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8416 same object, then we can fold this to a comparison of the two offsets in
8417 signed size type. This is possible because pointer arithmetic is
8418 restricted to retain within an object and overflow on pointer differences
8419 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8420
8421 We check flag_wrapv directly because pointers types are unsigned,
8422 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8423 normally what we want to avoid certain odd overflow cases, but
8424 not here. */
8425 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8426 && !flag_wrapv
8427 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8428 {
8429 tree base0, offset0, base1, offset1;
8430
8431 if (extract_array_ref (arg0, &base0, &offset0)
8432 && extract_array_ref (arg1, &base1, &offset1)
8433 && operand_equal_p (base0, base1, 0))
8434 {
8435 tree signed_size_type_node;
8436 signed_size_type_node = signed_type_for (size_type_node);
8437
8438 /* By converting to signed size type we cover middle-end pointer
8439 arithmetic which operates on unsigned pointer types of size
8440 type size and ARRAY_REF offsets which are properly sign or
8441 zero extended from their type in case it is narrower than
8442 size type. */
8443 if (offset0 == NULL_TREE)
8444 offset0 = build_int_cst (signed_size_type_node, 0);
8445 else
8446 offset0 = fold_convert (signed_size_type_node, offset0);
8447 if (offset1 == NULL_TREE)
8448 offset1 = build_int_cst (signed_size_type_node, 0);
8449 else
8450 offset1 = fold_convert (signed_size_type_node, offset1);
8451
8452 return fold_build2 (code, type, offset0, offset1);
8453 }
8454 }
8455
8456 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8457 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8458 the resulting offset is smaller in absolute value than the
8459 original one. */
8460 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8461 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8462 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8463 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8464 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8465 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8466 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8467 {
8468 tree const1 = TREE_OPERAND (arg0, 1);
8469 tree const2 = TREE_OPERAND (arg1, 1);
8470 tree variable1 = TREE_OPERAND (arg0, 0);
8471 tree variable2 = TREE_OPERAND (arg1, 0);
8472 tree cst;
8473 const char * const warnmsg = G_("assuming signed overflow does not "
8474 "occur when combining constants around "
8475 "a comparison");
8476
8477 /* Put the constant on the side where it doesn't overflow and is
8478 of lower absolute value than before. */
8479 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8480 ? MINUS_EXPR : PLUS_EXPR,
8481 const2, const1, 0);
8482 if (!TREE_OVERFLOW (cst)
8483 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8484 {
8485 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8486 return fold_build2 (code, type,
8487 variable1,
8488 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8489 variable2, cst));
8490 }
8491
8492 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8493 ? MINUS_EXPR : PLUS_EXPR,
8494 const1, const2, 0);
8495 if (!TREE_OVERFLOW (cst)
8496 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8497 {
8498 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8499 return fold_build2 (code, type,
8500 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8501 variable1, cst),
8502 variable2);
8503 }
8504 }
8505
8506 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8507 signed arithmetic case. That form is created by the compiler
8508 often enough for folding it to be of value. One example is in
8509 computing loop trip counts after Operator Strength Reduction. */
8510 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8511 && TREE_CODE (arg0) == MULT_EXPR
8512 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8513 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8514 && integer_zerop (arg1))
8515 {
8516 tree const1 = TREE_OPERAND (arg0, 1);
8517 tree const2 = arg1; /* zero */
8518 tree variable1 = TREE_OPERAND (arg0, 0);
8519 enum tree_code cmp_code = code;
8520
8521 gcc_assert (!integer_zerop (const1));
8522
8523 fold_overflow_warning (("assuming signed overflow does not occur when "
8524 "eliminating multiplication in comparison "
8525 "with zero"),
8526 WARN_STRICT_OVERFLOW_COMPARISON);
8527
8528 /* If const1 is negative we swap the sense of the comparison. */
8529 if (tree_int_cst_sgn (const1) < 0)
8530 cmp_code = swap_tree_comparison (cmp_code);
8531
8532 return fold_build2 (cmp_code, type, variable1, const2);
8533 }
8534
8535 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8536 if (tem)
8537 return tem;
8538
8539 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8540 {
8541 tree targ0 = strip_float_extensions (arg0);
8542 tree targ1 = strip_float_extensions (arg1);
8543 tree newtype = TREE_TYPE (targ0);
8544
8545 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8546 newtype = TREE_TYPE (targ1);
8547
8548 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8549 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8550 return fold_build2 (code, type, fold_convert (newtype, targ0),
8551 fold_convert (newtype, targ1));
8552
8553 /* (-a) CMP (-b) -> b CMP a */
8554 if (TREE_CODE (arg0) == NEGATE_EXPR
8555 && TREE_CODE (arg1) == NEGATE_EXPR)
8556 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8557 TREE_OPERAND (arg0, 0));
8558
8559 if (TREE_CODE (arg1) == REAL_CST)
8560 {
8561 REAL_VALUE_TYPE cst;
8562 cst = TREE_REAL_CST (arg1);
8563
8564 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8565 if (TREE_CODE (arg0) == NEGATE_EXPR)
8566 return fold_build2 (swap_tree_comparison (code), type,
8567 TREE_OPERAND (arg0, 0),
8568 build_real (TREE_TYPE (arg1),
8569 REAL_VALUE_NEGATE (cst)));
8570
8571 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8572 /* a CMP (-0) -> a CMP 0 */
8573 if (REAL_VALUE_MINUS_ZERO (cst))
8574 return fold_build2 (code, type, arg0,
8575 build_real (TREE_TYPE (arg1), dconst0));
8576
8577 /* x != NaN is always true, other ops are always false. */
8578 if (REAL_VALUE_ISNAN (cst)
8579 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8580 {
8581 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8582 return omit_one_operand (type, tem, arg0);
8583 }
8584
8585 /* Fold comparisons against infinity. */
8586 if (REAL_VALUE_ISINF (cst))
8587 {
8588 tem = fold_inf_compare (code, type, arg0, arg1);
8589 if (tem != NULL_TREE)
8590 return tem;
8591 }
8592 }
8593
8594 /* If this is a comparison of a real constant with a PLUS_EXPR
8595 or a MINUS_EXPR of a real constant, we can convert it into a
8596 comparison with a revised real constant as long as no overflow
8597 occurs when unsafe_math_optimizations are enabled. */
8598 if (flag_unsafe_math_optimizations
8599 && TREE_CODE (arg1) == REAL_CST
8600 && (TREE_CODE (arg0) == PLUS_EXPR
8601 || TREE_CODE (arg0) == MINUS_EXPR)
8602 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8603 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8604 ? MINUS_EXPR : PLUS_EXPR,
8605 arg1, TREE_OPERAND (arg0, 1), 0))
8606 && !TREE_OVERFLOW (tem))
8607 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8608
8609 /* Likewise, we can simplify a comparison of a real constant with
8610 a MINUS_EXPR whose first operand is also a real constant, i.e.
8611 (c1 - x) < c2 becomes x > c1-c2. */
8612 if (flag_unsafe_math_optimizations
8613 && TREE_CODE (arg1) == REAL_CST
8614 && TREE_CODE (arg0) == MINUS_EXPR
8615 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8616 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8617 arg1, 0))
8618 && !TREE_OVERFLOW (tem))
8619 return fold_build2 (swap_tree_comparison (code), type,
8620 TREE_OPERAND (arg0, 1), tem);
8621
8622 /* Fold comparisons against built-in math functions. */
8623 if (TREE_CODE (arg1) == REAL_CST
8624 && flag_unsafe_math_optimizations
8625 && ! flag_errno_math)
8626 {
8627 enum built_in_function fcode = builtin_mathfn_code (arg0);
8628
8629 if (fcode != END_BUILTINS)
8630 {
8631 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8632 if (tem != NULL_TREE)
8633 return tem;
8634 }
8635 }
8636 }
8637
8638 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8639 if (TREE_CONSTANT (arg1)
8640 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8641 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8642 /* This optimization is invalid for ordered comparisons
8643 if CONST+INCR overflows or if foo+incr might overflow.
8644 This optimization is invalid for floating point due to rounding.
8645 For pointer types we assume overflow doesn't happen. */
8646 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8647 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8648 && (code == EQ_EXPR || code == NE_EXPR))))
8649 {
8650 tree varop, newconst;
8651
8652 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8653 {
8654 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8655 arg1, TREE_OPERAND (arg0, 1));
8656 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8657 TREE_OPERAND (arg0, 0),
8658 TREE_OPERAND (arg0, 1));
8659 }
8660 else
8661 {
8662 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8663 arg1, TREE_OPERAND (arg0, 1));
8664 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8665 TREE_OPERAND (arg0, 0),
8666 TREE_OPERAND (arg0, 1));
8667 }
8668
8669
8670 /* If VAROP is a reference to a bitfield, we must mask
8671 the constant by the width of the field. */
8672 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8673 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8674 && host_integerp (DECL_SIZE (TREE_OPERAND
8675 (TREE_OPERAND (varop, 0), 1)), 1))
8676 {
8677 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8678 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8679 tree folded_compare, shift;
8680
8681 /* First check whether the comparison would come out
8682 always the same. If we don't do that we would
8683 change the meaning with the masking. */
8684 folded_compare = fold_build2 (code, type,
8685 TREE_OPERAND (varop, 0), arg1);
8686 if (TREE_CODE (folded_compare) == INTEGER_CST)
8687 return omit_one_operand (type, folded_compare, varop);
8688
8689 shift = build_int_cst (NULL_TREE,
8690 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8691 shift = fold_convert (TREE_TYPE (varop), shift);
8692 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8693 newconst, shift);
8694 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8695 newconst, shift);
8696 }
8697
8698 return fold_build2 (code, type, varop, newconst);
8699 }
8700
8701 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8702 && (TREE_CODE (arg0) == NOP_EXPR
8703 || TREE_CODE (arg0) == CONVERT_EXPR))
8704 {
8705 /* If we are widening one operand of an integer comparison,
8706 see if the other operand is similarly being widened. Perhaps we
8707 can do the comparison in the narrower type. */
8708 tem = fold_widened_comparison (code, type, arg0, arg1);
8709 if (tem)
8710 return tem;
8711
8712 /* Or if we are changing signedness. */
8713 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8714 if (tem)
8715 return tem;
8716 }
8717
8718 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8719 constant, we can simplify it. */
8720 if (TREE_CODE (arg1) == INTEGER_CST
8721 && (TREE_CODE (arg0) == MIN_EXPR
8722 || TREE_CODE (arg0) == MAX_EXPR)
8723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8724 {
8725 tem = optimize_minmax_comparison (code, type, op0, op1);
8726 if (tem)
8727 return tem;
8728 }
8729
8730 /* Simplify comparison of something with itself. (For IEEE
8731 floating-point, we can only do some of these simplifications.) */
8732 if (operand_equal_p (arg0, arg1, 0))
8733 {
8734 switch (code)
8735 {
8736 case EQ_EXPR:
8737 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8738 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8739 return constant_boolean_node (1, type);
8740 break;
8741
8742 case GE_EXPR:
8743 case LE_EXPR:
8744 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8745 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8746 return constant_boolean_node (1, type);
8747 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8748
8749 case NE_EXPR:
8750 /* For NE, we can only do this simplification if integer
8751 or we don't honor IEEE floating point NaNs. */
8752 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8753 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8754 break;
8755 /* ... fall through ... */
8756 case GT_EXPR:
8757 case LT_EXPR:
8758 return constant_boolean_node (0, type);
8759 default:
8760 gcc_unreachable ();
8761 }
8762 }
8763
8764 /* If we are comparing an expression that just has comparisons
8765 of two integer values, arithmetic expressions of those comparisons,
8766 and constants, we can simplify it. There are only three cases
8767 to check: the two values can either be equal, the first can be
8768 greater, or the second can be greater. Fold the expression for
8769 those three values. Since each value must be 0 or 1, we have
8770 eight possibilities, each of which corresponds to the constant 0
8771 or 1 or one of the six possible comparisons.
8772
8773 This handles common cases like (a > b) == 0 but also handles
8774 expressions like ((x > y) - (y > x)) > 0, which supposedly
8775 occur in macroized code. */
8776
8777 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8778 {
8779 tree cval1 = 0, cval2 = 0;
8780 int save_p = 0;
8781
8782 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8783 /* Don't handle degenerate cases here; they should already
8784 have been handled anyway. */
8785 && cval1 != 0 && cval2 != 0
8786 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8787 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8788 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8789 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8790 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8791 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8792 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8793 {
8794 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8795 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8796
8797 /* We can't just pass T to eval_subst in case cval1 or cval2
8798 was the same as ARG1. */
8799
8800 tree high_result
8801 = fold_build2 (code, type,
8802 eval_subst (arg0, cval1, maxval,
8803 cval2, minval),
8804 arg1);
8805 tree equal_result
8806 = fold_build2 (code, type,
8807 eval_subst (arg0, cval1, maxval,
8808 cval2, maxval),
8809 arg1);
8810 tree low_result
8811 = fold_build2 (code, type,
8812 eval_subst (arg0, cval1, minval,
8813 cval2, maxval),
8814 arg1);
8815
8816 /* All three of these results should be 0 or 1. Confirm they are.
8817 Then use those values to select the proper code to use. */
8818
8819 if (TREE_CODE (high_result) == INTEGER_CST
8820 && TREE_CODE (equal_result) == INTEGER_CST
8821 && TREE_CODE (low_result) == INTEGER_CST)
8822 {
8823 /* Make a 3-bit mask with the high-order bit being the
8824 value for `>', the next for '=', and the low for '<'. */
8825 switch ((integer_onep (high_result) * 4)
8826 + (integer_onep (equal_result) * 2)
8827 + integer_onep (low_result))
8828 {
8829 case 0:
8830 /* Always false. */
8831 return omit_one_operand (type, integer_zero_node, arg0);
8832 case 1:
8833 code = LT_EXPR;
8834 break;
8835 case 2:
8836 code = EQ_EXPR;
8837 break;
8838 case 3:
8839 code = LE_EXPR;
8840 break;
8841 case 4:
8842 code = GT_EXPR;
8843 break;
8844 case 5:
8845 code = NE_EXPR;
8846 break;
8847 case 6:
8848 code = GE_EXPR;
8849 break;
8850 case 7:
8851 /* Always true. */
8852 return omit_one_operand (type, integer_one_node, arg0);
8853 }
8854
8855 if (save_p)
8856 return save_expr (build2 (code, type, cval1, cval2));
8857 return fold_build2 (code, type, cval1, cval2);
8858 }
8859 }
8860 }
8861
8862 /* Fold a comparison of the address of COMPONENT_REFs with the same
8863 type and component to a comparison of the address of the base
8864 object. In short, &x->a OP &y->a to x OP y and
8865 &x->a OP &y.a to x OP &y */
8866 if (TREE_CODE (arg0) == ADDR_EXPR
8867 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8868 && TREE_CODE (arg1) == ADDR_EXPR
8869 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8870 {
8871 tree cref0 = TREE_OPERAND (arg0, 0);
8872 tree cref1 = TREE_OPERAND (arg1, 0);
8873 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8874 {
8875 tree op0 = TREE_OPERAND (cref0, 0);
8876 tree op1 = TREE_OPERAND (cref1, 0);
8877 return fold_build2 (code, type,
8878 build_fold_addr_expr (op0),
8879 build_fold_addr_expr (op1));
8880 }
8881 }
8882
8883 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8884 into a single range test. */
8885 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8886 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8887 && TREE_CODE (arg1) == INTEGER_CST
8888 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8889 && !integer_zerop (TREE_OPERAND (arg0, 1))
8890 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8891 && !TREE_OVERFLOW (arg1))
8892 {
8893 tem = fold_div_compare (code, type, arg0, arg1);
8894 if (tem != NULL_TREE)
8895 return tem;
8896 }
8897
8898 /* Fold ~X op ~Y as Y op X. */
8899 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8900 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8901 {
8902 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8903 return fold_build2 (code, type,
8904 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8905 TREE_OPERAND (arg0, 0));
8906 }
8907
8908 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8909 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8910 && TREE_CODE (arg1) == INTEGER_CST)
8911 {
8912 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8913 return fold_build2 (swap_tree_comparison (code), type,
8914 TREE_OPERAND (arg0, 0),
8915 fold_build1 (BIT_NOT_EXPR, cmp_type,
8916 fold_convert (cmp_type, arg1)));
8917 }
8918
8919 return NULL_TREE;
8920 }
8921
8922
8923 /* Subroutine of fold_binary. Optimize complex multiplications of the
8924 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8925 argument EXPR represents the expression "z" of type TYPE. */
8926
8927 static tree
8928 fold_mult_zconjz (tree type, tree expr)
8929 {
8930 tree itype = TREE_TYPE (type);
8931 tree rpart, ipart, tem;
8932
8933 if (TREE_CODE (expr) == COMPLEX_EXPR)
8934 {
8935 rpart = TREE_OPERAND (expr, 0);
8936 ipart = TREE_OPERAND (expr, 1);
8937 }
8938 else if (TREE_CODE (expr) == COMPLEX_CST)
8939 {
8940 rpart = TREE_REALPART (expr);
8941 ipart = TREE_IMAGPART (expr);
8942 }
8943 else
8944 {
8945 expr = save_expr (expr);
8946 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8947 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8948 }
8949
8950 rpart = save_expr (rpart);
8951 ipart = save_expr (ipart);
8952 tem = fold_build2 (PLUS_EXPR, itype,
8953 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8954 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8955 return fold_build2 (COMPLEX_EXPR, type, tem,
8956 fold_convert (itype, integer_zero_node));
8957 }
8958
8959
8960 /* Fold a binary expression of code CODE and type TYPE with operands
8961 OP0 and OP1. Return the folded expression if folding is
8962 successful. Otherwise, return NULL_TREE. */
8963
8964 tree
8965 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8966 {
8967 enum tree_code_class kind = TREE_CODE_CLASS (code);
8968 tree arg0, arg1, tem;
8969 tree t1 = NULL_TREE;
8970 bool strict_overflow_p;
8971
8972 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8973 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8974 && TREE_CODE_LENGTH (code) == 2
8975 && op0 != NULL_TREE
8976 && op1 != NULL_TREE);
8977
8978 arg0 = op0;
8979 arg1 = op1;
8980
8981 /* Strip any conversions that don't change the mode. This is
8982 safe for every expression, except for a comparison expression
8983 because its signedness is derived from its operands. So, in
8984 the latter case, only strip conversions that don't change the
8985 signedness.
8986
8987 Note that this is done as an internal manipulation within the
8988 constant folder, in order to find the simplest representation
8989 of the arguments so that their form can be studied. In any
8990 cases, the appropriate type conversions should be put back in
8991 the tree that will get out of the constant folder. */
8992
8993 if (kind == tcc_comparison)
8994 {
8995 STRIP_SIGN_NOPS (arg0);
8996 STRIP_SIGN_NOPS (arg1);
8997 }
8998 else
8999 {
9000 STRIP_NOPS (arg0);
9001 STRIP_NOPS (arg1);
9002 }
9003
9004 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9005 constant but we can't do arithmetic on them. */
9006 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9007 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9008 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9009 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9010 {
9011 if (kind == tcc_binary)
9012 tem = const_binop (code, arg0, arg1, 0);
9013 else if (kind == tcc_comparison)
9014 tem = fold_relational_const (code, type, arg0, arg1);
9015 else
9016 tem = NULL_TREE;
9017
9018 if (tem != NULL_TREE)
9019 {
9020 if (TREE_TYPE (tem) != type)
9021 tem = fold_convert (type, tem);
9022 return tem;
9023 }
9024 }
9025
9026 /* If this is a commutative operation, and ARG0 is a constant, move it
9027 to ARG1 to reduce the number of tests below. */
9028 if (commutative_tree_code (code)
9029 && tree_swap_operands_p (arg0, arg1, true))
9030 return fold_build2 (code, type, op1, op0);
9031
9032 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9033
9034 First check for cases where an arithmetic operation is applied to a
9035 compound, conditional, or comparison operation. Push the arithmetic
9036 operation inside the compound or conditional to see if any folding
9037 can then be done. Convert comparison to conditional for this purpose.
9038 The also optimizes non-constant cases that used to be done in
9039 expand_expr.
9040
9041 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9042 one of the operands is a comparison and the other is a comparison, a
9043 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9044 code below would make the expression more complex. Change it to a
9045 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9046 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9047
9048 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9049 || code == EQ_EXPR || code == NE_EXPR)
9050 && ((truth_value_p (TREE_CODE (arg0))
9051 && (truth_value_p (TREE_CODE (arg1))
9052 || (TREE_CODE (arg1) == BIT_AND_EXPR
9053 && integer_onep (TREE_OPERAND (arg1, 1)))))
9054 || (truth_value_p (TREE_CODE (arg1))
9055 && (truth_value_p (TREE_CODE (arg0))
9056 || (TREE_CODE (arg0) == BIT_AND_EXPR
9057 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9058 {
9059 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9060 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9061 : TRUTH_XOR_EXPR,
9062 boolean_type_node,
9063 fold_convert (boolean_type_node, arg0),
9064 fold_convert (boolean_type_node, arg1));
9065
9066 if (code == EQ_EXPR)
9067 tem = invert_truthvalue (tem);
9068
9069 return fold_convert (type, tem);
9070 }
9071
9072 if (TREE_CODE_CLASS (code) == tcc_binary
9073 || TREE_CODE_CLASS (code) == tcc_comparison)
9074 {
9075 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9076 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9077 fold_build2 (code, type,
9078 TREE_OPERAND (arg0, 1), op1));
9079 if (TREE_CODE (arg1) == COMPOUND_EXPR
9080 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9081 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9082 fold_build2 (code, type,
9083 op0, TREE_OPERAND (arg1, 1)));
9084
9085 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9086 {
9087 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9088 arg0, arg1,
9089 /*cond_first_p=*/1);
9090 if (tem != NULL_TREE)
9091 return tem;
9092 }
9093
9094 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9095 {
9096 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9097 arg1, arg0,
9098 /*cond_first_p=*/0);
9099 if (tem != NULL_TREE)
9100 return tem;
9101 }
9102 }
9103
9104 switch (code)
9105 {
9106 case PLUS_EXPR:
9107 /* A + (-B) -> A - B */
9108 if (TREE_CODE (arg1) == NEGATE_EXPR)
9109 return fold_build2 (MINUS_EXPR, type,
9110 fold_convert (type, arg0),
9111 fold_convert (type, TREE_OPERAND (arg1, 0)));
9112 /* (-A) + B -> B - A */
9113 if (TREE_CODE (arg0) == NEGATE_EXPR
9114 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9115 return fold_build2 (MINUS_EXPR, type,
9116 fold_convert (type, arg1),
9117 fold_convert (type, TREE_OPERAND (arg0, 0)));
9118 /* Convert ~A + 1 to -A. */
9119 if (INTEGRAL_TYPE_P (type)
9120 && TREE_CODE (arg0) == BIT_NOT_EXPR
9121 && integer_onep (arg1))
9122 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9123
9124 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9125 same or one. */
9126 if ((TREE_CODE (arg0) == MULT_EXPR
9127 || TREE_CODE (arg1) == MULT_EXPR)
9128 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9129 {
9130 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9131 if (tem)
9132 return tem;
9133 }
9134
9135 if (! FLOAT_TYPE_P (type))
9136 {
9137 if (integer_zerop (arg1))
9138 return non_lvalue (fold_convert (type, arg0));
9139
9140 /* ~X + X is -1. */
9141 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9142 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9143 && !TYPE_OVERFLOW_TRAPS (type))
9144 {
9145 t1 = build_int_cst_type (type, -1);
9146 return omit_one_operand (type, t1, arg1);
9147 }
9148
9149 /* X + ~X is -1. */
9150 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9152 && !TYPE_OVERFLOW_TRAPS (type))
9153 {
9154 t1 = build_int_cst_type (type, -1);
9155 return omit_one_operand (type, t1, arg0);
9156 }
9157
9158 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9159 with a constant, and the two constants have no bits in common,
9160 we should treat this as a BIT_IOR_EXPR since this may produce more
9161 simplifications. */
9162 if (TREE_CODE (arg0) == BIT_AND_EXPR
9163 && TREE_CODE (arg1) == BIT_AND_EXPR
9164 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9165 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9166 && integer_zerop (const_binop (BIT_AND_EXPR,
9167 TREE_OPERAND (arg0, 1),
9168 TREE_OPERAND (arg1, 1), 0)))
9169 {
9170 code = BIT_IOR_EXPR;
9171 goto bit_ior;
9172 }
9173
9174 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9175 (plus (plus (mult) (mult)) (foo)) so that we can
9176 take advantage of the factoring cases below. */
9177 if (((TREE_CODE (arg0) == PLUS_EXPR
9178 || TREE_CODE (arg0) == MINUS_EXPR)
9179 && TREE_CODE (arg1) == MULT_EXPR)
9180 || ((TREE_CODE (arg1) == PLUS_EXPR
9181 || TREE_CODE (arg1) == MINUS_EXPR)
9182 && TREE_CODE (arg0) == MULT_EXPR))
9183 {
9184 tree parg0, parg1, parg, marg;
9185 enum tree_code pcode;
9186
9187 if (TREE_CODE (arg1) == MULT_EXPR)
9188 parg = arg0, marg = arg1;
9189 else
9190 parg = arg1, marg = arg0;
9191 pcode = TREE_CODE (parg);
9192 parg0 = TREE_OPERAND (parg, 0);
9193 parg1 = TREE_OPERAND (parg, 1);
9194 STRIP_NOPS (parg0);
9195 STRIP_NOPS (parg1);
9196
9197 if (TREE_CODE (parg0) == MULT_EXPR
9198 && TREE_CODE (parg1) != MULT_EXPR)
9199 return fold_build2 (pcode, type,
9200 fold_build2 (PLUS_EXPR, type,
9201 fold_convert (type, parg0),
9202 fold_convert (type, marg)),
9203 fold_convert (type, parg1));
9204 if (TREE_CODE (parg0) != MULT_EXPR
9205 && TREE_CODE (parg1) == MULT_EXPR)
9206 return fold_build2 (PLUS_EXPR, type,
9207 fold_convert (type, parg0),
9208 fold_build2 (pcode, type,
9209 fold_convert (type, marg),
9210 fold_convert (type,
9211 parg1)));
9212 }
9213
9214 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9215 of the array. Loop optimizer sometimes produce this type of
9216 expressions. */
9217 if (TREE_CODE (arg0) == ADDR_EXPR)
9218 {
9219 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9220 if (tem)
9221 return fold_convert (type, tem);
9222 }
9223 else if (TREE_CODE (arg1) == ADDR_EXPR)
9224 {
9225 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9226 if (tem)
9227 return fold_convert (type, tem);
9228 }
9229 }
9230 else
9231 {
9232 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9233 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9234 return non_lvalue (fold_convert (type, arg0));
9235
9236 /* Likewise if the operands are reversed. */
9237 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9238 return non_lvalue (fold_convert (type, arg1));
9239
9240 /* Convert X + -C into X - C. */
9241 if (TREE_CODE (arg1) == REAL_CST
9242 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9243 {
9244 tem = fold_negate_const (arg1, type);
9245 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9246 return fold_build2 (MINUS_EXPR, type,
9247 fold_convert (type, arg0),
9248 fold_convert (type, tem));
9249 }
9250
9251 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9252 to __complex__ ( x, y ). This is not the same for SNaNs or
9253 if signed zeros are involved. */
9254 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9255 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9256 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9257 {
9258 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9259 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9260 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9261 bool arg0rz = false, arg0iz = false;
9262 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9263 || (arg0i && (arg0iz = real_zerop (arg0i))))
9264 {
9265 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9266 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9267 if (arg0rz && arg1i && real_zerop (arg1i))
9268 {
9269 tree rp = arg1r ? arg1r
9270 : build1 (REALPART_EXPR, rtype, arg1);
9271 tree ip = arg0i ? arg0i
9272 : build1 (IMAGPART_EXPR, rtype, arg0);
9273 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9274 }
9275 else if (arg0iz && arg1r && real_zerop (arg1r))
9276 {
9277 tree rp = arg0r ? arg0r
9278 : build1 (REALPART_EXPR, rtype, arg0);
9279 tree ip = arg1i ? arg1i
9280 : build1 (IMAGPART_EXPR, rtype, arg1);
9281 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9282 }
9283 }
9284 }
9285
9286 if (flag_unsafe_math_optimizations
9287 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9288 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9289 && (tem = distribute_real_division (code, type, arg0, arg1)))
9290 return tem;
9291
9292 /* Convert x+x into x*2.0. */
9293 if (operand_equal_p (arg0, arg1, 0)
9294 && SCALAR_FLOAT_TYPE_P (type))
9295 return fold_build2 (MULT_EXPR, type, arg0,
9296 build_real (type, dconst2));
9297
9298 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9299 if (flag_unsafe_math_optimizations
9300 && TREE_CODE (arg1) == PLUS_EXPR
9301 && TREE_CODE (arg0) != MULT_EXPR)
9302 {
9303 tree tree10 = TREE_OPERAND (arg1, 0);
9304 tree tree11 = TREE_OPERAND (arg1, 1);
9305 if (TREE_CODE (tree11) == MULT_EXPR
9306 && TREE_CODE (tree10) == MULT_EXPR)
9307 {
9308 tree tree0;
9309 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9310 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9311 }
9312 }
9313 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9314 if (flag_unsafe_math_optimizations
9315 && TREE_CODE (arg0) == PLUS_EXPR
9316 && TREE_CODE (arg1) != MULT_EXPR)
9317 {
9318 tree tree00 = TREE_OPERAND (arg0, 0);
9319 tree tree01 = TREE_OPERAND (arg0, 1);
9320 if (TREE_CODE (tree01) == MULT_EXPR
9321 && TREE_CODE (tree00) == MULT_EXPR)
9322 {
9323 tree tree0;
9324 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9325 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9326 }
9327 }
9328 }
9329
9330 bit_rotate:
9331 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9332 is a rotate of A by C1 bits. */
9333 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9334 is a rotate of A by B bits. */
9335 {
9336 enum tree_code code0, code1;
9337 code0 = TREE_CODE (arg0);
9338 code1 = TREE_CODE (arg1);
9339 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9340 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9341 && operand_equal_p (TREE_OPERAND (arg0, 0),
9342 TREE_OPERAND (arg1, 0), 0)
9343 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9344 {
9345 tree tree01, tree11;
9346 enum tree_code code01, code11;
9347
9348 tree01 = TREE_OPERAND (arg0, 1);
9349 tree11 = TREE_OPERAND (arg1, 1);
9350 STRIP_NOPS (tree01);
9351 STRIP_NOPS (tree11);
9352 code01 = TREE_CODE (tree01);
9353 code11 = TREE_CODE (tree11);
9354 if (code01 == INTEGER_CST
9355 && code11 == INTEGER_CST
9356 && TREE_INT_CST_HIGH (tree01) == 0
9357 && TREE_INT_CST_HIGH (tree11) == 0
9358 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9359 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9360 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9361 code0 == LSHIFT_EXPR ? tree01 : tree11);
9362 else if (code11 == MINUS_EXPR)
9363 {
9364 tree tree110, tree111;
9365 tree110 = TREE_OPERAND (tree11, 0);
9366 tree111 = TREE_OPERAND (tree11, 1);
9367 STRIP_NOPS (tree110);
9368 STRIP_NOPS (tree111);
9369 if (TREE_CODE (tree110) == INTEGER_CST
9370 && 0 == compare_tree_int (tree110,
9371 TYPE_PRECISION
9372 (TREE_TYPE (TREE_OPERAND
9373 (arg0, 0))))
9374 && operand_equal_p (tree01, tree111, 0))
9375 return build2 ((code0 == LSHIFT_EXPR
9376 ? LROTATE_EXPR
9377 : RROTATE_EXPR),
9378 type, TREE_OPERAND (arg0, 0), tree01);
9379 }
9380 else if (code01 == MINUS_EXPR)
9381 {
9382 tree tree010, tree011;
9383 tree010 = TREE_OPERAND (tree01, 0);
9384 tree011 = TREE_OPERAND (tree01, 1);
9385 STRIP_NOPS (tree010);
9386 STRIP_NOPS (tree011);
9387 if (TREE_CODE (tree010) == INTEGER_CST
9388 && 0 == compare_tree_int (tree010,
9389 TYPE_PRECISION
9390 (TREE_TYPE (TREE_OPERAND
9391 (arg0, 0))))
9392 && operand_equal_p (tree11, tree011, 0))
9393 return build2 ((code0 != LSHIFT_EXPR
9394 ? LROTATE_EXPR
9395 : RROTATE_EXPR),
9396 type, TREE_OPERAND (arg0, 0), tree11);
9397 }
9398 }
9399 }
9400
9401 associate:
9402 /* In most languages, can't associate operations on floats through
9403 parentheses. Rather than remember where the parentheses were, we
9404 don't associate floats at all, unless the user has specified
9405 -funsafe-math-optimizations. */
9406
9407 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9408 {
9409 tree var0, con0, lit0, minus_lit0;
9410 tree var1, con1, lit1, minus_lit1;
9411 bool ok = true;
9412
9413 /* Split both trees into variables, constants, and literals. Then
9414 associate each group together, the constants with literals,
9415 then the result with variables. This increases the chances of
9416 literals being recombined later and of generating relocatable
9417 expressions for the sum of a constant and literal. */
9418 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9419 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9420 code == MINUS_EXPR);
9421
9422 /* With undefined overflow we can only associate constants
9423 with one variable. */
9424 if ((POINTER_TYPE_P (type)
9425 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9426 && var0 && var1)
9427 {
9428 tree tmp0 = var0;
9429 tree tmp1 = var1;
9430
9431 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9432 tmp0 = TREE_OPERAND (tmp0, 0);
9433 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9434 tmp1 = TREE_OPERAND (tmp1, 0);
9435 /* The only case we can still associate with two variables
9436 is if they are the same, modulo negation. */
9437 if (!operand_equal_p (tmp0, tmp1, 0))
9438 ok = false;
9439 }
9440
9441 /* Only do something if we found more than two objects. Otherwise,
9442 nothing has changed and we risk infinite recursion. */
9443 if (ok
9444 && (2 < ((var0 != 0) + (var1 != 0)
9445 + (con0 != 0) + (con1 != 0)
9446 + (lit0 != 0) + (lit1 != 0)
9447 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9448 {
9449 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9450 if (code == MINUS_EXPR)
9451 code = PLUS_EXPR;
9452
9453 var0 = associate_trees (var0, var1, code, type);
9454 con0 = associate_trees (con0, con1, code, type);
9455 lit0 = associate_trees (lit0, lit1, code, type);
9456 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9457
9458 /* Preserve the MINUS_EXPR if the negative part of the literal is
9459 greater than the positive part. Otherwise, the multiplicative
9460 folding code (i.e extract_muldiv) may be fooled in case
9461 unsigned constants are subtracted, like in the following
9462 example: ((X*2 + 4) - 8U)/2. */
9463 if (minus_lit0 && lit0)
9464 {
9465 if (TREE_CODE (lit0) == INTEGER_CST
9466 && TREE_CODE (minus_lit0) == INTEGER_CST
9467 && tree_int_cst_lt (lit0, minus_lit0))
9468 {
9469 minus_lit0 = associate_trees (minus_lit0, lit0,
9470 MINUS_EXPR, type);
9471 lit0 = 0;
9472 }
9473 else
9474 {
9475 lit0 = associate_trees (lit0, minus_lit0,
9476 MINUS_EXPR, type);
9477 minus_lit0 = 0;
9478 }
9479 }
9480 if (minus_lit0)
9481 {
9482 if (con0 == 0)
9483 return fold_convert (type,
9484 associate_trees (var0, minus_lit0,
9485 MINUS_EXPR, type));
9486 else
9487 {
9488 con0 = associate_trees (con0, minus_lit0,
9489 MINUS_EXPR, type);
9490 return fold_convert (type,
9491 associate_trees (var0, con0,
9492 PLUS_EXPR, type));
9493 }
9494 }
9495
9496 con0 = associate_trees (con0, lit0, code, type);
9497 return fold_convert (type, associate_trees (var0, con0,
9498 code, type));
9499 }
9500 }
9501
9502 return NULL_TREE;
9503
9504 case MINUS_EXPR:
9505 /* A - (-B) -> A + B */
9506 if (TREE_CODE (arg1) == NEGATE_EXPR)
9507 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9508 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9509 if (TREE_CODE (arg0) == NEGATE_EXPR
9510 && (FLOAT_TYPE_P (type)
9511 || INTEGRAL_TYPE_P (type))
9512 && negate_expr_p (arg1)
9513 && reorder_operands_p (arg0, arg1))
9514 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9515 TREE_OPERAND (arg0, 0));
9516 /* Convert -A - 1 to ~A. */
9517 if (INTEGRAL_TYPE_P (type)
9518 && TREE_CODE (arg0) == NEGATE_EXPR
9519 && integer_onep (arg1)
9520 && !TYPE_OVERFLOW_TRAPS (type))
9521 return fold_build1 (BIT_NOT_EXPR, type,
9522 fold_convert (type, TREE_OPERAND (arg0, 0)));
9523
9524 /* Convert -1 - A to ~A. */
9525 if (INTEGRAL_TYPE_P (type)
9526 && integer_all_onesp (arg0))
9527 return fold_build1 (BIT_NOT_EXPR, type, op1);
9528
9529 if (! FLOAT_TYPE_P (type))
9530 {
9531 if (integer_zerop (arg0))
9532 return negate_expr (fold_convert (type, arg1));
9533 if (integer_zerop (arg1))
9534 return non_lvalue (fold_convert (type, arg0));
9535
9536 /* Fold A - (A & B) into ~B & A. */
9537 if (!TREE_SIDE_EFFECTS (arg0)
9538 && TREE_CODE (arg1) == BIT_AND_EXPR)
9539 {
9540 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9541 return fold_build2 (BIT_AND_EXPR, type,
9542 fold_build1 (BIT_NOT_EXPR, type,
9543 TREE_OPERAND (arg1, 0)),
9544 arg0);
9545 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9546 return fold_build2 (BIT_AND_EXPR, type,
9547 fold_build1 (BIT_NOT_EXPR, type,
9548 TREE_OPERAND (arg1, 1)),
9549 arg0);
9550 }
9551
9552 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9553 any power of 2 minus 1. */
9554 if (TREE_CODE (arg0) == BIT_AND_EXPR
9555 && TREE_CODE (arg1) == BIT_AND_EXPR
9556 && operand_equal_p (TREE_OPERAND (arg0, 0),
9557 TREE_OPERAND (arg1, 0), 0))
9558 {
9559 tree mask0 = TREE_OPERAND (arg0, 1);
9560 tree mask1 = TREE_OPERAND (arg1, 1);
9561 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9562
9563 if (operand_equal_p (tem, mask1, 0))
9564 {
9565 tem = fold_build2 (BIT_XOR_EXPR, type,
9566 TREE_OPERAND (arg0, 0), mask1);
9567 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9568 }
9569 }
9570 }
9571
9572 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9573 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9574 return non_lvalue (fold_convert (type, arg0));
9575
9576 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9577 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9578 (-ARG1 + ARG0) reduces to -ARG1. */
9579 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9580 return negate_expr (fold_convert (type, arg1));
9581
9582 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9583 __complex__ ( x, -y ). This is not the same for SNaNs or if
9584 signed zeros are involved. */
9585 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9586 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9587 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9588 {
9589 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9590 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9591 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9592 bool arg0rz = false, arg0iz = false;
9593 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9594 || (arg0i && (arg0iz = real_zerop (arg0i))))
9595 {
9596 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9597 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9598 if (arg0rz && arg1i && real_zerop (arg1i))
9599 {
9600 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9601 arg1r ? arg1r
9602 : build1 (REALPART_EXPR, rtype, arg1));
9603 tree ip = arg0i ? arg0i
9604 : build1 (IMAGPART_EXPR, rtype, arg0);
9605 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9606 }
9607 else if (arg0iz && arg1r && real_zerop (arg1r))
9608 {
9609 tree rp = arg0r ? arg0r
9610 : build1 (REALPART_EXPR, rtype, arg0);
9611 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9612 arg1i ? arg1i
9613 : build1 (IMAGPART_EXPR, rtype, arg1));
9614 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9615 }
9616 }
9617 }
9618
9619 /* Fold &x - &x. This can happen from &x.foo - &x.
9620 This is unsafe for certain floats even in non-IEEE formats.
9621 In IEEE, it is unsafe because it does wrong for NaNs.
9622 Also note that operand_equal_p is always false if an operand
9623 is volatile. */
9624
9625 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9626 && operand_equal_p (arg0, arg1, 0))
9627 return fold_convert (type, integer_zero_node);
9628
9629 /* A - B -> A + (-B) if B is easily negatable. */
9630 if (negate_expr_p (arg1)
9631 && ((FLOAT_TYPE_P (type)
9632 /* Avoid this transformation if B is a positive REAL_CST. */
9633 && (TREE_CODE (arg1) != REAL_CST
9634 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9635 || INTEGRAL_TYPE_P (type)))
9636 return fold_build2 (PLUS_EXPR, type,
9637 fold_convert (type, arg0),
9638 fold_convert (type, negate_expr (arg1)));
9639
9640 /* Try folding difference of addresses. */
9641 {
9642 HOST_WIDE_INT diff;
9643
9644 if ((TREE_CODE (arg0) == ADDR_EXPR
9645 || TREE_CODE (arg1) == ADDR_EXPR)
9646 && ptr_difference_const (arg0, arg1, &diff))
9647 return build_int_cst_type (type, diff);
9648 }
9649
9650 /* Fold &a[i] - &a[j] to i-j. */
9651 if (TREE_CODE (arg0) == ADDR_EXPR
9652 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9653 && TREE_CODE (arg1) == ADDR_EXPR
9654 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9655 {
9656 tree aref0 = TREE_OPERAND (arg0, 0);
9657 tree aref1 = TREE_OPERAND (arg1, 0);
9658 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9659 TREE_OPERAND (aref1, 0), 0))
9660 {
9661 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9662 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9663 tree esz = array_ref_element_size (aref0);
9664 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9665 return fold_build2 (MULT_EXPR, type, diff,
9666 fold_convert (type, esz));
9667
9668 }
9669 }
9670
9671 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9672 of the array. Loop optimizer sometimes produce this type of
9673 expressions. */
9674 if (TREE_CODE (arg0) == ADDR_EXPR)
9675 {
9676 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9677 if (tem)
9678 return fold_convert (type, tem);
9679 }
9680
9681 if (flag_unsafe_math_optimizations
9682 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9683 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9684 && (tem = distribute_real_division (code, type, arg0, arg1)))
9685 return tem;
9686
9687 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9688 same or one. */
9689 if ((TREE_CODE (arg0) == MULT_EXPR
9690 || TREE_CODE (arg1) == MULT_EXPR)
9691 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9692 {
9693 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9694 if (tem)
9695 return tem;
9696 }
9697
9698 goto associate;
9699
9700 case MULT_EXPR:
9701 /* (-A) * (-B) -> A * B */
9702 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9703 return fold_build2 (MULT_EXPR, type,
9704 fold_convert (type, TREE_OPERAND (arg0, 0)),
9705 fold_convert (type, negate_expr (arg1)));
9706 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9707 return fold_build2 (MULT_EXPR, type,
9708 fold_convert (type, negate_expr (arg0)),
9709 fold_convert (type, TREE_OPERAND (arg1, 0)));
9710
9711 if (! FLOAT_TYPE_P (type))
9712 {
9713 if (integer_zerop (arg1))
9714 return omit_one_operand (type, arg1, arg0);
9715 if (integer_onep (arg1))
9716 return non_lvalue (fold_convert (type, arg0));
9717 /* Transform x * -1 into -x. */
9718 if (integer_all_onesp (arg1))
9719 return fold_convert (type, negate_expr (arg0));
9720 /* Transform x * -C into -x * C if x is easily negatable. */
9721 if (TREE_CODE (arg1) == INTEGER_CST
9722 && tree_int_cst_sgn (arg1) == -1
9723 && negate_expr_p (arg0)
9724 && (tem = negate_expr (arg1)) != arg1
9725 && !TREE_OVERFLOW (tem))
9726 return fold_build2 (MULT_EXPR, type,
9727 negate_expr (arg0), tem);
9728
9729 /* (a * (1 << b)) is (a << b) */
9730 if (TREE_CODE (arg1) == LSHIFT_EXPR
9731 && integer_onep (TREE_OPERAND (arg1, 0)))
9732 return fold_build2 (LSHIFT_EXPR, type, arg0,
9733 TREE_OPERAND (arg1, 1));
9734 if (TREE_CODE (arg0) == LSHIFT_EXPR
9735 && integer_onep (TREE_OPERAND (arg0, 0)))
9736 return fold_build2 (LSHIFT_EXPR, type, arg1,
9737 TREE_OPERAND (arg0, 1));
9738
9739 strict_overflow_p = false;
9740 if (TREE_CODE (arg1) == INTEGER_CST
9741 && 0 != (tem = extract_muldiv (op0,
9742 fold_convert (type, arg1),
9743 code, NULL_TREE,
9744 &strict_overflow_p)))
9745 {
9746 if (strict_overflow_p)
9747 fold_overflow_warning (("assuming signed overflow does not "
9748 "occur when simplifying "
9749 "multiplication"),
9750 WARN_STRICT_OVERFLOW_MISC);
9751 return fold_convert (type, tem);
9752 }
9753
9754 /* Optimize z * conj(z) for integer complex numbers. */
9755 if (TREE_CODE (arg0) == CONJ_EXPR
9756 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9757 return fold_mult_zconjz (type, arg1);
9758 if (TREE_CODE (arg1) == CONJ_EXPR
9759 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9760 return fold_mult_zconjz (type, arg0);
9761 }
9762 else
9763 {
9764 /* Maybe fold x * 0 to 0. The expressions aren't the same
9765 when x is NaN, since x * 0 is also NaN. Nor are they the
9766 same in modes with signed zeros, since multiplying a
9767 negative value by 0 gives -0, not +0. */
9768 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9769 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && real_zerop (arg1))
9771 return omit_one_operand (type, arg1, arg0);
9772 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9773 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9774 && real_onep (arg1))
9775 return non_lvalue (fold_convert (type, arg0));
9776
9777 /* Transform x * -1.0 into -x. */
9778 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9779 && real_minus_onep (arg1))
9780 return fold_convert (type, negate_expr (arg0));
9781
9782 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9783 if (flag_unsafe_math_optimizations
9784 && TREE_CODE (arg0) == RDIV_EXPR
9785 && TREE_CODE (arg1) == REAL_CST
9786 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9787 {
9788 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9789 arg1, 0);
9790 if (tem)
9791 return fold_build2 (RDIV_EXPR, type, tem,
9792 TREE_OPERAND (arg0, 1));
9793 }
9794
9795 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9796 if (operand_equal_p (arg0, arg1, 0))
9797 {
9798 tree tem = fold_strip_sign_ops (arg0);
9799 if (tem != NULL_TREE)
9800 {
9801 tem = fold_convert (type, tem);
9802 return fold_build2 (MULT_EXPR, type, tem, tem);
9803 }
9804 }
9805
9806 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9807 This is not the same for NaNs or if signed zeros are
9808 involved. */
9809 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9810 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9811 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9812 && TREE_CODE (arg1) == COMPLEX_CST
9813 && real_zerop (TREE_REALPART (arg1)))
9814 {
9815 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9816 if (real_onep (TREE_IMAGPART (arg1)))
9817 return fold_build2 (COMPLEX_EXPR, type,
9818 negate_expr (fold_build1 (IMAGPART_EXPR,
9819 rtype, arg0)),
9820 fold_build1 (REALPART_EXPR, rtype, arg0));
9821 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9822 return fold_build2 (COMPLEX_EXPR, type,
9823 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9824 negate_expr (fold_build1 (REALPART_EXPR,
9825 rtype, arg0)));
9826 }
9827
9828 /* Optimize z * conj(z) for floating point complex numbers.
9829 Guarded by flag_unsafe_math_optimizations as non-finite
9830 imaginary components don't produce scalar results. */
9831 if (flag_unsafe_math_optimizations
9832 && TREE_CODE (arg0) == CONJ_EXPR
9833 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9834 return fold_mult_zconjz (type, arg1);
9835 if (flag_unsafe_math_optimizations
9836 && TREE_CODE (arg1) == CONJ_EXPR
9837 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9838 return fold_mult_zconjz (type, arg0);
9839
9840 if (flag_unsafe_math_optimizations)
9841 {
9842 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9843 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9844
9845 /* Optimizations of root(...)*root(...). */
9846 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9847 {
9848 tree rootfn, arg;
9849 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9850 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9851
9852 /* Optimize sqrt(x)*sqrt(x) as x. */
9853 if (BUILTIN_SQRT_P (fcode0)
9854 && operand_equal_p (arg00, arg10, 0)
9855 && ! HONOR_SNANS (TYPE_MODE (type)))
9856 return arg00;
9857
9858 /* Optimize root(x)*root(y) as root(x*y). */
9859 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9860 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9861 return build_call_expr (rootfn, 1, arg);
9862 }
9863
9864 /* Optimize expN(x)*expN(y) as expN(x+y). */
9865 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9866 {
9867 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9868 tree arg = fold_build2 (PLUS_EXPR, type,
9869 CALL_EXPR_ARG (arg0, 0),
9870 CALL_EXPR_ARG (arg1, 0));
9871 return build_call_expr (expfn, 1, arg);
9872 }
9873
9874 /* Optimizations of pow(...)*pow(...). */
9875 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9876 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9877 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9878 {
9879 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9880 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9881 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9882 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9883
9884 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9885 if (operand_equal_p (arg01, arg11, 0))
9886 {
9887 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9888 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9889 return build_call_expr (powfn, 2, arg, arg01);
9890 }
9891
9892 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9893 if (operand_equal_p (arg00, arg10, 0))
9894 {
9895 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9896 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9897 return build_call_expr (powfn, 2, arg00, arg);
9898 }
9899 }
9900
9901 /* Optimize tan(x)*cos(x) as sin(x). */
9902 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9903 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9904 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9905 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9906 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9907 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9908 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9909 CALL_EXPR_ARG (arg1, 0), 0))
9910 {
9911 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9912
9913 if (sinfn != NULL_TREE)
9914 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9915 }
9916
9917 /* Optimize x*pow(x,c) as pow(x,c+1). */
9918 if (fcode1 == BUILT_IN_POW
9919 || fcode1 == BUILT_IN_POWF
9920 || fcode1 == BUILT_IN_POWL)
9921 {
9922 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9923 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9924 if (TREE_CODE (arg11) == REAL_CST
9925 && !TREE_OVERFLOW (arg11)
9926 && operand_equal_p (arg0, arg10, 0))
9927 {
9928 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9929 REAL_VALUE_TYPE c;
9930 tree arg;
9931
9932 c = TREE_REAL_CST (arg11);
9933 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9934 arg = build_real (type, c);
9935 return build_call_expr (powfn, 2, arg0, arg);
9936 }
9937 }
9938
9939 /* Optimize pow(x,c)*x as pow(x,c+1). */
9940 if (fcode0 == BUILT_IN_POW
9941 || fcode0 == BUILT_IN_POWF
9942 || fcode0 == BUILT_IN_POWL)
9943 {
9944 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9945 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9946 if (TREE_CODE (arg01) == REAL_CST
9947 && !TREE_OVERFLOW (arg01)
9948 && operand_equal_p (arg1, arg00, 0))
9949 {
9950 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9951 REAL_VALUE_TYPE c;
9952 tree arg;
9953
9954 c = TREE_REAL_CST (arg01);
9955 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9956 arg = build_real (type, c);
9957 return build_call_expr (powfn, 2, arg1, arg);
9958 }
9959 }
9960
9961 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9962 if (! optimize_size
9963 && operand_equal_p (arg0, arg1, 0))
9964 {
9965 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9966
9967 if (powfn)
9968 {
9969 tree arg = build_real (type, dconst2);
9970 return build_call_expr (powfn, 2, arg0, arg);
9971 }
9972 }
9973 }
9974 }
9975 goto associate;
9976
9977 case BIT_IOR_EXPR:
9978 bit_ior:
9979 if (integer_all_onesp (arg1))
9980 return omit_one_operand (type, arg1, arg0);
9981 if (integer_zerop (arg1))
9982 return non_lvalue (fold_convert (type, arg0));
9983 if (operand_equal_p (arg0, arg1, 0))
9984 return non_lvalue (fold_convert (type, arg0));
9985
9986 /* ~X | X is -1. */
9987 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9988 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9989 {
9990 t1 = build_int_cst_type (type, -1);
9991 return omit_one_operand (type, t1, arg1);
9992 }
9993
9994 /* X | ~X is -1. */
9995 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9996 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9997 {
9998 t1 = build_int_cst_type (type, -1);
9999 return omit_one_operand (type, t1, arg0);
10000 }
10001
10002 /* Canonicalize (X & C1) | C2. */
10003 if (TREE_CODE (arg0) == BIT_AND_EXPR
10004 && TREE_CODE (arg1) == INTEGER_CST
10005 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10006 {
10007 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10008 int width = TYPE_PRECISION (type);
10009 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10010 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10011 hi2 = TREE_INT_CST_HIGH (arg1);
10012 lo2 = TREE_INT_CST_LOW (arg1);
10013
10014 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10015 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10016 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10017
10018 if (width > HOST_BITS_PER_WIDE_INT)
10019 {
10020 mhi = (unsigned HOST_WIDE_INT) -1
10021 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10022 mlo = -1;
10023 }
10024 else
10025 {
10026 mhi = 0;
10027 mlo = (unsigned HOST_WIDE_INT) -1
10028 >> (HOST_BITS_PER_WIDE_INT - width);
10029 }
10030
10031 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10032 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10033 return fold_build2 (BIT_IOR_EXPR, type,
10034 TREE_OPERAND (arg0, 0), arg1);
10035
10036 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10037 hi1 &= mhi;
10038 lo1 &= mlo;
10039 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10040 return fold_build2 (BIT_IOR_EXPR, type,
10041 fold_build2 (BIT_AND_EXPR, type,
10042 TREE_OPERAND (arg0, 0),
10043 build_int_cst_wide (type,
10044 lo1 & ~lo2,
10045 hi1 & ~hi2)),
10046 arg1);
10047 }
10048
10049 /* (X & Y) | Y is (X, Y). */
10050 if (TREE_CODE (arg0) == BIT_AND_EXPR
10051 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10052 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10053 /* (X & Y) | X is (Y, X). */
10054 if (TREE_CODE (arg0) == BIT_AND_EXPR
10055 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10056 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10057 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10058 /* X | (X & Y) is (Y, X). */
10059 if (TREE_CODE (arg1) == BIT_AND_EXPR
10060 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10061 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10062 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10063 /* X | (Y & X) is (Y, X). */
10064 if (TREE_CODE (arg1) == BIT_AND_EXPR
10065 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10066 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10067 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10068
10069 t1 = distribute_bit_expr (code, type, arg0, arg1);
10070 if (t1 != NULL_TREE)
10071 return t1;
10072
10073 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10074
10075 This results in more efficient code for machines without a NAND
10076 instruction. Combine will canonicalize to the first form
10077 which will allow use of NAND instructions provided by the
10078 backend if they exist. */
10079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10080 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10081 {
10082 return fold_build1 (BIT_NOT_EXPR, type,
10083 build2 (BIT_AND_EXPR, type,
10084 TREE_OPERAND (arg0, 0),
10085 TREE_OPERAND (arg1, 0)));
10086 }
10087
10088 /* See if this can be simplified into a rotate first. If that
10089 is unsuccessful continue in the association code. */
10090 goto bit_rotate;
10091
10092 case BIT_XOR_EXPR:
10093 if (integer_zerop (arg1))
10094 return non_lvalue (fold_convert (type, arg0));
10095 if (integer_all_onesp (arg1))
10096 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10097 if (operand_equal_p (arg0, arg1, 0))
10098 return omit_one_operand (type, integer_zero_node, arg0);
10099
10100 /* ~X ^ X is -1. */
10101 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10103 {
10104 t1 = build_int_cst_type (type, -1);
10105 return omit_one_operand (type, t1, arg1);
10106 }
10107
10108 /* X ^ ~X is -1. */
10109 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10111 {
10112 t1 = build_int_cst_type (type, -1);
10113 return omit_one_operand (type, t1, arg0);
10114 }
10115
10116 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10117 with a constant, and the two constants have no bits in common,
10118 we should treat this as a BIT_IOR_EXPR since this may produce more
10119 simplifications. */
10120 if (TREE_CODE (arg0) == BIT_AND_EXPR
10121 && TREE_CODE (arg1) == BIT_AND_EXPR
10122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10123 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10124 && integer_zerop (const_binop (BIT_AND_EXPR,
10125 TREE_OPERAND (arg0, 1),
10126 TREE_OPERAND (arg1, 1), 0)))
10127 {
10128 code = BIT_IOR_EXPR;
10129 goto bit_ior;
10130 }
10131
10132 /* (X | Y) ^ X -> Y & ~ X*/
10133 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10134 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10135 {
10136 tree t2 = TREE_OPERAND (arg0, 1);
10137 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10138 arg1);
10139 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10140 fold_convert (type, t1));
10141 return t1;
10142 }
10143
10144 /* (Y | X) ^ X -> Y & ~ X*/
10145 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10146 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10147 {
10148 tree t2 = TREE_OPERAND (arg0, 0);
10149 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10150 arg1);
10151 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10152 fold_convert (type, t1));
10153 return t1;
10154 }
10155
10156 /* X ^ (X | Y) -> Y & ~ X*/
10157 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10158 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10159 {
10160 tree t2 = TREE_OPERAND (arg1, 1);
10161 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10162 arg0);
10163 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10164 fold_convert (type, t1));
10165 return t1;
10166 }
10167
10168 /* X ^ (Y | X) -> Y & ~ X*/
10169 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10170 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10171 {
10172 tree t2 = TREE_OPERAND (arg1, 0);
10173 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10174 arg0);
10175 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10176 fold_convert (type, t1));
10177 return t1;
10178 }
10179
10180 /* Convert ~X ^ ~Y to X ^ Y. */
10181 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10182 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10183 return fold_build2 (code, type,
10184 fold_convert (type, TREE_OPERAND (arg0, 0)),
10185 fold_convert (type, TREE_OPERAND (arg1, 0)));
10186
10187 /* Convert ~X ^ C to X ^ ~C. */
10188 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10189 && TREE_CODE (arg1) == INTEGER_CST)
10190 return fold_build2 (code, type,
10191 fold_convert (type, TREE_OPERAND (arg0, 0)),
10192 fold_build1 (BIT_NOT_EXPR, type, arg1));
10193
10194 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10195 if (TREE_CODE (arg0) == BIT_AND_EXPR
10196 && integer_onep (TREE_OPERAND (arg0, 1))
10197 && integer_onep (arg1))
10198 return fold_build2 (EQ_EXPR, type, arg0,
10199 build_int_cst (TREE_TYPE (arg0), 0));
10200
10201 /* Fold (X & Y) ^ Y as ~X & Y. */
10202 if (TREE_CODE (arg0) == BIT_AND_EXPR
10203 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10204 {
10205 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10206 return fold_build2 (BIT_AND_EXPR, type,
10207 fold_build1 (BIT_NOT_EXPR, type, tem),
10208 fold_convert (type, arg1));
10209 }
10210 /* Fold (X & Y) ^ X as ~Y & X. */
10211 if (TREE_CODE (arg0) == BIT_AND_EXPR
10212 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10213 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10214 {
10215 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10216 return fold_build2 (BIT_AND_EXPR, type,
10217 fold_build1 (BIT_NOT_EXPR, type, tem),
10218 fold_convert (type, arg1));
10219 }
10220 /* Fold X ^ (X & Y) as X & ~Y. */
10221 if (TREE_CODE (arg1) == BIT_AND_EXPR
10222 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10223 {
10224 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10225 return fold_build2 (BIT_AND_EXPR, type,
10226 fold_convert (type, arg0),
10227 fold_build1 (BIT_NOT_EXPR, type, tem));
10228 }
10229 /* Fold X ^ (Y & X) as ~Y & X. */
10230 if (TREE_CODE (arg1) == BIT_AND_EXPR
10231 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10232 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10233 {
10234 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10235 return fold_build2 (BIT_AND_EXPR, type,
10236 fold_build1 (BIT_NOT_EXPR, type, tem),
10237 fold_convert (type, arg0));
10238 }
10239
10240 /* See if this can be simplified into a rotate first. If that
10241 is unsuccessful continue in the association code. */
10242 goto bit_rotate;
10243
10244 case BIT_AND_EXPR:
10245 if (integer_all_onesp (arg1))
10246 return non_lvalue (fold_convert (type, arg0));
10247 if (integer_zerop (arg1))
10248 return omit_one_operand (type, arg1, arg0);
10249 if (operand_equal_p (arg0, arg1, 0))
10250 return non_lvalue (fold_convert (type, arg0));
10251
10252 /* ~X & X is always zero. */
10253 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10254 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10255 return omit_one_operand (type, integer_zero_node, arg1);
10256
10257 /* X & ~X is always zero. */
10258 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10259 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10260 return omit_one_operand (type, integer_zero_node, arg0);
10261
10262 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10263 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10264 && TREE_CODE (arg1) == INTEGER_CST
10265 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10266 return fold_build2 (BIT_IOR_EXPR, type,
10267 fold_build2 (BIT_AND_EXPR, type,
10268 TREE_OPERAND (arg0, 0), arg1),
10269 fold_build2 (BIT_AND_EXPR, type,
10270 TREE_OPERAND (arg0, 1), arg1));
10271
10272 /* (X | Y) & Y is (X, Y). */
10273 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10274 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10275 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10276 /* (X | Y) & X is (Y, X). */
10277 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10278 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10279 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10280 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10281 /* X & (X | Y) is (Y, X). */
10282 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10284 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10285 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10286 /* X & (Y | X) is (Y, X). */
10287 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10288 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10289 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10290 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10291
10292 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10293 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10294 && integer_onep (TREE_OPERAND (arg0, 1))
10295 && integer_onep (arg1))
10296 {
10297 tem = TREE_OPERAND (arg0, 0);
10298 return fold_build2 (EQ_EXPR, type,
10299 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10300 build_int_cst (TREE_TYPE (tem), 1)),
10301 build_int_cst (TREE_TYPE (tem), 0));
10302 }
10303 /* Fold ~X & 1 as (X & 1) == 0. */
10304 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10305 && integer_onep (arg1))
10306 {
10307 tem = TREE_OPERAND (arg0, 0);
10308 return fold_build2 (EQ_EXPR, type,
10309 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10310 build_int_cst (TREE_TYPE (tem), 1)),
10311 build_int_cst (TREE_TYPE (tem), 0));
10312 }
10313
10314 /* Fold (X ^ Y) & Y as ~X & Y. */
10315 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10316 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10317 {
10318 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10319 return fold_build2 (BIT_AND_EXPR, type,
10320 fold_build1 (BIT_NOT_EXPR, type, tem),
10321 fold_convert (type, arg1));
10322 }
10323 /* Fold (X ^ Y) & X as ~Y & X. */
10324 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10325 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10326 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10327 {
10328 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10329 return fold_build2 (BIT_AND_EXPR, type,
10330 fold_build1 (BIT_NOT_EXPR, type, tem),
10331 fold_convert (type, arg1));
10332 }
10333 /* Fold X & (X ^ Y) as X & ~Y. */
10334 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10336 {
10337 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10338 return fold_build2 (BIT_AND_EXPR, type,
10339 fold_convert (type, arg0),
10340 fold_build1 (BIT_NOT_EXPR, type, tem));
10341 }
10342 /* Fold X & (Y ^ X) as ~Y & X. */
10343 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10344 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10345 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10346 {
10347 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10348 return fold_build2 (BIT_AND_EXPR, type,
10349 fold_build1 (BIT_NOT_EXPR, type, tem),
10350 fold_convert (type, arg0));
10351 }
10352
10353 t1 = distribute_bit_expr (code, type, arg0, arg1);
10354 if (t1 != NULL_TREE)
10355 return t1;
10356 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10357 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10358 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10359 {
10360 unsigned int prec
10361 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10362
10363 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10364 && (~TREE_INT_CST_LOW (arg1)
10365 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10366 return fold_convert (type, TREE_OPERAND (arg0, 0));
10367 }
10368
10369 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10370
10371 This results in more efficient code for machines without a NOR
10372 instruction. Combine will canonicalize to the first form
10373 which will allow use of NOR instructions provided by the
10374 backend if they exist. */
10375 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10376 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10377 {
10378 return fold_build1 (BIT_NOT_EXPR, type,
10379 build2 (BIT_IOR_EXPR, type,
10380 TREE_OPERAND (arg0, 0),
10381 TREE_OPERAND (arg1, 0)));
10382 }
10383
10384 goto associate;
10385
10386 case RDIV_EXPR:
10387 /* Don't touch a floating-point divide by zero unless the mode
10388 of the constant can represent infinity. */
10389 if (TREE_CODE (arg1) == REAL_CST
10390 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10391 && real_zerop (arg1))
10392 return NULL_TREE;
10393
10394 /* Optimize A / A to 1.0 if we don't care about
10395 NaNs or Infinities. Skip the transformation
10396 for non-real operands. */
10397 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10398 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10399 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10400 && operand_equal_p (arg0, arg1, 0))
10401 {
10402 tree r = build_real (TREE_TYPE (arg0), dconst1);
10403
10404 return omit_two_operands (type, r, arg0, arg1);
10405 }
10406
10407 /* The complex version of the above A / A optimization. */
10408 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10409 && operand_equal_p (arg0, arg1, 0))
10410 {
10411 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10412 if (! HONOR_NANS (TYPE_MODE (elem_type))
10413 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10414 {
10415 tree r = build_real (elem_type, dconst1);
10416 /* omit_two_operands will call fold_convert for us. */
10417 return omit_two_operands (type, r, arg0, arg1);
10418 }
10419 }
10420
10421 /* (-A) / (-B) -> A / B */
10422 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10423 return fold_build2 (RDIV_EXPR, type,
10424 TREE_OPERAND (arg0, 0),
10425 negate_expr (arg1));
10426 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10427 return fold_build2 (RDIV_EXPR, type,
10428 negate_expr (arg0),
10429 TREE_OPERAND (arg1, 0));
10430
10431 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10432 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10433 && real_onep (arg1))
10434 return non_lvalue (fold_convert (type, arg0));
10435
10436 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10437 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10438 && real_minus_onep (arg1))
10439 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10440
10441 /* If ARG1 is a constant, we can convert this to a multiply by the
10442 reciprocal. This does not have the same rounding properties,
10443 so only do this if -funsafe-math-optimizations. We can actually
10444 always safely do it if ARG1 is a power of two, but it's hard to
10445 tell if it is or not in a portable manner. */
10446 if (TREE_CODE (arg1) == REAL_CST)
10447 {
10448 if (flag_unsafe_math_optimizations
10449 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10450 arg1, 0)))
10451 return fold_build2 (MULT_EXPR, type, arg0, tem);
10452 /* Find the reciprocal if optimizing and the result is exact. */
10453 if (optimize)
10454 {
10455 REAL_VALUE_TYPE r;
10456 r = TREE_REAL_CST (arg1);
10457 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10458 {
10459 tem = build_real (type, r);
10460 return fold_build2 (MULT_EXPR, type,
10461 fold_convert (type, arg0), tem);
10462 }
10463 }
10464 }
10465 /* Convert A/B/C to A/(B*C). */
10466 if (flag_unsafe_math_optimizations
10467 && TREE_CODE (arg0) == RDIV_EXPR)
10468 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10469 fold_build2 (MULT_EXPR, type,
10470 TREE_OPERAND (arg0, 1), arg1));
10471
10472 /* Convert A/(B/C) to (A/B)*C. */
10473 if (flag_unsafe_math_optimizations
10474 && TREE_CODE (arg1) == RDIV_EXPR)
10475 return fold_build2 (MULT_EXPR, type,
10476 fold_build2 (RDIV_EXPR, type, arg0,
10477 TREE_OPERAND (arg1, 0)),
10478 TREE_OPERAND (arg1, 1));
10479
10480 /* Convert C1/(X*C2) into (C1/C2)/X. */
10481 if (flag_unsafe_math_optimizations
10482 && TREE_CODE (arg1) == MULT_EXPR
10483 && TREE_CODE (arg0) == REAL_CST
10484 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10485 {
10486 tree tem = const_binop (RDIV_EXPR, arg0,
10487 TREE_OPERAND (arg1, 1), 0);
10488 if (tem)
10489 return fold_build2 (RDIV_EXPR, type, tem,
10490 TREE_OPERAND (arg1, 0));
10491 }
10492
10493 if (flag_unsafe_math_optimizations)
10494 {
10495 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10496 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10497
10498 /* Optimize sin(x)/cos(x) as tan(x). */
10499 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10500 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10501 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10502 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10503 CALL_EXPR_ARG (arg1, 0), 0))
10504 {
10505 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10506
10507 if (tanfn != NULL_TREE)
10508 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10509 }
10510
10511 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10512 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10513 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10514 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10515 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10516 CALL_EXPR_ARG (arg1, 0), 0))
10517 {
10518 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10519
10520 if (tanfn != NULL_TREE)
10521 {
10522 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10523 return fold_build2 (RDIV_EXPR, type,
10524 build_real (type, dconst1), tmp);
10525 }
10526 }
10527
10528 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10529 NaNs or Infinities. */
10530 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10531 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10532 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10533 {
10534 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10535 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10536
10537 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10538 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10539 && operand_equal_p (arg00, arg01, 0))
10540 {
10541 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10542
10543 if (cosfn != NULL_TREE)
10544 return build_call_expr (cosfn, 1, arg00);
10545 }
10546 }
10547
10548 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10549 NaNs or Infinities. */
10550 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10551 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10552 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10553 {
10554 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10555 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10556
10557 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10558 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10559 && operand_equal_p (arg00, arg01, 0))
10560 {
10561 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10562
10563 if (cosfn != NULL_TREE)
10564 {
10565 tree tmp = build_call_expr (cosfn, 1, arg00);
10566 return fold_build2 (RDIV_EXPR, type,
10567 build_real (type, dconst1),
10568 tmp);
10569 }
10570 }
10571 }
10572
10573 /* Optimize pow(x,c)/x as pow(x,c-1). */
10574 if (fcode0 == BUILT_IN_POW
10575 || fcode0 == BUILT_IN_POWF
10576 || fcode0 == BUILT_IN_POWL)
10577 {
10578 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10579 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10580 if (TREE_CODE (arg01) == REAL_CST
10581 && !TREE_OVERFLOW (arg01)
10582 && operand_equal_p (arg1, arg00, 0))
10583 {
10584 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10585 REAL_VALUE_TYPE c;
10586 tree arg;
10587
10588 c = TREE_REAL_CST (arg01);
10589 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10590 arg = build_real (type, c);
10591 return build_call_expr (powfn, 2, arg1, arg);
10592 }
10593 }
10594
10595 /* Optimize x/expN(y) into x*expN(-y). */
10596 if (BUILTIN_EXPONENT_P (fcode1))
10597 {
10598 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10599 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10600 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10601 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10602 }
10603
10604 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10605 if (fcode1 == BUILT_IN_POW
10606 || fcode1 == BUILT_IN_POWF
10607 || fcode1 == BUILT_IN_POWL)
10608 {
10609 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10610 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10611 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10612 tree neg11 = fold_convert (type, negate_expr (arg11));
10613 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10614 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10615 }
10616 }
10617 return NULL_TREE;
10618
10619 case TRUNC_DIV_EXPR:
10620 case FLOOR_DIV_EXPR:
10621 /* Simplify A / (B << N) where A and B are positive and B is
10622 a power of 2, to A >> (N + log2(B)). */
10623 strict_overflow_p = false;
10624 if (TREE_CODE (arg1) == LSHIFT_EXPR
10625 && (TYPE_UNSIGNED (type)
10626 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10627 {
10628 tree sval = TREE_OPERAND (arg1, 0);
10629 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10630 {
10631 tree sh_cnt = TREE_OPERAND (arg1, 1);
10632 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10633
10634 if (strict_overflow_p)
10635 fold_overflow_warning (("assuming signed overflow does not "
10636 "occur when simplifying A / (B << N)"),
10637 WARN_STRICT_OVERFLOW_MISC);
10638
10639 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10640 sh_cnt, build_int_cst (NULL_TREE, pow2));
10641 return fold_build2 (RSHIFT_EXPR, type,
10642 fold_convert (type, arg0), sh_cnt);
10643 }
10644 }
10645 /* Fall thru */
10646
10647 case ROUND_DIV_EXPR:
10648 case CEIL_DIV_EXPR:
10649 case EXACT_DIV_EXPR:
10650 if (integer_onep (arg1))
10651 return non_lvalue (fold_convert (type, arg0));
10652 if (integer_zerop (arg1))
10653 return NULL_TREE;
10654 /* X / -1 is -X. */
10655 if (!TYPE_UNSIGNED (type)
10656 && TREE_CODE (arg1) == INTEGER_CST
10657 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10658 && TREE_INT_CST_HIGH (arg1) == -1)
10659 return fold_convert (type, negate_expr (arg0));
10660
10661 /* Convert -A / -B to A / B when the type is signed and overflow is
10662 undefined. */
10663 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10664 && TREE_CODE (arg0) == NEGATE_EXPR
10665 && negate_expr_p (arg1))
10666 {
10667 if (INTEGRAL_TYPE_P (type))
10668 fold_overflow_warning (("assuming signed overflow does not occur "
10669 "when distributing negation across "
10670 "division"),
10671 WARN_STRICT_OVERFLOW_MISC);
10672 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10673 negate_expr (arg1));
10674 }
10675 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10676 && TREE_CODE (arg1) == NEGATE_EXPR
10677 && negate_expr_p (arg0))
10678 {
10679 if (INTEGRAL_TYPE_P (type))
10680 fold_overflow_warning (("assuming signed overflow does not occur "
10681 "when distributing negation across "
10682 "division"),
10683 WARN_STRICT_OVERFLOW_MISC);
10684 return fold_build2 (code, type, negate_expr (arg0),
10685 TREE_OPERAND (arg1, 0));
10686 }
10687
10688 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10689 operation, EXACT_DIV_EXPR.
10690
10691 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10692 At one time others generated faster code, it's not clear if they do
10693 after the last round to changes to the DIV code in expmed.c. */
10694 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10695 && multiple_of_p (type, arg0, arg1))
10696 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10697
10698 strict_overflow_p = false;
10699 if (TREE_CODE (arg1) == INTEGER_CST
10700 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10701 &strict_overflow_p)))
10702 {
10703 if (strict_overflow_p)
10704 fold_overflow_warning (("assuming signed overflow does not occur "
10705 "when simplifying division"),
10706 WARN_STRICT_OVERFLOW_MISC);
10707 return fold_convert (type, tem);
10708 }
10709
10710 return NULL_TREE;
10711
10712 case CEIL_MOD_EXPR:
10713 case FLOOR_MOD_EXPR:
10714 case ROUND_MOD_EXPR:
10715 case TRUNC_MOD_EXPR:
10716 /* X % 1 is always zero, but be sure to preserve any side
10717 effects in X. */
10718 if (integer_onep (arg1))
10719 return omit_one_operand (type, integer_zero_node, arg0);
10720
10721 /* X % 0, return X % 0 unchanged so that we can get the
10722 proper warnings and errors. */
10723 if (integer_zerop (arg1))
10724 return NULL_TREE;
10725
10726 /* 0 % X is always zero, but be sure to preserve any side
10727 effects in X. Place this after checking for X == 0. */
10728 if (integer_zerop (arg0))
10729 return omit_one_operand (type, integer_zero_node, arg1);
10730
10731 /* X % -1 is zero. */
10732 if (!TYPE_UNSIGNED (type)
10733 && TREE_CODE (arg1) == INTEGER_CST
10734 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10735 && TREE_INT_CST_HIGH (arg1) == -1)
10736 return omit_one_operand (type, integer_zero_node, arg0);
10737
10738 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10739 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10740 strict_overflow_p = false;
10741 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10742 && (TYPE_UNSIGNED (type)
10743 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10744 {
10745 tree c = arg1;
10746 /* Also optimize A % (C << N) where C is a power of 2,
10747 to A & ((C << N) - 1). */
10748 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10749 c = TREE_OPERAND (arg1, 0);
10750
10751 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10752 {
10753 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10754 build_int_cst (TREE_TYPE (arg1), 1));
10755 if (strict_overflow_p)
10756 fold_overflow_warning (("assuming signed overflow does not "
10757 "occur when simplifying "
10758 "X % (power of two)"),
10759 WARN_STRICT_OVERFLOW_MISC);
10760 return fold_build2 (BIT_AND_EXPR, type,
10761 fold_convert (type, arg0),
10762 fold_convert (type, mask));
10763 }
10764 }
10765
10766 /* X % -C is the same as X % C. */
10767 if (code == TRUNC_MOD_EXPR
10768 && !TYPE_UNSIGNED (type)
10769 && TREE_CODE (arg1) == INTEGER_CST
10770 && !TREE_OVERFLOW (arg1)
10771 && TREE_INT_CST_HIGH (arg1) < 0
10772 && !TYPE_OVERFLOW_TRAPS (type)
10773 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10774 && !sign_bit_p (arg1, arg1))
10775 return fold_build2 (code, type, fold_convert (type, arg0),
10776 fold_convert (type, negate_expr (arg1)));
10777
10778 /* X % -Y is the same as X % Y. */
10779 if (code == TRUNC_MOD_EXPR
10780 && !TYPE_UNSIGNED (type)
10781 && TREE_CODE (arg1) == NEGATE_EXPR
10782 && !TYPE_OVERFLOW_TRAPS (type))
10783 return fold_build2 (code, type, fold_convert (type, arg0),
10784 fold_convert (type, TREE_OPERAND (arg1, 0)));
10785
10786 if (TREE_CODE (arg1) == INTEGER_CST
10787 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10788 &strict_overflow_p)))
10789 {
10790 if (strict_overflow_p)
10791 fold_overflow_warning (("assuming signed overflow does not occur "
10792 "when simplifying modulos"),
10793 WARN_STRICT_OVERFLOW_MISC);
10794 return fold_convert (type, tem);
10795 }
10796
10797 return NULL_TREE;
10798
10799 case LROTATE_EXPR:
10800 case RROTATE_EXPR:
10801 if (integer_all_onesp (arg0))
10802 return omit_one_operand (type, arg0, arg1);
10803 goto shift;
10804
10805 case RSHIFT_EXPR:
10806 /* Optimize -1 >> x for arithmetic right shifts. */
10807 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10808 return omit_one_operand (type, arg0, arg1);
10809 /* ... fall through ... */
10810
10811 case LSHIFT_EXPR:
10812 shift:
10813 if (integer_zerop (arg1))
10814 return non_lvalue (fold_convert (type, arg0));
10815 if (integer_zerop (arg0))
10816 return omit_one_operand (type, arg0, arg1);
10817
10818 /* Since negative shift count is not well-defined,
10819 don't try to compute it in the compiler. */
10820 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10821 return NULL_TREE;
10822
10823 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10824 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10825 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10826 && host_integerp (TREE_OPERAND (arg0, 1), false)
10827 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10828 {
10829 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10830 + TREE_INT_CST_LOW (arg1));
10831
10832 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10833 being well defined. */
10834 if (low >= TYPE_PRECISION (type))
10835 {
10836 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10837 low = low % TYPE_PRECISION (type);
10838 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10839 return build_int_cst (type, 0);
10840 else
10841 low = TYPE_PRECISION (type) - 1;
10842 }
10843
10844 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10845 build_int_cst (type, low));
10846 }
10847
10848 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10849 into x & ((unsigned)-1 >> c) for unsigned types. */
10850 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10851 || (TYPE_UNSIGNED (type)
10852 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10853 && host_integerp (arg1, false)
10854 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10855 && host_integerp (TREE_OPERAND (arg0, 1), false)
10856 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10857 {
10858 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10859 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10860 tree lshift;
10861 tree arg00;
10862
10863 if (low0 == low1)
10864 {
10865 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10866
10867 lshift = build_int_cst (type, -1);
10868 lshift = int_const_binop (code, lshift, arg1, 0);
10869
10870 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10871 }
10872 }
10873
10874 /* Rewrite an LROTATE_EXPR by a constant into an
10875 RROTATE_EXPR by a new constant. */
10876 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10877 {
10878 tree tem = build_int_cst (TREE_TYPE (arg1),
10879 GET_MODE_BITSIZE (TYPE_MODE (type)));
10880 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10881 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10882 }
10883
10884 /* If we have a rotate of a bit operation with the rotate count and
10885 the second operand of the bit operation both constant,
10886 permute the two operations. */
10887 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10888 && (TREE_CODE (arg0) == BIT_AND_EXPR
10889 || TREE_CODE (arg0) == BIT_IOR_EXPR
10890 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10892 return fold_build2 (TREE_CODE (arg0), type,
10893 fold_build2 (code, type,
10894 TREE_OPERAND (arg0, 0), arg1),
10895 fold_build2 (code, type,
10896 TREE_OPERAND (arg0, 1), arg1));
10897
10898 /* Two consecutive rotates adding up to the width of the mode can
10899 be ignored. */
10900 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10901 && TREE_CODE (arg0) == RROTATE_EXPR
10902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10903 && TREE_INT_CST_HIGH (arg1) == 0
10904 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10905 && ((TREE_INT_CST_LOW (arg1)
10906 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10907 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10908 return TREE_OPERAND (arg0, 0);
10909
10910 return NULL_TREE;
10911
10912 case MIN_EXPR:
10913 if (operand_equal_p (arg0, arg1, 0))
10914 return omit_one_operand (type, arg0, arg1);
10915 if (INTEGRAL_TYPE_P (type)
10916 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10917 return omit_one_operand (type, arg1, arg0);
10918 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10919 if (tem)
10920 return tem;
10921 goto associate;
10922
10923 case MAX_EXPR:
10924 if (operand_equal_p (arg0, arg1, 0))
10925 return omit_one_operand (type, arg0, arg1);
10926 if (INTEGRAL_TYPE_P (type)
10927 && TYPE_MAX_VALUE (type)
10928 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10929 return omit_one_operand (type, arg1, arg0);
10930 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10931 if (tem)
10932 return tem;
10933 goto associate;
10934
10935 case TRUTH_ANDIF_EXPR:
10936 /* Note that the operands of this must be ints
10937 and their values must be 0 or 1.
10938 ("true" is a fixed value perhaps depending on the language.) */
10939 /* If first arg is constant zero, return it. */
10940 if (integer_zerop (arg0))
10941 return fold_convert (type, arg0);
10942 case TRUTH_AND_EXPR:
10943 /* If either arg is constant true, drop it. */
10944 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10945 return non_lvalue (fold_convert (type, arg1));
10946 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10947 /* Preserve sequence points. */
10948 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10949 return non_lvalue (fold_convert (type, arg0));
10950 /* If second arg is constant zero, result is zero, but first arg
10951 must be evaluated. */
10952 if (integer_zerop (arg1))
10953 return omit_one_operand (type, arg1, arg0);
10954 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10955 case will be handled here. */
10956 if (integer_zerop (arg0))
10957 return omit_one_operand (type, arg0, arg1);
10958
10959 /* !X && X is always false. */
10960 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10961 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10962 return omit_one_operand (type, integer_zero_node, arg1);
10963 /* X && !X is always false. */
10964 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10965 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10966 return omit_one_operand (type, integer_zero_node, arg0);
10967
10968 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10969 means A >= Y && A != MAX, but in this case we know that
10970 A < X <= MAX. */
10971
10972 if (!TREE_SIDE_EFFECTS (arg0)
10973 && !TREE_SIDE_EFFECTS (arg1))
10974 {
10975 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10976 if (tem && !operand_equal_p (tem, arg0, 0))
10977 return fold_build2 (code, type, tem, arg1);
10978
10979 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10980 if (tem && !operand_equal_p (tem, arg1, 0))
10981 return fold_build2 (code, type, arg0, tem);
10982 }
10983
10984 truth_andor:
10985 /* We only do these simplifications if we are optimizing. */
10986 if (!optimize)
10987 return NULL_TREE;
10988
10989 /* Check for things like (A || B) && (A || C). We can convert this
10990 to A || (B && C). Note that either operator can be any of the four
10991 truth and/or operations and the transformation will still be
10992 valid. Also note that we only care about order for the
10993 ANDIF and ORIF operators. If B contains side effects, this
10994 might change the truth-value of A. */
10995 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10996 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10997 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10998 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10999 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11000 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11001 {
11002 tree a00 = TREE_OPERAND (arg0, 0);
11003 tree a01 = TREE_OPERAND (arg0, 1);
11004 tree a10 = TREE_OPERAND (arg1, 0);
11005 tree a11 = TREE_OPERAND (arg1, 1);
11006 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11007 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11008 && (code == TRUTH_AND_EXPR
11009 || code == TRUTH_OR_EXPR));
11010
11011 if (operand_equal_p (a00, a10, 0))
11012 return fold_build2 (TREE_CODE (arg0), type, a00,
11013 fold_build2 (code, type, a01, a11));
11014 else if (commutative && operand_equal_p (a00, a11, 0))
11015 return fold_build2 (TREE_CODE (arg0), type, a00,
11016 fold_build2 (code, type, a01, a10));
11017 else if (commutative && operand_equal_p (a01, a10, 0))
11018 return fold_build2 (TREE_CODE (arg0), type, a01,
11019 fold_build2 (code, type, a00, a11));
11020
11021 /* This case if tricky because we must either have commutative
11022 operators or else A10 must not have side-effects. */
11023
11024 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11025 && operand_equal_p (a01, a11, 0))
11026 return fold_build2 (TREE_CODE (arg0), type,
11027 fold_build2 (code, type, a00, a10),
11028 a01);
11029 }
11030
11031 /* See if we can build a range comparison. */
11032 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11033 return tem;
11034
11035 /* Check for the possibility of merging component references. If our
11036 lhs is another similar operation, try to merge its rhs with our
11037 rhs. Then try to merge our lhs and rhs. */
11038 if (TREE_CODE (arg0) == code
11039 && 0 != (tem = fold_truthop (code, type,
11040 TREE_OPERAND (arg0, 1), arg1)))
11041 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11042
11043 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11044 return tem;
11045
11046 return NULL_TREE;
11047
11048 case TRUTH_ORIF_EXPR:
11049 /* Note that the operands of this must be ints
11050 and their values must be 0 or true.
11051 ("true" is a fixed value perhaps depending on the language.) */
11052 /* If first arg is constant true, return it. */
11053 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11054 return fold_convert (type, arg0);
11055 case TRUTH_OR_EXPR:
11056 /* If either arg is constant zero, drop it. */
11057 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11058 return non_lvalue (fold_convert (type, arg1));
11059 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11060 /* Preserve sequence points. */
11061 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11062 return non_lvalue (fold_convert (type, arg0));
11063 /* If second arg is constant true, result is true, but we must
11064 evaluate first arg. */
11065 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11066 return omit_one_operand (type, arg1, arg0);
11067 /* Likewise for first arg, but note this only occurs here for
11068 TRUTH_OR_EXPR. */
11069 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11070 return omit_one_operand (type, arg0, arg1);
11071
11072 /* !X || X is always true. */
11073 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11074 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11075 return omit_one_operand (type, integer_one_node, arg1);
11076 /* X || !X is always true. */
11077 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11078 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11079 return omit_one_operand (type, integer_one_node, arg0);
11080
11081 goto truth_andor;
11082
11083 case TRUTH_XOR_EXPR:
11084 /* If the second arg is constant zero, drop it. */
11085 if (integer_zerop (arg1))
11086 return non_lvalue (fold_convert (type, arg0));
11087 /* If the second arg is constant true, this is a logical inversion. */
11088 if (integer_onep (arg1))
11089 {
11090 /* Only call invert_truthvalue if operand is a truth value. */
11091 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11092 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11093 else
11094 tem = invert_truthvalue (arg0);
11095 return non_lvalue (fold_convert (type, tem));
11096 }
11097 /* Identical arguments cancel to zero. */
11098 if (operand_equal_p (arg0, arg1, 0))
11099 return omit_one_operand (type, integer_zero_node, arg0);
11100
11101 /* !X ^ X is always true. */
11102 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11103 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11104 return omit_one_operand (type, integer_one_node, arg1);
11105
11106 /* X ^ !X is always true. */
11107 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11108 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11109 return omit_one_operand (type, integer_one_node, arg0);
11110
11111 return NULL_TREE;
11112
11113 case EQ_EXPR:
11114 case NE_EXPR:
11115 tem = fold_comparison (code, type, op0, op1);
11116 if (tem != NULL_TREE)
11117 return tem;
11118
11119 /* bool_var != 0 becomes bool_var. */
11120 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11121 && code == NE_EXPR)
11122 return non_lvalue (fold_convert (type, arg0));
11123
11124 /* bool_var == 1 becomes bool_var. */
11125 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11126 && code == EQ_EXPR)
11127 return non_lvalue (fold_convert (type, arg0));
11128
11129 /* bool_var != 1 becomes !bool_var. */
11130 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11131 && code == NE_EXPR)
11132 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11133
11134 /* bool_var == 0 becomes !bool_var. */
11135 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11136 && code == EQ_EXPR)
11137 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11138
11139 /* If this is an equality comparison of the address of two non-weak,
11140 unaliased symbols neither of which are extern (since we do not
11141 have access to attributes for externs), then we know the result. */
11142 if (TREE_CODE (arg0) == ADDR_EXPR
11143 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11144 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11145 && ! lookup_attribute ("alias",
11146 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11147 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11148 && TREE_CODE (arg1) == ADDR_EXPR
11149 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11150 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11151 && ! lookup_attribute ("alias",
11152 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11153 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11154 {
11155 /* We know that we're looking at the address of two
11156 non-weak, unaliased, static _DECL nodes.
11157
11158 It is both wasteful and incorrect to call operand_equal_p
11159 to compare the two ADDR_EXPR nodes. It is wasteful in that
11160 all we need to do is test pointer equality for the arguments
11161 to the two ADDR_EXPR nodes. It is incorrect to use
11162 operand_equal_p as that function is NOT equivalent to a
11163 C equality test. It can in fact return false for two
11164 objects which would test as equal using the C equality
11165 operator. */
11166 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11167 return constant_boolean_node (equal
11168 ? code == EQ_EXPR : code != EQ_EXPR,
11169 type);
11170 }
11171
11172 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11173 a MINUS_EXPR of a constant, we can convert it into a comparison with
11174 a revised constant as long as no overflow occurs. */
11175 if (TREE_CODE (arg1) == INTEGER_CST
11176 && (TREE_CODE (arg0) == PLUS_EXPR
11177 || TREE_CODE (arg0) == MINUS_EXPR)
11178 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11179 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11180 ? MINUS_EXPR : PLUS_EXPR,
11181 fold_convert (TREE_TYPE (arg0), arg1),
11182 TREE_OPERAND (arg0, 1), 0))
11183 && !TREE_OVERFLOW (tem))
11184 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11185
11186 /* Similarly for a NEGATE_EXPR. */
11187 if (TREE_CODE (arg0) == NEGATE_EXPR
11188 && TREE_CODE (arg1) == INTEGER_CST
11189 && 0 != (tem = negate_expr (arg1))
11190 && TREE_CODE (tem) == INTEGER_CST
11191 && !TREE_OVERFLOW (tem))
11192 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11193
11194 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11195 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11196 && TREE_CODE (arg1) == INTEGER_CST
11197 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11198 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11199 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11200 fold_convert (TREE_TYPE (arg0), arg1),
11201 TREE_OPERAND (arg0, 1)));
11202
11203 /* Transform comparisons of the form X +- C CMP X. */
11204 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11206 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11207 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11208 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11209 {
11210 tree cst = TREE_OPERAND (arg0, 1);
11211
11212 if (code == EQ_EXPR
11213 && !integer_zerop (cst))
11214 return omit_two_operands (type, boolean_false_node,
11215 TREE_OPERAND (arg0, 0), arg1);
11216 else
11217 return omit_two_operands (type, boolean_true_node,
11218 TREE_OPERAND (arg0, 0), arg1);
11219 }
11220
11221 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11222 for !=. Don't do this for ordered comparisons due to overflow. */
11223 if (TREE_CODE (arg0) == MINUS_EXPR
11224 && integer_zerop (arg1))
11225 return fold_build2 (code, type,
11226 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11227
11228 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11229 if (TREE_CODE (arg0) == ABS_EXPR
11230 && (integer_zerop (arg1) || real_zerop (arg1)))
11231 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11232
11233 /* If this is an EQ or NE comparison with zero and ARG0 is
11234 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11235 two operations, but the latter can be done in one less insn
11236 on machines that have only two-operand insns or on which a
11237 constant cannot be the first operand. */
11238 if (TREE_CODE (arg0) == BIT_AND_EXPR
11239 && integer_zerop (arg1))
11240 {
11241 tree arg00 = TREE_OPERAND (arg0, 0);
11242 tree arg01 = TREE_OPERAND (arg0, 1);
11243 if (TREE_CODE (arg00) == LSHIFT_EXPR
11244 && integer_onep (TREE_OPERAND (arg00, 0)))
11245 return
11246 fold_build2 (code, type,
11247 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11248 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11249 arg01, TREE_OPERAND (arg00, 1)),
11250 fold_convert (TREE_TYPE (arg0),
11251 integer_one_node)),
11252 arg1);
11253 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11254 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11255 return
11256 fold_build2 (code, type,
11257 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11258 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11259 arg00, TREE_OPERAND (arg01, 1)),
11260 fold_convert (TREE_TYPE (arg0),
11261 integer_one_node)),
11262 arg1);
11263 }
11264
11265 /* If this is an NE or EQ comparison of zero against the result of a
11266 signed MOD operation whose second operand is a power of 2, make
11267 the MOD operation unsigned since it is simpler and equivalent. */
11268 if (integer_zerop (arg1)
11269 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11270 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11271 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11272 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11273 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11274 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11275 {
11276 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11277 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11278 fold_convert (newtype,
11279 TREE_OPERAND (arg0, 0)),
11280 fold_convert (newtype,
11281 TREE_OPERAND (arg0, 1)));
11282
11283 return fold_build2 (code, type, newmod,
11284 fold_convert (newtype, arg1));
11285 }
11286
11287 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11288 C1 is a valid shift constant, and C2 is a power of two, i.e.
11289 a single bit. */
11290 if (TREE_CODE (arg0) == BIT_AND_EXPR
11291 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11292 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11293 == INTEGER_CST
11294 && integer_pow2p (TREE_OPERAND (arg0, 1))
11295 && integer_zerop (arg1))
11296 {
11297 tree itype = TREE_TYPE (arg0);
11298 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11299 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11300
11301 /* Check for a valid shift count. */
11302 if (TREE_INT_CST_HIGH (arg001) == 0
11303 && TREE_INT_CST_LOW (arg001) < prec)
11304 {
11305 tree arg01 = TREE_OPERAND (arg0, 1);
11306 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11307 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11308 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11309 can be rewritten as (X & (C2 << C1)) != 0. */
11310 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11311 {
11312 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11313 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11314 return fold_build2 (code, type, tem, arg1);
11315 }
11316 /* Otherwise, for signed (arithmetic) shifts,
11317 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11318 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11319 else if (!TYPE_UNSIGNED (itype))
11320 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11321 arg000, build_int_cst (itype, 0));
11322 /* Otherwise, of unsigned (logical) shifts,
11323 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11324 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11325 else
11326 return omit_one_operand (type,
11327 code == EQ_EXPR ? integer_one_node
11328 : integer_zero_node,
11329 arg000);
11330 }
11331 }
11332
11333 /* If this is an NE comparison of zero with an AND of one, remove the
11334 comparison since the AND will give the correct value. */
11335 if (code == NE_EXPR
11336 && integer_zerop (arg1)
11337 && TREE_CODE (arg0) == BIT_AND_EXPR
11338 && integer_onep (TREE_OPERAND (arg0, 1)))
11339 return fold_convert (type, arg0);
11340
11341 /* If we have (A & C) == C where C is a power of 2, convert this into
11342 (A & C) != 0. Similarly for NE_EXPR. */
11343 if (TREE_CODE (arg0) == BIT_AND_EXPR
11344 && integer_pow2p (TREE_OPERAND (arg0, 1))
11345 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11346 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11347 arg0, fold_convert (TREE_TYPE (arg0),
11348 integer_zero_node));
11349
11350 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11351 bit, then fold the expression into A < 0 or A >= 0. */
11352 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11353 if (tem)
11354 return tem;
11355
11356 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11357 Similarly for NE_EXPR. */
11358 if (TREE_CODE (arg0) == BIT_AND_EXPR
11359 && TREE_CODE (arg1) == INTEGER_CST
11360 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11361 {
11362 tree notc = fold_build1 (BIT_NOT_EXPR,
11363 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11364 TREE_OPERAND (arg0, 1));
11365 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11366 arg1, notc);
11367 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11368 if (integer_nonzerop (dandnotc))
11369 return omit_one_operand (type, rslt, arg0);
11370 }
11371
11372 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11373 Similarly for NE_EXPR. */
11374 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11375 && TREE_CODE (arg1) == INTEGER_CST
11376 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11377 {
11378 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11379 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11380 TREE_OPERAND (arg0, 1), notd);
11381 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11382 if (integer_nonzerop (candnotd))
11383 return omit_one_operand (type, rslt, arg0);
11384 }
11385
11386 /* If this is a comparison of a field, we may be able to simplify it. */
11387 if ((TREE_CODE (arg0) == COMPONENT_REF
11388 || TREE_CODE (arg0) == BIT_FIELD_REF)
11389 /* Handle the constant case even without -O
11390 to make sure the warnings are given. */
11391 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11392 {
11393 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11394 if (t1)
11395 return t1;
11396 }
11397
11398 /* Optimize comparisons of strlen vs zero to a compare of the
11399 first character of the string vs zero. To wit,
11400 strlen(ptr) == 0 => *ptr == 0
11401 strlen(ptr) != 0 => *ptr != 0
11402 Other cases should reduce to one of these two (or a constant)
11403 due to the return value of strlen being unsigned. */
11404 if (TREE_CODE (arg0) == CALL_EXPR
11405 && integer_zerop (arg1))
11406 {
11407 tree fndecl = get_callee_fndecl (arg0);
11408
11409 if (fndecl
11410 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11411 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11412 && call_expr_nargs (arg0) == 1
11413 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11414 {
11415 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11416 return fold_build2 (code, type, iref,
11417 build_int_cst (TREE_TYPE (iref), 0));
11418 }
11419 }
11420
11421 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11422 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11423 if (TREE_CODE (arg0) == RSHIFT_EXPR
11424 && integer_zerop (arg1)
11425 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11426 {
11427 tree arg00 = TREE_OPERAND (arg0, 0);
11428 tree arg01 = TREE_OPERAND (arg0, 1);
11429 tree itype = TREE_TYPE (arg00);
11430 if (TREE_INT_CST_HIGH (arg01) == 0
11431 && TREE_INT_CST_LOW (arg01)
11432 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11433 {
11434 if (TYPE_UNSIGNED (itype))
11435 {
11436 itype = lang_hooks.types.signed_type (itype);
11437 arg00 = fold_convert (itype, arg00);
11438 }
11439 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11440 type, arg00, build_int_cst (itype, 0));
11441 }
11442 }
11443
11444 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11445 if (integer_zerop (arg1)
11446 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11447 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11448 TREE_OPERAND (arg0, 1));
11449
11450 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11451 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11452 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11453 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11454 build_int_cst (TREE_TYPE (arg1), 0));
11455 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11456 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11457 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11458 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11459 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11460 build_int_cst (TREE_TYPE (arg1), 0));
11461
11462 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11463 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11464 && TREE_CODE (arg1) == INTEGER_CST
11465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11466 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11467 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11468 TREE_OPERAND (arg0, 1), arg1));
11469
11470 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11471 (X & C) == 0 when C is a single bit. */
11472 if (TREE_CODE (arg0) == BIT_AND_EXPR
11473 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11474 && integer_zerop (arg1)
11475 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11476 {
11477 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11478 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11479 TREE_OPERAND (arg0, 1));
11480 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11481 type, tem, arg1);
11482 }
11483
11484 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11485 constant C is a power of two, i.e. a single bit. */
11486 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11487 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11488 && integer_zerop (arg1)
11489 && integer_pow2p (TREE_OPERAND (arg0, 1))
11490 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11491 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11492 {
11493 tree arg00 = TREE_OPERAND (arg0, 0);
11494 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11495 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11496 }
11497
11498 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11499 when is C is a power of two, i.e. a single bit. */
11500 if (TREE_CODE (arg0) == BIT_AND_EXPR
11501 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11502 && integer_zerop (arg1)
11503 && integer_pow2p (TREE_OPERAND (arg0, 1))
11504 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11505 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11506 {
11507 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11508 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11509 arg000, TREE_OPERAND (arg0, 1));
11510 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11511 tem, build_int_cst (TREE_TYPE (tem), 0));
11512 }
11513
11514 if (integer_zerop (arg1)
11515 && tree_expr_nonzero_p (arg0))
11516 {
11517 tree res = constant_boolean_node (code==NE_EXPR, type);
11518 return omit_one_operand (type, res, arg0);
11519 }
11520
11521 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11522 if (TREE_CODE (arg0) == NEGATE_EXPR
11523 && TREE_CODE (arg1) == NEGATE_EXPR)
11524 return fold_build2 (code, type,
11525 TREE_OPERAND (arg0, 0),
11526 TREE_OPERAND (arg1, 0));
11527
11528 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11529 if (TREE_CODE (arg0) == BIT_AND_EXPR
11530 && TREE_CODE (arg1) == BIT_AND_EXPR)
11531 {
11532 tree arg00 = TREE_OPERAND (arg0, 0);
11533 tree arg01 = TREE_OPERAND (arg0, 1);
11534 tree arg10 = TREE_OPERAND (arg1, 0);
11535 tree arg11 = TREE_OPERAND (arg1, 1);
11536 tree itype = TREE_TYPE (arg0);
11537
11538 if (operand_equal_p (arg01, arg11, 0))
11539 return fold_build2 (code, type,
11540 fold_build2 (BIT_AND_EXPR, itype,
11541 fold_build2 (BIT_XOR_EXPR, itype,
11542 arg00, arg10),
11543 arg01),
11544 build_int_cst (itype, 0));
11545
11546 if (operand_equal_p (arg01, arg10, 0))
11547 return fold_build2 (code, type,
11548 fold_build2 (BIT_AND_EXPR, itype,
11549 fold_build2 (BIT_XOR_EXPR, itype,
11550 arg00, arg11),
11551 arg01),
11552 build_int_cst (itype, 0));
11553
11554 if (operand_equal_p (arg00, arg11, 0))
11555 return fold_build2 (code, type,
11556 fold_build2 (BIT_AND_EXPR, itype,
11557 fold_build2 (BIT_XOR_EXPR, itype,
11558 arg01, arg10),
11559 arg00),
11560 build_int_cst (itype, 0));
11561
11562 if (operand_equal_p (arg00, arg10, 0))
11563 return fold_build2 (code, type,
11564 fold_build2 (BIT_AND_EXPR, itype,
11565 fold_build2 (BIT_XOR_EXPR, itype,
11566 arg01, arg11),
11567 arg00),
11568 build_int_cst (itype, 0));
11569 }
11570
11571 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11572 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11573 {
11574 tree arg00 = TREE_OPERAND (arg0, 0);
11575 tree arg01 = TREE_OPERAND (arg0, 1);
11576 tree arg10 = TREE_OPERAND (arg1, 0);
11577 tree arg11 = TREE_OPERAND (arg1, 1);
11578 tree itype = TREE_TYPE (arg0);
11579
11580 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11581 operand_equal_p guarantees no side-effects so we don't need
11582 to use omit_one_operand on Z. */
11583 if (operand_equal_p (arg01, arg11, 0))
11584 return fold_build2 (code, type, arg00, arg10);
11585 if (operand_equal_p (arg01, arg10, 0))
11586 return fold_build2 (code, type, arg00, arg11);
11587 if (operand_equal_p (arg00, arg11, 0))
11588 return fold_build2 (code, type, arg01, arg10);
11589 if (operand_equal_p (arg00, arg10, 0))
11590 return fold_build2 (code, type, arg01, arg11);
11591
11592 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11593 if (TREE_CODE (arg01) == INTEGER_CST
11594 && TREE_CODE (arg11) == INTEGER_CST)
11595 return fold_build2 (code, type,
11596 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11597 fold_build2 (BIT_XOR_EXPR, itype,
11598 arg01, arg11)),
11599 arg10);
11600 }
11601
11602 /* Attempt to simplify equality/inequality comparisons of complex
11603 values. Only lower the comparison if the result is known or
11604 can be simplified to a single scalar comparison. */
11605 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11606 || TREE_CODE (arg0) == COMPLEX_CST)
11607 && (TREE_CODE (arg1) == COMPLEX_EXPR
11608 || TREE_CODE (arg1) == COMPLEX_CST))
11609 {
11610 tree real0, imag0, real1, imag1;
11611 tree rcond, icond;
11612
11613 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11614 {
11615 real0 = TREE_OPERAND (arg0, 0);
11616 imag0 = TREE_OPERAND (arg0, 1);
11617 }
11618 else
11619 {
11620 real0 = TREE_REALPART (arg0);
11621 imag0 = TREE_IMAGPART (arg0);
11622 }
11623
11624 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11625 {
11626 real1 = TREE_OPERAND (arg1, 0);
11627 imag1 = TREE_OPERAND (arg1, 1);
11628 }
11629 else
11630 {
11631 real1 = TREE_REALPART (arg1);
11632 imag1 = TREE_IMAGPART (arg1);
11633 }
11634
11635 rcond = fold_binary (code, type, real0, real1);
11636 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11637 {
11638 if (integer_zerop (rcond))
11639 {
11640 if (code == EQ_EXPR)
11641 return omit_two_operands (type, boolean_false_node,
11642 imag0, imag1);
11643 return fold_build2 (NE_EXPR, type, imag0, imag1);
11644 }
11645 else
11646 {
11647 if (code == NE_EXPR)
11648 return omit_two_operands (type, boolean_true_node,
11649 imag0, imag1);
11650 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11651 }
11652 }
11653
11654 icond = fold_binary (code, type, imag0, imag1);
11655 if (icond && TREE_CODE (icond) == INTEGER_CST)
11656 {
11657 if (integer_zerop (icond))
11658 {
11659 if (code == EQ_EXPR)
11660 return omit_two_operands (type, boolean_false_node,
11661 real0, real1);
11662 return fold_build2 (NE_EXPR, type, real0, real1);
11663 }
11664 else
11665 {
11666 if (code == NE_EXPR)
11667 return omit_two_operands (type, boolean_true_node,
11668 real0, real1);
11669 return fold_build2 (EQ_EXPR, type, real0, real1);
11670 }
11671 }
11672 }
11673
11674 return NULL_TREE;
11675
11676 case LT_EXPR:
11677 case GT_EXPR:
11678 case LE_EXPR:
11679 case GE_EXPR:
11680 tem = fold_comparison (code, type, op0, op1);
11681 if (tem != NULL_TREE)
11682 return tem;
11683
11684 /* Transform comparisons of the form X +- C CMP X. */
11685 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11686 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11687 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11688 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11689 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11690 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11691 {
11692 tree arg01 = TREE_OPERAND (arg0, 1);
11693 enum tree_code code0 = TREE_CODE (arg0);
11694 int is_positive;
11695
11696 if (TREE_CODE (arg01) == REAL_CST)
11697 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11698 else
11699 is_positive = tree_int_cst_sgn (arg01);
11700
11701 /* (X - c) > X becomes false. */
11702 if (code == GT_EXPR
11703 && ((code0 == MINUS_EXPR && is_positive >= 0)
11704 || (code0 == PLUS_EXPR && is_positive <= 0)))
11705 {
11706 if (TREE_CODE (arg01) == INTEGER_CST
11707 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11708 fold_overflow_warning (("assuming signed overflow does not "
11709 "occur when assuming that (X - c) > X "
11710 "is always false"),
11711 WARN_STRICT_OVERFLOW_ALL);
11712 return constant_boolean_node (0, type);
11713 }
11714
11715 /* Likewise (X + c) < X becomes false. */
11716 if (code == LT_EXPR
11717 && ((code0 == PLUS_EXPR && is_positive >= 0)
11718 || (code0 == MINUS_EXPR && is_positive <= 0)))
11719 {
11720 if (TREE_CODE (arg01) == INTEGER_CST
11721 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11722 fold_overflow_warning (("assuming signed overflow does not "
11723 "occur when assuming that "
11724 "(X + c) < X is always false"),
11725 WARN_STRICT_OVERFLOW_ALL);
11726 return constant_boolean_node (0, type);
11727 }
11728
11729 /* Convert (X - c) <= X to true. */
11730 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11731 && code == LE_EXPR
11732 && ((code0 == MINUS_EXPR && is_positive >= 0)
11733 || (code0 == PLUS_EXPR && is_positive <= 0)))
11734 {
11735 if (TREE_CODE (arg01) == INTEGER_CST
11736 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11737 fold_overflow_warning (("assuming signed overflow does not "
11738 "occur when assuming that "
11739 "(X - c) <= X is always true"),
11740 WARN_STRICT_OVERFLOW_ALL);
11741 return constant_boolean_node (1, type);
11742 }
11743
11744 /* Convert (X + c) >= X to true. */
11745 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11746 && code == GE_EXPR
11747 && ((code0 == PLUS_EXPR && is_positive >= 0)
11748 || (code0 == MINUS_EXPR && is_positive <= 0)))
11749 {
11750 if (TREE_CODE (arg01) == INTEGER_CST
11751 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11752 fold_overflow_warning (("assuming signed overflow does not "
11753 "occur when assuming that "
11754 "(X + c) >= X is always true"),
11755 WARN_STRICT_OVERFLOW_ALL);
11756 return constant_boolean_node (1, type);
11757 }
11758
11759 if (TREE_CODE (arg01) == INTEGER_CST)
11760 {
11761 /* Convert X + c > X and X - c < X to true for integers. */
11762 if (code == GT_EXPR
11763 && ((code0 == PLUS_EXPR && is_positive > 0)
11764 || (code0 == MINUS_EXPR && is_positive < 0)))
11765 {
11766 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11767 fold_overflow_warning (("assuming signed overflow does "
11768 "not occur when assuming that "
11769 "(X + c) > X is always true"),
11770 WARN_STRICT_OVERFLOW_ALL);
11771 return constant_boolean_node (1, type);
11772 }
11773
11774 if (code == LT_EXPR
11775 && ((code0 == MINUS_EXPR && is_positive > 0)
11776 || (code0 == PLUS_EXPR && is_positive < 0)))
11777 {
11778 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11779 fold_overflow_warning (("assuming signed overflow does "
11780 "not occur when assuming that "
11781 "(X - c) < X is always true"),
11782 WARN_STRICT_OVERFLOW_ALL);
11783 return constant_boolean_node (1, type);
11784 }
11785
11786 /* Convert X + c <= X and X - c >= X to false for integers. */
11787 if (code == LE_EXPR
11788 && ((code0 == PLUS_EXPR && is_positive > 0)
11789 || (code0 == MINUS_EXPR && is_positive < 0)))
11790 {
11791 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11792 fold_overflow_warning (("assuming signed overflow does "
11793 "not occur when assuming that "
11794 "(X + c) <= X is always false"),
11795 WARN_STRICT_OVERFLOW_ALL);
11796 return constant_boolean_node (0, type);
11797 }
11798
11799 if (code == GE_EXPR
11800 && ((code0 == MINUS_EXPR && is_positive > 0)
11801 || (code0 == PLUS_EXPR && is_positive < 0)))
11802 {
11803 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11804 fold_overflow_warning (("assuming signed overflow does "
11805 "not occur when assuming that "
11806 "(X - c) >= X is always true"),
11807 WARN_STRICT_OVERFLOW_ALL);
11808 return constant_boolean_node (0, type);
11809 }
11810 }
11811 }
11812
11813 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11814 This transformation affects the cases which are handled in later
11815 optimizations involving comparisons with non-negative constants. */
11816 if (TREE_CODE (arg1) == INTEGER_CST
11817 && TREE_CODE (arg0) != INTEGER_CST
11818 && tree_int_cst_sgn (arg1) > 0)
11819 {
11820 if (code == GE_EXPR)
11821 {
11822 arg1 = const_binop (MINUS_EXPR, arg1,
11823 build_int_cst (TREE_TYPE (arg1), 1), 0);
11824 return fold_build2 (GT_EXPR, type, arg0,
11825 fold_convert (TREE_TYPE (arg0), arg1));
11826 }
11827 if (code == LT_EXPR)
11828 {
11829 arg1 = const_binop (MINUS_EXPR, arg1,
11830 build_int_cst (TREE_TYPE (arg1), 1), 0);
11831 return fold_build2 (LE_EXPR, type, arg0,
11832 fold_convert (TREE_TYPE (arg0), arg1));
11833 }
11834 }
11835
11836 /* Comparisons with the highest or lowest possible integer of
11837 the specified precision will have known values. */
11838 {
11839 tree arg1_type = TREE_TYPE (arg1);
11840 unsigned int width = TYPE_PRECISION (arg1_type);
11841
11842 if (TREE_CODE (arg1) == INTEGER_CST
11843 && !TREE_OVERFLOW (arg1)
11844 && width <= 2 * HOST_BITS_PER_WIDE_INT
11845 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11846 {
11847 HOST_WIDE_INT signed_max_hi;
11848 unsigned HOST_WIDE_INT signed_max_lo;
11849 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11850
11851 if (width <= HOST_BITS_PER_WIDE_INT)
11852 {
11853 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11854 - 1;
11855 signed_max_hi = 0;
11856 max_hi = 0;
11857
11858 if (TYPE_UNSIGNED (arg1_type))
11859 {
11860 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11861 min_lo = 0;
11862 min_hi = 0;
11863 }
11864 else
11865 {
11866 max_lo = signed_max_lo;
11867 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11868 min_hi = -1;
11869 }
11870 }
11871 else
11872 {
11873 width -= HOST_BITS_PER_WIDE_INT;
11874 signed_max_lo = -1;
11875 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11876 - 1;
11877 max_lo = -1;
11878 min_lo = 0;
11879
11880 if (TYPE_UNSIGNED (arg1_type))
11881 {
11882 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11883 min_hi = 0;
11884 }
11885 else
11886 {
11887 max_hi = signed_max_hi;
11888 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11889 }
11890 }
11891
11892 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11893 && TREE_INT_CST_LOW (arg1) == max_lo)
11894 switch (code)
11895 {
11896 case GT_EXPR:
11897 return omit_one_operand (type, integer_zero_node, arg0);
11898
11899 case GE_EXPR:
11900 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11901
11902 case LE_EXPR:
11903 return omit_one_operand (type, integer_one_node, arg0);
11904
11905 case LT_EXPR:
11906 return fold_build2 (NE_EXPR, type, arg0, arg1);
11907
11908 /* The GE_EXPR and LT_EXPR cases above are not normally
11909 reached because of previous transformations. */
11910
11911 default:
11912 break;
11913 }
11914 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11915 == max_hi
11916 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11917 switch (code)
11918 {
11919 case GT_EXPR:
11920 arg1 = const_binop (PLUS_EXPR, arg1,
11921 build_int_cst (TREE_TYPE (arg1), 1), 0);
11922 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11923 case LE_EXPR:
11924 arg1 = const_binop (PLUS_EXPR, arg1,
11925 build_int_cst (TREE_TYPE (arg1), 1), 0);
11926 return fold_build2 (NE_EXPR, type, arg0, arg1);
11927 default:
11928 break;
11929 }
11930 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11931 == min_hi
11932 && TREE_INT_CST_LOW (arg1) == min_lo)
11933 switch (code)
11934 {
11935 case LT_EXPR:
11936 return omit_one_operand (type, integer_zero_node, arg0);
11937
11938 case LE_EXPR:
11939 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11940
11941 case GE_EXPR:
11942 return omit_one_operand (type, integer_one_node, arg0);
11943
11944 case GT_EXPR:
11945 return fold_build2 (NE_EXPR, type, op0, op1);
11946
11947 default:
11948 break;
11949 }
11950 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11951 == min_hi
11952 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11953 switch (code)
11954 {
11955 case GE_EXPR:
11956 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11957 return fold_build2 (NE_EXPR, type, arg0, arg1);
11958 case LT_EXPR:
11959 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11960 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11961 default:
11962 break;
11963 }
11964
11965 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11966 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11967 && TYPE_UNSIGNED (arg1_type)
11968 /* We will flip the signedness of the comparison operator
11969 associated with the mode of arg1, so the sign bit is
11970 specified by this mode. Check that arg1 is the signed
11971 max associated with this sign bit. */
11972 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11973 /* signed_type does not work on pointer types. */
11974 && INTEGRAL_TYPE_P (arg1_type))
11975 {
11976 /* The following case also applies to X < signed_max+1
11977 and X >= signed_max+1 because previous transformations. */
11978 if (code == LE_EXPR || code == GT_EXPR)
11979 {
11980 tree st0, st1;
11981 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11982 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11983 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11984 type, fold_convert (st0, arg0),
11985 build_int_cst (st1, 0));
11986 }
11987 }
11988 }
11989 }
11990
11991 /* If we are comparing an ABS_EXPR with a constant, we can
11992 convert all the cases into explicit comparisons, but they may
11993 well not be faster than doing the ABS and one comparison.
11994 But ABS (X) <= C is a range comparison, which becomes a subtraction
11995 and a comparison, and is probably faster. */
11996 if (code == LE_EXPR
11997 && TREE_CODE (arg1) == INTEGER_CST
11998 && TREE_CODE (arg0) == ABS_EXPR
11999 && ! TREE_SIDE_EFFECTS (arg0)
12000 && (0 != (tem = negate_expr (arg1)))
12001 && TREE_CODE (tem) == INTEGER_CST
12002 && !TREE_OVERFLOW (tem))
12003 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12004 build2 (GE_EXPR, type,
12005 TREE_OPERAND (arg0, 0), tem),
12006 build2 (LE_EXPR, type,
12007 TREE_OPERAND (arg0, 0), arg1));
12008
12009 /* Convert ABS_EXPR<x> >= 0 to true. */
12010 strict_overflow_p = false;
12011 if (code == GE_EXPR
12012 && (integer_zerop (arg1)
12013 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12014 && real_zerop (arg1)))
12015 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12016 {
12017 if (strict_overflow_p)
12018 fold_overflow_warning (("assuming signed overflow does not occur "
12019 "when simplifying comparison of "
12020 "absolute value and zero"),
12021 WARN_STRICT_OVERFLOW_CONDITIONAL);
12022 return omit_one_operand (type, integer_one_node, arg0);
12023 }
12024
12025 /* Convert ABS_EXPR<x> < 0 to false. */
12026 strict_overflow_p = false;
12027 if (code == LT_EXPR
12028 && (integer_zerop (arg1) || real_zerop (arg1))
12029 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12030 {
12031 if (strict_overflow_p)
12032 fold_overflow_warning (("assuming signed overflow does not occur "
12033 "when simplifying comparison of "
12034 "absolute value and zero"),
12035 WARN_STRICT_OVERFLOW_CONDITIONAL);
12036 return omit_one_operand (type, integer_zero_node, arg0);
12037 }
12038
12039 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12040 and similarly for >= into !=. */
12041 if ((code == LT_EXPR || code == GE_EXPR)
12042 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12043 && TREE_CODE (arg1) == LSHIFT_EXPR
12044 && integer_onep (TREE_OPERAND (arg1, 0)))
12045 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12046 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12047 TREE_OPERAND (arg1, 1)),
12048 build_int_cst (TREE_TYPE (arg0), 0));
12049
12050 if ((code == LT_EXPR || code == GE_EXPR)
12051 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12052 && (TREE_CODE (arg1) == NOP_EXPR
12053 || TREE_CODE (arg1) == CONVERT_EXPR)
12054 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12055 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12056 return
12057 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12058 fold_convert (TREE_TYPE (arg0),
12059 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12060 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12061 1))),
12062 build_int_cst (TREE_TYPE (arg0), 0));
12063
12064 return NULL_TREE;
12065
12066 case UNORDERED_EXPR:
12067 case ORDERED_EXPR:
12068 case UNLT_EXPR:
12069 case UNLE_EXPR:
12070 case UNGT_EXPR:
12071 case UNGE_EXPR:
12072 case UNEQ_EXPR:
12073 case LTGT_EXPR:
12074 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12075 {
12076 t1 = fold_relational_const (code, type, arg0, arg1);
12077 if (t1 != NULL_TREE)
12078 return t1;
12079 }
12080
12081 /* If the first operand is NaN, the result is constant. */
12082 if (TREE_CODE (arg0) == REAL_CST
12083 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12084 && (code != LTGT_EXPR || ! flag_trapping_math))
12085 {
12086 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12087 ? integer_zero_node
12088 : integer_one_node;
12089 return omit_one_operand (type, t1, arg1);
12090 }
12091
12092 /* If the second operand is NaN, the result is constant. */
12093 if (TREE_CODE (arg1) == REAL_CST
12094 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12095 && (code != LTGT_EXPR || ! flag_trapping_math))
12096 {
12097 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12098 ? integer_zero_node
12099 : integer_one_node;
12100 return omit_one_operand (type, t1, arg0);
12101 }
12102
12103 /* Simplify unordered comparison of something with itself. */
12104 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12105 && operand_equal_p (arg0, arg1, 0))
12106 return constant_boolean_node (1, type);
12107
12108 if (code == LTGT_EXPR
12109 && !flag_trapping_math
12110 && operand_equal_p (arg0, arg1, 0))
12111 return constant_boolean_node (0, type);
12112
12113 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12114 {
12115 tree targ0 = strip_float_extensions (arg0);
12116 tree targ1 = strip_float_extensions (arg1);
12117 tree newtype = TREE_TYPE (targ0);
12118
12119 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12120 newtype = TREE_TYPE (targ1);
12121
12122 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12123 return fold_build2 (code, type, fold_convert (newtype, targ0),
12124 fold_convert (newtype, targ1));
12125 }
12126
12127 return NULL_TREE;
12128
12129 case COMPOUND_EXPR:
12130 /* When pedantic, a compound expression can be neither an lvalue
12131 nor an integer constant expression. */
12132 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12133 return NULL_TREE;
12134 /* Don't let (0, 0) be null pointer constant. */
12135 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12136 : fold_convert (type, arg1);
12137 return pedantic_non_lvalue (tem);
12138
12139 case COMPLEX_EXPR:
12140 if ((TREE_CODE (arg0) == REAL_CST
12141 && TREE_CODE (arg1) == REAL_CST)
12142 || (TREE_CODE (arg0) == INTEGER_CST
12143 && TREE_CODE (arg1) == INTEGER_CST))
12144 return build_complex (type, arg0, arg1);
12145 return NULL_TREE;
12146
12147 case ASSERT_EXPR:
12148 /* An ASSERT_EXPR should never be passed to fold_binary. */
12149 gcc_unreachable ();
12150
12151 default:
12152 return NULL_TREE;
12153 } /* switch (code) */
12154 }
12155
12156 /* Callback for walk_tree, looking for LABEL_EXPR.
12157 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12158 Do not check the sub-tree of GOTO_EXPR. */
12159
12160 static tree
12161 contains_label_1 (tree *tp,
12162 int *walk_subtrees,
12163 void *data ATTRIBUTE_UNUSED)
12164 {
12165 switch (TREE_CODE (*tp))
12166 {
12167 case LABEL_EXPR:
12168 return *tp;
12169 case GOTO_EXPR:
12170 *walk_subtrees = 0;
12171 /* no break */
12172 default:
12173 return NULL_TREE;
12174 }
12175 }
12176
12177 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12178 accessible from outside the sub-tree. Returns NULL_TREE if no
12179 addressable label is found. */
12180
12181 static bool
12182 contains_label_p (tree st)
12183 {
12184 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12185 }
12186
12187 /* Fold a ternary expression of code CODE and type TYPE with operands
12188 OP0, OP1, and OP2. Return the folded expression if folding is
12189 successful. Otherwise, return NULL_TREE. */
12190
12191 tree
12192 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12193 {
12194 tree tem;
12195 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12196 enum tree_code_class kind = TREE_CODE_CLASS (code);
12197
12198 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12199 && TREE_CODE_LENGTH (code) == 3);
12200
12201 /* Strip any conversions that don't change the mode. This is safe
12202 for every expression, except for a comparison expression because
12203 its signedness is derived from its operands. So, in the latter
12204 case, only strip conversions that don't change the signedness.
12205
12206 Note that this is done as an internal manipulation within the
12207 constant folder, in order to find the simplest representation of
12208 the arguments so that their form can be studied. In any cases,
12209 the appropriate type conversions should be put back in the tree
12210 that will get out of the constant folder. */
12211 if (op0)
12212 {
12213 arg0 = op0;
12214 STRIP_NOPS (arg0);
12215 }
12216
12217 if (op1)
12218 {
12219 arg1 = op1;
12220 STRIP_NOPS (arg1);
12221 }
12222
12223 switch (code)
12224 {
12225 case COMPONENT_REF:
12226 if (TREE_CODE (arg0) == CONSTRUCTOR
12227 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12228 {
12229 unsigned HOST_WIDE_INT idx;
12230 tree field, value;
12231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12232 if (field == arg1)
12233 return value;
12234 }
12235 return NULL_TREE;
12236
12237 case COND_EXPR:
12238 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12239 so all simple results must be passed through pedantic_non_lvalue. */
12240 if (TREE_CODE (arg0) == INTEGER_CST)
12241 {
12242 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12243 tem = integer_zerop (arg0) ? op2 : op1;
12244 /* Only optimize constant conditions when the selected branch
12245 has the same type as the COND_EXPR. This avoids optimizing
12246 away "c ? x : throw", where the throw has a void type.
12247 Avoid throwing away that operand which contains label. */
12248 if ((!TREE_SIDE_EFFECTS (unused_op)
12249 || !contains_label_p (unused_op))
12250 && (! VOID_TYPE_P (TREE_TYPE (tem))
12251 || VOID_TYPE_P (type)))
12252 return pedantic_non_lvalue (tem);
12253 return NULL_TREE;
12254 }
12255 if (operand_equal_p (arg1, op2, 0))
12256 return pedantic_omit_one_operand (type, arg1, arg0);
12257
12258 /* If we have A op B ? A : C, we may be able to convert this to a
12259 simpler expression, depending on the operation and the values
12260 of B and C. Signed zeros prevent all of these transformations,
12261 for reasons given above each one.
12262
12263 Also try swapping the arguments and inverting the conditional. */
12264 if (COMPARISON_CLASS_P (arg0)
12265 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12266 arg1, TREE_OPERAND (arg0, 1))
12267 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12268 {
12269 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12270 if (tem)
12271 return tem;
12272 }
12273
12274 if (COMPARISON_CLASS_P (arg0)
12275 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12276 op2,
12277 TREE_OPERAND (arg0, 1))
12278 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12279 {
12280 tem = fold_truth_not_expr (arg0);
12281 if (tem && COMPARISON_CLASS_P (tem))
12282 {
12283 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12284 if (tem)
12285 return tem;
12286 }
12287 }
12288
12289 /* If the second operand is simpler than the third, swap them
12290 since that produces better jump optimization results. */
12291 if (truth_value_p (TREE_CODE (arg0))
12292 && tree_swap_operands_p (op1, op2, false))
12293 {
12294 /* See if this can be inverted. If it can't, possibly because
12295 it was a floating-point inequality comparison, don't do
12296 anything. */
12297 tem = fold_truth_not_expr (arg0);
12298 if (tem)
12299 return fold_build3 (code, type, tem, op2, op1);
12300 }
12301
12302 /* Convert A ? 1 : 0 to simply A. */
12303 if (integer_onep (op1)
12304 && integer_zerop (op2)
12305 /* If we try to convert OP0 to our type, the
12306 call to fold will try to move the conversion inside
12307 a COND, which will recurse. In that case, the COND_EXPR
12308 is probably the best choice, so leave it alone. */
12309 && type == TREE_TYPE (arg0))
12310 return pedantic_non_lvalue (arg0);
12311
12312 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12313 over COND_EXPR in cases such as floating point comparisons. */
12314 if (integer_zerop (op1)
12315 && integer_onep (op2)
12316 && truth_value_p (TREE_CODE (arg0)))
12317 return pedantic_non_lvalue (fold_convert (type,
12318 invert_truthvalue (arg0)));
12319
12320 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12321 if (TREE_CODE (arg0) == LT_EXPR
12322 && integer_zerop (TREE_OPERAND (arg0, 1))
12323 && integer_zerop (op2)
12324 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12325 {
12326 /* sign_bit_p only checks ARG1 bits within A's precision.
12327 If <sign bit of A> has wider type than A, bits outside
12328 of A's precision in <sign bit of A> need to be checked.
12329 If they are all 0, this optimization needs to be done
12330 in unsigned A's type, if they are all 1 in signed A's type,
12331 otherwise this can't be done. */
12332 if (TYPE_PRECISION (TREE_TYPE (tem))
12333 < TYPE_PRECISION (TREE_TYPE (arg1))
12334 && TYPE_PRECISION (TREE_TYPE (tem))
12335 < TYPE_PRECISION (type))
12336 {
12337 unsigned HOST_WIDE_INT mask_lo;
12338 HOST_WIDE_INT mask_hi;
12339 int inner_width, outer_width;
12340 tree tem_type;
12341
12342 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12343 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12344 if (outer_width > TYPE_PRECISION (type))
12345 outer_width = TYPE_PRECISION (type);
12346
12347 if (outer_width > HOST_BITS_PER_WIDE_INT)
12348 {
12349 mask_hi = ((unsigned HOST_WIDE_INT) -1
12350 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12351 mask_lo = -1;
12352 }
12353 else
12354 {
12355 mask_hi = 0;
12356 mask_lo = ((unsigned HOST_WIDE_INT) -1
12357 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12358 }
12359 if (inner_width > HOST_BITS_PER_WIDE_INT)
12360 {
12361 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12362 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12363 mask_lo = 0;
12364 }
12365 else
12366 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12367 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12368
12369 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12370 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12371 {
12372 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12373 tem = fold_convert (tem_type, tem);
12374 }
12375 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12376 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12377 {
12378 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12379 tem = fold_convert (tem_type, tem);
12380 }
12381 else
12382 tem = NULL;
12383 }
12384
12385 if (tem)
12386 return fold_convert (type,
12387 fold_build2 (BIT_AND_EXPR,
12388 TREE_TYPE (tem), tem,
12389 fold_convert (TREE_TYPE (tem),
12390 arg1)));
12391 }
12392
12393 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12394 already handled above. */
12395 if (TREE_CODE (arg0) == BIT_AND_EXPR
12396 && integer_onep (TREE_OPERAND (arg0, 1))
12397 && integer_zerop (op2)
12398 && integer_pow2p (arg1))
12399 {
12400 tree tem = TREE_OPERAND (arg0, 0);
12401 STRIP_NOPS (tem);
12402 if (TREE_CODE (tem) == RSHIFT_EXPR
12403 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12404 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12405 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12406 return fold_build2 (BIT_AND_EXPR, type,
12407 TREE_OPERAND (tem, 0), arg1);
12408 }
12409
12410 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12411 is probably obsolete because the first operand should be a
12412 truth value (that's why we have the two cases above), but let's
12413 leave it in until we can confirm this for all front-ends. */
12414 if (integer_zerop (op2)
12415 && TREE_CODE (arg0) == NE_EXPR
12416 && integer_zerop (TREE_OPERAND (arg0, 1))
12417 && integer_pow2p (arg1)
12418 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12419 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12420 arg1, OEP_ONLY_CONST))
12421 return pedantic_non_lvalue (fold_convert (type,
12422 TREE_OPERAND (arg0, 0)));
12423
12424 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12425 if (integer_zerop (op2)
12426 && truth_value_p (TREE_CODE (arg0))
12427 && truth_value_p (TREE_CODE (arg1)))
12428 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12429 fold_convert (type, arg0),
12430 arg1);
12431
12432 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12433 if (integer_onep (op2)
12434 && truth_value_p (TREE_CODE (arg0))
12435 && truth_value_p (TREE_CODE (arg1)))
12436 {
12437 /* Only perform transformation if ARG0 is easily inverted. */
12438 tem = fold_truth_not_expr (arg0);
12439 if (tem)
12440 return fold_build2 (TRUTH_ORIF_EXPR, type,
12441 fold_convert (type, tem),
12442 arg1);
12443 }
12444
12445 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12446 if (integer_zerop (arg1)
12447 && truth_value_p (TREE_CODE (arg0))
12448 && truth_value_p (TREE_CODE (op2)))
12449 {
12450 /* Only perform transformation if ARG0 is easily inverted. */
12451 tem = fold_truth_not_expr (arg0);
12452 if (tem)
12453 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12454 fold_convert (type, tem),
12455 op2);
12456 }
12457
12458 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12459 if (integer_onep (arg1)
12460 && truth_value_p (TREE_CODE (arg0))
12461 && truth_value_p (TREE_CODE (op2)))
12462 return fold_build2 (TRUTH_ORIF_EXPR, type,
12463 fold_convert (type, arg0),
12464 op2);
12465
12466 return NULL_TREE;
12467
12468 case CALL_EXPR:
12469 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12470 of fold_ternary on them. */
12471 gcc_unreachable ();
12472
12473 case BIT_FIELD_REF:
12474 if (TREE_CODE (arg0) == VECTOR_CST
12475 && type == TREE_TYPE (TREE_TYPE (arg0))
12476 && host_integerp (arg1, 1)
12477 && host_integerp (op2, 1))
12478 {
12479 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12480 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12481
12482 if (width != 0
12483 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12484 && (idx % width) == 0
12485 && (idx = idx / width)
12486 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12487 {
12488 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12489 while (idx-- > 0 && elements)
12490 elements = TREE_CHAIN (elements);
12491 if (elements)
12492 return TREE_VALUE (elements);
12493 else
12494 return fold_convert (type, integer_zero_node);
12495 }
12496 }
12497 return NULL_TREE;
12498
12499 default:
12500 return NULL_TREE;
12501 } /* switch (code) */
12502 }
12503
12504 /* Perform constant folding and related simplification of EXPR.
12505 The related simplifications include x*1 => x, x*0 => 0, etc.,
12506 and application of the associative law.
12507 NOP_EXPR conversions may be removed freely (as long as we
12508 are careful not to change the type of the overall expression).
12509 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12510 but we can constant-fold them if they have constant operands. */
12511
12512 #ifdef ENABLE_FOLD_CHECKING
12513 # define fold(x) fold_1 (x)
12514 static tree fold_1 (tree);
12515 static
12516 #endif
12517 tree
12518 fold (tree expr)
12519 {
12520 const tree t = expr;
12521 enum tree_code code = TREE_CODE (t);
12522 enum tree_code_class kind = TREE_CODE_CLASS (code);
12523 tree tem;
12524
12525 /* Return right away if a constant. */
12526 if (kind == tcc_constant)
12527 return t;
12528
12529 /* CALL_EXPR-like objects with variable numbers of operands are
12530 treated specially. */
12531 if (kind == tcc_vl_exp)
12532 {
12533 if (code == CALL_EXPR)
12534 {
12535 tem = fold_call_expr (expr, false);
12536 return tem ? tem : expr;
12537 }
12538 return expr;
12539 }
12540
12541 if (IS_EXPR_CODE_CLASS (kind)
12542 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12543 {
12544 tree type = TREE_TYPE (t);
12545 tree op0, op1, op2;
12546
12547 switch (TREE_CODE_LENGTH (code))
12548 {
12549 case 1:
12550 op0 = TREE_OPERAND (t, 0);
12551 tem = fold_unary (code, type, op0);
12552 return tem ? tem : expr;
12553 case 2:
12554 op0 = TREE_OPERAND (t, 0);
12555 op1 = TREE_OPERAND (t, 1);
12556 tem = fold_binary (code, type, op0, op1);
12557 return tem ? tem : expr;
12558 case 3:
12559 op0 = TREE_OPERAND (t, 0);
12560 op1 = TREE_OPERAND (t, 1);
12561 op2 = TREE_OPERAND (t, 2);
12562 tem = fold_ternary (code, type, op0, op1, op2);
12563 return tem ? tem : expr;
12564 default:
12565 break;
12566 }
12567 }
12568
12569 switch (code)
12570 {
12571 case CONST_DECL:
12572 return fold (DECL_INITIAL (t));
12573
12574 default:
12575 return t;
12576 } /* switch (code) */
12577 }
12578
12579 #ifdef ENABLE_FOLD_CHECKING
12580 #undef fold
12581
12582 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12583 static void fold_check_failed (tree, tree);
12584 void print_fold_checksum (tree);
12585
12586 /* When --enable-checking=fold, compute a digest of expr before
12587 and after actual fold call to see if fold did not accidentally
12588 change original expr. */
12589
12590 tree
12591 fold (tree expr)
12592 {
12593 tree ret;
12594 struct md5_ctx ctx;
12595 unsigned char checksum_before[16], checksum_after[16];
12596 htab_t ht;
12597
12598 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12599 md5_init_ctx (&ctx);
12600 fold_checksum_tree (expr, &ctx, ht);
12601 md5_finish_ctx (&ctx, checksum_before);
12602 htab_empty (ht);
12603
12604 ret = fold_1 (expr);
12605
12606 md5_init_ctx (&ctx);
12607 fold_checksum_tree (expr, &ctx, ht);
12608 md5_finish_ctx (&ctx, checksum_after);
12609 htab_delete (ht);
12610
12611 if (memcmp (checksum_before, checksum_after, 16))
12612 fold_check_failed (expr, ret);
12613
12614 return ret;
12615 }
12616
12617 void
12618 print_fold_checksum (tree expr)
12619 {
12620 struct md5_ctx ctx;
12621 unsigned char checksum[16], cnt;
12622 htab_t ht;
12623
12624 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12625 md5_init_ctx (&ctx);
12626 fold_checksum_tree (expr, &ctx, ht);
12627 md5_finish_ctx (&ctx, checksum);
12628 htab_delete (ht);
12629 for (cnt = 0; cnt < 16; ++cnt)
12630 fprintf (stderr, "%02x", checksum[cnt]);
12631 putc ('\n', stderr);
12632 }
12633
12634 static void
12635 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12636 {
12637 internal_error ("fold check: original tree changed by fold");
12638 }
12639
12640 static void
12641 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12642 {
12643 void **slot;
12644 enum tree_code code;
12645 struct tree_function_decl buf;
12646 int i, len;
12647
12648 recursive_label:
12649
12650 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12651 <= sizeof (struct tree_function_decl))
12652 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12653 if (expr == NULL)
12654 return;
12655 slot = htab_find_slot (ht, expr, INSERT);
12656 if (*slot != NULL)
12657 return;
12658 *slot = expr;
12659 code = TREE_CODE (expr);
12660 if (TREE_CODE_CLASS (code) == tcc_declaration
12661 && DECL_ASSEMBLER_NAME_SET_P (expr))
12662 {
12663 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12664 memcpy ((char *) &buf, expr, tree_size (expr));
12665 expr = (tree) &buf;
12666 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12667 }
12668 else if (TREE_CODE_CLASS (code) == tcc_type
12669 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12670 || TYPE_CACHED_VALUES_P (expr)
12671 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12672 {
12673 /* Allow these fields to be modified. */
12674 memcpy ((char *) &buf, expr, tree_size (expr));
12675 expr = (tree) &buf;
12676 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12677 TYPE_POINTER_TO (expr) = NULL;
12678 TYPE_REFERENCE_TO (expr) = NULL;
12679 if (TYPE_CACHED_VALUES_P (expr))
12680 {
12681 TYPE_CACHED_VALUES_P (expr) = 0;
12682 TYPE_CACHED_VALUES (expr) = NULL;
12683 }
12684 }
12685 md5_process_bytes (expr, tree_size (expr), ctx);
12686 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12687 if (TREE_CODE_CLASS (code) != tcc_type
12688 && TREE_CODE_CLASS (code) != tcc_declaration
12689 && code != TREE_LIST)
12690 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12691 switch (TREE_CODE_CLASS (code))
12692 {
12693 case tcc_constant:
12694 switch (code)
12695 {
12696 case STRING_CST:
12697 md5_process_bytes (TREE_STRING_POINTER (expr),
12698 TREE_STRING_LENGTH (expr), ctx);
12699 break;
12700 case COMPLEX_CST:
12701 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12702 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12703 break;
12704 case VECTOR_CST:
12705 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12706 break;
12707 default:
12708 break;
12709 }
12710 break;
12711 case tcc_exceptional:
12712 switch (code)
12713 {
12714 case TREE_LIST:
12715 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12716 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12717 expr = TREE_CHAIN (expr);
12718 goto recursive_label;
12719 break;
12720 case TREE_VEC:
12721 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12722 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12723 break;
12724 default:
12725 break;
12726 }
12727 break;
12728 case tcc_expression:
12729 case tcc_reference:
12730 case tcc_comparison:
12731 case tcc_unary:
12732 case tcc_binary:
12733 case tcc_statement:
12734 case tcc_vl_exp:
12735 len = TREE_OPERAND_LENGTH (expr);
12736 for (i = 0; i < len; ++i)
12737 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12738 break;
12739 case tcc_declaration:
12740 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12741 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12742 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12743 {
12744 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12745 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12746 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12747 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12748 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12749 }
12750 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12751 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12752
12753 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12754 {
12755 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12756 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12757 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12758 }
12759 break;
12760 case tcc_type:
12761 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12762 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12763 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12764 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12765 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12766 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12767 if (INTEGRAL_TYPE_P (expr)
12768 || SCALAR_FLOAT_TYPE_P (expr))
12769 {
12770 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12771 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12772 }
12773 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12774 if (TREE_CODE (expr) == RECORD_TYPE
12775 || TREE_CODE (expr) == UNION_TYPE
12776 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12777 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12778 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12779 break;
12780 default:
12781 break;
12782 }
12783 }
12784
12785 #endif
12786
12787 /* Fold a unary tree expression with code CODE of type TYPE with an
12788 operand OP0. Return a folded expression if successful. Otherwise,
12789 return a tree expression with code CODE of type TYPE with an
12790 operand OP0. */
12791
12792 tree
12793 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12794 {
12795 tree tem;
12796 #ifdef ENABLE_FOLD_CHECKING
12797 unsigned char checksum_before[16], checksum_after[16];
12798 struct md5_ctx ctx;
12799 htab_t ht;
12800
12801 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12802 md5_init_ctx (&ctx);
12803 fold_checksum_tree (op0, &ctx, ht);
12804 md5_finish_ctx (&ctx, checksum_before);
12805 htab_empty (ht);
12806 #endif
12807
12808 tem = fold_unary (code, type, op0);
12809 if (!tem)
12810 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12811
12812 #ifdef ENABLE_FOLD_CHECKING
12813 md5_init_ctx (&ctx);
12814 fold_checksum_tree (op0, &ctx, ht);
12815 md5_finish_ctx (&ctx, checksum_after);
12816 htab_delete (ht);
12817
12818 if (memcmp (checksum_before, checksum_after, 16))
12819 fold_check_failed (op0, tem);
12820 #endif
12821 return tem;
12822 }
12823
12824 /* Fold a binary tree expression with code CODE of type TYPE with
12825 operands OP0 and OP1. Return a folded expression if successful.
12826 Otherwise, return a tree expression with code CODE of type TYPE
12827 with operands OP0 and OP1. */
12828
12829 tree
12830 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12831 MEM_STAT_DECL)
12832 {
12833 tree tem;
12834 #ifdef ENABLE_FOLD_CHECKING
12835 unsigned char checksum_before_op0[16],
12836 checksum_before_op1[16],
12837 checksum_after_op0[16],
12838 checksum_after_op1[16];
12839 struct md5_ctx ctx;
12840 htab_t ht;
12841
12842 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12843 md5_init_ctx (&ctx);
12844 fold_checksum_tree (op0, &ctx, ht);
12845 md5_finish_ctx (&ctx, checksum_before_op0);
12846 htab_empty (ht);
12847
12848 md5_init_ctx (&ctx);
12849 fold_checksum_tree (op1, &ctx, ht);
12850 md5_finish_ctx (&ctx, checksum_before_op1);
12851 htab_empty (ht);
12852 #endif
12853
12854 tem = fold_binary (code, type, op0, op1);
12855 if (!tem)
12856 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12857
12858 #ifdef ENABLE_FOLD_CHECKING
12859 md5_init_ctx (&ctx);
12860 fold_checksum_tree (op0, &ctx, ht);
12861 md5_finish_ctx (&ctx, checksum_after_op0);
12862 htab_empty (ht);
12863
12864 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12865 fold_check_failed (op0, tem);
12866
12867 md5_init_ctx (&ctx);
12868 fold_checksum_tree (op1, &ctx, ht);
12869 md5_finish_ctx (&ctx, checksum_after_op1);
12870 htab_delete (ht);
12871
12872 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12873 fold_check_failed (op1, tem);
12874 #endif
12875 return tem;
12876 }
12877
12878 /* Fold a ternary tree expression with code CODE of type TYPE with
12879 operands OP0, OP1, and OP2. Return a folded expression if
12880 successful. Otherwise, return a tree expression with code CODE of
12881 type TYPE with operands OP0, OP1, and OP2. */
12882
12883 tree
12884 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12885 MEM_STAT_DECL)
12886 {
12887 tree tem;
12888 #ifdef ENABLE_FOLD_CHECKING
12889 unsigned char checksum_before_op0[16],
12890 checksum_before_op1[16],
12891 checksum_before_op2[16],
12892 checksum_after_op0[16],
12893 checksum_after_op1[16],
12894 checksum_after_op2[16];
12895 struct md5_ctx ctx;
12896 htab_t ht;
12897
12898 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12899 md5_init_ctx (&ctx);
12900 fold_checksum_tree (op0, &ctx, ht);
12901 md5_finish_ctx (&ctx, checksum_before_op0);
12902 htab_empty (ht);
12903
12904 md5_init_ctx (&ctx);
12905 fold_checksum_tree (op1, &ctx, ht);
12906 md5_finish_ctx (&ctx, checksum_before_op1);
12907 htab_empty (ht);
12908
12909 md5_init_ctx (&ctx);
12910 fold_checksum_tree (op2, &ctx, ht);
12911 md5_finish_ctx (&ctx, checksum_before_op2);
12912 htab_empty (ht);
12913 #endif
12914
12915 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12916 tem = fold_ternary (code, type, op0, op1, op2);
12917 if (!tem)
12918 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12919
12920 #ifdef ENABLE_FOLD_CHECKING
12921 md5_init_ctx (&ctx);
12922 fold_checksum_tree (op0, &ctx, ht);
12923 md5_finish_ctx (&ctx, checksum_after_op0);
12924 htab_empty (ht);
12925
12926 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12927 fold_check_failed (op0, tem);
12928
12929 md5_init_ctx (&ctx);
12930 fold_checksum_tree (op1, &ctx, ht);
12931 md5_finish_ctx (&ctx, checksum_after_op1);
12932 htab_empty (ht);
12933
12934 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12935 fold_check_failed (op1, tem);
12936
12937 md5_init_ctx (&ctx);
12938 fold_checksum_tree (op2, &ctx, ht);
12939 md5_finish_ctx (&ctx, checksum_after_op2);
12940 htab_delete (ht);
12941
12942 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12943 fold_check_failed (op2, tem);
12944 #endif
12945 return tem;
12946 }
12947
12948 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12949 arguments in ARGARRAY, and a null static chain.
12950 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12951 of type TYPE from the given operands as constructed by build_call_array. */
12952
12953 tree
12954 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12955 {
12956 tree tem;
12957 #ifdef ENABLE_FOLD_CHECKING
12958 unsigned char checksum_before_fn[16],
12959 checksum_before_arglist[16],
12960 checksum_after_fn[16],
12961 checksum_after_arglist[16];
12962 struct md5_ctx ctx;
12963 htab_t ht;
12964 int i;
12965
12966 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12967 md5_init_ctx (&ctx);
12968 fold_checksum_tree (fn, &ctx, ht);
12969 md5_finish_ctx (&ctx, checksum_before_fn);
12970 htab_empty (ht);
12971
12972 md5_init_ctx (&ctx);
12973 for (i = 0; i < nargs; i++)
12974 fold_checksum_tree (argarray[i], &ctx, ht);
12975 md5_finish_ctx (&ctx, checksum_before_arglist);
12976 htab_empty (ht);
12977 #endif
12978
12979 tem = fold_builtin_call_array (type, fn, nargs, argarray);
12980
12981 #ifdef ENABLE_FOLD_CHECKING
12982 md5_init_ctx (&ctx);
12983 fold_checksum_tree (fn, &ctx, ht);
12984 md5_finish_ctx (&ctx, checksum_after_fn);
12985 htab_empty (ht);
12986
12987 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12988 fold_check_failed (fn, tem);
12989
12990 md5_init_ctx (&ctx);
12991 for (i = 0; i < nargs; i++)
12992 fold_checksum_tree (argarray[i], &ctx, ht);
12993 md5_finish_ctx (&ctx, checksum_after_arglist);
12994 htab_delete (ht);
12995
12996 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12997 fold_check_failed (NULL_TREE, tem);
12998 #endif
12999 return tem;
13000 }
13001
13002 /* Perform constant folding and related simplification of initializer
13003 expression EXPR. These behave identically to "fold_buildN" but ignore
13004 potential run-time traps and exceptions that fold must preserve. */
13005
13006 #define START_FOLD_INIT \
13007 int saved_signaling_nans = flag_signaling_nans;\
13008 int saved_trapping_math = flag_trapping_math;\
13009 int saved_rounding_math = flag_rounding_math;\
13010 int saved_trapv = flag_trapv;\
13011 int saved_folding_initializer = folding_initializer;\
13012 flag_signaling_nans = 0;\
13013 flag_trapping_math = 0;\
13014 flag_rounding_math = 0;\
13015 flag_trapv = 0;\
13016 folding_initializer = 1;
13017
13018 #define END_FOLD_INIT \
13019 flag_signaling_nans = saved_signaling_nans;\
13020 flag_trapping_math = saved_trapping_math;\
13021 flag_rounding_math = saved_rounding_math;\
13022 flag_trapv = saved_trapv;\
13023 folding_initializer = saved_folding_initializer;
13024
13025 tree
13026 fold_build1_initializer (enum tree_code code, tree type, tree op)
13027 {
13028 tree result;
13029 START_FOLD_INIT;
13030
13031 result = fold_build1 (code, type, op);
13032
13033 END_FOLD_INIT;
13034 return result;
13035 }
13036
13037 tree
13038 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13039 {
13040 tree result;
13041 START_FOLD_INIT;
13042
13043 result = fold_build2 (code, type, op0, op1);
13044
13045 END_FOLD_INIT;
13046 return result;
13047 }
13048
13049 tree
13050 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13051 tree op2)
13052 {
13053 tree result;
13054 START_FOLD_INIT;
13055
13056 result = fold_build3 (code, type, op0, op1, op2);
13057
13058 END_FOLD_INIT;
13059 return result;
13060 }
13061
13062 tree
13063 fold_build_call_array_initializer (tree type, tree fn,
13064 int nargs, tree *argarray)
13065 {
13066 tree result;
13067 START_FOLD_INIT;
13068
13069 result = fold_build_call_array (type, fn, nargs, argarray);
13070
13071 END_FOLD_INIT;
13072 return result;
13073 }
13074
13075 #undef START_FOLD_INIT
13076 #undef END_FOLD_INIT
13077
13078 /* Determine if first argument is a multiple of second argument. Return 0 if
13079 it is not, or we cannot easily determined it to be.
13080
13081 An example of the sort of thing we care about (at this point; this routine
13082 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13083 fold cases do now) is discovering that
13084
13085 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13086
13087 is a multiple of
13088
13089 SAVE_EXPR (J * 8)
13090
13091 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13092
13093 This code also handles discovering that
13094
13095 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13096
13097 is a multiple of 8 so we don't have to worry about dealing with a
13098 possible remainder.
13099
13100 Note that we *look* inside a SAVE_EXPR only to determine how it was
13101 calculated; it is not safe for fold to do much of anything else with the
13102 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13103 at run time. For example, the latter example above *cannot* be implemented
13104 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13105 evaluation time of the original SAVE_EXPR is not necessarily the same at
13106 the time the new expression is evaluated. The only optimization of this
13107 sort that would be valid is changing
13108
13109 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13110
13111 divided by 8 to
13112
13113 SAVE_EXPR (I) * SAVE_EXPR (J)
13114
13115 (where the same SAVE_EXPR (J) is used in the original and the
13116 transformed version). */
13117
13118 static int
13119 multiple_of_p (tree type, tree top, tree bottom)
13120 {
13121 if (operand_equal_p (top, bottom, 0))
13122 return 1;
13123
13124 if (TREE_CODE (type) != INTEGER_TYPE)
13125 return 0;
13126
13127 switch (TREE_CODE (top))
13128 {
13129 case BIT_AND_EXPR:
13130 /* Bitwise and provides a power of two multiple. If the mask is
13131 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13132 if (!integer_pow2p (bottom))
13133 return 0;
13134 /* FALLTHRU */
13135
13136 case MULT_EXPR:
13137 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13138 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13139
13140 case PLUS_EXPR:
13141 case MINUS_EXPR:
13142 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13143 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13144
13145 case LSHIFT_EXPR:
13146 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13147 {
13148 tree op1, t1;
13149
13150 op1 = TREE_OPERAND (top, 1);
13151 /* const_binop may not detect overflow correctly,
13152 so check for it explicitly here. */
13153 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13154 > TREE_INT_CST_LOW (op1)
13155 && TREE_INT_CST_HIGH (op1) == 0
13156 && 0 != (t1 = fold_convert (type,
13157 const_binop (LSHIFT_EXPR,
13158 size_one_node,
13159 op1, 0)))
13160 && !TREE_OVERFLOW (t1))
13161 return multiple_of_p (type, t1, bottom);
13162 }
13163 return 0;
13164
13165 case NOP_EXPR:
13166 /* Can't handle conversions from non-integral or wider integral type. */
13167 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13168 || (TYPE_PRECISION (type)
13169 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13170 return 0;
13171
13172 /* .. fall through ... */
13173
13174 case SAVE_EXPR:
13175 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13176
13177 case INTEGER_CST:
13178 if (TREE_CODE (bottom) != INTEGER_CST
13179 || (TYPE_UNSIGNED (type)
13180 && (tree_int_cst_sgn (top) < 0
13181 || tree_int_cst_sgn (bottom) < 0)))
13182 return 0;
13183 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13184 top, bottom, 0));
13185
13186 default:
13187 return 0;
13188 }
13189 }
13190
13191 /* Return true if `t' is known to be non-negative. If the return
13192 value is based on the assumption that signed overflow is undefined,
13193 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13194 *STRICT_OVERFLOW_P. */
13195
13196 bool
13197 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13198 {
13199 if (t == error_mark_node)
13200 return false;
13201
13202 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13203 return true;
13204
13205 switch (TREE_CODE (t))
13206 {
13207 case SSA_NAME:
13208 /* Query VRP to see if it has recorded any information about
13209 the range of this object. */
13210 return ssa_name_nonnegative_p (t);
13211
13212 case ABS_EXPR:
13213 /* We can't return 1 if flag_wrapv is set because
13214 ABS_EXPR<INT_MIN> = INT_MIN. */
13215 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13216 return true;
13217 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13218 {
13219 *strict_overflow_p = true;
13220 return true;
13221 }
13222 break;
13223
13224 case INTEGER_CST:
13225 return tree_int_cst_sgn (t) >= 0;
13226
13227 case REAL_CST:
13228 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13229
13230 case PLUS_EXPR:
13231 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13232 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13233 strict_overflow_p)
13234 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13235 strict_overflow_p));
13236
13237 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13238 both unsigned and at least 2 bits shorter than the result. */
13239 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13240 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13241 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13242 {
13243 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13244 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13245 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13246 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13247 {
13248 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13249 TYPE_PRECISION (inner2)) + 1;
13250 return prec < TYPE_PRECISION (TREE_TYPE (t));
13251 }
13252 }
13253 break;
13254
13255 case MULT_EXPR:
13256 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13257 {
13258 /* x * x for floating point x is always non-negative. */
13259 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13260 return true;
13261 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13262 strict_overflow_p)
13263 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13264 strict_overflow_p));
13265 }
13266
13267 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13268 both unsigned and their total bits is shorter than the result. */
13269 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13270 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13271 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13272 {
13273 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13274 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13275 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13276 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13277 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13278 < TYPE_PRECISION (TREE_TYPE (t));
13279 }
13280 return false;
13281
13282 case BIT_AND_EXPR:
13283 case MAX_EXPR:
13284 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13285 strict_overflow_p)
13286 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13287 strict_overflow_p));
13288
13289 case BIT_IOR_EXPR:
13290 case BIT_XOR_EXPR:
13291 case MIN_EXPR:
13292 case RDIV_EXPR:
13293 case TRUNC_DIV_EXPR:
13294 case CEIL_DIV_EXPR:
13295 case FLOOR_DIV_EXPR:
13296 case ROUND_DIV_EXPR:
13297 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13298 strict_overflow_p)
13299 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13300 strict_overflow_p));
13301
13302 case TRUNC_MOD_EXPR:
13303 case CEIL_MOD_EXPR:
13304 case FLOOR_MOD_EXPR:
13305 case ROUND_MOD_EXPR:
13306 case SAVE_EXPR:
13307 case NON_LVALUE_EXPR:
13308 case FLOAT_EXPR:
13309 case FIX_TRUNC_EXPR:
13310 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13311 strict_overflow_p);
13312
13313 case COMPOUND_EXPR:
13314 case MODIFY_EXPR:
13315 case GIMPLE_MODIFY_STMT:
13316 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13317 strict_overflow_p);
13318
13319 case BIND_EXPR:
13320 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13321 strict_overflow_p);
13322
13323 case COND_EXPR:
13324 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13325 strict_overflow_p)
13326 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13327 strict_overflow_p));
13328
13329 case NOP_EXPR:
13330 {
13331 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13332 tree outer_type = TREE_TYPE (t);
13333
13334 if (TREE_CODE (outer_type) == REAL_TYPE)
13335 {
13336 if (TREE_CODE (inner_type) == REAL_TYPE)
13337 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13338 strict_overflow_p);
13339 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13340 {
13341 if (TYPE_UNSIGNED (inner_type))
13342 return true;
13343 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13344 strict_overflow_p);
13345 }
13346 }
13347 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13348 {
13349 if (TREE_CODE (inner_type) == REAL_TYPE)
13350 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13351 strict_overflow_p);
13352 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13353 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13354 && TYPE_UNSIGNED (inner_type);
13355 }
13356 }
13357 break;
13358
13359 case TARGET_EXPR:
13360 {
13361 tree temp = TARGET_EXPR_SLOT (t);
13362 t = TARGET_EXPR_INITIAL (t);
13363
13364 /* If the initializer is non-void, then it's a normal expression
13365 that will be assigned to the slot. */
13366 if (!VOID_TYPE_P (t))
13367 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13368
13369 /* Otherwise, the initializer sets the slot in some way. One common
13370 way is an assignment statement at the end of the initializer. */
13371 while (1)
13372 {
13373 if (TREE_CODE (t) == BIND_EXPR)
13374 t = expr_last (BIND_EXPR_BODY (t));
13375 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13376 || TREE_CODE (t) == TRY_CATCH_EXPR)
13377 t = expr_last (TREE_OPERAND (t, 0));
13378 else if (TREE_CODE (t) == STATEMENT_LIST)
13379 t = expr_last (t);
13380 else
13381 break;
13382 }
13383 if ((TREE_CODE (t) == MODIFY_EXPR
13384 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13385 && GENERIC_TREE_OPERAND (t, 0) == temp)
13386 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13387 strict_overflow_p);
13388
13389 return false;
13390 }
13391
13392 case CALL_EXPR:
13393 {
13394 tree fndecl = get_callee_fndecl (t);
13395 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13396 switch (DECL_FUNCTION_CODE (fndecl))
13397 {
13398 CASE_FLT_FN (BUILT_IN_ACOS):
13399 CASE_FLT_FN (BUILT_IN_ACOSH):
13400 CASE_FLT_FN (BUILT_IN_CABS):
13401 CASE_FLT_FN (BUILT_IN_COSH):
13402 CASE_FLT_FN (BUILT_IN_ERFC):
13403 CASE_FLT_FN (BUILT_IN_EXP):
13404 CASE_FLT_FN (BUILT_IN_EXP10):
13405 CASE_FLT_FN (BUILT_IN_EXP2):
13406 CASE_FLT_FN (BUILT_IN_FABS):
13407 CASE_FLT_FN (BUILT_IN_FDIM):
13408 CASE_FLT_FN (BUILT_IN_HYPOT):
13409 CASE_FLT_FN (BUILT_IN_POW10):
13410 CASE_INT_FN (BUILT_IN_FFS):
13411 CASE_INT_FN (BUILT_IN_PARITY):
13412 CASE_INT_FN (BUILT_IN_POPCOUNT):
13413 case BUILT_IN_BSWAP32:
13414 case BUILT_IN_BSWAP64:
13415 /* Always true. */
13416 return true;
13417
13418 CASE_FLT_FN (BUILT_IN_SQRT):
13419 /* sqrt(-0.0) is -0.0. */
13420 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13421 return true;
13422 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13423 strict_overflow_p);
13424
13425 CASE_FLT_FN (BUILT_IN_ASINH):
13426 CASE_FLT_FN (BUILT_IN_ATAN):
13427 CASE_FLT_FN (BUILT_IN_ATANH):
13428 CASE_FLT_FN (BUILT_IN_CBRT):
13429 CASE_FLT_FN (BUILT_IN_CEIL):
13430 CASE_FLT_FN (BUILT_IN_ERF):
13431 CASE_FLT_FN (BUILT_IN_EXPM1):
13432 CASE_FLT_FN (BUILT_IN_FLOOR):
13433 CASE_FLT_FN (BUILT_IN_FMOD):
13434 CASE_FLT_FN (BUILT_IN_FREXP):
13435 CASE_FLT_FN (BUILT_IN_LCEIL):
13436 CASE_FLT_FN (BUILT_IN_LDEXP):
13437 CASE_FLT_FN (BUILT_IN_LFLOOR):
13438 CASE_FLT_FN (BUILT_IN_LLCEIL):
13439 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13440 CASE_FLT_FN (BUILT_IN_LLRINT):
13441 CASE_FLT_FN (BUILT_IN_LLROUND):
13442 CASE_FLT_FN (BUILT_IN_LRINT):
13443 CASE_FLT_FN (BUILT_IN_LROUND):
13444 CASE_FLT_FN (BUILT_IN_MODF):
13445 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13446 CASE_FLT_FN (BUILT_IN_RINT):
13447 CASE_FLT_FN (BUILT_IN_ROUND):
13448 CASE_FLT_FN (BUILT_IN_SCALB):
13449 CASE_FLT_FN (BUILT_IN_SCALBLN):
13450 CASE_FLT_FN (BUILT_IN_SCALBN):
13451 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13452 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13453 CASE_FLT_FN (BUILT_IN_SINH):
13454 CASE_FLT_FN (BUILT_IN_TANH):
13455 CASE_FLT_FN (BUILT_IN_TRUNC):
13456 /* True if the 1st argument is nonnegative. */
13457 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13458 strict_overflow_p);
13459
13460 CASE_FLT_FN (BUILT_IN_FMAX):
13461 /* True if the 1st OR 2nd arguments are nonnegative. */
13462 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13463 strict_overflow_p)
13464 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13465 strict_overflow_p)));
13466
13467 CASE_FLT_FN (BUILT_IN_FMIN):
13468 /* True if the 1st AND 2nd arguments are nonnegative. */
13469 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13470 strict_overflow_p)
13471 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13472 strict_overflow_p)));
13473
13474 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13475 /* True if the 2nd argument is nonnegative. */
13476 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13477 strict_overflow_p);
13478
13479 CASE_FLT_FN (BUILT_IN_POWI):
13480 /* True if the 1st argument is nonnegative or the second
13481 argument is an even integer. */
13482 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13483 {
13484 tree arg1 = CALL_EXPR_ARG (t, 1);
13485 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13486 return true;
13487 }
13488 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13489 strict_overflow_p);
13490
13491 CASE_FLT_FN (BUILT_IN_POW):
13492 /* True if the 1st argument is nonnegative or the second
13493 argument is an even integer valued real. */
13494 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13495 {
13496 REAL_VALUE_TYPE c;
13497 HOST_WIDE_INT n;
13498
13499 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13500 n = real_to_integer (&c);
13501 if ((n & 1) == 0)
13502 {
13503 REAL_VALUE_TYPE cint;
13504 real_from_integer (&cint, VOIDmode, n,
13505 n < 0 ? -1 : 0, 0);
13506 if (real_identical (&c, &cint))
13507 return true;
13508 }
13509 }
13510 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13511 strict_overflow_p);
13512
13513 default:
13514 break;
13515 }
13516 }
13517
13518 /* ... fall through ... */
13519
13520 default:
13521 if (truth_value_p (TREE_CODE (t)))
13522 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13523 return true;
13524 }
13525
13526 /* We don't know sign of `t', so be conservative and return false. */
13527 return false;
13528 }
13529
13530 /* Return true if `t' is known to be non-negative. Handle warnings
13531 about undefined signed overflow. */
13532
13533 bool
13534 tree_expr_nonnegative_p (tree t)
13535 {
13536 bool ret, strict_overflow_p;
13537
13538 strict_overflow_p = false;
13539 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13540 if (strict_overflow_p)
13541 fold_overflow_warning (("assuming signed overflow does not occur when "
13542 "determining that expression is always "
13543 "non-negative"),
13544 WARN_STRICT_OVERFLOW_MISC);
13545 return ret;
13546 }
13547
13548 /* Return true when T is an address and is known to be nonzero.
13549 For floating point we further ensure that T is not denormal.
13550 Similar logic is present in nonzero_address in rtlanal.h.
13551
13552 If the return value is based on the assumption that signed overflow
13553 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13554 change *STRICT_OVERFLOW_P. */
13555
13556 bool
13557 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13558 {
13559 tree type = TREE_TYPE (t);
13560 bool sub_strict_overflow_p;
13561
13562 /* Doing something useful for floating point would need more work. */
13563 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13564 return false;
13565
13566 switch (TREE_CODE (t))
13567 {
13568 case SSA_NAME:
13569 /* Query VRP to see if it has recorded any information about
13570 the range of this object. */
13571 return ssa_name_nonzero_p (t);
13572
13573 case ABS_EXPR:
13574 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13575 strict_overflow_p);
13576
13577 case INTEGER_CST:
13578 return !integer_zerop (t);
13579
13580 case PLUS_EXPR:
13581 if (TYPE_OVERFLOW_UNDEFINED (type))
13582 {
13583 /* With the presence of negative values it is hard
13584 to say something. */
13585 sub_strict_overflow_p = false;
13586 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13587 &sub_strict_overflow_p)
13588 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13589 &sub_strict_overflow_p))
13590 return false;
13591 /* One of operands must be positive and the other non-negative. */
13592 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13593 overflows, on a twos-complement machine the sum of two
13594 nonnegative numbers can never be zero. */
13595 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13596 strict_overflow_p)
13597 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13598 strict_overflow_p));
13599 }
13600 break;
13601
13602 case MULT_EXPR:
13603 if (TYPE_OVERFLOW_UNDEFINED (type))
13604 {
13605 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13606 strict_overflow_p)
13607 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13608 strict_overflow_p))
13609 {
13610 *strict_overflow_p = true;
13611 return true;
13612 }
13613 }
13614 break;
13615
13616 case NOP_EXPR:
13617 {
13618 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13619 tree outer_type = TREE_TYPE (t);
13620
13621 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13622 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13623 strict_overflow_p));
13624 }
13625 break;
13626
13627 case ADDR_EXPR:
13628 {
13629 tree base = get_base_address (TREE_OPERAND (t, 0));
13630
13631 if (!base)
13632 return false;
13633
13634 /* Weak declarations may link to NULL. */
13635 if (VAR_OR_FUNCTION_DECL_P (base))
13636 return !DECL_WEAK (base);
13637
13638 /* Constants are never weak. */
13639 if (CONSTANT_CLASS_P (base))
13640 return true;
13641
13642 return false;
13643 }
13644
13645 case COND_EXPR:
13646 sub_strict_overflow_p = false;
13647 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13648 &sub_strict_overflow_p)
13649 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13650 &sub_strict_overflow_p))
13651 {
13652 if (sub_strict_overflow_p)
13653 *strict_overflow_p = true;
13654 return true;
13655 }
13656 break;
13657
13658 case MIN_EXPR:
13659 sub_strict_overflow_p = false;
13660 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13661 &sub_strict_overflow_p)
13662 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13663 &sub_strict_overflow_p))
13664 {
13665 if (sub_strict_overflow_p)
13666 *strict_overflow_p = true;
13667 }
13668 break;
13669
13670 case MAX_EXPR:
13671 sub_strict_overflow_p = false;
13672 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13673 &sub_strict_overflow_p))
13674 {
13675 if (sub_strict_overflow_p)
13676 *strict_overflow_p = true;
13677
13678 /* When both operands are nonzero, then MAX must be too. */
13679 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13680 strict_overflow_p))
13681 return true;
13682
13683 /* MAX where operand 0 is positive is positive. */
13684 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13685 strict_overflow_p);
13686 }
13687 /* MAX where operand 1 is positive is positive. */
13688 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13689 &sub_strict_overflow_p)
13690 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13691 &sub_strict_overflow_p))
13692 {
13693 if (sub_strict_overflow_p)
13694 *strict_overflow_p = true;
13695 return true;
13696 }
13697 break;
13698
13699 case COMPOUND_EXPR:
13700 case MODIFY_EXPR:
13701 case GIMPLE_MODIFY_STMT:
13702 case BIND_EXPR:
13703 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13704 strict_overflow_p);
13705
13706 case SAVE_EXPR:
13707 case NON_LVALUE_EXPR:
13708 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13709 strict_overflow_p);
13710
13711 case BIT_IOR_EXPR:
13712 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13713 strict_overflow_p)
13714 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13715 strict_overflow_p));
13716
13717 case CALL_EXPR:
13718 return alloca_call_p (t);
13719
13720 default:
13721 break;
13722 }
13723 return false;
13724 }
13725
13726 /* Return true when T is an address and is known to be nonzero.
13727 Handle warnings about undefined signed overflow. */
13728
13729 bool
13730 tree_expr_nonzero_p (tree t)
13731 {
13732 bool ret, strict_overflow_p;
13733
13734 strict_overflow_p = false;
13735 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13736 if (strict_overflow_p)
13737 fold_overflow_warning (("assuming signed overflow does not occur when "
13738 "determining that expression is always "
13739 "non-zero"),
13740 WARN_STRICT_OVERFLOW_MISC);
13741 return ret;
13742 }
13743
13744 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13745 attempt to fold the expression to a constant without modifying TYPE,
13746 OP0 or OP1.
13747
13748 If the expression could be simplified to a constant, then return
13749 the constant. If the expression would not be simplified to a
13750 constant, then return NULL_TREE. */
13751
13752 tree
13753 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13754 {
13755 tree tem = fold_binary (code, type, op0, op1);
13756 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13757 }
13758
13759 /* Given the components of a unary expression CODE, TYPE and OP0,
13760 attempt to fold the expression to a constant without modifying
13761 TYPE or OP0.
13762
13763 If the expression could be simplified to a constant, then return
13764 the constant. If the expression would not be simplified to a
13765 constant, then return NULL_TREE. */
13766
13767 tree
13768 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13769 {
13770 tree tem = fold_unary (code, type, op0);
13771 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13772 }
13773
13774 /* If EXP represents referencing an element in a constant string
13775 (either via pointer arithmetic or array indexing), return the
13776 tree representing the value accessed, otherwise return NULL. */
13777
13778 tree
13779 fold_read_from_constant_string (tree exp)
13780 {
13781 if ((TREE_CODE (exp) == INDIRECT_REF
13782 || TREE_CODE (exp) == ARRAY_REF)
13783 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13784 {
13785 tree exp1 = TREE_OPERAND (exp, 0);
13786 tree index;
13787 tree string;
13788
13789 if (TREE_CODE (exp) == INDIRECT_REF)
13790 string = string_constant (exp1, &index);
13791 else
13792 {
13793 tree low_bound = array_ref_low_bound (exp);
13794 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13795
13796 /* Optimize the special-case of a zero lower bound.
13797
13798 We convert the low_bound to sizetype to avoid some problems
13799 with constant folding. (E.g. suppose the lower bound is 1,
13800 and its mode is QI. Without the conversion,l (ARRAY
13801 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13802 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13803 if (! integer_zerop (low_bound))
13804 index = size_diffop (index, fold_convert (sizetype, low_bound));
13805
13806 string = exp1;
13807 }
13808
13809 if (string
13810 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13811 && TREE_CODE (string) == STRING_CST
13812 && TREE_CODE (index) == INTEGER_CST
13813 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13814 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13815 == MODE_INT)
13816 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13817 return fold_convert (TREE_TYPE (exp),
13818 build_int_cst (NULL_TREE,
13819 (TREE_STRING_POINTER (string)
13820 [TREE_INT_CST_LOW (index)])));
13821 }
13822 return NULL;
13823 }
13824
13825 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13826 an integer constant or real constant.
13827
13828 TYPE is the type of the result. */
13829
13830 static tree
13831 fold_negate_const (tree arg0, tree type)
13832 {
13833 tree t = NULL_TREE;
13834
13835 switch (TREE_CODE (arg0))
13836 {
13837 case INTEGER_CST:
13838 {
13839 unsigned HOST_WIDE_INT low;
13840 HOST_WIDE_INT high;
13841 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13842 TREE_INT_CST_HIGH (arg0),
13843 &low, &high);
13844 t = force_fit_type_double (type, low, high, 1,
13845 (overflow | TREE_OVERFLOW (arg0))
13846 && !TYPE_UNSIGNED (type));
13847 break;
13848 }
13849
13850 case REAL_CST:
13851 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13852 break;
13853
13854 default:
13855 gcc_unreachable ();
13856 }
13857
13858 return t;
13859 }
13860
13861 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13862 an integer constant or real constant.
13863
13864 TYPE is the type of the result. */
13865
13866 tree
13867 fold_abs_const (tree arg0, tree type)
13868 {
13869 tree t = NULL_TREE;
13870
13871 switch (TREE_CODE (arg0))
13872 {
13873 case INTEGER_CST:
13874 /* If the value is unsigned, then the absolute value is
13875 the same as the ordinary value. */
13876 if (TYPE_UNSIGNED (type))
13877 t = arg0;
13878 /* Similarly, if the value is non-negative. */
13879 else if (INT_CST_LT (integer_minus_one_node, arg0))
13880 t = arg0;
13881 /* If the value is negative, then the absolute value is
13882 its negation. */
13883 else
13884 {
13885 unsigned HOST_WIDE_INT low;
13886 HOST_WIDE_INT high;
13887 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13888 TREE_INT_CST_HIGH (arg0),
13889 &low, &high);
13890 t = force_fit_type_double (type, low, high, -1,
13891 overflow | TREE_OVERFLOW (arg0));
13892 }
13893 break;
13894
13895 case REAL_CST:
13896 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13897 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13898 else
13899 t = arg0;
13900 break;
13901
13902 default:
13903 gcc_unreachable ();
13904 }
13905
13906 return t;
13907 }
13908
13909 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13910 constant. TYPE is the type of the result. */
13911
13912 static tree
13913 fold_not_const (tree arg0, tree type)
13914 {
13915 tree t = NULL_TREE;
13916
13917 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13918
13919 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13920 ~TREE_INT_CST_HIGH (arg0), 0,
13921 TREE_OVERFLOW (arg0));
13922
13923 return t;
13924 }
13925
13926 /* Given CODE, a relational operator, the target type, TYPE and two
13927 constant operands OP0 and OP1, return the result of the
13928 relational operation. If the result is not a compile time
13929 constant, then return NULL_TREE. */
13930
13931 static tree
13932 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13933 {
13934 int result, invert;
13935
13936 /* From here on, the only cases we handle are when the result is
13937 known to be a constant. */
13938
13939 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13940 {
13941 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13942 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13943
13944 /* Handle the cases where either operand is a NaN. */
13945 if (real_isnan (c0) || real_isnan (c1))
13946 {
13947 switch (code)
13948 {
13949 case EQ_EXPR:
13950 case ORDERED_EXPR:
13951 result = 0;
13952 break;
13953
13954 case NE_EXPR:
13955 case UNORDERED_EXPR:
13956 case UNLT_EXPR:
13957 case UNLE_EXPR:
13958 case UNGT_EXPR:
13959 case UNGE_EXPR:
13960 case UNEQ_EXPR:
13961 result = 1;
13962 break;
13963
13964 case LT_EXPR:
13965 case LE_EXPR:
13966 case GT_EXPR:
13967 case GE_EXPR:
13968 case LTGT_EXPR:
13969 if (flag_trapping_math)
13970 return NULL_TREE;
13971 result = 0;
13972 break;
13973
13974 default:
13975 gcc_unreachable ();
13976 }
13977
13978 return constant_boolean_node (result, type);
13979 }
13980
13981 return constant_boolean_node (real_compare (code, c0, c1), type);
13982 }
13983
13984 /* Handle equality/inequality of complex constants. */
13985 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13986 {
13987 tree rcond = fold_relational_const (code, type,
13988 TREE_REALPART (op0),
13989 TREE_REALPART (op1));
13990 tree icond = fold_relational_const (code, type,
13991 TREE_IMAGPART (op0),
13992 TREE_IMAGPART (op1));
13993 if (code == EQ_EXPR)
13994 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13995 else if (code == NE_EXPR)
13996 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13997 else
13998 return NULL_TREE;
13999 }
14000
14001 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14002
14003 To compute GT, swap the arguments and do LT.
14004 To compute GE, do LT and invert the result.
14005 To compute LE, swap the arguments, do LT and invert the result.
14006 To compute NE, do EQ and invert the result.
14007
14008 Therefore, the code below must handle only EQ and LT. */
14009
14010 if (code == LE_EXPR || code == GT_EXPR)
14011 {
14012 tree tem = op0;
14013 op0 = op1;
14014 op1 = tem;
14015 code = swap_tree_comparison (code);
14016 }
14017
14018 /* Note that it is safe to invert for real values here because we
14019 have already handled the one case that it matters. */
14020
14021 invert = 0;
14022 if (code == NE_EXPR || code == GE_EXPR)
14023 {
14024 invert = 1;
14025 code = invert_tree_comparison (code, false);
14026 }
14027
14028 /* Compute a result for LT or EQ if args permit;
14029 Otherwise return T. */
14030 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14031 {
14032 if (code == EQ_EXPR)
14033 result = tree_int_cst_equal (op0, op1);
14034 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14035 result = INT_CST_LT_UNSIGNED (op0, op1);
14036 else
14037 result = INT_CST_LT (op0, op1);
14038 }
14039 else
14040 return NULL_TREE;
14041
14042 if (invert)
14043 result ^= 1;
14044 return constant_boolean_node (result, type);
14045 }
14046
14047 /* Build an expression for the a clean point containing EXPR with type TYPE.
14048 Don't build a cleanup point expression for EXPR which don't have side
14049 effects. */
14050
14051 tree
14052 fold_build_cleanup_point_expr (tree type, tree expr)
14053 {
14054 /* If the expression does not have side effects then we don't have to wrap
14055 it with a cleanup point expression. */
14056 if (!TREE_SIDE_EFFECTS (expr))
14057 return expr;
14058
14059 /* If the expression is a return, check to see if the expression inside the
14060 return has no side effects or the right hand side of the modify expression
14061 inside the return. If either don't have side effects set we don't need to
14062 wrap the expression in a cleanup point expression. Note we don't check the
14063 left hand side of the modify because it should always be a return decl. */
14064 if (TREE_CODE (expr) == RETURN_EXPR)
14065 {
14066 tree op = TREE_OPERAND (expr, 0);
14067 if (!op || !TREE_SIDE_EFFECTS (op))
14068 return expr;
14069 op = TREE_OPERAND (op, 1);
14070 if (!TREE_SIDE_EFFECTS (op))
14071 return expr;
14072 }
14073
14074 return build1 (CLEANUP_POINT_EXPR, type, expr);
14075 }
14076
14077 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14078 avoid confusing the gimplify process. */
14079
14080 tree
14081 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14082 {
14083 /* The size of the object is not relevant when talking about its address. */
14084 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14085 t = TREE_OPERAND (t, 0);
14086
14087 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14088 if (TREE_CODE (t) == INDIRECT_REF
14089 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14090 {
14091 t = TREE_OPERAND (t, 0);
14092 if (TREE_TYPE (t) != ptrtype)
14093 t = build1 (NOP_EXPR, ptrtype, t);
14094 }
14095 else
14096 {
14097 tree base = t;
14098
14099 while (handled_component_p (base))
14100 base = TREE_OPERAND (base, 0);
14101 if (DECL_P (base))
14102 TREE_ADDRESSABLE (base) = 1;
14103
14104 t = build1 (ADDR_EXPR, ptrtype, t);
14105 }
14106
14107 return t;
14108 }
14109
14110 tree
14111 build_fold_addr_expr (tree t)
14112 {
14113 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14114 }
14115
14116 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14117 of an indirection through OP0, or NULL_TREE if no simplification is
14118 possible. */
14119
14120 tree
14121 fold_indirect_ref_1 (tree type, tree op0)
14122 {
14123 tree sub = op0;
14124 tree subtype;
14125
14126 STRIP_NOPS (sub);
14127 subtype = TREE_TYPE (sub);
14128 if (!POINTER_TYPE_P (subtype))
14129 return NULL_TREE;
14130
14131 if (TREE_CODE (sub) == ADDR_EXPR)
14132 {
14133 tree op = TREE_OPERAND (sub, 0);
14134 tree optype = TREE_TYPE (op);
14135 /* *&CONST_DECL -> to the value of the const decl. */
14136 if (TREE_CODE (op) == CONST_DECL)
14137 return DECL_INITIAL (op);
14138 /* *&p => p; make sure to handle *&"str"[cst] here. */
14139 if (type == optype)
14140 {
14141 tree fop = fold_read_from_constant_string (op);
14142 if (fop)
14143 return fop;
14144 else
14145 return op;
14146 }
14147 /* *(foo *)&fooarray => fooarray[0] */
14148 else if (TREE_CODE (optype) == ARRAY_TYPE
14149 && type == TREE_TYPE (optype))
14150 {
14151 tree type_domain = TYPE_DOMAIN (optype);
14152 tree min_val = size_zero_node;
14153 if (type_domain && TYPE_MIN_VALUE (type_domain))
14154 min_val = TYPE_MIN_VALUE (type_domain);
14155 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14156 }
14157 /* *(foo *)&complexfoo => __real__ complexfoo */
14158 else if (TREE_CODE (optype) == COMPLEX_TYPE
14159 && type == TREE_TYPE (optype))
14160 return fold_build1 (REALPART_EXPR, type, op);
14161 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14162 else if (TREE_CODE (optype) == VECTOR_TYPE
14163 && type == TREE_TYPE (optype))
14164 {
14165 tree part_width = TYPE_SIZE (type);
14166 tree index = bitsize_int (0);
14167 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14168 }
14169 }
14170
14171 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14172 if (TREE_CODE (sub) == PLUS_EXPR
14173 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14174 {
14175 tree op00 = TREE_OPERAND (sub, 0);
14176 tree op01 = TREE_OPERAND (sub, 1);
14177 tree op00type;
14178
14179 STRIP_NOPS (op00);
14180 op00type = TREE_TYPE (op00);
14181 if (TREE_CODE (op00) == ADDR_EXPR
14182 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14183 && type == TREE_TYPE (TREE_TYPE (op00type)))
14184 {
14185 tree size = TYPE_SIZE_UNIT (type);
14186 if (tree_int_cst_equal (size, op01))
14187 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14188 }
14189 }
14190
14191 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14192 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14193 && type == TREE_TYPE (TREE_TYPE (subtype)))
14194 {
14195 tree type_domain;
14196 tree min_val = size_zero_node;
14197 sub = build_fold_indirect_ref (sub);
14198 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14199 if (type_domain && TYPE_MIN_VALUE (type_domain))
14200 min_val = TYPE_MIN_VALUE (type_domain);
14201 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14202 }
14203
14204 return NULL_TREE;
14205 }
14206
14207 /* Builds an expression for an indirection through T, simplifying some
14208 cases. */
14209
14210 tree
14211 build_fold_indirect_ref (tree t)
14212 {
14213 tree type = TREE_TYPE (TREE_TYPE (t));
14214 tree sub = fold_indirect_ref_1 (type, t);
14215
14216 if (sub)
14217 return sub;
14218 else
14219 return build1 (INDIRECT_REF, type, t);
14220 }
14221
14222 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14223
14224 tree
14225 fold_indirect_ref (tree t)
14226 {
14227 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14228
14229 if (sub)
14230 return sub;
14231 else
14232 return t;
14233 }
14234
14235 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14236 whose result is ignored. The type of the returned tree need not be
14237 the same as the original expression. */
14238
14239 tree
14240 fold_ignored_result (tree t)
14241 {
14242 if (!TREE_SIDE_EFFECTS (t))
14243 return integer_zero_node;
14244
14245 for (;;)
14246 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14247 {
14248 case tcc_unary:
14249 t = TREE_OPERAND (t, 0);
14250 break;
14251
14252 case tcc_binary:
14253 case tcc_comparison:
14254 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14255 t = TREE_OPERAND (t, 0);
14256 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14257 t = TREE_OPERAND (t, 1);
14258 else
14259 return t;
14260 break;
14261
14262 case tcc_expression:
14263 switch (TREE_CODE (t))
14264 {
14265 case COMPOUND_EXPR:
14266 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14267 return t;
14268 t = TREE_OPERAND (t, 0);
14269 break;
14270
14271 case COND_EXPR:
14272 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14273 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14274 return t;
14275 t = TREE_OPERAND (t, 0);
14276 break;
14277
14278 default:
14279 return t;
14280 }
14281 break;
14282
14283 default:
14284 return t;
14285 }
14286 }
14287
14288 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14289 This can only be applied to objects of a sizetype. */
14290
14291 tree
14292 round_up (tree value, int divisor)
14293 {
14294 tree div = NULL_TREE;
14295
14296 gcc_assert (divisor > 0);
14297 if (divisor == 1)
14298 return value;
14299
14300 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14301 have to do anything. Only do this when we are not given a const,
14302 because in that case, this check is more expensive than just
14303 doing it. */
14304 if (TREE_CODE (value) != INTEGER_CST)
14305 {
14306 div = build_int_cst (TREE_TYPE (value), divisor);
14307
14308 if (multiple_of_p (TREE_TYPE (value), value, div))
14309 return value;
14310 }
14311
14312 /* If divisor is a power of two, simplify this to bit manipulation. */
14313 if (divisor == (divisor & -divisor))
14314 {
14315 if (TREE_CODE (value) == INTEGER_CST)
14316 {
14317 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14318 unsigned HOST_WIDE_INT high;
14319 bool overflow_p;
14320
14321 if ((low & (divisor - 1)) == 0)
14322 return value;
14323
14324 overflow_p = TREE_OVERFLOW (value);
14325 high = TREE_INT_CST_HIGH (value);
14326 low &= ~(divisor - 1);
14327 low += divisor;
14328 if (low == 0)
14329 {
14330 high++;
14331 if (high == 0)
14332 overflow_p = true;
14333 }
14334
14335 return force_fit_type_double (TREE_TYPE (value), low, high,
14336 -1, overflow_p);
14337 }
14338 else
14339 {
14340 tree t;
14341
14342 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14343 value = size_binop (PLUS_EXPR, value, t);
14344 t = build_int_cst (TREE_TYPE (value), -divisor);
14345 value = size_binop (BIT_AND_EXPR, value, t);
14346 }
14347 }
14348 else
14349 {
14350 if (!div)
14351 div = build_int_cst (TREE_TYPE (value), divisor);
14352 value = size_binop (CEIL_DIV_EXPR, value, div);
14353 value = size_binop (MULT_EXPR, value, div);
14354 }
14355
14356 return value;
14357 }
14358
14359 /* Likewise, but round down. */
14360
14361 tree
14362 round_down (tree value, int divisor)
14363 {
14364 tree div = NULL_TREE;
14365
14366 gcc_assert (divisor > 0);
14367 if (divisor == 1)
14368 return value;
14369
14370 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14371 have to do anything. Only do this when we are not given a const,
14372 because in that case, this check is more expensive than just
14373 doing it. */
14374 if (TREE_CODE (value) != INTEGER_CST)
14375 {
14376 div = build_int_cst (TREE_TYPE (value), divisor);
14377
14378 if (multiple_of_p (TREE_TYPE (value), value, div))
14379 return value;
14380 }
14381
14382 /* If divisor is a power of two, simplify this to bit manipulation. */
14383 if (divisor == (divisor & -divisor))
14384 {
14385 tree t;
14386
14387 t = build_int_cst (TREE_TYPE (value), -divisor);
14388 value = size_binop (BIT_AND_EXPR, value, t);
14389 }
14390 else
14391 {
14392 if (!div)
14393 div = build_int_cst (TREE_TYPE (value), divisor);
14394 value = size_binop (FLOOR_DIV_EXPR, value, div);
14395 value = size_binop (MULT_EXPR, value, div);
14396 }
14397
14398 return value;
14399 }
14400
14401 /* Returns the pointer to the base of the object addressed by EXP and
14402 extracts the information about the offset of the access, storing it
14403 to PBITPOS and POFFSET. */
14404
14405 static tree
14406 split_address_to_core_and_offset (tree exp,
14407 HOST_WIDE_INT *pbitpos, tree *poffset)
14408 {
14409 tree core;
14410 enum machine_mode mode;
14411 int unsignedp, volatilep;
14412 HOST_WIDE_INT bitsize;
14413
14414 if (TREE_CODE (exp) == ADDR_EXPR)
14415 {
14416 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14417 poffset, &mode, &unsignedp, &volatilep,
14418 false);
14419 core = build_fold_addr_expr (core);
14420 }
14421 else
14422 {
14423 core = exp;
14424 *pbitpos = 0;
14425 *poffset = NULL_TREE;
14426 }
14427
14428 return core;
14429 }
14430
14431 /* Returns true if addresses of E1 and E2 differ by a constant, false
14432 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14433
14434 bool
14435 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14436 {
14437 tree core1, core2;
14438 HOST_WIDE_INT bitpos1, bitpos2;
14439 tree toffset1, toffset2, tdiff, type;
14440
14441 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14442 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14443
14444 if (bitpos1 % BITS_PER_UNIT != 0
14445 || bitpos2 % BITS_PER_UNIT != 0
14446 || !operand_equal_p (core1, core2, 0))
14447 return false;
14448
14449 if (toffset1 && toffset2)
14450 {
14451 type = TREE_TYPE (toffset1);
14452 if (type != TREE_TYPE (toffset2))
14453 toffset2 = fold_convert (type, toffset2);
14454
14455 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14456 if (!cst_and_fits_in_hwi (tdiff))
14457 return false;
14458
14459 *diff = int_cst_value (tdiff);
14460 }
14461 else if (toffset1 || toffset2)
14462 {
14463 /* If only one of the offsets is non-constant, the difference cannot
14464 be a constant. */
14465 return false;
14466 }
14467 else
14468 *diff = 0;
14469
14470 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14471 return true;
14472 }
14473
14474 /* Simplify the floating point expression EXP when the sign of the
14475 result is not significant. Return NULL_TREE if no simplification
14476 is possible. */
14477
14478 tree
14479 fold_strip_sign_ops (tree exp)
14480 {
14481 tree arg0, arg1;
14482
14483 switch (TREE_CODE (exp))
14484 {
14485 case ABS_EXPR:
14486 case NEGATE_EXPR:
14487 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14488 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14489
14490 case MULT_EXPR:
14491 case RDIV_EXPR:
14492 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14493 return NULL_TREE;
14494 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14495 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14496 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14497 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14498 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14499 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14500 break;
14501
14502 case COMPOUND_EXPR:
14503 arg0 = TREE_OPERAND (exp, 0);
14504 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14505 if (arg1)
14506 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14507 break;
14508
14509 case COND_EXPR:
14510 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14511 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14512 if (arg0 || arg1)
14513 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14514 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14515 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14516 break;
14517
14518 case CALL_EXPR:
14519 {
14520 const enum built_in_function fcode = builtin_mathfn_code (exp);
14521 switch (fcode)
14522 {
14523 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14524 /* Strip copysign function call, return the 1st argument. */
14525 arg0 = CALL_EXPR_ARG (exp, 0);
14526 arg1 = CALL_EXPR_ARG (exp, 1);
14527 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14528
14529 default:
14530 /* Strip sign ops from the argument of "odd" math functions. */
14531 if (negate_mathfn_p (fcode))
14532 {
14533 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14534 if (arg0)
14535 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14536 }
14537 break;
14538 }
14539 }
14540 break;
14541
14542 default:
14543 break;
14544 }
14545 return NULL_TREE;
14546 }
This page took 0.735583 seconds and 6 git commands to generate.