]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
fold-const.c (fold_comparision): Remove the "foo++ == CONST" transformation.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
30
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
33
34 fold takes a tree as argument and returns a simplified tree.
35
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
39
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
42
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
46
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
67
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
71
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
92 };
93
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146
147
148 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
149 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
150 and SUM1. Then this yields nonzero if overflow occurred during the
151 addition.
152
153 Overflow occurs if A and B have the same sign, but A and SUM differ in
154 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 sign. */
156 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 \f
158 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
159 We do that by representing the two-word integer in 4 words, with only
160 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
161 number. The value of the word is LOWPART + HIGHPART * BASE. */
162
163 #define LOWPART(x) \
164 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
165 #define HIGHPART(x) \
166 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
167 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168
169 /* Unpack a two-word integer into 4 words.
170 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
171 WORDS points to the array of HOST_WIDE_INTs. */
172
173 static void
174 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 {
176 words[0] = LOWPART (low);
177 words[1] = HIGHPART (low);
178 words[2] = LOWPART (hi);
179 words[3] = HIGHPART (hi);
180 }
181
182 /* Pack an array of 4 words into a two-word integer.
183 WORDS points to the array of words.
184 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185
186 static void
187 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 HOST_WIDE_INT *hi)
189 {
190 *low = words[0] + words[1] * BASE;
191 *hi = words[2] + words[3] * BASE;
192 }
193 \f
194 /* Force the double-word integer L1, H1 to be within the range of the
195 integer type TYPE. Stores the properly truncated and sign-extended
196 double-word integer in *LV, *HV. Returns true if the operation
197 overflows, that is, argument and result are different. */
198
199 int
200 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
201 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
202 {
203 unsigned HOST_WIDE_INT low0 = l1;
204 HOST_WIDE_INT high0 = h1;
205 unsigned int prec;
206 int sign_extended_type;
207
208 if (POINTER_TYPE_P (type)
209 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = POINTER_SIZE;
211 else
212 prec = TYPE_PRECISION (type);
213
214 /* Size types *are* sign extended. */
215 sign_extended_type = (!TYPE_UNSIGNED (type)
216 || (TREE_CODE (type) == INTEGER_TYPE
217 && TYPE_IS_SIZETYPE (type)));
218
219 /* First clear all bits that are beyond the type's precision. */
220 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 ;
222 else if (prec > HOST_BITS_PER_WIDE_INT)
223 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
224 else
225 {
226 h1 = 0;
227 if (prec < HOST_BITS_PER_WIDE_INT)
228 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 }
230
231 /* Then do sign extension if necessary. */
232 if (!sign_extended_type)
233 /* No sign extension */;
234 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235 /* Correct width already. */;
236 else if (prec > HOST_BITS_PER_WIDE_INT)
237 {
238 /* Sign extend top half? */
239 if (h1 & ((unsigned HOST_WIDE_INT)1
240 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
241 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 }
243 else if (prec == HOST_BITS_PER_WIDE_INT)
244 {
245 if ((HOST_WIDE_INT)l1 < 0)
246 h1 = -1;
247 }
248 else
249 {
250 /* Sign extend bottom half? */
251 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 {
253 h1 = -1;
254 l1 |= (HOST_WIDE_INT)(-1) << prec;
255 }
256 }
257
258 *lv = l1;
259 *hv = h1;
260
261 /* If the value didn't fit, signal overflow. */
262 return l1 != low0 || h1 != high0;
263 }
264
265 /* We force the double-int HIGH:LOW to the range of the type TYPE by
266 sign or zero extending it.
267 OVERFLOWABLE indicates if we are interested
268 in overflow of the value, when >0 we are only interested in signed
269 overflow, for <0 we are interested in any overflow. OVERFLOWED
270 indicates whether overflow has already occurred. CONST_OVERFLOWED
271 indicates whether constant overflow has already occurred. We force
272 T's value to be within range of T's type (by setting to 0 or 1 all
273 the bits outside the type's range). We set TREE_OVERFLOWED if,
274 OVERFLOWED is nonzero,
275 or OVERFLOWABLE is >0 and signed overflow occurs
276 or OVERFLOWABLE is <0 and any overflow occurs
277 We return a new tree node for the extended double-int. The node
278 is shared if no overflow flags are set. */
279
280 tree
281 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
282 HOST_WIDE_INT high, int overflowable,
283 bool overflowed)
284 {
285 int sign_extended_type;
286 bool overflow;
287
288 /* Size types *are* sign extended. */
289 sign_extended_type = (!TYPE_UNSIGNED (type)
290 || (TREE_CODE (type) == INTEGER_TYPE
291 && TYPE_IS_SIZETYPE (type)));
292
293 overflow = fit_double_type (low, high, &low, &high, type);
294
295 /* If we need to set overflow flags, return a new unshared node. */
296 if (overflowed || overflow)
297 {
298 if (overflowed
299 || overflowable < 0
300 || (overflowable > 0 && sign_extended_type))
301 {
302 tree t = make_node (INTEGER_CST);
303 TREE_INT_CST_LOW (t) = low;
304 TREE_INT_CST_HIGH (t) = high;
305 TREE_TYPE (t) = type;
306 TREE_OVERFLOW (t) = 1;
307 return t;
308 }
309 }
310
311 /* Else build a shared node. */
312 return build_int_cst_wide (type, low, high);
313 }
314 \f
315 /* Add two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows according to UNSIGNED_P.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320
321 int
322 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 bool unsigned_p)
326 {
327 unsigned HOST_WIDE_INT l;
328 HOST_WIDE_INT h;
329
330 l = l1 + l2;
331 h = h1 + h2 + (l < l1);
332
333 *lv = l;
334 *hv = h;
335
336 if (unsigned_p)
337 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 else
339 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 }
341
342 /* Negate a doubleword integer with doubleword result.
343 Return nonzero if the operation overflows, assuming it's signed.
344 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
345 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346
347 int
348 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
349 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
350 {
351 if (l1 == 0)
352 {
353 *lv = 0;
354 *hv = - h1;
355 return (*hv & h1) < 0;
356 }
357 else
358 {
359 *lv = -l1;
360 *hv = ~h1;
361 return 0;
362 }
363 }
364 \f
365 /* Multiply two doubleword integers with doubleword result.
366 Return nonzero if the operation overflows according to UNSIGNED_P.
367 Each argument is given as two `HOST_WIDE_INT' pieces.
368 One argument is L1 and H1; the other, L2 and H2.
369 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370
371 int
372 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
373 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
374 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 bool unsigned_p)
376 {
377 HOST_WIDE_INT arg1[4];
378 HOST_WIDE_INT arg2[4];
379 HOST_WIDE_INT prod[4 * 2];
380 unsigned HOST_WIDE_INT carry;
381 int i, j, k;
382 unsigned HOST_WIDE_INT toplow, neglow;
383 HOST_WIDE_INT tophigh, neghigh;
384
385 encode (arg1, l1, h1);
386 encode (arg2, l2, h2);
387
388 memset (prod, 0, sizeof prod);
389
390 for (i = 0; i < 4; i++)
391 {
392 carry = 0;
393 for (j = 0; j < 4; j++)
394 {
395 k = i + j;
396 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
397 carry += arg1[i] * arg2[j];
398 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 carry += prod[k];
400 prod[k] = LOWPART (carry);
401 carry = HIGHPART (carry);
402 }
403 prod[i + 4] = carry;
404 }
405
406 decode (prod, lv, hv);
407 decode (prod + 4, &toplow, &tophigh);
408
409 /* Unsigned overflow is immediate. */
410 if (unsigned_p)
411 return (toplow | tophigh) != 0;
412
413 /* Check for signed overflow by calculating the signed representation of the
414 top half of the result; it should agree with the low half's sign bit. */
415 if (h1 < 0)
416 {
417 neg_double (l2, h2, &neglow, &neghigh);
418 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
419 }
420 if (h2 < 0)
421 {
422 neg_double (l1, h1, &neglow, &neghigh);
423 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 }
425 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 }
427 \f
428 /* Shift the doubleword integer in L1, H1 left by COUNT places
429 keeping only PREC bits of result.
430 Shift right if COUNT is negative.
431 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
432 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433
434 void
435 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
436 HOST_WIDE_INT count, unsigned int prec,
437 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 {
439 unsigned HOST_WIDE_INT signmask;
440
441 if (count < 0)
442 {
443 rshift_double (l1, h1, -count, prec, lv, hv, arith);
444 return;
445 }
446
447 if (SHIFT_COUNT_TRUNCATED)
448 count %= prec;
449
450 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 {
452 /* Shifting by the host word size is undefined according to the
453 ANSI standard, so we must handle this as a special case. */
454 *hv = 0;
455 *lv = 0;
456 }
457 else if (count >= HOST_BITS_PER_WIDE_INT)
458 {
459 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
460 *lv = 0;
461 }
462 else
463 {
464 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
465 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
466 *lv = l1 << count;
467 }
468
469 /* Sign extend all bits that are beyond the precision. */
470
471 signmask = -((prec > HOST_BITS_PER_WIDE_INT
472 ? ((unsigned HOST_WIDE_INT) *hv
473 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
474 : (*lv >> (prec - 1))) & 1);
475
476 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 ;
478 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 {
480 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
481 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
482 }
483 else
484 {
485 *hv = signmask;
486 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
487 *lv |= signmask << prec;
488 }
489 }
490
491 /* Shift the doubleword integer in L1, H1 right by COUNT places
492 keeping only PREC bits of result. COUNT must be positive.
493 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
494 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495
496 void
497 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
498 HOST_WIDE_INT count, unsigned int prec,
499 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 int arith)
501 {
502 unsigned HOST_WIDE_INT signmask;
503
504 signmask = (arith
505 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 : 0);
507
508 if (SHIFT_COUNT_TRUNCATED)
509 count %= prec;
510
511 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 {
513 /* Shifting by the host word size is undefined according to the
514 ANSI standard, so we must handle this as a special case. */
515 *hv = 0;
516 *lv = 0;
517 }
518 else if (count >= HOST_BITS_PER_WIDE_INT)
519 {
520 *hv = 0;
521 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
522 }
523 else
524 {
525 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 *lv = ((l1 >> count)
527 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 }
529
530 /* Zero / sign extend all bits that are beyond the precision. */
531
532 if (count >= (HOST_WIDE_INT)prec)
533 {
534 *hv = signmask;
535 *lv = signmask;
536 }
537 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 ;
539 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 {
541 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
542 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
543 }
544 else
545 {
546 *hv = signmask;
547 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
548 *lv |= signmask << (prec - count);
549 }
550 }
551 \f
552 /* Rotate the doubleword integer in L1, H1 left by COUNT places
553 keeping only PREC bits of result.
554 Rotate right if COUNT is negative.
555 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556
557 void
558 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
559 HOST_WIDE_INT count, unsigned int prec,
560 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 {
562 unsigned HOST_WIDE_INT s1l, s2l;
563 HOST_WIDE_INT s1h, s2h;
564
565 count %= prec;
566 if (count < 0)
567 count += prec;
568
569 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
570 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
571 *lv = s1l | s2l;
572 *hv = s1h | s2h;
573 }
574
575 /* Rotate the doubleword integer in L1, H1 left by COUNT places
576 keeping only PREC bits of result. COUNT must be positive.
577 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578
579 void
580 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
581 HOST_WIDE_INT count, unsigned int prec,
582 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 {
584 unsigned HOST_WIDE_INT s1l, s2l;
585 HOST_WIDE_INT s1h, s2h;
586
587 count %= prec;
588 if (count < 0)
589 count += prec;
590
591 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
592 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
593 *lv = s1l | s2l;
594 *hv = s1h | s2h;
595 }
596 \f
597 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
598 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
599 CODE is a tree code for a kind of division, one of
600 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 or EXACT_DIV_EXPR
602 It controls how the quotient is rounded to an integer.
603 Return nonzero if the operation overflows.
604 UNS nonzero says do unsigned division. */
605
606 int
607 div_and_round_double (enum tree_code code, int uns,
608 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
609 HOST_WIDE_INT hnum_orig,
610 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
611 HOST_WIDE_INT hden_orig,
612 unsigned HOST_WIDE_INT *lquo,
613 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
614 HOST_WIDE_INT *hrem)
615 {
616 int quo_neg = 0;
617 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
618 HOST_WIDE_INT den[4], quo[4];
619 int i, j;
620 unsigned HOST_WIDE_INT work;
621 unsigned HOST_WIDE_INT carry = 0;
622 unsigned HOST_WIDE_INT lnum = lnum_orig;
623 HOST_WIDE_INT hnum = hnum_orig;
624 unsigned HOST_WIDE_INT lden = lden_orig;
625 HOST_WIDE_INT hden = hden_orig;
626 int overflow = 0;
627
628 if (hden == 0 && lden == 0)
629 overflow = 1, lden = 1;
630
631 /* Calculate quotient sign and convert operands to unsigned. */
632 if (!uns)
633 {
634 if (hnum < 0)
635 {
636 quo_neg = ~ quo_neg;
637 /* (minimum integer) / (-1) is the only overflow case. */
638 if (neg_double (lnum, hnum, &lnum, &hnum)
639 && ((HOST_WIDE_INT) lden & hden) == -1)
640 overflow = 1;
641 }
642 if (hden < 0)
643 {
644 quo_neg = ~ quo_neg;
645 neg_double (lden, hden, &lden, &hden);
646 }
647 }
648
649 if (hnum == 0 && hden == 0)
650 { /* single precision */
651 *hquo = *hrem = 0;
652 /* This unsigned division rounds toward zero. */
653 *lquo = lnum / lden;
654 goto finish_up;
655 }
656
657 if (hnum == 0)
658 { /* trivial case: dividend < divisor */
659 /* hden != 0 already checked. */
660 *hquo = *lquo = 0;
661 *hrem = hnum;
662 *lrem = lnum;
663 goto finish_up;
664 }
665
666 memset (quo, 0, sizeof quo);
667
668 memset (num, 0, sizeof num); /* to zero 9th element */
669 memset (den, 0, sizeof den);
670
671 encode (num, lnum, hnum);
672 encode (den, lden, hden);
673
674 /* Special code for when the divisor < BASE. */
675 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 {
677 /* hnum != 0 already checked. */
678 for (i = 4 - 1; i >= 0; i--)
679 {
680 work = num[i] + carry * BASE;
681 quo[i] = work / lden;
682 carry = work % lden;
683 }
684 }
685 else
686 {
687 /* Full double precision division,
688 with thanks to Don Knuth's "Seminumerical Algorithms". */
689 int num_hi_sig, den_hi_sig;
690 unsigned HOST_WIDE_INT quo_est, scale;
691
692 /* Find the highest nonzero divisor digit. */
693 for (i = 4 - 1;; i--)
694 if (den[i] != 0)
695 {
696 den_hi_sig = i;
697 break;
698 }
699
700 /* Insure that the first digit of the divisor is at least BASE/2.
701 This is required by the quotient digit estimation algorithm. */
702
703 scale = BASE / (den[den_hi_sig] + 1);
704 if (scale > 1)
705 { /* scale divisor and dividend */
706 carry = 0;
707 for (i = 0; i <= 4 - 1; i++)
708 {
709 work = (num[i] * scale) + carry;
710 num[i] = LOWPART (work);
711 carry = HIGHPART (work);
712 }
713
714 num[4] = carry;
715 carry = 0;
716 for (i = 0; i <= 4 - 1; i++)
717 {
718 work = (den[i] * scale) + carry;
719 den[i] = LOWPART (work);
720 carry = HIGHPART (work);
721 if (den[i] != 0) den_hi_sig = i;
722 }
723 }
724
725 num_hi_sig = 4;
726
727 /* Main loop */
728 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 {
730 /* Guess the next quotient digit, quo_est, by dividing the first
731 two remaining dividend digits by the high order quotient digit.
732 quo_est is never low and is at most 2 high. */
733 unsigned HOST_WIDE_INT tmp;
734
735 num_hi_sig = i + den_hi_sig + 1;
736 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
737 if (num[num_hi_sig] != den[den_hi_sig])
738 quo_est = work / den[den_hi_sig];
739 else
740 quo_est = BASE - 1;
741
742 /* Refine quo_est so it's usually correct, and at most one high. */
743 tmp = work - quo_est * den[den_hi_sig];
744 if (tmp < BASE
745 && (den[den_hi_sig - 1] * quo_est
746 > (tmp * BASE + num[num_hi_sig - 2])))
747 quo_est--;
748
749 /* Try QUO_EST as the quotient digit, by multiplying the
750 divisor by QUO_EST and subtracting from the remaining dividend.
751 Keep in mind that QUO_EST is the I - 1st digit. */
752
753 carry = 0;
754 for (j = 0; j <= den_hi_sig; j++)
755 {
756 work = quo_est * den[j] + carry;
757 carry = HIGHPART (work);
758 work = num[i + j] - LOWPART (work);
759 num[i + j] = LOWPART (work);
760 carry += HIGHPART (work) != 0;
761 }
762
763 /* If quo_est was high by one, then num[i] went negative and
764 we need to correct things. */
765 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 {
767 quo_est--;
768 carry = 0; /* add divisor back in */
769 for (j = 0; j <= den_hi_sig; j++)
770 {
771 work = num[i + j] + den[j] + carry;
772 carry = HIGHPART (work);
773 num[i + j] = LOWPART (work);
774 }
775
776 num [num_hi_sig] += carry;
777 }
778
779 /* Store the quotient digit. */
780 quo[i] = quo_est;
781 }
782 }
783
784 decode (quo, lquo, hquo);
785
786 finish_up:
787 /* If result is negative, make it so. */
788 if (quo_neg)
789 neg_double (*lquo, *hquo, lquo, hquo);
790
791 /* Compute trial remainder: rem = num - (quo * den) */
792 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
793 neg_double (*lrem, *hrem, lrem, hrem);
794 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
795
796 switch (code)
797 {
798 case TRUNC_DIV_EXPR:
799 case TRUNC_MOD_EXPR: /* round toward zero */
800 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
801 return overflow;
802
803 case FLOOR_DIV_EXPR:
804 case FLOOR_MOD_EXPR: /* round toward negative infinity */
805 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 {
807 /* quo = quo - 1; */
808 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
809 lquo, hquo);
810 }
811 else
812 return overflow;
813 break;
814
815 case CEIL_DIV_EXPR:
816 case CEIL_MOD_EXPR: /* round toward positive infinity */
817 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 {
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
820 lquo, hquo);
821 }
822 else
823 return overflow;
824 break;
825
826 case ROUND_DIV_EXPR:
827 case ROUND_MOD_EXPR: /* round to closest integer */
828 {
829 unsigned HOST_WIDE_INT labs_rem = *lrem;
830 HOST_WIDE_INT habs_rem = *hrem;
831 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
832 HOST_WIDE_INT habs_den = hden, htwice;
833
834 /* Get absolute values. */
835 if (*hrem < 0)
836 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 if (hden < 0)
838 neg_double (lden, hden, &labs_den, &habs_den);
839
840 /* If (2 * abs (lrem) >= abs (lden)) */
841 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
842 labs_rem, habs_rem, &ltwice, &htwice);
843
844 if (((unsigned HOST_WIDE_INT) habs_den
845 < (unsigned HOST_WIDE_INT) htwice)
846 || (((unsigned HOST_WIDE_INT) habs_den
847 == (unsigned HOST_WIDE_INT) htwice)
848 && (labs_den < ltwice)))
849 {
850 if (*hquo < 0)
851 /* quo = quo - 1; */
852 add_double (*lquo, *hquo,
853 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 else
855 /* quo = quo + 1; */
856 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
857 lquo, hquo);
858 }
859 else
860 return overflow;
861 }
862 break;
863
864 default:
865 gcc_unreachable ();
866 }
867
868 /* Compute true remainder: rem = num - (quo * den) */
869 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
870 neg_double (*lrem, *hrem, lrem, hrem);
871 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
872 return overflow;
873 }
874
875 /* If ARG2 divides ARG1 with zero remainder, carries out the division
876 of type CODE and returns the quotient.
877 Otherwise returns NULL_TREE. */
878
879 static tree
880 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
881 {
882 unsigned HOST_WIDE_INT int1l, int2l;
883 HOST_WIDE_INT int1h, int2h;
884 unsigned HOST_WIDE_INT quol, reml;
885 HOST_WIDE_INT quoh, remh;
886 tree type = TREE_TYPE (arg1);
887 int uns = TYPE_UNSIGNED (type);
888
889 int1l = TREE_INT_CST_LOW (arg1);
890 int1h = TREE_INT_CST_HIGH (arg1);
891 /* &obj[0] + -128 really should be compiled as &obj[-8] rahter than
892 &obj[some_exotic_number]. */
893 if (POINTER_TYPE_P (type))
894 {
895 uns = false;
896 type = signed_type_for (type);
897 fit_double_type (int1l, int1h, &int1l, &int1h,
898 type);
899 }
900 else
901 fit_double_type (int1l, int1h, &int1l, &int1h, type);
902 int2l = TREE_INT_CST_LOW (arg2);
903 int2h = TREE_INT_CST_HIGH (arg2);
904
905 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
906 &quol, &quoh, &reml, &remh);
907 if (remh != 0 || reml != 0)
908 return NULL_TREE;
909
910 return build_int_cst_wide (type, quol, quoh);
911 }
912 \f
913 /* This is non-zero if we should defer warnings about undefined
914 overflow. This facility exists because these warnings are a
915 special case. The code to estimate loop iterations does not want
916 to issue any warnings, since it works with expressions which do not
917 occur in user code. Various bits of cleanup code call fold(), but
918 only use the result if it has certain characteristics (e.g., is a
919 constant); that code only wants to issue a warning if the result is
920 used. */
921
922 static int fold_deferring_overflow_warnings;
923
924 /* If a warning about undefined overflow is deferred, this is the
925 warning. Note that this may cause us to turn two warnings into
926 one, but that is fine since it is sufficient to only give one
927 warning per expression. */
928
929 static const char* fold_deferred_overflow_warning;
930
931 /* If a warning about undefined overflow is deferred, this is the
932 level at which the warning should be emitted. */
933
934 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935
936 /* Start deferring overflow warnings. We could use a stack here to
937 permit nested calls, but at present it is not necessary. */
938
939 void
940 fold_defer_overflow_warnings (void)
941 {
942 ++fold_deferring_overflow_warnings;
943 }
944
945 /* Stop deferring overflow warnings. If there is a pending warning,
946 and ISSUE is true, then issue the warning if appropriate. STMT is
947 the statement with which the warning should be associated (used for
948 location information); STMT may be NULL. CODE is the level of the
949 warning--a warn_strict_overflow_code value. This function will use
950 the smaller of CODE and the deferred code when deciding whether to
951 issue the warning. CODE may be zero to mean to always use the
952 deferred code. */
953
954 void
955 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
956 {
957 const char *warnmsg;
958 location_t locus;
959
960 gcc_assert (fold_deferring_overflow_warnings > 0);
961 --fold_deferring_overflow_warnings;
962 if (fold_deferring_overflow_warnings > 0)
963 {
964 if (fold_deferred_overflow_warning != NULL
965 && code != 0
966 && code < (int) fold_deferred_overflow_code)
967 fold_deferred_overflow_code = code;
968 return;
969 }
970
971 warnmsg = fold_deferred_overflow_warning;
972 fold_deferred_overflow_warning = NULL;
973
974 if (!issue || warnmsg == NULL)
975 return;
976
977 /* Use the smallest code level when deciding to issue the
978 warning. */
979 if (code == 0 || code > (int) fold_deferred_overflow_code)
980 code = fold_deferred_overflow_code;
981
982 if (!issue_strict_overflow_warning (code))
983 return;
984
985 if (stmt == NULL_TREE || !expr_has_location (stmt))
986 locus = input_location;
987 else
988 locus = expr_location (stmt);
989 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
990 }
991
992 /* Stop deferring overflow warnings, ignoring any deferred
993 warnings. */
994
995 void
996 fold_undefer_and_ignore_overflow_warnings (void)
997 {
998 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
999 }
1000
1001 /* Whether we are deferring overflow warnings. */
1002
1003 bool
1004 fold_deferring_overflow_warnings_p (void)
1005 {
1006 return fold_deferring_overflow_warnings > 0;
1007 }
1008
1009 /* This is called when we fold something based on the fact that signed
1010 overflow is undefined. */
1011
1012 static void
1013 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1014 {
1015 gcc_assert (!flag_wrapv && !flag_trapv);
1016 if (fold_deferring_overflow_warnings > 0)
1017 {
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1020 {
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1023 }
1024 }
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1027 }
1028 \f
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1031
1032 static bool
1033 negate_mathfn_p (enum built_in_function code)
1034 {
1035 switch (code)
1036 {
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1060 return true;
1061
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1067
1068 default:
1069 break;
1070 }
1071 return false;
1072 }
1073
1074 /* Check whether we may negate an integer constant T without causing
1075 overflow. */
1076
1077 bool
1078 may_negate_without_overflow_p (tree t)
1079 {
1080 unsigned HOST_WIDE_INT val;
1081 unsigned int prec;
1082 tree type;
1083
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1088 return false;
1089
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1092 {
1093 if (TREE_INT_CST_LOW (t) != 0)
1094 return true;
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1097 }
1098 else
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103 }
1104
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1107
1108 static bool
1109 negate_expr_p (tree t)
1110 {
1111 tree type;
1112
1113 if (t == 0)
1114 return false;
1115
1116 type = TREE_TYPE (t);
1117
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1120 {
1121 case INTEGER_CST:
1122 if (TYPE_OVERFLOW_WRAPS (type))
1123 return true;
1124
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1127 case BIT_NOT_EXPR:
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1130
1131 case REAL_CST:
1132 case NEGATE_EXPR:
1133 return true;
1134
1135 case COMPLEX_CST:
1136 return negate_expr_p (TREE_REALPART (t))
1137 && negate_expr_p (TREE_IMAGPART (t));
1138
1139 case COMPLEX_EXPR:
1140 return negate_expr_p (TREE_OPERAND (t, 0))
1141 && negate_expr_p (TREE_OPERAND (t, 1));
1142
1143 case CONJ_EXPR:
1144 return negate_expr_p (TREE_OPERAND (t, 0));
1145
1146 case PLUS_EXPR:
1147 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1148 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 return false;
1150 /* -(A + B) -> (-B) - A. */
1151 if (negate_expr_p (TREE_OPERAND (t, 1))
1152 && reorder_operands_p (TREE_OPERAND (t, 0),
1153 TREE_OPERAND (t, 1)))
1154 return true;
1155 /* -(A + B) -> (-A) - B. */
1156 return negate_expr_p (TREE_OPERAND (t, 0));
1157
1158 case MINUS_EXPR:
1159 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1160 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1162 && reorder_operands_p (TREE_OPERAND (t, 0),
1163 TREE_OPERAND (t, 1));
1164
1165 case MULT_EXPR:
1166 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1167 break;
1168
1169 /* Fall through. */
1170
1171 case RDIV_EXPR:
1172 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1173 return negate_expr_p (TREE_OPERAND (t, 1))
1174 || negate_expr_p (TREE_OPERAND (t, 0));
1175 break;
1176
1177 case TRUNC_DIV_EXPR:
1178 case ROUND_DIV_EXPR:
1179 case FLOOR_DIV_EXPR:
1180 case CEIL_DIV_EXPR:
1181 case EXACT_DIV_EXPR:
1182 /* In general we can't negate A / B, because if A is INT_MIN and
1183 B is 1, we may turn this into INT_MIN / -1 which is undefined
1184 and actually traps on some architectures. But if overflow is
1185 undefined, we can negate, because - (INT_MIN / 1) is an
1186 overflow. */
1187 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1188 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 break;
1190 return negate_expr_p (TREE_OPERAND (t, 1))
1191 || negate_expr_p (TREE_OPERAND (t, 0));
1192
1193 case NOP_EXPR:
1194 /* Negate -((double)float) as (double)(-float). */
1195 if (TREE_CODE (type) == REAL_TYPE)
1196 {
1197 tree tem = strip_float_extensions (t);
1198 if (tem != t)
1199 return negate_expr_p (tem);
1200 }
1201 break;
1202
1203 case CALL_EXPR:
1204 /* Negate -f(x) as f(-x). */
1205 if (negate_mathfn_p (builtin_mathfn_code (t)))
1206 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1207 break;
1208
1209 case RSHIFT_EXPR:
1210 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1211 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 {
1213 tree op1 = TREE_OPERAND (t, 1);
1214 if (TREE_INT_CST_HIGH (op1) == 0
1215 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1216 == TREE_INT_CST_LOW (op1))
1217 return true;
1218 }
1219 break;
1220
1221 default:
1222 break;
1223 }
1224 return false;
1225 }
1226
1227 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1228 simplification is possible.
1229 If negate_expr_p would return true for T, NULL_TREE will never be
1230 returned. */
1231
1232 static tree
1233 fold_negate_expr (tree t)
1234 {
1235 tree type = TREE_TYPE (t);
1236 tree tem;
1237
1238 switch (TREE_CODE (t))
1239 {
1240 /* Convert - (~A) to A + 1. */
1241 case BIT_NOT_EXPR:
1242 if (INTEGRAL_TYPE_P (type))
1243 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1244 build_int_cst (type, 1));
1245 break;
1246
1247 case INTEGER_CST:
1248 tem = fold_negate_const (t, type);
1249 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1250 || !TYPE_OVERFLOW_TRAPS (type))
1251 return tem;
1252 break;
1253
1254 case REAL_CST:
1255 tem = fold_negate_const (t, type);
1256 /* Two's complement FP formats, such as c4x, may overflow. */
1257 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1258 return tem;
1259 break;
1260
1261 case COMPLEX_CST:
1262 {
1263 tree rpart = negate_expr (TREE_REALPART (t));
1264 tree ipart = negate_expr (TREE_IMAGPART (t));
1265
1266 if ((TREE_CODE (rpart) == REAL_CST
1267 && TREE_CODE (ipart) == REAL_CST)
1268 || (TREE_CODE (rpart) == INTEGER_CST
1269 && TREE_CODE (ipart) == INTEGER_CST))
1270 return build_complex (type, rpart, ipart);
1271 }
1272 break;
1273
1274 case COMPLEX_EXPR:
1275 if (negate_expr_p (t))
1276 return fold_build2 (COMPLEX_EXPR, type,
1277 fold_negate_expr (TREE_OPERAND (t, 0)),
1278 fold_negate_expr (TREE_OPERAND (t, 1)));
1279 break;
1280
1281 case CONJ_EXPR:
1282 if (negate_expr_p (t))
1283 return fold_build1 (CONJ_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)));
1285 break;
1286
1287 case NEGATE_EXPR:
1288 return TREE_OPERAND (t, 0);
1289
1290 case PLUS_EXPR:
1291 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1292 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1293 {
1294 /* -(A + B) -> (-B) - A. */
1295 if (negate_expr_p (TREE_OPERAND (t, 1))
1296 && reorder_operands_p (TREE_OPERAND (t, 0),
1297 TREE_OPERAND (t, 1)))
1298 {
1299 tem = negate_expr (TREE_OPERAND (t, 1));
1300 return fold_build2 (MINUS_EXPR, type,
1301 tem, TREE_OPERAND (t, 0));
1302 }
1303
1304 /* -(A + B) -> (-A) - B. */
1305 if (negate_expr_p (TREE_OPERAND (t, 0)))
1306 {
1307 tem = negate_expr (TREE_OPERAND (t, 0));
1308 return fold_build2 (MINUS_EXPR, type,
1309 tem, TREE_OPERAND (t, 1));
1310 }
1311 }
1312 break;
1313
1314 case MINUS_EXPR:
1315 /* - (A - B) -> B - A */
1316 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1317 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1318 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1319 return fold_build2 (MINUS_EXPR, type,
1320 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1321 break;
1322
1323 case MULT_EXPR:
1324 if (TYPE_UNSIGNED (type))
1325 break;
1326
1327 /* Fall through. */
1328
1329 case RDIV_EXPR:
1330 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1331 {
1332 tem = TREE_OPERAND (t, 1);
1333 if (negate_expr_p (tem))
1334 return fold_build2 (TREE_CODE (t), type,
1335 TREE_OPERAND (t, 0), negate_expr (tem));
1336 tem = TREE_OPERAND (t, 0);
1337 if (negate_expr_p (tem))
1338 return fold_build2 (TREE_CODE (t), type,
1339 negate_expr (tem), TREE_OPERAND (t, 1));
1340 }
1341 break;
1342
1343 case TRUNC_DIV_EXPR:
1344 case ROUND_DIV_EXPR:
1345 case FLOOR_DIV_EXPR:
1346 case CEIL_DIV_EXPR:
1347 case EXACT_DIV_EXPR:
1348 /* In general we can't negate A / B, because if A is INT_MIN and
1349 B is 1, we may turn this into INT_MIN / -1 which is undefined
1350 and actually traps on some architectures. But if overflow is
1351 undefined, we can negate, because - (INT_MIN / 1) is an
1352 overflow. */
1353 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1354 {
1355 const char * const warnmsg = G_("assuming signed overflow does not "
1356 "occur when negating a division");
1357 tem = TREE_OPERAND (t, 1);
1358 if (negate_expr_p (tem))
1359 {
1360 if (INTEGRAL_TYPE_P (type)
1361 && (TREE_CODE (tem) != INTEGER_CST
1362 || integer_onep (tem)))
1363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1364 return fold_build2 (TREE_CODE (t), type,
1365 TREE_OPERAND (t, 0), negate_expr (tem));
1366 }
1367 tem = TREE_OPERAND (t, 0);
1368 if (negate_expr_p (tem))
1369 {
1370 if (INTEGRAL_TYPE_P (type)
1371 && (TREE_CODE (tem) != INTEGER_CST
1372 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1373 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1374 return fold_build2 (TREE_CODE (t), type,
1375 negate_expr (tem), TREE_OPERAND (t, 1));
1376 }
1377 }
1378 break;
1379
1380 case NOP_EXPR:
1381 /* Convert -((double)float) into (double)(-float). */
1382 if (TREE_CODE (type) == REAL_TYPE)
1383 {
1384 tem = strip_float_extensions (t);
1385 if (tem != t && negate_expr_p (tem))
1386 return negate_expr (tem);
1387 }
1388 break;
1389
1390 case CALL_EXPR:
1391 /* Negate -f(x) as f(-x). */
1392 if (negate_mathfn_p (builtin_mathfn_code (t))
1393 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1394 {
1395 tree fndecl, arg;
1396
1397 fndecl = get_callee_fndecl (t);
1398 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1399 return build_call_expr (fndecl, 1, arg);
1400 }
1401 break;
1402
1403 case RSHIFT_EXPR:
1404 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1405 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1406 {
1407 tree op1 = TREE_OPERAND (t, 1);
1408 if (TREE_INT_CST_HIGH (op1) == 0
1409 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1410 == TREE_INT_CST_LOW (op1))
1411 {
1412 tree ntype = TYPE_UNSIGNED (type)
1413 ? lang_hooks.types.signed_type (type)
1414 : lang_hooks.types.unsigned_type (type);
1415 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1416 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1417 return fold_convert (type, temp);
1418 }
1419 }
1420 break;
1421
1422 default:
1423 break;
1424 }
1425
1426 return NULL_TREE;
1427 }
1428
1429 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1430 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1431 return NULL_TREE. */
1432
1433 static tree
1434 negate_expr (tree t)
1435 {
1436 tree type, tem;
1437
1438 if (t == NULL_TREE)
1439 return NULL_TREE;
1440
1441 type = TREE_TYPE (t);
1442 STRIP_SIGN_NOPS (t);
1443
1444 tem = fold_negate_expr (t);
1445 if (!tem)
1446 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1447 return fold_convert (type, tem);
1448 }
1449 \f
1450 /* Split a tree IN into a constant, literal and variable parts that could be
1451 combined with CODE to make IN. "constant" means an expression with
1452 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1453 commutative arithmetic operation. Store the constant part into *CONP,
1454 the literal in *LITP and return the variable part. If a part isn't
1455 present, set it to null. If the tree does not decompose in this way,
1456 return the entire tree as the variable part and the other parts as null.
1457
1458 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1459 case, we negate an operand that was subtracted. Except if it is a
1460 literal for which we use *MINUS_LITP instead.
1461
1462 If NEGATE_P is true, we are negating all of IN, again except a literal
1463 for which we use *MINUS_LITP instead.
1464
1465 If IN is itself a literal or constant, return it as appropriate.
1466
1467 Note that we do not guarantee that any of the three values will be the
1468 same type as IN, but they will have the same signedness and mode. */
1469
1470 static tree
1471 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1472 tree *minus_litp, int negate_p)
1473 {
1474 tree var = 0;
1475
1476 *conp = 0;
1477 *litp = 0;
1478 *minus_litp = 0;
1479
1480 /* Strip any conversions that don't change the machine mode or signedness. */
1481 STRIP_SIGN_NOPS (in);
1482
1483 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1484 *litp = in;
1485 else if (TREE_CODE (in) == code
1486 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1487 /* We can associate addition and subtraction together (even
1488 though the C standard doesn't say so) for integers because
1489 the value is not affected. For reals, the value might be
1490 affected, so we can't. */
1491 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1492 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1493 {
1494 tree op0 = TREE_OPERAND (in, 0);
1495 tree op1 = TREE_OPERAND (in, 1);
1496 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1497 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1498
1499 /* First see if either of the operands is a literal, then a constant. */
1500 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1501 *litp = op0, op0 = 0;
1502 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1503 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1504
1505 if (op0 != 0 && TREE_CONSTANT (op0))
1506 *conp = op0, op0 = 0;
1507 else if (op1 != 0 && TREE_CONSTANT (op1))
1508 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1509
1510 /* If we haven't dealt with either operand, this is not a case we can
1511 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1512 if (op0 != 0 && op1 != 0)
1513 var = in;
1514 else if (op0 != 0)
1515 var = op0;
1516 else
1517 var = op1, neg_var_p = neg1_p;
1518
1519 /* Now do any needed negations. */
1520 if (neg_litp_p)
1521 *minus_litp = *litp, *litp = 0;
1522 if (neg_conp_p)
1523 *conp = negate_expr (*conp);
1524 if (neg_var_p)
1525 var = negate_expr (var);
1526 }
1527 else if (TREE_CONSTANT (in))
1528 *conp = in;
1529 else
1530 var = in;
1531
1532 if (negate_p)
1533 {
1534 if (*litp)
1535 *minus_litp = *litp, *litp = 0;
1536 else if (*minus_litp)
1537 *litp = *minus_litp, *minus_litp = 0;
1538 *conp = negate_expr (*conp);
1539 var = negate_expr (var);
1540 }
1541
1542 return var;
1543 }
1544
1545 /* Re-associate trees split by the above function. T1 and T2 are either
1546 expressions to associate or null. Return the new expression, if any. If
1547 we build an operation, do it in TYPE and with CODE. */
1548
1549 static tree
1550 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1551 {
1552 if (t1 == 0)
1553 return t2;
1554 else if (t2 == 0)
1555 return t1;
1556
1557 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1558 try to fold this since we will have infinite recursion. But do
1559 deal with any NEGATE_EXPRs. */
1560 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1561 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1562 {
1563 if (code == PLUS_EXPR)
1564 {
1565 if (TREE_CODE (t1) == NEGATE_EXPR)
1566 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1567 fold_convert (type, TREE_OPERAND (t1, 0)));
1568 else if (TREE_CODE (t2) == NEGATE_EXPR)
1569 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1570 fold_convert (type, TREE_OPERAND (t2, 0)));
1571 else if (integer_zerop (t2))
1572 return fold_convert (type, t1);
1573 }
1574 else if (code == MINUS_EXPR)
1575 {
1576 if (integer_zerop (t2))
1577 return fold_convert (type, t1);
1578 }
1579
1580 return build2 (code, type, fold_convert (type, t1),
1581 fold_convert (type, t2));
1582 }
1583
1584 return fold_build2 (code, type, fold_convert (type, t1),
1585 fold_convert (type, t2));
1586 }
1587 \f
1588 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1589 for use in int_const_binop, size_binop and size_diffop. */
1590
1591 static bool
1592 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1593 {
1594 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1595 return false;
1596 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1597 return false;
1598
1599 switch (code)
1600 {
1601 case LSHIFT_EXPR:
1602 case RSHIFT_EXPR:
1603 case LROTATE_EXPR:
1604 case RROTATE_EXPR:
1605 return true;
1606
1607 default:
1608 break;
1609 }
1610
1611 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1612 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1613 && TYPE_MODE (type1) == TYPE_MODE (type2);
1614 }
1615
1616
1617 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1618 to produce a new constant. Return NULL_TREE if we don't know how
1619 to evaluate CODE at compile-time.
1620
1621 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1622
1623 tree
1624 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1625 {
1626 unsigned HOST_WIDE_INT int1l, int2l;
1627 HOST_WIDE_INT int1h, int2h;
1628 unsigned HOST_WIDE_INT low;
1629 HOST_WIDE_INT hi;
1630 unsigned HOST_WIDE_INT garbagel;
1631 HOST_WIDE_INT garbageh;
1632 tree t;
1633 tree type = TREE_TYPE (arg1);
1634 int uns = TYPE_UNSIGNED (type);
1635 int is_sizetype
1636 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1637 int overflow = 0;
1638
1639 int1l = TREE_INT_CST_LOW (arg1);
1640 int1h = TREE_INT_CST_HIGH (arg1);
1641 int2l = TREE_INT_CST_LOW (arg2);
1642 int2h = TREE_INT_CST_HIGH (arg2);
1643
1644 switch (code)
1645 {
1646 case BIT_IOR_EXPR:
1647 low = int1l | int2l, hi = int1h | int2h;
1648 break;
1649
1650 case BIT_XOR_EXPR:
1651 low = int1l ^ int2l, hi = int1h ^ int2h;
1652 break;
1653
1654 case BIT_AND_EXPR:
1655 low = int1l & int2l, hi = int1h & int2h;
1656 break;
1657
1658 case RSHIFT_EXPR:
1659 int2l = -int2l;
1660 case LSHIFT_EXPR:
1661 /* It's unclear from the C standard whether shifts can overflow.
1662 The following code ignores overflow; perhaps a C standard
1663 interpretation ruling is needed. */
1664 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1665 &low, &hi, !uns);
1666 break;
1667
1668 case RROTATE_EXPR:
1669 int2l = - int2l;
1670 case LROTATE_EXPR:
1671 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1672 &low, &hi);
1673 break;
1674
1675 case PLUS_EXPR:
1676 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1677 break;
1678
1679 case MINUS_EXPR:
1680 neg_double (int2l, int2h, &low, &hi);
1681 add_double (int1l, int1h, low, hi, &low, &hi);
1682 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1683 break;
1684
1685 case MULT_EXPR:
1686 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1687 break;
1688
1689 case TRUNC_DIV_EXPR:
1690 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1691 case EXACT_DIV_EXPR:
1692 /* This is a shortcut for a common special case. */
1693 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1694 && !TREE_OVERFLOW (arg1)
1695 && !TREE_OVERFLOW (arg2)
1696 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1697 {
1698 if (code == CEIL_DIV_EXPR)
1699 int1l += int2l - 1;
1700
1701 low = int1l / int2l, hi = 0;
1702 break;
1703 }
1704
1705 /* ... fall through ... */
1706
1707 case ROUND_DIV_EXPR:
1708 if (int2h == 0 && int2l == 0)
1709 return NULL_TREE;
1710 if (int2h == 0 && int2l == 1)
1711 {
1712 low = int1l, hi = int1h;
1713 break;
1714 }
1715 if (int1l == int2l && int1h == int2h
1716 && ! (int1l == 0 && int1h == 0))
1717 {
1718 low = 1, hi = 0;
1719 break;
1720 }
1721 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1722 &low, &hi, &garbagel, &garbageh);
1723 break;
1724
1725 case TRUNC_MOD_EXPR:
1726 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1727 /* This is a shortcut for a common special case. */
1728 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1729 && !TREE_OVERFLOW (arg1)
1730 && !TREE_OVERFLOW (arg2)
1731 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1732 {
1733 if (code == CEIL_MOD_EXPR)
1734 int1l += int2l - 1;
1735 low = int1l % int2l, hi = 0;
1736 break;
1737 }
1738
1739 /* ... fall through ... */
1740
1741 case ROUND_MOD_EXPR:
1742 if (int2h == 0 && int2l == 0)
1743 return NULL_TREE;
1744 overflow = div_and_round_double (code, uns,
1745 int1l, int1h, int2l, int2h,
1746 &garbagel, &garbageh, &low, &hi);
1747 break;
1748
1749 case MIN_EXPR:
1750 case MAX_EXPR:
1751 if (uns)
1752 low = (((unsigned HOST_WIDE_INT) int1h
1753 < (unsigned HOST_WIDE_INT) int2h)
1754 || (((unsigned HOST_WIDE_INT) int1h
1755 == (unsigned HOST_WIDE_INT) int2h)
1756 && int1l < int2l));
1757 else
1758 low = (int1h < int2h
1759 || (int1h == int2h && int1l < int2l));
1760
1761 if (low == (code == MIN_EXPR))
1762 low = int1l, hi = int1h;
1763 else
1764 low = int2l, hi = int2h;
1765 break;
1766
1767 default:
1768 return NULL_TREE;
1769 }
1770
1771 if (notrunc)
1772 {
1773 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1774
1775 /* Propagate overflow flags ourselves. */
1776 if (((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1778 {
1779 t = copy_node (t);
1780 TREE_OVERFLOW (t) = 1;
1781 }
1782 }
1783 else
1784 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1785 ((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1787
1788 return t;
1789 }
1790
1791 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1792 constant. We assume ARG1 and ARG2 have the same data type, or at least
1793 are the same kind of constant and the same machine mode. Return zero if
1794 combining the constants is not allowed in the current operating mode.
1795
1796 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1797
1798 static tree
1799 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1800 {
1801 /* Sanity check for the recursive cases. */
1802 if (!arg1 || !arg2)
1803 return NULL_TREE;
1804
1805 STRIP_NOPS (arg1);
1806 STRIP_NOPS (arg2);
1807
1808 if (TREE_CODE (arg1) == INTEGER_CST)
1809 return int_const_binop (code, arg1, arg2, notrunc);
1810
1811 if (TREE_CODE (arg1) == REAL_CST)
1812 {
1813 enum machine_mode mode;
1814 REAL_VALUE_TYPE d1;
1815 REAL_VALUE_TYPE d2;
1816 REAL_VALUE_TYPE value;
1817 REAL_VALUE_TYPE result;
1818 bool inexact;
1819 tree t, type;
1820
1821 /* The following codes are handled by real_arithmetic. */
1822 switch (code)
1823 {
1824 case PLUS_EXPR:
1825 case MINUS_EXPR:
1826 case MULT_EXPR:
1827 case RDIV_EXPR:
1828 case MIN_EXPR:
1829 case MAX_EXPR:
1830 break;
1831
1832 default:
1833 return NULL_TREE;
1834 }
1835
1836 d1 = TREE_REAL_CST (arg1);
1837 d2 = TREE_REAL_CST (arg2);
1838
1839 type = TREE_TYPE (arg1);
1840 mode = TYPE_MODE (type);
1841
1842 /* Don't perform operation if we honor signaling NaNs and
1843 either operand is a NaN. */
1844 if (HONOR_SNANS (mode)
1845 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1846 return NULL_TREE;
1847
1848 /* Don't perform operation if it would raise a division
1849 by zero exception. */
1850 if (code == RDIV_EXPR
1851 && REAL_VALUES_EQUAL (d2, dconst0)
1852 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1853 return NULL_TREE;
1854
1855 /* If either operand is a NaN, just return it. Otherwise, set up
1856 for floating-point trap; we return an overflow. */
1857 if (REAL_VALUE_ISNAN (d1))
1858 return arg1;
1859 else if (REAL_VALUE_ISNAN (d2))
1860 return arg2;
1861
1862 inexact = real_arithmetic (&value, code, &d1, &d2);
1863 real_convert (&result, mode, &value);
1864
1865 /* Don't constant fold this floating point operation if
1866 the result has overflowed and flag_trapping_math. */
1867 if (flag_trapping_math
1868 && MODE_HAS_INFINITIES (mode)
1869 && REAL_VALUE_ISINF (result)
1870 && !REAL_VALUE_ISINF (d1)
1871 && !REAL_VALUE_ISINF (d2))
1872 return NULL_TREE;
1873
1874 /* Don't constant fold this floating point operation if the
1875 result may dependent upon the run-time rounding mode and
1876 flag_rounding_math is set, or if GCC's software emulation
1877 is unable to accurately represent the result. */
1878 if ((flag_rounding_math
1879 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1880 && !flag_unsafe_math_optimizations))
1881 && (inexact || !real_identical (&result, &value)))
1882 return NULL_TREE;
1883
1884 t = build_real (type, result);
1885
1886 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1887 return t;
1888 }
1889
1890 if (TREE_CODE (arg1) == COMPLEX_CST)
1891 {
1892 tree type = TREE_TYPE (arg1);
1893 tree r1 = TREE_REALPART (arg1);
1894 tree i1 = TREE_IMAGPART (arg1);
1895 tree r2 = TREE_REALPART (arg2);
1896 tree i2 = TREE_IMAGPART (arg2);
1897 tree real, imag;
1898
1899 switch (code)
1900 {
1901 case PLUS_EXPR:
1902 case MINUS_EXPR:
1903 real = const_binop (code, r1, r2, notrunc);
1904 imag = const_binop (code, i1, i2, notrunc);
1905 break;
1906
1907 case MULT_EXPR:
1908 real = const_binop (MINUS_EXPR,
1909 const_binop (MULT_EXPR, r1, r2, notrunc),
1910 const_binop (MULT_EXPR, i1, i2, notrunc),
1911 notrunc);
1912 imag = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r1, i2, notrunc),
1914 const_binop (MULT_EXPR, i1, r2, notrunc),
1915 notrunc);
1916 break;
1917
1918 case RDIV_EXPR:
1919 {
1920 tree magsquared
1921 = const_binop (PLUS_EXPR,
1922 const_binop (MULT_EXPR, r2, r2, notrunc),
1923 const_binop (MULT_EXPR, i2, i2, notrunc),
1924 notrunc);
1925 tree t1
1926 = const_binop (PLUS_EXPR,
1927 const_binop (MULT_EXPR, r1, r2, notrunc),
1928 const_binop (MULT_EXPR, i1, i2, notrunc),
1929 notrunc);
1930 tree t2
1931 = const_binop (MINUS_EXPR,
1932 const_binop (MULT_EXPR, i1, r2, notrunc),
1933 const_binop (MULT_EXPR, r1, i2, notrunc),
1934 notrunc);
1935
1936 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1937 code = TRUNC_DIV_EXPR;
1938
1939 real = const_binop (code, t1, magsquared, notrunc);
1940 imag = const_binop (code, t2, magsquared, notrunc);
1941 }
1942 break;
1943
1944 default:
1945 return NULL_TREE;
1946 }
1947
1948 if (real && imag)
1949 return build_complex (type, real, imag);
1950 }
1951
1952 return NULL_TREE;
1953 }
1954
1955 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1956 indicates which particular sizetype to create. */
1957
1958 tree
1959 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1960 {
1961 return build_int_cst (sizetype_tab[(int) kind], number);
1962 }
1963 \f
1964 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1965 is a tree code. The type of the result is taken from the operands.
1966 Both must be equivalent integer types, ala int_binop_types_match_p.
1967 If the operands are constant, so is the result. */
1968
1969 tree
1970 size_binop (enum tree_code code, tree arg0, tree arg1)
1971 {
1972 tree type = TREE_TYPE (arg0);
1973
1974 if (arg0 == error_mark_node || arg1 == error_mark_node)
1975 return error_mark_node;
1976
1977 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1978 TREE_TYPE (arg1)));
1979
1980 /* Handle the special case of two integer constants faster. */
1981 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1982 {
1983 /* And some specific cases even faster than that. */
1984 if (code == PLUS_EXPR)
1985 {
1986 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1987 return arg1;
1988 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1989 return arg0;
1990 }
1991 else if (code == MINUS_EXPR)
1992 {
1993 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1994 return arg0;
1995 }
1996 else if (code == MULT_EXPR)
1997 {
1998 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1999 return arg1;
2000 }
2001
2002 /* Handle general case of two integer constants. */
2003 return int_const_binop (code, arg0, arg1, 0);
2004 }
2005
2006 return fold_build2 (code, type, arg0, arg1);
2007 }
2008
2009 /* Given two values, either both of sizetype or both of bitsizetype,
2010 compute the difference between the two values. Return the value
2011 in signed type corresponding to the type of the operands. */
2012
2013 tree
2014 size_diffop (tree arg0, tree arg1)
2015 {
2016 tree type = TREE_TYPE (arg0);
2017 tree ctype;
2018
2019 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2020 TREE_TYPE (arg1)));
2021
2022 /* If the type is already signed, just do the simple thing. */
2023 if (!TYPE_UNSIGNED (type))
2024 return size_binop (MINUS_EXPR, arg0, arg1);
2025
2026 if (type == sizetype)
2027 ctype = ssizetype;
2028 else if (type == bitsizetype)
2029 ctype = sbitsizetype;
2030 else
2031 ctype = lang_hooks.types.signed_type (type);
2032
2033 /* If either operand is not a constant, do the conversions to the signed
2034 type and subtract. The hardware will do the right thing with any
2035 overflow in the subtraction. */
2036 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2037 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2038 fold_convert (ctype, arg1));
2039
2040 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2041 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2042 overflow) and negate (which can't either). Special-case a result
2043 of zero while we're here. */
2044 if (tree_int_cst_equal (arg0, arg1))
2045 return build_int_cst (ctype, 0);
2046 else if (tree_int_cst_lt (arg1, arg0))
2047 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2048 else
2049 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2050 fold_convert (ctype, size_binop (MINUS_EXPR,
2051 arg1, arg0)));
2052 }
2053 \f
2054 /* A subroutine of fold_convert_const handling conversions of an
2055 INTEGER_CST to another integer type. */
2056
2057 static tree
2058 fold_convert_const_int_from_int (tree type, tree arg1)
2059 {
2060 tree t;
2061
2062 /* Given an integer constant, make new constant with new type,
2063 appropriately sign-extended or truncated. */
2064 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2065 TREE_INT_CST_HIGH (arg1),
2066 /* Don't set the overflow when
2067 converting a pointer */
2068 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2069 (TREE_INT_CST_HIGH (arg1) < 0
2070 && (TYPE_UNSIGNED (type)
2071 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2072 | TREE_OVERFLOW (arg1));
2073
2074 return t;
2075 }
2076
2077 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2078 to an integer type. */
2079
2080 static tree
2081 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2082 {
2083 int overflow = 0;
2084 tree t;
2085
2086 /* The following code implements the floating point to integer
2087 conversion rules required by the Java Language Specification,
2088 that IEEE NaNs are mapped to zero and values that overflow
2089 the target precision saturate, i.e. values greater than
2090 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2091 are mapped to INT_MIN. These semantics are allowed by the
2092 C and C++ standards that simply state that the behavior of
2093 FP-to-integer conversion is unspecified upon overflow. */
2094
2095 HOST_WIDE_INT high, low;
2096 REAL_VALUE_TYPE r;
2097 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2098
2099 switch (code)
2100 {
2101 case FIX_TRUNC_EXPR:
2102 real_trunc (&r, VOIDmode, &x);
2103 break;
2104
2105 default:
2106 gcc_unreachable ();
2107 }
2108
2109 /* If R is NaN, return zero and show we have an overflow. */
2110 if (REAL_VALUE_ISNAN (r))
2111 {
2112 overflow = 1;
2113 high = 0;
2114 low = 0;
2115 }
2116
2117 /* See if R is less than the lower bound or greater than the
2118 upper bound. */
2119
2120 if (! overflow)
2121 {
2122 tree lt = TYPE_MIN_VALUE (type);
2123 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2124 if (REAL_VALUES_LESS (r, l))
2125 {
2126 overflow = 1;
2127 high = TREE_INT_CST_HIGH (lt);
2128 low = TREE_INT_CST_LOW (lt);
2129 }
2130 }
2131
2132 if (! overflow)
2133 {
2134 tree ut = TYPE_MAX_VALUE (type);
2135 if (ut)
2136 {
2137 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2138 if (REAL_VALUES_LESS (u, r))
2139 {
2140 overflow = 1;
2141 high = TREE_INT_CST_HIGH (ut);
2142 low = TREE_INT_CST_LOW (ut);
2143 }
2144 }
2145 }
2146
2147 if (! overflow)
2148 REAL_VALUE_TO_INT (&low, &high, r);
2149
2150 t = force_fit_type_double (type, low, high, -1,
2151 overflow | TREE_OVERFLOW (arg1));
2152 return t;
2153 }
2154
2155 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2156 to another floating point type. */
2157
2158 static tree
2159 fold_convert_const_real_from_real (tree type, tree arg1)
2160 {
2161 REAL_VALUE_TYPE value;
2162 tree t;
2163
2164 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2165 t = build_real (type, value);
2166
2167 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2168 return t;
2169 }
2170
2171 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2172 type TYPE. If no simplification can be done return NULL_TREE. */
2173
2174 static tree
2175 fold_convert_const (enum tree_code code, tree type, tree arg1)
2176 {
2177 if (TREE_TYPE (arg1) == type)
2178 return arg1;
2179
2180 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2181 {
2182 if (TREE_CODE (arg1) == INTEGER_CST)
2183 return fold_convert_const_int_from_int (type, arg1);
2184 else if (TREE_CODE (arg1) == REAL_CST)
2185 return fold_convert_const_int_from_real (code, type, arg1);
2186 }
2187 else if (TREE_CODE (type) == REAL_TYPE)
2188 {
2189 if (TREE_CODE (arg1) == INTEGER_CST)
2190 return build_real_from_int_cst (type, arg1);
2191 if (TREE_CODE (arg1) == REAL_CST)
2192 return fold_convert_const_real_from_real (type, arg1);
2193 }
2194 return NULL_TREE;
2195 }
2196
2197 /* Construct a vector of zero elements of vector type TYPE. */
2198
2199 static tree
2200 build_zero_vector (tree type)
2201 {
2202 tree elem, list;
2203 int i, units;
2204
2205 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2206 units = TYPE_VECTOR_SUBPARTS (type);
2207
2208 list = NULL_TREE;
2209 for (i = 0; i < units; i++)
2210 list = tree_cons (NULL_TREE, elem, list);
2211 return build_vector (type, list);
2212 }
2213
2214 /* Convert expression ARG to type TYPE. Used by the middle-end for
2215 simple conversions in preference to calling the front-end's convert. */
2216
2217 tree
2218 fold_convert (tree type, tree arg)
2219 {
2220 tree orig = TREE_TYPE (arg);
2221 tree tem;
2222
2223 if (type == orig)
2224 return arg;
2225
2226 if (TREE_CODE (arg) == ERROR_MARK
2227 || TREE_CODE (type) == ERROR_MARK
2228 || TREE_CODE (orig) == ERROR_MARK)
2229 return error_mark_node;
2230
2231 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2232 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2233 TYPE_MAIN_VARIANT (orig)))
2234 return fold_build1 (NOP_EXPR, type, arg);
2235
2236 switch (TREE_CODE (type))
2237 {
2238 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2239 case POINTER_TYPE: case REFERENCE_TYPE:
2240 case OFFSET_TYPE:
2241 if (TREE_CODE (arg) == INTEGER_CST)
2242 {
2243 tem = fold_convert_const (NOP_EXPR, type, arg);
2244 if (tem != NULL_TREE)
2245 return tem;
2246 }
2247 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2248 || TREE_CODE (orig) == OFFSET_TYPE)
2249 return fold_build1 (NOP_EXPR, type, arg);
2250 if (TREE_CODE (orig) == COMPLEX_TYPE)
2251 {
2252 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert (type, tem);
2254 }
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1 (NOP_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272
2273 switch (TREE_CODE (orig))
2274 {
2275 case INTEGER_TYPE:
2276 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2277 case POINTER_TYPE: case REFERENCE_TYPE:
2278 return fold_build1 (FLOAT_EXPR, type, arg);
2279
2280 case REAL_TYPE:
2281 return fold_build1 (NOP_EXPR, type, arg);
2282
2283 case COMPLEX_TYPE:
2284 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2285 return fold_convert (type, tem);
2286
2287 default:
2288 gcc_unreachable ();
2289 }
2290
2291 case COMPLEX_TYPE:
2292 switch (TREE_CODE (orig))
2293 {
2294 case INTEGER_TYPE:
2295 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2296 case POINTER_TYPE: case REFERENCE_TYPE:
2297 case REAL_TYPE:
2298 return build2 (COMPLEX_EXPR, type,
2299 fold_convert (TREE_TYPE (type), arg),
2300 fold_convert (TREE_TYPE (type), integer_zero_node));
2301 case COMPLEX_TYPE:
2302 {
2303 tree rpart, ipart;
2304
2305 if (TREE_CODE (arg) == COMPLEX_EXPR)
2306 {
2307 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2308 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2309 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2310 }
2311
2312 arg = save_expr (arg);
2313 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2314 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2315 rpart = fold_convert (TREE_TYPE (type), rpart);
2316 ipart = fold_convert (TREE_TYPE (type), ipart);
2317 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2318 }
2319
2320 default:
2321 gcc_unreachable ();
2322 }
2323
2324 case VECTOR_TYPE:
2325 if (integer_zerop (arg))
2326 return build_zero_vector (type);
2327 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2328 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2329 || TREE_CODE (orig) == VECTOR_TYPE);
2330 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2331
2332 case VOID_TYPE:
2333 tem = fold_ignored_result (arg);
2334 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2335 return tem;
2336 return fold_build1 (NOP_EXPR, type, tem);
2337
2338 default:
2339 gcc_unreachable ();
2340 }
2341 }
2342 \f
2343 /* Return false if expr can be assumed not to be an lvalue, true
2344 otherwise. */
2345
2346 static bool
2347 maybe_lvalue_p (tree x)
2348 {
2349 /* We only need to wrap lvalue tree codes. */
2350 switch (TREE_CODE (x))
2351 {
2352 case VAR_DECL:
2353 case PARM_DECL:
2354 case RESULT_DECL:
2355 case LABEL_DECL:
2356 case FUNCTION_DECL:
2357 case SSA_NAME:
2358
2359 case COMPONENT_REF:
2360 case INDIRECT_REF:
2361 case ALIGN_INDIRECT_REF:
2362 case MISALIGNED_INDIRECT_REF:
2363 case ARRAY_REF:
2364 case ARRAY_RANGE_REF:
2365 case BIT_FIELD_REF:
2366 case OBJ_TYPE_REF:
2367
2368 case REALPART_EXPR:
2369 case IMAGPART_EXPR:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2372 case SAVE_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2375 case COMPOUND_EXPR:
2376 case MODIFY_EXPR:
2377 case GIMPLE_MODIFY_STMT:
2378 case TARGET_EXPR:
2379 case COND_EXPR:
2380 case BIND_EXPR:
2381 case MIN_EXPR:
2382 case MAX_EXPR:
2383 break;
2384
2385 default:
2386 /* Assume the worst for front-end tree codes. */
2387 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2388 break;
2389 return false;
2390 }
2391
2392 return true;
2393 }
2394
2395 /* Return an expr equal to X but certainly not valid as an lvalue. */
2396
2397 tree
2398 non_lvalue (tree x)
2399 {
2400 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2401 us. */
2402 if (in_gimple_form)
2403 return x;
2404
2405 if (! maybe_lvalue_p (x))
2406 return x;
2407 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2408 }
2409
2410 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2411 Zero means allow extended lvalues. */
2412
2413 int pedantic_lvalues;
2414
2415 /* When pedantic, return an expr equal to X but certainly not valid as a
2416 pedantic lvalue. Otherwise, return X. */
2417
2418 static tree
2419 pedantic_non_lvalue (tree x)
2420 {
2421 if (pedantic_lvalues)
2422 return non_lvalue (x);
2423 else
2424 return x;
2425 }
2426 \f
2427 /* Given a tree comparison code, return the code that is the logical inverse
2428 of the given code. It is not safe to do this for floating-point
2429 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2430 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2431
2432 enum tree_code
2433 invert_tree_comparison (enum tree_code code, bool honor_nans)
2434 {
2435 if (honor_nans && flag_trapping_math)
2436 return ERROR_MARK;
2437
2438 switch (code)
2439 {
2440 case EQ_EXPR:
2441 return NE_EXPR;
2442 case NE_EXPR:
2443 return EQ_EXPR;
2444 case GT_EXPR:
2445 return honor_nans ? UNLE_EXPR : LE_EXPR;
2446 case GE_EXPR:
2447 return honor_nans ? UNLT_EXPR : LT_EXPR;
2448 case LT_EXPR:
2449 return honor_nans ? UNGE_EXPR : GE_EXPR;
2450 case LE_EXPR:
2451 return honor_nans ? UNGT_EXPR : GT_EXPR;
2452 case LTGT_EXPR:
2453 return UNEQ_EXPR;
2454 case UNEQ_EXPR:
2455 return LTGT_EXPR;
2456 case UNGT_EXPR:
2457 return LE_EXPR;
2458 case UNGE_EXPR:
2459 return LT_EXPR;
2460 case UNLT_EXPR:
2461 return GE_EXPR;
2462 case UNLE_EXPR:
2463 return GT_EXPR;
2464 case ORDERED_EXPR:
2465 return UNORDERED_EXPR;
2466 case UNORDERED_EXPR:
2467 return ORDERED_EXPR;
2468 default:
2469 gcc_unreachable ();
2470 }
2471 }
2472
2473 /* Similar, but return the comparison that results if the operands are
2474 swapped. This is safe for floating-point. */
2475
2476 enum tree_code
2477 swap_tree_comparison (enum tree_code code)
2478 {
2479 switch (code)
2480 {
2481 case EQ_EXPR:
2482 case NE_EXPR:
2483 case ORDERED_EXPR:
2484 case UNORDERED_EXPR:
2485 case LTGT_EXPR:
2486 case UNEQ_EXPR:
2487 return code;
2488 case GT_EXPR:
2489 return LT_EXPR;
2490 case GE_EXPR:
2491 return LE_EXPR;
2492 case LT_EXPR:
2493 return GT_EXPR;
2494 case LE_EXPR:
2495 return GE_EXPR;
2496 case UNGT_EXPR:
2497 return UNLT_EXPR;
2498 case UNGE_EXPR:
2499 return UNLE_EXPR;
2500 case UNLT_EXPR:
2501 return UNGT_EXPR;
2502 case UNLE_EXPR:
2503 return UNGE_EXPR;
2504 default:
2505 gcc_unreachable ();
2506 }
2507 }
2508
2509
2510 /* Convert a comparison tree code from an enum tree_code representation
2511 into a compcode bit-based encoding. This function is the inverse of
2512 compcode_to_comparison. */
2513
2514 static enum comparison_code
2515 comparison_to_compcode (enum tree_code code)
2516 {
2517 switch (code)
2518 {
2519 case LT_EXPR:
2520 return COMPCODE_LT;
2521 case EQ_EXPR:
2522 return COMPCODE_EQ;
2523 case LE_EXPR:
2524 return COMPCODE_LE;
2525 case GT_EXPR:
2526 return COMPCODE_GT;
2527 case NE_EXPR:
2528 return COMPCODE_NE;
2529 case GE_EXPR:
2530 return COMPCODE_GE;
2531 case ORDERED_EXPR:
2532 return COMPCODE_ORD;
2533 case UNORDERED_EXPR:
2534 return COMPCODE_UNORD;
2535 case UNLT_EXPR:
2536 return COMPCODE_UNLT;
2537 case UNEQ_EXPR:
2538 return COMPCODE_UNEQ;
2539 case UNLE_EXPR:
2540 return COMPCODE_UNLE;
2541 case UNGT_EXPR:
2542 return COMPCODE_UNGT;
2543 case LTGT_EXPR:
2544 return COMPCODE_LTGT;
2545 case UNGE_EXPR:
2546 return COMPCODE_UNGE;
2547 default:
2548 gcc_unreachable ();
2549 }
2550 }
2551
2552 /* Convert a compcode bit-based encoding of a comparison operator back
2553 to GCC's enum tree_code representation. This function is the
2554 inverse of comparison_to_compcode. */
2555
2556 static enum tree_code
2557 compcode_to_comparison (enum comparison_code code)
2558 {
2559 switch (code)
2560 {
2561 case COMPCODE_LT:
2562 return LT_EXPR;
2563 case COMPCODE_EQ:
2564 return EQ_EXPR;
2565 case COMPCODE_LE:
2566 return LE_EXPR;
2567 case COMPCODE_GT:
2568 return GT_EXPR;
2569 case COMPCODE_NE:
2570 return NE_EXPR;
2571 case COMPCODE_GE:
2572 return GE_EXPR;
2573 case COMPCODE_ORD:
2574 return ORDERED_EXPR;
2575 case COMPCODE_UNORD:
2576 return UNORDERED_EXPR;
2577 case COMPCODE_UNLT:
2578 return UNLT_EXPR;
2579 case COMPCODE_UNEQ:
2580 return UNEQ_EXPR;
2581 case COMPCODE_UNLE:
2582 return UNLE_EXPR;
2583 case COMPCODE_UNGT:
2584 return UNGT_EXPR;
2585 case COMPCODE_LTGT:
2586 return LTGT_EXPR;
2587 case COMPCODE_UNGE:
2588 return UNGE_EXPR;
2589 default:
2590 gcc_unreachable ();
2591 }
2592 }
2593
2594 /* Return a tree for the comparison which is the combination of
2595 doing the AND or OR (depending on CODE) of the two operations LCODE
2596 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2597 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2598 if this makes the transformation invalid. */
2599
2600 tree
2601 combine_comparisons (enum tree_code code, enum tree_code lcode,
2602 enum tree_code rcode, tree truth_type,
2603 tree ll_arg, tree lr_arg)
2604 {
2605 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2606 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2607 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2608 enum comparison_code compcode;
2609
2610 switch (code)
2611 {
2612 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2613 compcode = lcompcode & rcompcode;
2614 break;
2615
2616 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2617 compcode = lcompcode | rcompcode;
2618 break;
2619
2620 default:
2621 return NULL_TREE;
2622 }
2623
2624 if (!honor_nans)
2625 {
2626 /* Eliminate unordered comparisons, as well as LTGT and ORD
2627 which are not used unless the mode has NaNs. */
2628 compcode &= ~COMPCODE_UNORD;
2629 if (compcode == COMPCODE_LTGT)
2630 compcode = COMPCODE_NE;
2631 else if (compcode == COMPCODE_ORD)
2632 compcode = COMPCODE_TRUE;
2633 }
2634 else if (flag_trapping_math)
2635 {
2636 /* Check that the original operation and the optimized ones will trap
2637 under the same condition. */
2638 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2639 && (lcompcode != COMPCODE_EQ)
2640 && (lcompcode != COMPCODE_ORD);
2641 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2642 && (rcompcode != COMPCODE_EQ)
2643 && (rcompcode != COMPCODE_ORD);
2644 bool trap = (compcode & COMPCODE_UNORD) == 0
2645 && (compcode != COMPCODE_EQ)
2646 && (compcode != COMPCODE_ORD);
2647
2648 /* In a short-circuited boolean expression the LHS might be
2649 such that the RHS, if evaluated, will never trap. For
2650 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2651 if neither x nor y is NaN. (This is a mixed blessing: for
2652 example, the expression above will never trap, hence
2653 optimizing it to x < y would be invalid). */
2654 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2655 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2656 rtrap = false;
2657
2658 /* If the comparison was short-circuited, and only the RHS
2659 trapped, we may now generate a spurious trap. */
2660 if (rtrap && !ltrap
2661 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2662 return NULL_TREE;
2663
2664 /* If we changed the conditions that cause a trap, we lose. */
2665 if ((ltrap || rtrap) != trap)
2666 return NULL_TREE;
2667 }
2668
2669 if (compcode == COMPCODE_TRUE)
2670 return constant_boolean_node (true, truth_type);
2671 else if (compcode == COMPCODE_FALSE)
2672 return constant_boolean_node (false, truth_type);
2673 else
2674 return fold_build2 (compcode_to_comparison (compcode),
2675 truth_type, ll_arg, lr_arg);
2676 }
2677
2678 /* Return nonzero if CODE is a tree code that represents a truth value. */
2679
2680 static int
2681 truth_value_p (enum tree_code code)
2682 {
2683 return (TREE_CODE_CLASS (code) == tcc_comparison
2684 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2685 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2686 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2687 }
2688 \f
2689 /* Return nonzero if two operands (typically of the same tree node)
2690 are necessarily equal. If either argument has side-effects this
2691 function returns zero. FLAGS modifies behavior as follows:
2692
2693 If OEP_ONLY_CONST is set, only return nonzero for constants.
2694 This function tests whether the operands are indistinguishable;
2695 it does not test whether they are equal using C's == operation.
2696 The distinction is important for IEEE floating point, because
2697 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2698 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2699
2700 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2701 even though it may hold multiple values during a function.
2702 This is because a GCC tree node guarantees that nothing else is
2703 executed between the evaluation of its "operands" (which may often
2704 be evaluated in arbitrary order). Hence if the operands themselves
2705 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2706 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2707 unset means assuming isochronic (or instantaneous) tree equivalence.
2708 Unless comparing arbitrary expression trees, such as from different
2709 statements, this flag can usually be left unset.
2710
2711 If OEP_PURE_SAME is set, then pure functions with identical arguments
2712 are considered the same. It is used when the caller has other ways
2713 to ensure that global memory is unchanged in between. */
2714
2715 int
2716 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2717 {
2718 /* If either is ERROR_MARK, they aren't equal. */
2719 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2720 return 0;
2721
2722 /* If both types don't have the same signedness, then we can't consider
2723 them equal. We must check this before the STRIP_NOPS calls
2724 because they may change the signedness of the arguments. */
2725 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2726 return 0;
2727
2728 /* If both types don't have the same precision, then it is not safe
2729 to strip NOPs. */
2730 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2731 return 0;
2732
2733 STRIP_NOPS (arg0);
2734 STRIP_NOPS (arg1);
2735
2736 /* In case both args are comparisons but with different comparison
2737 code, try to swap the comparison operands of one arg to produce
2738 a match and compare that variant. */
2739 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2740 && COMPARISON_CLASS_P (arg0)
2741 && COMPARISON_CLASS_P (arg1))
2742 {
2743 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2744
2745 if (TREE_CODE (arg0) == swap_code)
2746 return operand_equal_p (TREE_OPERAND (arg0, 0),
2747 TREE_OPERAND (arg1, 1), flags)
2748 && operand_equal_p (TREE_OPERAND (arg0, 1),
2749 TREE_OPERAND (arg1, 0), flags);
2750 }
2751
2752 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2753 /* This is needed for conversions and for COMPONENT_REF.
2754 Might as well play it safe and always test this. */
2755 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2756 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2757 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2758 return 0;
2759
2760 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2761 We don't care about side effects in that case because the SAVE_EXPR
2762 takes care of that for us. In all other cases, two expressions are
2763 equal if they have no side effects. If we have two identical
2764 expressions with side effects that should be treated the same due
2765 to the only side effects being identical SAVE_EXPR's, that will
2766 be detected in the recursive calls below. */
2767 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2768 && (TREE_CODE (arg0) == SAVE_EXPR
2769 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2770 return 1;
2771
2772 /* Next handle constant cases, those for which we can return 1 even
2773 if ONLY_CONST is set. */
2774 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2775 switch (TREE_CODE (arg0))
2776 {
2777 case INTEGER_CST:
2778 return tree_int_cst_equal (arg0, arg1);
2779
2780 case REAL_CST:
2781 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2782 TREE_REAL_CST (arg1)))
2783 return 1;
2784
2785
2786 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2787 {
2788 /* If we do not distinguish between signed and unsigned zero,
2789 consider them equal. */
2790 if (real_zerop (arg0) && real_zerop (arg1))
2791 return 1;
2792 }
2793 return 0;
2794
2795 case VECTOR_CST:
2796 {
2797 tree v1, v2;
2798
2799 v1 = TREE_VECTOR_CST_ELTS (arg0);
2800 v2 = TREE_VECTOR_CST_ELTS (arg1);
2801 while (v1 && v2)
2802 {
2803 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2804 flags))
2805 return 0;
2806 v1 = TREE_CHAIN (v1);
2807 v2 = TREE_CHAIN (v2);
2808 }
2809
2810 return v1 == v2;
2811 }
2812
2813 case COMPLEX_CST:
2814 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2815 flags)
2816 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2817 flags));
2818
2819 case STRING_CST:
2820 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2821 && ! memcmp (TREE_STRING_POINTER (arg0),
2822 TREE_STRING_POINTER (arg1),
2823 TREE_STRING_LENGTH (arg0)));
2824
2825 case ADDR_EXPR:
2826 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2827 0);
2828 default:
2829 break;
2830 }
2831
2832 if (flags & OEP_ONLY_CONST)
2833 return 0;
2834
2835 /* Define macros to test an operand from arg0 and arg1 for equality and a
2836 variant that allows null and views null as being different from any
2837 non-null value. In the latter case, if either is null, the both
2838 must be; otherwise, do the normal comparison. */
2839 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2840 TREE_OPERAND (arg1, N), flags)
2841
2842 #define OP_SAME_WITH_NULL(N) \
2843 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2844 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2845
2846 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2847 {
2848 case tcc_unary:
2849 /* Two conversions are equal only if signedness and modes match. */
2850 switch (TREE_CODE (arg0))
2851 {
2852 case NOP_EXPR:
2853 case CONVERT_EXPR:
2854 case FIX_TRUNC_EXPR:
2855 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2856 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2857 return 0;
2858 break;
2859 default:
2860 break;
2861 }
2862
2863 return OP_SAME (0);
2864
2865
2866 case tcc_comparison:
2867 case tcc_binary:
2868 if (OP_SAME (0) && OP_SAME (1))
2869 return 1;
2870
2871 /* For commutative ops, allow the other order. */
2872 return (commutative_tree_code (TREE_CODE (arg0))
2873 && operand_equal_p (TREE_OPERAND (arg0, 0),
2874 TREE_OPERAND (arg1, 1), flags)
2875 && operand_equal_p (TREE_OPERAND (arg0, 1),
2876 TREE_OPERAND (arg1, 0), flags));
2877
2878 case tcc_reference:
2879 /* If either of the pointer (or reference) expressions we are
2880 dereferencing contain a side effect, these cannot be equal. */
2881 if (TREE_SIDE_EFFECTS (arg0)
2882 || TREE_SIDE_EFFECTS (arg1))
2883 return 0;
2884
2885 switch (TREE_CODE (arg0))
2886 {
2887 case INDIRECT_REF:
2888 case ALIGN_INDIRECT_REF:
2889 case MISALIGNED_INDIRECT_REF:
2890 case REALPART_EXPR:
2891 case IMAGPART_EXPR:
2892 return OP_SAME (0);
2893
2894 case ARRAY_REF:
2895 case ARRAY_RANGE_REF:
2896 /* Operands 2 and 3 may be null.
2897 Compare the array index by value if it is constant first as we
2898 may have different types but same value here. */
2899 return (OP_SAME (0)
2900 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2901 TREE_OPERAND (arg1, 1))
2902 || OP_SAME (1))
2903 && OP_SAME_WITH_NULL (2)
2904 && OP_SAME_WITH_NULL (3));
2905
2906 case COMPONENT_REF:
2907 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2908 may be NULL when we're called to compare MEM_EXPRs. */
2909 return OP_SAME_WITH_NULL (0)
2910 && OP_SAME (1)
2911 && OP_SAME_WITH_NULL (2);
2912
2913 case BIT_FIELD_REF:
2914 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2915
2916 default:
2917 return 0;
2918 }
2919
2920 case tcc_expression:
2921 switch (TREE_CODE (arg0))
2922 {
2923 case ADDR_EXPR:
2924 case TRUTH_NOT_EXPR:
2925 return OP_SAME (0);
2926
2927 case TRUTH_ANDIF_EXPR:
2928 case TRUTH_ORIF_EXPR:
2929 return OP_SAME (0) && OP_SAME (1);
2930
2931 case TRUTH_AND_EXPR:
2932 case TRUTH_OR_EXPR:
2933 case TRUTH_XOR_EXPR:
2934 if (OP_SAME (0) && OP_SAME (1))
2935 return 1;
2936
2937 /* Otherwise take into account this is a commutative operation. */
2938 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2939 TREE_OPERAND (arg1, 1), flags)
2940 && operand_equal_p (TREE_OPERAND (arg0, 1),
2941 TREE_OPERAND (arg1, 0), flags));
2942
2943 default:
2944 return 0;
2945 }
2946
2947 case tcc_vl_exp:
2948 switch (TREE_CODE (arg0))
2949 {
2950 case CALL_EXPR:
2951 /* If the CALL_EXPRs call different functions, then they
2952 clearly can not be equal. */
2953 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2954 flags))
2955 return 0;
2956
2957 {
2958 unsigned int cef = call_expr_flags (arg0);
2959 if (flags & OEP_PURE_SAME)
2960 cef &= ECF_CONST | ECF_PURE;
2961 else
2962 cef &= ECF_CONST;
2963 if (!cef)
2964 return 0;
2965 }
2966
2967 /* Now see if all the arguments are the same. */
2968 {
2969 call_expr_arg_iterator iter0, iter1;
2970 tree a0, a1;
2971 for (a0 = first_call_expr_arg (arg0, &iter0),
2972 a1 = first_call_expr_arg (arg1, &iter1);
2973 a0 && a1;
2974 a0 = next_call_expr_arg (&iter0),
2975 a1 = next_call_expr_arg (&iter1))
2976 if (! operand_equal_p (a0, a1, flags))
2977 return 0;
2978
2979 /* If we get here and both argument lists are exhausted
2980 then the CALL_EXPRs are equal. */
2981 return ! (a0 || a1);
2982 }
2983 default:
2984 return 0;
2985 }
2986
2987 case tcc_declaration:
2988 /* Consider __builtin_sqrt equal to sqrt. */
2989 return (TREE_CODE (arg0) == FUNCTION_DECL
2990 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2991 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2992 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2993
2994 default:
2995 return 0;
2996 }
2997
2998 #undef OP_SAME
2999 #undef OP_SAME_WITH_NULL
3000 }
3001 \f
3002 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3003 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3004
3005 When in doubt, return 0. */
3006
3007 static int
3008 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3009 {
3010 int unsignedp1, unsignedpo;
3011 tree primarg0, primarg1, primother;
3012 unsigned int correct_width;
3013
3014 if (operand_equal_p (arg0, arg1, 0))
3015 return 1;
3016
3017 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3018 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3019 return 0;
3020
3021 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3022 and see if the inner values are the same. This removes any
3023 signedness comparison, which doesn't matter here. */
3024 primarg0 = arg0, primarg1 = arg1;
3025 STRIP_NOPS (primarg0);
3026 STRIP_NOPS (primarg1);
3027 if (operand_equal_p (primarg0, primarg1, 0))
3028 return 1;
3029
3030 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3031 actual comparison operand, ARG0.
3032
3033 First throw away any conversions to wider types
3034 already present in the operands. */
3035
3036 primarg1 = get_narrower (arg1, &unsignedp1);
3037 primother = get_narrower (other, &unsignedpo);
3038
3039 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3040 if (unsignedp1 == unsignedpo
3041 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3042 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3043 {
3044 tree type = TREE_TYPE (arg0);
3045
3046 /* Make sure shorter operand is extended the right way
3047 to match the longer operand. */
3048 primarg1 = fold_convert (get_signed_or_unsigned_type
3049 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3050
3051 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3052 return 1;
3053 }
3054
3055 return 0;
3056 }
3057 \f
3058 /* See if ARG is an expression that is either a comparison or is performing
3059 arithmetic on comparisons. The comparisons must only be comparing
3060 two different values, which will be stored in *CVAL1 and *CVAL2; if
3061 they are nonzero it means that some operands have already been found.
3062 No variables may be used anywhere else in the expression except in the
3063 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3064 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3065
3066 If this is true, return 1. Otherwise, return zero. */
3067
3068 static int
3069 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3070 {
3071 enum tree_code code = TREE_CODE (arg);
3072 enum tree_code_class class = TREE_CODE_CLASS (code);
3073
3074 /* We can handle some of the tcc_expression cases here. */
3075 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3076 class = tcc_unary;
3077 else if (class == tcc_expression
3078 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3079 || code == COMPOUND_EXPR))
3080 class = tcc_binary;
3081
3082 else if (class == tcc_expression && code == SAVE_EXPR
3083 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3084 {
3085 /* If we've already found a CVAL1 or CVAL2, this expression is
3086 two complex to handle. */
3087 if (*cval1 || *cval2)
3088 return 0;
3089
3090 class = tcc_unary;
3091 *save_p = 1;
3092 }
3093
3094 switch (class)
3095 {
3096 case tcc_unary:
3097 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3098
3099 case tcc_binary:
3100 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3101 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3102 cval1, cval2, save_p));
3103
3104 case tcc_constant:
3105 return 1;
3106
3107 case tcc_expression:
3108 if (code == COND_EXPR)
3109 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3110 cval1, cval2, save_p)
3111 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3112 cval1, cval2, save_p)
3113 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3114 cval1, cval2, save_p));
3115 return 0;
3116
3117 case tcc_comparison:
3118 /* First see if we can handle the first operand, then the second. For
3119 the second operand, we know *CVAL1 can't be zero. It must be that
3120 one side of the comparison is each of the values; test for the
3121 case where this isn't true by failing if the two operands
3122 are the same. */
3123
3124 if (operand_equal_p (TREE_OPERAND (arg, 0),
3125 TREE_OPERAND (arg, 1), 0))
3126 return 0;
3127
3128 if (*cval1 == 0)
3129 *cval1 = TREE_OPERAND (arg, 0);
3130 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3131 ;
3132 else if (*cval2 == 0)
3133 *cval2 = TREE_OPERAND (arg, 0);
3134 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3135 ;
3136 else
3137 return 0;
3138
3139 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3140 ;
3141 else if (*cval2 == 0)
3142 *cval2 = TREE_OPERAND (arg, 1);
3143 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3144 ;
3145 else
3146 return 0;
3147
3148 return 1;
3149
3150 default:
3151 return 0;
3152 }
3153 }
3154 \f
3155 /* ARG is a tree that is known to contain just arithmetic operations and
3156 comparisons. Evaluate the operations in the tree substituting NEW0 for
3157 any occurrence of OLD0 as an operand of a comparison and likewise for
3158 NEW1 and OLD1. */
3159
3160 static tree
3161 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3162 {
3163 tree type = TREE_TYPE (arg);
3164 enum tree_code code = TREE_CODE (arg);
3165 enum tree_code_class class = TREE_CODE_CLASS (code);
3166
3167 /* We can handle some of the tcc_expression cases here. */
3168 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3169 class = tcc_unary;
3170 else if (class == tcc_expression
3171 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3172 class = tcc_binary;
3173
3174 switch (class)
3175 {
3176 case tcc_unary:
3177 return fold_build1 (code, type,
3178 eval_subst (TREE_OPERAND (arg, 0),
3179 old0, new0, old1, new1));
3180
3181 case tcc_binary:
3182 return fold_build2 (code, type,
3183 eval_subst (TREE_OPERAND (arg, 0),
3184 old0, new0, old1, new1),
3185 eval_subst (TREE_OPERAND (arg, 1),
3186 old0, new0, old1, new1));
3187
3188 case tcc_expression:
3189 switch (code)
3190 {
3191 case SAVE_EXPR:
3192 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3193
3194 case COMPOUND_EXPR:
3195 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3196
3197 case COND_EXPR:
3198 return fold_build3 (code, type,
3199 eval_subst (TREE_OPERAND (arg, 0),
3200 old0, new0, old1, new1),
3201 eval_subst (TREE_OPERAND (arg, 1),
3202 old0, new0, old1, new1),
3203 eval_subst (TREE_OPERAND (arg, 2),
3204 old0, new0, old1, new1));
3205 default:
3206 break;
3207 }
3208 /* Fall through - ??? */
3209
3210 case tcc_comparison:
3211 {
3212 tree arg0 = TREE_OPERAND (arg, 0);
3213 tree arg1 = TREE_OPERAND (arg, 1);
3214
3215 /* We need to check both for exact equality and tree equality. The
3216 former will be true if the operand has a side-effect. In that
3217 case, we know the operand occurred exactly once. */
3218
3219 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3220 arg0 = new0;
3221 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3222 arg0 = new1;
3223
3224 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3225 arg1 = new0;
3226 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3227 arg1 = new1;
3228
3229 return fold_build2 (code, type, arg0, arg1);
3230 }
3231
3232 default:
3233 return arg;
3234 }
3235 }
3236 \f
3237 /* Return a tree for the case when the result of an expression is RESULT
3238 converted to TYPE and OMITTED was previously an operand of the expression
3239 but is now not needed (e.g., we folded OMITTED * 0).
3240
3241 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3242 the conversion of RESULT to TYPE. */
3243
3244 tree
3245 omit_one_operand (tree type, tree result, tree omitted)
3246 {
3247 tree t = fold_convert (type, result);
3248
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3251
3252 return non_lvalue (t);
3253 }
3254
3255 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3256
3257 static tree
3258 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3259 {
3260 tree t = fold_convert (type, result);
3261
3262 if (TREE_SIDE_EFFECTS (omitted))
3263 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3264
3265 return pedantic_non_lvalue (t);
3266 }
3267
3268 /* Return a tree for the case when the result of an expression is RESULT
3269 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3270 of the expression but are now not needed.
3271
3272 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3273 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3274 evaluated before OMITTED2. Otherwise, if neither has side effects,
3275 just do the conversion of RESULT to TYPE. */
3276
3277 tree
3278 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3279 {
3280 tree t = fold_convert (type, result);
3281
3282 if (TREE_SIDE_EFFECTS (omitted2))
3283 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3284 if (TREE_SIDE_EFFECTS (omitted1))
3285 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3286
3287 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3288 }
3289
3290 \f
3291 /* Return a simplified tree node for the truth-negation of ARG. This
3292 never alters ARG itself. We assume that ARG is an operation that
3293 returns a truth value (0 or 1).
3294
3295 FIXME: one would think we would fold the result, but it causes
3296 problems with the dominator optimizer. */
3297
3298 tree
3299 fold_truth_not_expr (tree arg)
3300 {
3301 tree type = TREE_TYPE (arg);
3302 enum tree_code code = TREE_CODE (arg);
3303
3304 /* If this is a comparison, we can simply invert it, except for
3305 floating-point non-equality comparisons, in which case we just
3306 enclose a TRUTH_NOT_EXPR around what we have. */
3307
3308 if (TREE_CODE_CLASS (code) == tcc_comparison)
3309 {
3310 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3311 if (FLOAT_TYPE_P (op_type)
3312 && flag_trapping_math
3313 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3314 && code != NE_EXPR && code != EQ_EXPR)
3315 return NULL_TREE;
3316 else
3317 {
3318 code = invert_tree_comparison (code,
3319 HONOR_NANS (TYPE_MODE (op_type)));
3320 if (code == ERROR_MARK)
3321 return NULL_TREE;
3322 else
3323 return build2 (code, type,
3324 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3325 }
3326 }
3327
3328 switch (code)
3329 {
3330 case INTEGER_CST:
3331 return constant_boolean_node (integer_zerop (arg), type);
3332
3333 case TRUTH_AND_EXPR:
3334 return build2 (TRUTH_OR_EXPR, type,
3335 invert_truthvalue (TREE_OPERAND (arg, 0)),
3336 invert_truthvalue (TREE_OPERAND (arg, 1)));
3337
3338 case TRUTH_OR_EXPR:
3339 return build2 (TRUTH_AND_EXPR, type,
3340 invert_truthvalue (TREE_OPERAND (arg, 0)),
3341 invert_truthvalue (TREE_OPERAND (arg, 1)));
3342
3343 case TRUTH_XOR_EXPR:
3344 /* Here we can invert either operand. We invert the first operand
3345 unless the second operand is a TRUTH_NOT_EXPR in which case our
3346 result is the XOR of the first operand with the inside of the
3347 negation of the second operand. */
3348
3349 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3350 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3351 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3352 else
3353 return build2 (TRUTH_XOR_EXPR, type,
3354 invert_truthvalue (TREE_OPERAND (arg, 0)),
3355 TREE_OPERAND (arg, 1));
3356
3357 case TRUTH_ANDIF_EXPR:
3358 return build2 (TRUTH_ORIF_EXPR, type,
3359 invert_truthvalue (TREE_OPERAND (arg, 0)),
3360 invert_truthvalue (TREE_OPERAND (arg, 1)));
3361
3362 case TRUTH_ORIF_EXPR:
3363 return build2 (TRUTH_ANDIF_EXPR, type,
3364 invert_truthvalue (TREE_OPERAND (arg, 0)),
3365 invert_truthvalue (TREE_OPERAND (arg, 1)));
3366
3367 case TRUTH_NOT_EXPR:
3368 return TREE_OPERAND (arg, 0);
3369
3370 case COND_EXPR:
3371 {
3372 tree arg1 = TREE_OPERAND (arg, 1);
3373 tree arg2 = TREE_OPERAND (arg, 2);
3374 /* A COND_EXPR may have a throw as one operand, which
3375 then has void type. Just leave void operands
3376 as they are. */
3377 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3378 VOID_TYPE_P (TREE_TYPE (arg1))
3379 ? arg1 : invert_truthvalue (arg1),
3380 VOID_TYPE_P (TREE_TYPE (arg2))
3381 ? arg2 : invert_truthvalue (arg2));
3382 }
3383
3384 case COMPOUND_EXPR:
3385 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3386 invert_truthvalue (TREE_OPERAND (arg, 1)));
3387
3388 case NON_LVALUE_EXPR:
3389 return invert_truthvalue (TREE_OPERAND (arg, 0));
3390
3391 case NOP_EXPR:
3392 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3393 return build1 (TRUTH_NOT_EXPR, type, arg);
3394
3395 case CONVERT_EXPR:
3396 case FLOAT_EXPR:
3397 return build1 (TREE_CODE (arg), type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3399
3400 case BIT_AND_EXPR:
3401 if (!integer_onep (TREE_OPERAND (arg, 1)))
3402 break;
3403 return build2 (EQ_EXPR, type, arg,
3404 build_int_cst (type, 0));
3405
3406 case SAVE_EXPR:
3407 return build1 (TRUTH_NOT_EXPR, type, arg);
3408
3409 case CLEANUP_POINT_EXPR:
3410 return build1 (CLEANUP_POINT_EXPR, type,
3411 invert_truthvalue (TREE_OPERAND (arg, 0)));
3412
3413 default:
3414 break;
3415 }
3416
3417 return NULL_TREE;
3418 }
3419
3420 /* Return a simplified tree node for the truth-negation of ARG. This
3421 never alters ARG itself. We assume that ARG is an operation that
3422 returns a truth value (0 or 1).
3423
3424 FIXME: one would think we would fold the result, but it causes
3425 problems with the dominator optimizer. */
3426
3427 tree
3428 invert_truthvalue (tree arg)
3429 {
3430 tree tem;
3431
3432 if (TREE_CODE (arg) == ERROR_MARK)
3433 return arg;
3434
3435 tem = fold_truth_not_expr (arg);
3436 if (!tem)
3437 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3438
3439 return tem;
3440 }
3441
3442 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3443 operands are another bit-wise operation with a common input. If so,
3444 distribute the bit operations to save an operation and possibly two if
3445 constants are involved. For example, convert
3446 (A | B) & (A | C) into A | (B & C)
3447 Further simplification will occur if B and C are constants.
3448
3449 If this optimization cannot be done, 0 will be returned. */
3450
3451 static tree
3452 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3453 {
3454 tree common;
3455 tree left, right;
3456
3457 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3458 || TREE_CODE (arg0) == code
3459 || (TREE_CODE (arg0) != BIT_AND_EXPR
3460 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3461 return 0;
3462
3463 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3464 {
3465 common = TREE_OPERAND (arg0, 0);
3466 left = TREE_OPERAND (arg0, 1);
3467 right = TREE_OPERAND (arg1, 1);
3468 }
3469 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3470 {
3471 common = TREE_OPERAND (arg0, 0);
3472 left = TREE_OPERAND (arg0, 1);
3473 right = TREE_OPERAND (arg1, 0);
3474 }
3475 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3476 {
3477 common = TREE_OPERAND (arg0, 1);
3478 left = TREE_OPERAND (arg0, 0);
3479 right = TREE_OPERAND (arg1, 1);
3480 }
3481 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3482 {
3483 common = TREE_OPERAND (arg0, 1);
3484 left = TREE_OPERAND (arg0, 0);
3485 right = TREE_OPERAND (arg1, 0);
3486 }
3487 else
3488 return 0;
3489
3490 return fold_build2 (TREE_CODE (arg0), type, common,
3491 fold_build2 (code, type, left, right));
3492 }
3493
3494 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3495 with code CODE. This optimization is unsafe. */
3496 static tree
3497 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3498 {
3499 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3500 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3501
3502 /* (A / C) +- (B / C) -> (A +- B) / C. */
3503 if (mul0 == mul1
3504 && operand_equal_p (TREE_OPERAND (arg0, 1),
3505 TREE_OPERAND (arg1, 1), 0))
3506 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3507 fold_build2 (code, type,
3508 TREE_OPERAND (arg0, 0),
3509 TREE_OPERAND (arg1, 0)),
3510 TREE_OPERAND (arg0, 1));
3511
3512 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3513 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3514 TREE_OPERAND (arg1, 0), 0)
3515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3516 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3517 {
3518 REAL_VALUE_TYPE r0, r1;
3519 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3520 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3521 if (!mul0)
3522 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3523 if (!mul1)
3524 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3525 real_arithmetic (&r0, code, &r0, &r1);
3526 return fold_build2 (MULT_EXPR, type,
3527 TREE_OPERAND (arg0, 0),
3528 build_real (type, r0));
3529 }
3530
3531 return NULL_TREE;
3532 }
3533 \f
3534 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3535 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3536
3537 static tree
3538 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3539 int unsignedp)
3540 {
3541 tree result;
3542
3543 if (bitpos == 0)
3544 {
3545 tree size = TYPE_SIZE (TREE_TYPE (inner));
3546 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3547 || POINTER_TYPE_P (TREE_TYPE (inner)))
3548 && host_integerp (size, 0)
3549 && tree_low_cst (size, 0) == bitsize)
3550 return fold_convert (type, inner);
3551 }
3552
3553 result = build3 (BIT_FIELD_REF, type, inner,
3554 size_int (bitsize), bitsize_int (bitpos));
3555
3556 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3557
3558 return result;
3559 }
3560
3561 /* Optimize a bit-field compare.
3562
3563 There are two cases: First is a compare against a constant and the
3564 second is a comparison of two items where the fields are at the same
3565 bit position relative to the start of a chunk (byte, halfword, word)
3566 large enough to contain it. In these cases we can avoid the shift
3567 implicit in bitfield extractions.
3568
3569 For constants, we emit a compare of the shifted constant with the
3570 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3571 compared. For two fields at the same position, we do the ANDs with the
3572 similar mask and compare the result of the ANDs.
3573
3574 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3575 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3576 are the left and right operands of the comparison, respectively.
3577
3578 If the optimization described above can be done, we return the resulting
3579 tree. Otherwise we return zero. */
3580
3581 static tree
3582 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3583 tree lhs, tree rhs)
3584 {
3585 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3586 tree type = TREE_TYPE (lhs);
3587 tree signed_type, unsigned_type;
3588 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3589 enum machine_mode lmode, rmode, nmode;
3590 int lunsignedp, runsignedp;
3591 int lvolatilep = 0, rvolatilep = 0;
3592 tree linner, rinner = NULL_TREE;
3593 tree mask;
3594 tree offset;
3595
3596 /* Get all the information about the extractions being done. If the bit size
3597 if the same as the size of the underlying object, we aren't doing an
3598 extraction at all and so can do nothing. We also don't want to
3599 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3600 then will no longer be able to replace it. */
3601 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3602 &lunsignedp, &lvolatilep, false);
3603 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3604 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3605 return 0;
3606
3607 if (!const_p)
3608 {
3609 /* If this is not a constant, we can only do something if bit positions,
3610 sizes, and signedness are the same. */
3611 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3612 &runsignedp, &rvolatilep, false);
3613
3614 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3615 || lunsignedp != runsignedp || offset != 0
3616 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3617 return 0;
3618 }
3619
3620 /* See if we can find a mode to refer to this field. We should be able to,
3621 but fail if we can't. */
3622 nmode = get_best_mode (lbitsize, lbitpos,
3623 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3624 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3625 TYPE_ALIGN (TREE_TYPE (rinner))),
3626 word_mode, lvolatilep || rvolatilep);
3627 if (nmode == VOIDmode)
3628 return 0;
3629
3630 /* Set signed and unsigned types of the precision of this mode for the
3631 shifts below. */
3632 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3633 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3634
3635 /* Compute the bit position and size for the new reference and our offset
3636 within it. If the new reference is the same size as the original, we
3637 won't optimize anything, so return zero. */
3638 nbitsize = GET_MODE_BITSIZE (nmode);
3639 nbitpos = lbitpos & ~ (nbitsize - 1);
3640 lbitpos -= nbitpos;
3641 if (nbitsize == lbitsize)
3642 return 0;
3643
3644 if (BYTES_BIG_ENDIAN)
3645 lbitpos = nbitsize - lbitsize - lbitpos;
3646
3647 /* Make the mask to be used against the extracted field. */
3648 mask = build_int_cst_type (unsigned_type, -1);
3649 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3650 mask = const_binop (RSHIFT_EXPR, mask,
3651 size_int (nbitsize - lbitsize - lbitpos), 0);
3652
3653 if (! const_p)
3654 /* If not comparing with constant, just rework the comparison
3655 and return. */
3656 return fold_build2 (code, compare_type,
3657 fold_build2 (BIT_AND_EXPR, unsigned_type,
3658 make_bit_field_ref (linner,
3659 unsigned_type,
3660 nbitsize, nbitpos,
3661 1),
3662 mask),
3663 fold_build2 (BIT_AND_EXPR, unsigned_type,
3664 make_bit_field_ref (rinner,
3665 unsigned_type,
3666 nbitsize, nbitpos,
3667 1),
3668 mask));
3669
3670 /* Otherwise, we are handling the constant case. See if the constant is too
3671 big for the field. Warn and return a tree of for 0 (false) if so. We do
3672 this not only for its own sake, but to avoid having to test for this
3673 error case below. If we didn't, we might generate wrong code.
3674
3675 For unsigned fields, the constant shifted right by the field length should
3676 be all zero. For signed fields, the high-order bits should agree with
3677 the sign bit. */
3678
3679 if (lunsignedp)
3680 {
3681 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3682 fold_convert (unsigned_type, rhs),
3683 size_int (lbitsize), 0)))
3684 {
3685 warning (0, "comparison is always %d due to width of bit-field",
3686 code == NE_EXPR);
3687 return constant_boolean_node (code == NE_EXPR, compare_type);
3688 }
3689 }
3690 else
3691 {
3692 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3693 size_int (lbitsize - 1), 0);
3694 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3695 {
3696 warning (0, "comparison is always %d due to width of bit-field",
3697 code == NE_EXPR);
3698 return constant_boolean_node (code == NE_EXPR, compare_type);
3699 }
3700 }
3701
3702 /* Single-bit compares should always be against zero. */
3703 if (lbitsize == 1 && ! integer_zerop (rhs))
3704 {
3705 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3706 rhs = build_int_cst (type, 0);
3707 }
3708
3709 /* Make a new bitfield reference, shift the constant over the
3710 appropriate number of bits and mask it with the computed mask
3711 (in case this was a signed field). If we changed it, make a new one. */
3712 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3713 if (lvolatilep)
3714 {
3715 TREE_SIDE_EFFECTS (lhs) = 1;
3716 TREE_THIS_VOLATILE (lhs) = 1;
3717 }
3718
3719 rhs = const_binop (BIT_AND_EXPR,
3720 const_binop (LSHIFT_EXPR,
3721 fold_convert (unsigned_type, rhs),
3722 size_int (lbitpos), 0),
3723 mask, 0);
3724
3725 return build2 (code, compare_type,
3726 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3727 rhs);
3728 }
3729 \f
3730 /* Subroutine for fold_truthop: decode a field reference.
3731
3732 If EXP is a comparison reference, we return the innermost reference.
3733
3734 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3735 set to the starting bit number.
3736
3737 If the innermost field can be completely contained in a mode-sized
3738 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3739
3740 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3741 otherwise it is not changed.
3742
3743 *PUNSIGNEDP is set to the signedness of the field.
3744
3745 *PMASK is set to the mask used. This is either contained in a
3746 BIT_AND_EXPR or derived from the width of the field.
3747
3748 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3749
3750 Return 0 if this is not a component reference or is one that we can't
3751 do anything with. */
3752
3753 static tree
3754 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3755 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3756 int *punsignedp, int *pvolatilep,
3757 tree *pmask, tree *pand_mask)
3758 {
3759 tree outer_type = 0;
3760 tree and_mask = 0;
3761 tree mask, inner, offset;
3762 tree unsigned_type;
3763 unsigned int precision;
3764
3765 /* All the optimizations using this function assume integer fields.
3766 There are problems with FP fields since the type_for_size call
3767 below can fail for, e.g., XFmode. */
3768 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3769 return 0;
3770
3771 /* We are interested in the bare arrangement of bits, so strip everything
3772 that doesn't affect the machine mode. However, record the type of the
3773 outermost expression if it may matter below. */
3774 if (TREE_CODE (exp) == NOP_EXPR
3775 || TREE_CODE (exp) == CONVERT_EXPR
3776 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3777 outer_type = TREE_TYPE (exp);
3778 STRIP_NOPS (exp);
3779
3780 if (TREE_CODE (exp) == BIT_AND_EXPR)
3781 {
3782 and_mask = TREE_OPERAND (exp, 1);
3783 exp = TREE_OPERAND (exp, 0);
3784 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3785 if (TREE_CODE (and_mask) != INTEGER_CST)
3786 return 0;
3787 }
3788
3789 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3790 punsignedp, pvolatilep, false);
3791 if ((inner == exp && and_mask == 0)
3792 || *pbitsize < 0 || offset != 0
3793 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3794 return 0;
3795
3796 /* If the number of bits in the reference is the same as the bitsize of
3797 the outer type, then the outer type gives the signedness. Otherwise
3798 (in case of a small bitfield) the signedness is unchanged. */
3799 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3800 *punsignedp = TYPE_UNSIGNED (outer_type);
3801
3802 /* Compute the mask to access the bitfield. */
3803 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3804 precision = TYPE_PRECISION (unsigned_type);
3805
3806 mask = build_int_cst_type (unsigned_type, -1);
3807
3808 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3809 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3810
3811 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3812 if (and_mask != 0)
3813 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3814 fold_convert (unsigned_type, and_mask), mask);
3815
3816 *pmask = mask;
3817 *pand_mask = and_mask;
3818 return inner;
3819 }
3820
3821 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3822 bit positions. */
3823
3824 static int
3825 all_ones_mask_p (tree mask, int size)
3826 {
3827 tree type = TREE_TYPE (mask);
3828 unsigned int precision = TYPE_PRECISION (type);
3829 tree tmask;
3830
3831 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3832
3833 return
3834 tree_int_cst_equal (mask,
3835 const_binop (RSHIFT_EXPR,
3836 const_binop (LSHIFT_EXPR, tmask,
3837 size_int (precision - size),
3838 0),
3839 size_int (precision - size), 0));
3840 }
3841
3842 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3843 represents the sign bit of EXP's type. If EXP represents a sign
3844 or zero extension, also test VAL against the unextended type.
3845 The return value is the (sub)expression whose sign bit is VAL,
3846 or NULL_TREE otherwise. */
3847
3848 static tree
3849 sign_bit_p (tree exp, tree val)
3850 {
3851 unsigned HOST_WIDE_INT mask_lo, lo;
3852 HOST_WIDE_INT mask_hi, hi;
3853 int width;
3854 tree t;
3855
3856 /* Tree EXP must have an integral type. */
3857 t = TREE_TYPE (exp);
3858 if (! INTEGRAL_TYPE_P (t))
3859 return NULL_TREE;
3860
3861 /* Tree VAL must be an integer constant. */
3862 if (TREE_CODE (val) != INTEGER_CST
3863 || TREE_OVERFLOW (val))
3864 return NULL_TREE;
3865
3866 width = TYPE_PRECISION (t);
3867 if (width > HOST_BITS_PER_WIDE_INT)
3868 {
3869 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3870 lo = 0;
3871
3872 mask_hi = ((unsigned HOST_WIDE_INT) -1
3873 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3874 mask_lo = -1;
3875 }
3876 else
3877 {
3878 hi = 0;
3879 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3880
3881 mask_hi = 0;
3882 mask_lo = ((unsigned HOST_WIDE_INT) -1
3883 >> (HOST_BITS_PER_WIDE_INT - width));
3884 }
3885
3886 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3887 treat VAL as if it were unsigned. */
3888 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3889 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3890 return exp;
3891
3892 /* Handle extension from a narrower type. */
3893 if (TREE_CODE (exp) == NOP_EXPR
3894 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3895 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3896
3897 return NULL_TREE;
3898 }
3899
3900 /* Subroutine for fold_truthop: determine if an operand is simple enough
3901 to be evaluated unconditionally. */
3902
3903 static int
3904 simple_operand_p (tree exp)
3905 {
3906 /* Strip any conversions that don't change the machine mode. */
3907 STRIP_NOPS (exp);
3908
3909 return (CONSTANT_CLASS_P (exp)
3910 || TREE_CODE (exp) == SSA_NAME
3911 || (DECL_P (exp)
3912 && ! TREE_ADDRESSABLE (exp)
3913 && ! TREE_THIS_VOLATILE (exp)
3914 && ! DECL_NONLOCAL (exp)
3915 /* Don't regard global variables as simple. They may be
3916 allocated in ways unknown to the compiler (shared memory,
3917 #pragma weak, etc). */
3918 && ! TREE_PUBLIC (exp)
3919 && ! DECL_EXTERNAL (exp)
3920 /* Loading a static variable is unduly expensive, but global
3921 registers aren't expensive. */
3922 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3923 }
3924 \f
3925 /* The following functions are subroutines to fold_range_test and allow it to
3926 try to change a logical combination of comparisons into a range test.
3927
3928 For example, both
3929 X == 2 || X == 3 || X == 4 || X == 5
3930 and
3931 X >= 2 && X <= 5
3932 are converted to
3933 (unsigned) (X - 2) <= 3
3934
3935 We describe each set of comparisons as being either inside or outside
3936 a range, using a variable named like IN_P, and then describe the
3937 range with a lower and upper bound. If one of the bounds is omitted,
3938 it represents either the highest or lowest value of the type.
3939
3940 In the comments below, we represent a range by two numbers in brackets
3941 preceded by a "+" to designate being inside that range, or a "-" to
3942 designate being outside that range, so the condition can be inverted by
3943 flipping the prefix. An omitted bound is represented by a "-". For
3944 example, "- [-, 10]" means being outside the range starting at the lowest
3945 possible value and ending at 10, in other words, being greater than 10.
3946 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3947 always false.
3948
3949 We set up things so that the missing bounds are handled in a consistent
3950 manner so neither a missing bound nor "true" and "false" need to be
3951 handled using a special case. */
3952
3953 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3954 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3955 and UPPER1_P are nonzero if the respective argument is an upper bound
3956 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3957 must be specified for a comparison. ARG1 will be converted to ARG0's
3958 type if both are specified. */
3959
3960 static tree
3961 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3962 tree arg1, int upper1_p)
3963 {
3964 tree tem;
3965 int result;
3966 int sgn0, sgn1;
3967
3968 /* If neither arg represents infinity, do the normal operation.
3969 Else, if not a comparison, return infinity. Else handle the special
3970 comparison rules. Note that most of the cases below won't occur, but
3971 are handled for consistency. */
3972
3973 if (arg0 != 0 && arg1 != 0)
3974 {
3975 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3976 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3977 STRIP_NOPS (tem);
3978 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3979 }
3980
3981 if (TREE_CODE_CLASS (code) != tcc_comparison)
3982 return 0;
3983
3984 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3985 for neither. In real maths, we cannot assume open ended ranges are
3986 the same. But, this is computer arithmetic, where numbers are finite.
3987 We can therefore make the transformation of any unbounded range with
3988 the value Z, Z being greater than any representable number. This permits
3989 us to treat unbounded ranges as equal. */
3990 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3991 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3992 switch (code)
3993 {
3994 case EQ_EXPR:
3995 result = sgn0 == sgn1;
3996 break;
3997 case NE_EXPR:
3998 result = sgn0 != sgn1;
3999 break;
4000 case LT_EXPR:
4001 result = sgn0 < sgn1;
4002 break;
4003 case LE_EXPR:
4004 result = sgn0 <= sgn1;
4005 break;
4006 case GT_EXPR:
4007 result = sgn0 > sgn1;
4008 break;
4009 case GE_EXPR:
4010 result = sgn0 >= sgn1;
4011 break;
4012 default:
4013 gcc_unreachable ();
4014 }
4015
4016 return constant_boolean_node (result, type);
4017 }
4018 \f
4019 /* Given EXP, a logical expression, set the range it is testing into
4020 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4021 actually being tested. *PLOW and *PHIGH will be made of the same
4022 type as the returned expression. If EXP is not a comparison, we
4023 will most likely not be returning a useful value and range. Set
4024 *STRICT_OVERFLOW_P to true if the return value is only valid
4025 because signed overflow is undefined; otherwise, do not change
4026 *STRICT_OVERFLOW_P. */
4027
4028 static tree
4029 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4030 bool *strict_overflow_p)
4031 {
4032 enum tree_code code;
4033 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4034 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4035 int in_p, n_in_p;
4036 tree low, high, n_low, n_high;
4037
4038 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4039 and see if we can refine the range. Some of the cases below may not
4040 happen, but it doesn't seem worth worrying about this. We "continue"
4041 the outer loop when we've changed something; otherwise we "break"
4042 the switch, which will "break" the while. */
4043
4044 in_p = 0;
4045 low = high = build_int_cst (TREE_TYPE (exp), 0);
4046
4047 while (1)
4048 {
4049 code = TREE_CODE (exp);
4050 exp_type = TREE_TYPE (exp);
4051
4052 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4053 {
4054 if (TREE_OPERAND_LENGTH (exp) > 0)
4055 arg0 = TREE_OPERAND (exp, 0);
4056 if (TREE_CODE_CLASS (code) == tcc_comparison
4057 || TREE_CODE_CLASS (code) == tcc_unary
4058 || TREE_CODE_CLASS (code) == tcc_binary)
4059 arg0_type = TREE_TYPE (arg0);
4060 if (TREE_CODE_CLASS (code) == tcc_binary
4061 || TREE_CODE_CLASS (code) == tcc_comparison
4062 || (TREE_CODE_CLASS (code) == tcc_expression
4063 && TREE_OPERAND_LENGTH (exp) > 1))
4064 arg1 = TREE_OPERAND (exp, 1);
4065 }
4066
4067 switch (code)
4068 {
4069 case TRUTH_NOT_EXPR:
4070 in_p = ! in_p, exp = arg0;
4071 continue;
4072
4073 case EQ_EXPR: case NE_EXPR:
4074 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4075 /* We can only do something if the range is testing for zero
4076 and if the second operand is an integer constant. Note that
4077 saying something is "in" the range we make is done by
4078 complementing IN_P since it will set in the initial case of
4079 being not equal to zero; "out" is leaving it alone. */
4080 if (low == 0 || high == 0
4081 || ! integer_zerop (low) || ! integer_zerop (high)
4082 || TREE_CODE (arg1) != INTEGER_CST)
4083 break;
4084
4085 switch (code)
4086 {
4087 case NE_EXPR: /* - [c, c] */
4088 low = high = arg1;
4089 break;
4090 case EQ_EXPR: /* + [c, c] */
4091 in_p = ! in_p, low = high = arg1;
4092 break;
4093 case GT_EXPR: /* - [-, c] */
4094 low = 0, high = arg1;
4095 break;
4096 case GE_EXPR: /* + [c, -] */
4097 in_p = ! in_p, low = arg1, high = 0;
4098 break;
4099 case LT_EXPR: /* - [c, -] */
4100 low = arg1, high = 0;
4101 break;
4102 case LE_EXPR: /* + [-, c] */
4103 in_p = ! in_p, low = 0, high = arg1;
4104 break;
4105 default:
4106 gcc_unreachable ();
4107 }
4108
4109 /* If this is an unsigned comparison, we also know that EXP is
4110 greater than or equal to zero. We base the range tests we make
4111 on that fact, so we record it here so we can parse existing
4112 range tests. We test arg0_type since often the return type
4113 of, e.g. EQ_EXPR, is boolean. */
4114 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4115 {
4116 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4117 in_p, low, high, 1,
4118 build_int_cst (arg0_type, 0),
4119 NULL_TREE))
4120 break;
4121
4122 in_p = n_in_p, low = n_low, high = n_high;
4123
4124 /* If the high bound is missing, but we have a nonzero low
4125 bound, reverse the range so it goes from zero to the low bound
4126 minus 1. */
4127 if (high == 0 && low && ! integer_zerop (low))
4128 {
4129 in_p = ! in_p;
4130 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4131 integer_one_node, 0);
4132 low = build_int_cst (arg0_type, 0);
4133 }
4134 }
4135
4136 exp = arg0;
4137 continue;
4138
4139 case NEGATE_EXPR:
4140 /* (-x) IN [a,b] -> x in [-b, -a] */
4141 n_low = range_binop (MINUS_EXPR, exp_type,
4142 build_int_cst (exp_type, 0),
4143 0, high, 1);
4144 n_high = range_binop (MINUS_EXPR, exp_type,
4145 build_int_cst (exp_type, 0),
4146 0, low, 0);
4147 low = n_low, high = n_high;
4148 exp = arg0;
4149 continue;
4150
4151 case BIT_NOT_EXPR:
4152 /* ~ X -> -X - 1 */
4153 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4154 build_int_cst (exp_type, 1));
4155 continue;
4156
4157 case PLUS_EXPR: case MINUS_EXPR:
4158 if (TREE_CODE (arg1) != INTEGER_CST)
4159 break;
4160
4161 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4162 move a constant to the other side. */
4163 if (!TYPE_UNSIGNED (arg0_type)
4164 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4165 break;
4166
4167 /* If EXP is signed, any overflow in the computation is undefined,
4168 so we don't worry about it so long as our computations on
4169 the bounds don't overflow. For unsigned, overflow is defined
4170 and this is exactly the right thing. */
4171 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4172 arg0_type, low, 0, arg1, 0);
4173 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4174 arg0_type, high, 1, arg1, 0);
4175 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4176 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4177 break;
4178
4179 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4180 *strict_overflow_p = true;
4181
4182 /* Check for an unsigned range which has wrapped around the maximum
4183 value thus making n_high < n_low, and normalize it. */
4184 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4185 {
4186 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4187 integer_one_node, 0);
4188 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4189 integer_one_node, 0);
4190
4191 /* If the range is of the form +/- [ x+1, x ], we won't
4192 be able to normalize it. But then, it represents the
4193 whole range or the empty set, so make it
4194 +/- [ -, - ]. */
4195 if (tree_int_cst_equal (n_low, low)
4196 && tree_int_cst_equal (n_high, high))
4197 low = high = 0;
4198 else
4199 in_p = ! in_p;
4200 }
4201 else
4202 low = n_low, high = n_high;
4203
4204 exp = arg0;
4205 continue;
4206
4207 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4208 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4209 break;
4210
4211 if (! INTEGRAL_TYPE_P (arg0_type)
4212 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4213 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4214 break;
4215
4216 n_low = low, n_high = high;
4217
4218 if (n_low != 0)
4219 n_low = fold_convert (arg0_type, n_low);
4220
4221 if (n_high != 0)
4222 n_high = fold_convert (arg0_type, n_high);
4223
4224
4225 /* If we're converting arg0 from an unsigned type, to exp,
4226 a signed type, we will be doing the comparison as unsigned.
4227 The tests above have already verified that LOW and HIGH
4228 are both positive.
4229
4230 So we have to ensure that we will handle large unsigned
4231 values the same way that the current signed bounds treat
4232 negative values. */
4233
4234 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4235 {
4236 tree high_positive;
4237 tree equiv_type = lang_hooks.types.type_for_mode
4238 (TYPE_MODE (arg0_type), 1);
4239
4240 /* A range without an upper bound is, naturally, unbounded.
4241 Since convert would have cropped a very large value, use
4242 the max value for the destination type. */
4243 high_positive
4244 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4245 : TYPE_MAX_VALUE (arg0_type);
4246
4247 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4248 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4249 fold_convert (arg0_type,
4250 high_positive),
4251 build_int_cst (arg0_type, 1));
4252
4253 /* If the low bound is specified, "and" the range with the
4254 range for which the original unsigned value will be
4255 positive. */
4256 if (low != 0)
4257 {
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 1, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4261 integer_zero_node),
4262 high_positive))
4263 break;
4264
4265 in_p = (n_in_p == in_p);
4266 }
4267 else
4268 {
4269 /* Otherwise, "or" the range with the range of the input
4270 that will be interpreted as negative. */
4271 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4272 0, n_low, n_high, 1,
4273 fold_convert (arg0_type,
4274 integer_zero_node),
4275 high_positive))
4276 break;
4277
4278 in_p = (in_p != n_in_p);
4279 }
4280 }
4281
4282 exp = arg0;
4283 low = n_low, high = n_high;
4284 continue;
4285
4286 default:
4287 break;
4288 }
4289
4290 break;
4291 }
4292
4293 /* If EXP is a constant, we can evaluate whether this is true or false. */
4294 if (TREE_CODE (exp) == INTEGER_CST)
4295 {
4296 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4297 exp, 0, low, 0))
4298 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4299 exp, 1, high, 1)));
4300 low = high = 0;
4301 exp = 0;
4302 }
4303
4304 *pin_p = in_p, *plow = low, *phigh = high;
4305 return exp;
4306 }
4307 \f
4308 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4309 type, TYPE, return an expression to test if EXP is in (or out of, depending
4310 on IN_P) the range. Return 0 if the test couldn't be created. */
4311
4312 static tree
4313 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4314 {
4315 tree etype = TREE_TYPE (exp);
4316 tree value;
4317
4318 #ifdef HAVE_canonicalize_funcptr_for_compare
4319 /* Disable this optimization for function pointer expressions
4320 on targets that require function pointer canonicalization. */
4321 if (HAVE_canonicalize_funcptr_for_compare
4322 && TREE_CODE (etype) == POINTER_TYPE
4323 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4324 return NULL_TREE;
4325 #endif
4326
4327 if (! in_p)
4328 {
4329 value = build_range_check (type, exp, 1, low, high);
4330 if (value != 0)
4331 return invert_truthvalue (value);
4332
4333 return 0;
4334 }
4335
4336 if (low == 0 && high == 0)
4337 return build_int_cst (type, 1);
4338
4339 if (low == 0)
4340 return fold_build2 (LE_EXPR, type, exp,
4341 fold_convert (etype, high));
4342
4343 if (high == 0)
4344 return fold_build2 (GE_EXPR, type, exp,
4345 fold_convert (etype, low));
4346
4347 if (operand_equal_p (low, high, 0))
4348 return fold_build2 (EQ_EXPR, type, exp,
4349 fold_convert (etype, low));
4350
4351 if (integer_zerop (low))
4352 {
4353 if (! TYPE_UNSIGNED (etype))
4354 {
4355 etype = lang_hooks.types.unsigned_type (etype);
4356 high = fold_convert (etype, high);
4357 exp = fold_convert (etype, exp);
4358 }
4359 return build_range_check (type, exp, 1, 0, high);
4360 }
4361
4362 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4363 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4364 {
4365 unsigned HOST_WIDE_INT lo;
4366 HOST_WIDE_INT hi;
4367 int prec;
4368
4369 prec = TYPE_PRECISION (etype);
4370 if (prec <= HOST_BITS_PER_WIDE_INT)
4371 {
4372 hi = 0;
4373 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4374 }
4375 else
4376 {
4377 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4378 lo = (unsigned HOST_WIDE_INT) -1;
4379 }
4380
4381 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4382 {
4383 if (TYPE_UNSIGNED (etype))
4384 {
4385 etype = lang_hooks.types.signed_type (etype);
4386 exp = fold_convert (etype, exp);
4387 }
4388 return fold_build2 (GT_EXPR, type, exp,
4389 build_int_cst (etype, 0));
4390 }
4391 }
4392
4393 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4394 This requires wrap-around arithmetics for the type of the expression. */
4395 switch (TREE_CODE (etype))
4396 {
4397 case INTEGER_TYPE:
4398 /* There is no requirement that LOW be within the range of ETYPE
4399 if the latter is a subtype. It must, however, be within the base
4400 type of ETYPE. So be sure we do the subtraction in that type. */
4401 if (TREE_TYPE (etype))
4402 etype = TREE_TYPE (etype);
4403 break;
4404
4405 case ENUMERAL_TYPE:
4406 case BOOLEAN_TYPE:
4407 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4408 TYPE_UNSIGNED (etype));
4409 break;
4410
4411 default:
4412 break;
4413 }
4414
4415 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4416 if (TREE_CODE (etype) == INTEGER_TYPE
4417 && !TYPE_OVERFLOW_WRAPS (etype))
4418 {
4419 tree utype, minv, maxv;
4420
4421 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4422 for the type in question, as we rely on this here. */
4423 utype = lang_hooks.types.unsigned_type (etype);
4424 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4425 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4426 integer_one_node, 1);
4427 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4428
4429 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4430 minv, 1, maxv, 1)))
4431 etype = utype;
4432 else
4433 return 0;
4434 }
4435
4436 high = fold_convert (etype, high);
4437 low = fold_convert (etype, low);
4438 exp = fold_convert (etype, exp);
4439
4440 value = const_binop (MINUS_EXPR, high, low, 0);
4441
4442 if (value != 0 && !TREE_OVERFLOW (value))
4443 return build_range_check (type,
4444 fold_build2 (MINUS_EXPR, etype, exp, low),
4445 1, build_int_cst (etype, 0), value);
4446
4447 return 0;
4448 }
4449 \f
4450 /* Return the predecessor of VAL in its type, handling the infinite case. */
4451
4452 static tree
4453 range_predecessor (tree val)
4454 {
4455 tree type = TREE_TYPE (val);
4456
4457 if (INTEGRAL_TYPE_P (type)
4458 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4459 return 0;
4460 else
4461 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4462 }
4463
4464 /* Return the successor of VAL in its type, handling the infinite case. */
4465
4466 static tree
4467 range_successor (tree val)
4468 {
4469 tree type = TREE_TYPE (val);
4470
4471 if (INTEGRAL_TYPE_P (type)
4472 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4473 return 0;
4474 else
4475 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4476 }
4477
4478 /* Given two ranges, see if we can merge them into one. Return 1 if we
4479 can, 0 if we can't. Set the output range into the specified parameters. */
4480
4481 static int
4482 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4483 tree high0, int in1_p, tree low1, tree high1)
4484 {
4485 int no_overlap;
4486 int subset;
4487 int temp;
4488 tree tem;
4489 int in_p;
4490 tree low, high;
4491 int lowequal = ((low0 == 0 && low1 == 0)
4492 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4493 low0, 0, low1, 0)));
4494 int highequal = ((high0 == 0 && high1 == 0)
4495 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4496 high0, 1, high1, 1)));
4497
4498 /* Make range 0 be the range that starts first, or ends last if they
4499 start at the same value. Swap them if it isn't. */
4500 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4501 low0, 0, low1, 0))
4502 || (lowequal
4503 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4504 high1, 1, high0, 1))))
4505 {
4506 temp = in0_p, in0_p = in1_p, in1_p = temp;
4507 tem = low0, low0 = low1, low1 = tem;
4508 tem = high0, high0 = high1, high1 = tem;
4509 }
4510
4511 /* Now flag two cases, whether the ranges are disjoint or whether the
4512 second range is totally subsumed in the first. Note that the tests
4513 below are simplified by the ones above. */
4514 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4515 high0, 1, low1, 0));
4516 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4517 high1, 1, high0, 1));
4518
4519 /* We now have four cases, depending on whether we are including or
4520 excluding the two ranges. */
4521 if (in0_p && in1_p)
4522 {
4523 /* If they don't overlap, the result is false. If the second range
4524 is a subset it is the result. Otherwise, the range is from the start
4525 of the second to the end of the first. */
4526 if (no_overlap)
4527 in_p = 0, low = high = 0;
4528 else if (subset)
4529 in_p = 1, low = low1, high = high1;
4530 else
4531 in_p = 1, low = low1, high = high0;
4532 }
4533
4534 else if (in0_p && ! in1_p)
4535 {
4536 /* If they don't overlap, the result is the first range. If they are
4537 equal, the result is false. If the second range is a subset of the
4538 first, and the ranges begin at the same place, we go from just after
4539 the end of the second range to the end of the first. If the second
4540 range is not a subset of the first, or if it is a subset and both
4541 ranges end at the same place, the range starts at the start of the
4542 first range and ends just before the second range.
4543 Otherwise, we can't describe this as a single range. */
4544 if (no_overlap)
4545 in_p = 1, low = low0, high = high0;
4546 else if (lowequal && highequal)
4547 in_p = 0, low = high = 0;
4548 else if (subset && lowequal)
4549 {
4550 low = range_successor (high1);
4551 high = high0;
4552 in_p = (low != 0);
4553 }
4554 else if (! subset || highequal)
4555 {
4556 low = low0;
4557 high = range_predecessor (low1);
4558 in_p = (high != 0);
4559 }
4560 else
4561 return 0;
4562 }
4563
4564 else if (! in0_p && in1_p)
4565 {
4566 /* If they don't overlap, the result is the second range. If the second
4567 is a subset of the first, the result is false. Otherwise,
4568 the range starts just after the first range and ends at the
4569 end of the second. */
4570 if (no_overlap)
4571 in_p = 1, low = low1, high = high1;
4572 else if (subset || highequal)
4573 in_p = 0, low = high = 0;
4574 else
4575 {
4576 low = range_successor (high0);
4577 high = high1;
4578 in_p = (low != 0);
4579 }
4580 }
4581
4582 else
4583 {
4584 /* The case where we are excluding both ranges. Here the complex case
4585 is if they don't overlap. In that case, the only time we have a
4586 range is if they are adjacent. If the second is a subset of the
4587 first, the result is the first. Otherwise, the range to exclude
4588 starts at the beginning of the first range and ends at the end of the
4589 second. */
4590 if (no_overlap)
4591 {
4592 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4593 range_successor (high0),
4594 1, low1, 0)))
4595 in_p = 0, low = low0, high = high1;
4596 else
4597 {
4598 /* Canonicalize - [min, x] into - [-, x]. */
4599 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4600 switch (TREE_CODE (TREE_TYPE (low0)))
4601 {
4602 case ENUMERAL_TYPE:
4603 if (TYPE_PRECISION (TREE_TYPE (low0))
4604 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4605 break;
4606 /* FALLTHROUGH */
4607 case INTEGER_TYPE:
4608 if (tree_int_cst_equal (low0,
4609 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4610 low0 = 0;
4611 break;
4612 case POINTER_TYPE:
4613 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4614 && integer_zerop (low0))
4615 low0 = 0;
4616 break;
4617 default:
4618 break;
4619 }
4620
4621 /* Canonicalize - [x, max] into - [x, -]. */
4622 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4623 switch (TREE_CODE (TREE_TYPE (high1)))
4624 {
4625 case ENUMERAL_TYPE:
4626 if (TYPE_PRECISION (TREE_TYPE (high1))
4627 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4628 break;
4629 /* FALLTHROUGH */
4630 case INTEGER_TYPE:
4631 if (tree_int_cst_equal (high1,
4632 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4633 high1 = 0;
4634 break;
4635 case POINTER_TYPE:
4636 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4637 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4638 high1, 1,
4639 integer_one_node, 1)))
4640 high1 = 0;
4641 break;
4642 default:
4643 break;
4644 }
4645
4646 /* The ranges might be also adjacent between the maximum and
4647 minimum values of the given type. For
4648 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4649 return + [x + 1, y - 1]. */
4650 if (low0 == 0 && high1 == 0)
4651 {
4652 low = range_successor (high0);
4653 high = range_predecessor (low1);
4654 if (low == 0 || high == 0)
4655 return 0;
4656
4657 in_p = 1;
4658 }
4659 else
4660 return 0;
4661 }
4662 }
4663 else if (subset)
4664 in_p = 0, low = low0, high = high0;
4665 else
4666 in_p = 0, low = low0, high = high1;
4667 }
4668
4669 *pin_p = in_p, *plow = low, *phigh = high;
4670 return 1;
4671 }
4672 \f
4673
4674 /* Subroutine of fold, looking inside expressions of the form
4675 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4676 of the COND_EXPR. This function is being used also to optimize
4677 A op B ? C : A, by reversing the comparison first.
4678
4679 Return a folded expression whose code is not a COND_EXPR
4680 anymore, or NULL_TREE if no folding opportunity is found. */
4681
4682 static tree
4683 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4684 {
4685 enum tree_code comp_code = TREE_CODE (arg0);
4686 tree arg00 = TREE_OPERAND (arg0, 0);
4687 tree arg01 = TREE_OPERAND (arg0, 1);
4688 tree arg1_type = TREE_TYPE (arg1);
4689 tree tem;
4690
4691 STRIP_NOPS (arg1);
4692 STRIP_NOPS (arg2);
4693
4694 /* If we have A op 0 ? A : -A, consider applying the following
4695 transformations:
4696
4697 A == 0? A : -A same as -A
4698 A != 0? A : -A same as A
4699 A >= 0? A : -A same as abs (A)
4700 A > 0? A : -A same as abs (A)
4701 A <= 0? A : -A same as -abs (A)
4702 A < 0? A : -A same as -abs (A)
4703
4704 None of these transformations work for modes with signed
4705 zeros. If A is +/-0, the first two transformations will
4706 change the sign of the result (from +0 to -0, or vice
4707 versa). The last four will fix the sign of the result,
4708 even though the original expressions could be positive or
4709 negative, depending on the sign of A.
4710
4711 Note that all these transformations are correct if A is
4712 NaN, since the two alternatives (A and -A) are also NaNs. */
4713 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4714 ? real_zerop (arg01)
4715 : integer_zerop (arg01))
4716 && ((TREE_CODE (arg2) == NEGATE_EXPR
4717 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4718 /* In the case that A is of the form X-Y, '-A' (arg2) may
4719 have already been folded to Y-X, check for that. */
4720 || (TREE_CODE (arg1) == MINUS_EXPR
4721 && TREE_CODE (arg2) == MINUS_EXPR
4722 && operand_equal_p (TREE_OPERAND (arg1, 0),
4723 TREE_OPERAND (arg2, 1), 0)
4724 && operand_equal_p (TREE_OPERAND (arg1, 1),
4725 TREE_OPERAND (arg2, 0), 0))))
4726 switch (comp_code)
4727 {
4728 case EQ_EXPR:
4729 case UNEQ_EXPR:
4730 tem = fold_convert (arg1_type, arg1);
4731 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4732 case NE_EXPR:
4733 case LTGT_EXPR:
4734 return pedantic_non_lvalue (fold_convert (type, arg1));
4735 case UNGE_EXPR:
4736 case UNGT_EXPR:
4737 if (flag_trapping_math)
4738 break;
4739 /* Fall through. */
4740 case GE_EXPR:
4741 case GT_EXPR:
4742 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4743 arg1 = fold_convert (lang_hooks.types.signed_type
4744 (TREE_TYPE (arg1)), arg1);
4745 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4746 return pedantic_non_lvalue (fold_convert (type, tem));
4747 case UNLE_EXPR:
4748 case UNLT_EXPR:
4749 if (flag_trapping_math)
4750 break;
4751 case LE_EXPR:
4752 case LT_EXPR:
4753 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4754 arg1 = fold_convert (lang_hooks.types.signed_type
4755 (TREE_TYPE (arg1)), arg1);
4756 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4757 return negate_expr (fold_convert (type, tem));
4758 default:
4759 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4760 break;
4761 }
4762
4763 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4764 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4765 both transformations are correct when A is NaN: A != 0
4766 is then true, and A == 0 is false. */
4767
4768 if (integer_zerop (arg01) && integer_zerop (arg2))
4769 {
4770 if (comp_code == NE_EXPR)
4771 return pedantic_non_lvalue (fold_convert (type, arg1));
4772 else if (comp_code == EQ_EXPR)
4773 return build_int_cst (type, 0);
4774 }
4775
4776 /* Try some transformations of A op B ? A : B.
4777
4778 A == B? A : B same as B
4779 A != B? A : B same as A
4780 A >= B? A : B same as max (A, B)
4781 A > B? A : B same as max (B, A)
4782 A <= B? A : B same as min (A, B)
4783 A < B? A : B same as min (B, A)
4784
4785 As above, these transformations don't work in the presence
4786 of signed zeros. For example, if A and B are zeros of
4787 opposite sign, the first two transformations will change
4788 the sign of the result. In the last four, the original
4789 expressions give different results for (A=+0, B=-0) and
4790 (A=-0, B=+0), but the transformed expressions do not.
4791
4792 The first two transformations are correct if either A or B
4793 is a NaN. In the first transformation, the condition will
4794 be false, and B will indeed be chosen. In the case of the
4795 second transformation, the condition A != B will be true,
4796 and A will be chosen.
4797
4798 The conversions to max() and min() are not correct if B is
4799 a number and A is not. The conditions in the original
4800 expressions will be false, so all four give B. The min()
4801 and max() versions would give a NaN instead. */
4802 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4803 /* Avoid these transformations if the COND_EXPR may be used
4804 as an lvalue in the C++ front-end. PR c++/19199. */
4805 && (in_gimple_form
4806 || (strcmp (lang_hooks.name, "GNU C++") != 0
4807 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4808 || ! maybe_lvalue_p (arg1)
4809 || ! maybe_lvalue_p (arg2)))
4810 {
4811 tree comp_op0 = arg00;
4812 tree comp_op1 = arg01;
4813 tree comp_type = TREE_TYPE (comp_op0);
4814
4815 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4816 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4817 {
4818 comp_type = type;
4819 comp_op0 = arg1;
4820 comp_op1 = arg2;
4821 }
4822
4823 switch (comp_code)
4824 {
4825 case EQ_EXPR:
4826 return pedantic_non_lvalue (fold_convert (type, arg2));
4827 case NE_EXPR:
4828 return pedantic_non_lvalue (fold_convert (type, arg1));
4829 case LE_EXPR:
4830 case LT_EXPR:
4831 case UNLE_EXPR:
4832 case UNLT_EXPR:
4833 /* In C++ a ?: expression can be an lvalue, so put the
4834 operand which will be used if they are equal first
4835 so that we can convert this back to the
4836 corresponding COND_EXPR. */
4837 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4838 {
4839 comp_op0 = fold_convert (comp_type, comp_op0);
4840 comp_op1 = fold_convert (comp_type, comp_op1);
4841 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4842 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4843 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4844 return pedantic_non_lvalue (fold_convert (type, tem));
4845 }
4846 break;
4847 case GE_EXPR:
4848 case GT_EXPR:
4849 case UNGE_EXPR:
4850 case UNGT_EXPR:
4851 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4852 {
4853 comp_op0 = fold_convert (comp_type, comp_op0);
4854 comp_op1 = fold_convert (comp_type, comp_op1);
4855 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4856 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4857 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4858 return pedantic_non_lvalue (fold_convert (type, tem));
4859 }
4860 break;
4861 case UNEQ_EXPR:
4862 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4863 return pedantic_non_lvalue (fold_convert (type, arg2));
4864 break;
4865 case LTGT_EXPR:
4866 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4867 return pedantic_non_lvalue (fold_convert (type, arg1));
4868 break;
4869 default:
4870 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4871 break;
4872 }
4873 }
4874
4875 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4876 we might still be able to simplify this. For example,
4877 if C1 is one less or one more than C2, this might have started
4878 out as a MIN or MAX and been transformed by this function.
4879 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4880
4881 if (INTEGRAL_TYPE_P (type)
4882 && TREE_CODE (arg01) == INTEGER_CST
4883 && TREE_CODE (arg2) == INTEGER_CST)
4884 switch (comp_code)
4885 {
4886 case EQ_EXPR:
4887 /* We can replace A with C1 in this case. */
4888 arg1 = fold_convert (type, arg01);
4889 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4890
4891 case LT_EXPR:
4892 /* If C1 is C2 + 1, this is min(A, C2). */
4893 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4894 OEP_ONLY_CONST)
4895 && operand_equal_p (arg01,
4896 const_binop (PLUS_EXPR, arg2,
4897 build_int_cst (type, 1), 0),
4898 OEP_ONLY_CONST))
4899 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4900 type, arg1, arg2));
4901 break;
4902
4903 case LE_EXPR:
4904 /* If C1 is C2 - 1, this is min(A, C2). */
4905 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4906 OEP_ONLY_CONST)
4907 && operand_equal_p (arg01,
4908 const_binop (MINUS_EXPR, arg2,
4909 build_int_cst (type, 1), 0),
4910 OEP_ONLY_CONST))
4911 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4912 type, arg1, arg2));
4913 break;
4914
4915 case GT_EXPR:
4916 /* If C1 is C2 - 1, this is max(A, C2). */
4917 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4918 OEP_ONLY_CONST)
4919 && operand_equal_p (arg01,
4920 const_binop (MINUS_EXPR, arg2,
4921 build_int_cst (type, 1), 0),
4922 OEP_ONLY_CONST))
4923 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4924 type, arg1, arg2));
4925 break;
4926
4927 case GE_EXPR:
4928 /* If C1 is C2 + 1, this is max(A, C2). */
4929 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (PLUS_EXPR, arg2,
4933 build_int_cst (type, 1), 0),
4934 OEP_ONLY_CONST))
4935 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4936 type, arg1, arg2));
4937 break;
4938 case NE_EXPR:
4939 break;
4940 default:
4941 gcc_unreachable ();
4942 }
4943
4944 return NULL_TREE;
4945 }
4946
4947
4948 \f
4949 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4950 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4951 #endif
4952
4953 /* EXP is some logical combination of boolean tests. See if we can
4954 merge it into some range test. Return the new tree if so. */
4955
4956 static tree
4957 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4958 {
4959 int or_op = (code == TRUTH_ORIF_EXPR
4960 || code == TRUTH_OR_EXPR);
4961 int in0_p, in1_p, in_p;
4962 tree low0, low1, low, high0, high1, high;
4963 bool strict_overflow_p = false;
4964 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4965 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4966 tree tem;
4967 const char * const warnmsg = G_("assuming signed overflow does not occur "
4968 "when simplifying range test");
4969
4970 /* If this is an OR operation, invert both sides; we will invert
4971 again at the end. */
4972 if (or_op)
4973 in0_p = ! in0_p, in1_p = ! in1_p;
4974
4975 /* If both expressions are the same, if we can merge the ranges, and we
4976 can build the range test, return it or it inverted. If one of the
4977 ranges is always true or always false, consider it to be the same
4978 expression as the other. */
4979 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4980 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4981 in1_p, low1, high1)
4982 && 0 != (tem = (build_range_check (type,
4983 lhs != 0 ? lhs
4984 : rhs != 0 ? rhs : integer_zero_node,
4985 in_p, low, high))))
4986 {
4987 if (strict_overflow_p)
4988 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4989 return or_op ? invert_truthvalue (tem) : tem;
4990 }
4991
4992 /* On machines where the branch cost is expensive, if this is a
4993 short-circuited branch and the underlying object on both sides
4994 is the same, make a non-short-circuit operation. */
4995 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4996 && lhs != 0 && rhs != 0
4997 && (code == TRUTH_ANDIF_EXPR
4998 || code == TRUTH_ORIF_EXPR)
4999 && operand_equal_p (lhs, rhs, 0))
5000 {
5001 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5002 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5003 which cases we can't do this. */
5004 if (simple_operand_p (lhs))
5005 return build2 (code == TRUTH_ANDIF_EXPR
5006 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5007 type, op0, op1);
5008
5009 else if (lang_hooks.decls.global_bindings_p () == 0
5010 && ! CONTAINS_PLACEHOLDER_P (lhs))
5011 {
5012 tree common = save_expr (lhs);
5013
5014 if (0 != (lhs = build_range_check (type, common,
5015 or_op ? ! in0_p : in0_p,
5016 low0, high0))
5017 && (0 != (rhs = build_range_check (type, common,
5018 or_op ? ! in1_p : in1_p,
5019 low1, high1))))
5020 {
5021 if (strict_overflow_p)
5022 fold_overflow_warning (warnmsg,
5023 WARN_STRICT_OVERFLOW_COMPARISON);
5024 return build2 (code == TRUTH_ANDIF_EXPR
5025 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5026 type, lhs, rhs);
5027 }
5028 }
5029 }
5030
5031 return 0;
5032 }
5033 \f
5034 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5035 bit value. Arrange things so the extra bits will be set to zero if and
5036 only if C is signed-extended to its full width. If MASK is nonzero,
5037 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5038
5039 static tree
5040 unextend (tree c, int p, int unsignedp, tree mask)
5041 {
5042 tree type = TREE_TYPE (c);
5043 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5044 tree temp;
5045
5046 if (p == modesize || unsignedp)
5047 return c;
5048
5049 /* We work by getting just the sign bit into the low-order bit, then
5050 into the high-order bit, then sign-extend. We then XOR that value
5051 with C. */
5052 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5053 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5054
5055 /* We must use a signed type in order to get an arithmetic right shift.
5056 However, we must also avoid introducing accidental overflows, so that
5057 a subsequent call to integer_zerop will work. Hence we must
5058 do the type conversion here. At this point, the constant is either
5059 zero or one, and the conversion to a signed type can never overflow.
5060 We could get an overflow if this conversion is done anywhere else. */
5061 if (TYPE_UNSIGNED (type))
5062 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5063
5064 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5065 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5066 if (mask != 0)
5067 temp = const_binop (BIT_AND_EXPR, temp,
5068 fold_convert (TREE_TYPE (c), mask), 0);
5069 /* If necessary, convert the type back to match the type of C. */
5070 if (TYPE_UNSIGNED (type))
5071 temp = fold_convert (type, temp);
5072
5073 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5074 }
5075 \f
5076 /* Find ways of folding logical expressions of LHS and RHS:
5077 Try to merge two comparisons to the same innermost item.
5078 Look for range tests like "ch >= '0' && ch <= '9'".
5079 Look for combinations of simple terms on machines with expensive branches
5080 and evaluate the RHS unconditionally.
5081
5082 For example, if we have p->a == 2 && p->b == 4 and we can make an
5083 object large enough to span both A and B, we can do this with a comparison
5084 against the object ANDed with the a mask.
5085
5086 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5087 operations to do this with one comparison.
5088
5089 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5090 function and the one above.
5091
5092 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5093 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5094
5095 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5096 two operands.
5097
5098 We return the simplified tree or 0 if no optimization is possible. */
5099
5100 static tree
5101 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5102 {
5103 /* If this is the "or" of two comparisons, we can do something if
5104 the comparisons are NE_EXPR. If this is the "and", we can do something
5105 if the comparisons are EQ_EXPR. I.e.,
5106 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5107
5108 WANTED_CODE is this operation code. For single bit fields, we can
5109 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5110 comparison for one-bit fields. */
5111
5112 enum tree_code wanted_code;
5113 enum tree_code lcode, rcode;
5114 tree ll_arg, lr_arg, rl_arg, rr_arg;
5115 tree ll_inner, lr_inner, rl_inner, rr_inner;
5116 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5117 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5118 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5119 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5120 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5121 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5122 enum machine_mode lnmode, rnmode;
5123 tree ll_mask, lr_mask, rl_mask, rr_mask;
5124 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5125 tree l_const, r_const;
5126 tree lntype, rntype, result;
5127 int first_bit, end_bit;
5128 int volatilep;
5129 tree orig_lhs = lhs, orig_rhs = rhs;
5130 enum tree_code orig_code = code;
5131
5132 /* Start by getting the comparison codes. Fail if anything is volatile.
5133 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5134 it were surrounded with a NE_EXPR. */
5135
5136 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5137 return 0;
5138
5139 lcode = TREE_CODE (lhs);
5140 rcode = TREE_CODE (rhs);
5141
5142 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5143 {
5144 lhs = build2 (NE_EXPR, truth_type, lhs,
5145 build_int_cst (TREE_TYPE (lhs), 0));
5146 lcode = NE_EXPR;
5147 }
5148
5149 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5150 {
5151 rhs = build2 (NE_EXPR, truth_type, rhs,
5152 build_int_cst (TREE_TYPE (rhs), 0));
5153 rcode = NE_EXPR;
5154 }
5155
5156 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5157 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5158 return 0;
5159
5160 ll_arg = TREE_OPERAND (lhs, 0);
5161 lr_arg = TREE_OPERAND (lhs, 1);
5162 rl_arg = TREE_OPERAND (rhs, 0);
5163 rr_arg = TREE_OPERAND (rhs, 1);
5164
5165 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5166 if (simple_operand_p (ll_arg)
5167 && simple_operand_p (lr_arg))
5168 {
5169 tree result;
5170 if (operand_equal_p (ll_arg, rl_arg, 0)
5171 && operand_equal_p (lr_arg, rr_arg, 0))
5172 {
5173 result = combine_comparisons (code, lcode, rcode,
5174 truth_type, ll_arg, lr_arg);
5175 if (result)
5176 return result;
5177 }
5178 else if (operand_equal_p (ll_arg, rr_arg, 0)
5179 && operand_equal_p (lr_arg, rl_arg, 0))
5180 {
5181 result = combine_comparisons (code, lcode,
5182 swap_tree_comparison (rcode),
5183 truth_type, ll_arg, lr_arg);
5184 if (result)
5185 return result;
5186 }
5187 }
5188
5189 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5190 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5191
5192 /* If the RHS can be evaluated unconditionally and its operands are
5193 simple, it wins to evaluate the RHS unconditionally on machines
5194 with expensive branches. In this case, this isn't a comparison
5195 that can be merged. Avoid doing this if the RHS is a floating-point
5196 comparison since those can trap. */
5197
5198 if (BRANCH_COST >= 2
5199 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5200 && simple_operand_p (rl_arg)
5201 && simple_operand_p (rr_arg))
5202 {
5203 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5204 if (code == TRUTH_OR_EXPR
5205 && lcode == NE_EXPR && integer_zerop (lr_arg)
5206 && rcode == NE_EXPR && integer_zerop (rr_arg)
5207 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5208 return build2 (NE_EXPR, truth_type,
5209 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5210 ll_arg, rl_arg),
5211 build_int_cst (TREE_TYPE (ll_arg), 0));
5212
5213 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5214 if (code == TRUTH_AND_EXPR
5215 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5216 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5217 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5218 return build2 (EQ_EXPR, truth_type,
5219 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5220 ll_arg, rl_arg),
5221 build_int_cst (TREE_TYPE (ll_arg), 0));
5222
5223 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5224 {
5225 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5226 return build2 (code, truth_type, lhs, rhs);
5227 return NULL_TREE;
5228 }
5229 }
5230
5231 /* See if the comparisons can be merged. Then get all the parameters for
5232 each side. */
5233
5234 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5235 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5236 return 0;
5237
5238 volatilep = 0;
5239 ll_inner = decode_field_reference (ll_arg,
5240 &ll_bitsize, &ll_bitpos, &ll_mode,
5241 &ll_unsignedp, &volatilep, &ll_mask,
5242 &ll_and_mask);
5243 lr_inner = decode_field_reference (lr_arg,
5244 &lr_bitsize, &lr_bitpos, &lr_mode,
5245 &lr_unsignedp, &volatilep, &lr_mask,
5246 &lr_and_mask);
5247 rl_inner = decode_field_reference (rl_arg,
5248 &rl_bitsize, &rl_bitpos, &rl_mode,
5249 &rl_unsignedp, &volatilep, &rl_mask,
5250 &rl_and_mask);
5251 rr_inner = decode_field_reference (rr_arg,
5252 &rr_bitsize, &rr_bitpos, &rr_mode,
5253 &rr_unsignedp, &volatilep, &rr_mask,
5254 &rr_and_mask);
5255
5256 /* It must be true that the inner operation on the lhs of each
5257 comparison must be the same if we are to be able to do anything.
5258 Then see if we have constants. If not, the same must be true for
5259 the rhs's. */
5260 if (volatilep || ll_inner == 0 || rl_inner == 0
5261 || ! operand_equal_p (ll_inner, rl_inner, 0))
5262 return 0;
5263
5264 if (TREE_CODE (lr_arg) == INTEGER_CST
5265 && TREE_CODE (rr_arg) == INTEGER_CST)
5266 l_const = lr_arg, r_const = rr_arg;
5267 else if (lr_inner == 0 || rr_inner == 0
5268 || ! operand_equal_p (lr_inner, rr_inner, 0))
5269 return 0;
5270 else
5271 l_const = r_const = 0;
5272
5273 /* If either comparison code is not correct for our logical operation,
5274 fail. However, we can convert a one-bit comparison against zero into
5275 the opposite comparison against that bit being set in the field. */
5276
5277 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5278 if (lcode != wanted_code)
5279 {
5280 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5281 {
5282 /* Make the left operand unsigned, since we are only interested
5283 in the value of one bit. Otherwise we are doing the wrong
5284 thing below. */
5285 ll_unsignedp = 1;
5286 l_const = ll_mask;
5287 }
5288 else
5289 return 0;
5290 }
5291
5292 /* This is analogous to the code for l_const above. */
5293 if (rcode != wanted_code)
5294 {
5295 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5296 {
5297 rl_unsignedp = 1;
5298 r_const = rl_mask;
5299 }
5300 else
5301 return 0;
5302 }
5303
5304 /* See if we can find a mode that contains both fields being compared on
5305 the left. If we can't, fail. Otherwise, update all constants and masks
5306 to be relative to a field of that size. */
5307 first_bit = MIN (ll_bitpos, rl_bitpos);
5308 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5309 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5310 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5311 volatilep);
5312 if (lnmode == VOIDmode)
5313 return 0;
5314
5315 lnbitsize = GET_MODE_BITSIZE (lnmode);
5316 lnbitpos = first_bit & ~ (lnbitsize - 1);
5317 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5318 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5319
5320 if (BYTES_BIG_ENDIAN)
5321 {
5322 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5323 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5324 }
5325
5326 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5327 size_int (xll_bitpos), 0);
5328 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5329 size_int (xrl_bitpos), 0);
5330
5331 if (l_const)
5332 {
5333 l_const = fold_convert (lntype, l_const);
5334 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5335 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5336 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5337 fold_build1 (BIT_NOT_EXPR,
5338 lntype, ll_mask),
5339 0)))
5340 {
5341 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5342
5343 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5344 }
5345 }
5346 if (r_const)
5347 {
5348 r_const = fold_convert (lntype, r_const);
5349 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5350 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5351 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5352 fold_build1 (BIT_NOT_EXPR,
5353 lntype, rl_mask),
5354 0)))
5355 {
5356 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5357
5358 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5359 }
5360 }
5361
5362 /* If the right sides are not constant, do the same for it. Also,
5363 disallow this optimization if a size or signedness mismatch occurs
5364 between the left and right sides. */
5365 if (l_const == 0)
5366 {
5367 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5368 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5369 /* Make sure the two fields on the right
5370 correspond to the left without being swapped. */
5371 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5372 return 0;
5373
5374 first_bit = MIN (lr_bitpos, rr_bitpos);
5375 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5376 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5377 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5378 volatilep);
5379 if (rnmode == VOIDmode)
5380 return 0;
5381
5382 rnbitsize = GET_MODE_BITSIZE (rnmode);
5383 rnbitpos = first_bit & ~ (rnbitsize - 1);
5384 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5385 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5386
5387 if (BYTES_BIG_ENDIAN)
5388 {
5389 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5390 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5391 }
5392
5393 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5394 size_int (xlr_bitpos), 0);
5395 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5396 size_int (xrr_bitpos), 0);
5397
5398 /* Make a mask that corresponds to both fields being compared.
5399 Do this for both items being compared. If the operands are the
5400 same size and the bits being compared are in the same position
5401 then we can do this by masking both and comparing the masked
5402 results. */
5403 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5404 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5405 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5406 {
5407 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5408 ll_unsignedp || rl_unsignedp);
5409 if (! all_ones_mask_p (ll_mask, lnbitsize))
5410 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5411
5412 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5413 lr_unsignedp || rr_unsignedp);
5414 if (! all_ones_mask_p (lr_mask, rnbitsize))
5415 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5416
5417 return build2 (wanted_code, truth_type, lhs, rhs);
5418 }
5419
5420 /* There is still another way we can do something: If both pairs of
5421 fields being compared are adjacent, we may be able to make a wider
5422 field containing them both.
5423
5424 Note that we still must mask the lhs/rhs expressions. Furthermore,
5425 the mask must be shifted to account for the shift done by
5426 make_bit_field_ref. */
5427 if ((ll_bitsize + ll_bitpos == rl_bitpos
5428 && lr_bitsize + lr_bitpos == rr_bitpos)
5429 || (ll_bitpos == rl_bitpos + rl_bitsize
5430 && lr_bitpos == rr_bitpos + rr_bitsize))
5431 {
5432 tree type;
5433
5434 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5435 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5436 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5437 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5438
5439 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5440 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5441 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5442 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5443
5444 /* Convert to the smaller type before masking out unwanted bits. */
5445 type = lntype;
5446 if (lntype != rntype)
5447 {
5448 if (lnbitsize > rnbitsize)
5449 {
5450 lhs = fold_convert (rntype, lhs);
5451 ll_mask = fold_convert (rntype, ll_mask);
5452 type = rntype;
5453 }
5454 else if (lnbitsize < rnbitsize)
5455 {
5456 rhs = fold_convert (lntype, rhs);
5457 lr_mask = fold_convert (lntype, lr_mask);
5458 type = lntype;
5459 }
5460 }
5461
5462 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5463 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5464
5465 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5466 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5467
5468 return build2 (wanted_code, truth_type, lhs, rhs);
5469 }
5470
5471 return 0;
5472 }
5473
5474 /* Handle the case of comparisons with constants. If there is something in
5475 common between the masks, those bits of the constants must be the same.
5476 If not, the condition is always false. Test for this to avoid generating
5477 incorrect code below. */
5478 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5479 if (! integer_zerop (result)
5480 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5481 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5482 {
5483 if (wanted_code == NE_EXPR)
5484 {
5485 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5486 return constant_boolean_node (true, truth_type);
5487 }
5488 else
5489 {
5490 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5491 return constant_boolean_node (false, truth_type);
5492 }
5493 }
5494
5495 /* Construct the expression we will return. First get the component
5496 reference we will make. Unless the mask is all ones the width of
5497 that field, perform the mask operation. Then compare with the
5498 merged constant. */
5499 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5500 ll_unsignedp || rl_unsignedp);
5501
5502 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5503 if (! all_ones_mask_p (ll_mask, lnbitsize))
5504 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5505
5506 return build2 (wanted_code, truth_type, result,
5507 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5508 }
5509 \f
5510 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5511 constant. */
5512
5513 static tree
5514 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5515 {
5516 tree arg0 = op0;
5517 enum tree_code op_code;
5518 tree comp_const = op1;
5519 tree minmax_const;
5520 int consts_equal, consts_lt;
5521 tree inner;
5522
5523 STRIP_SIGN_NOPS (arg0);
5524
5525 op_code = TREE_CODE (arg0);
5526 minmax_const = TREE_OPERAND (arg0, 1);
5527 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5528 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5529 inner = TREE_OPERAND (arg0, 0);
5530
5531 /* If something does not permit us to optimize, return the original tree. */
5532 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5533 || TREE_CODE (comp_const) != INTEGER_CST
5534 || TREE_OVERFLOW (comp_const)
5535 || TREE_CODE (minmax_const) != INTEGER_CST
5536 || TREE_OVERFLOW (minmax_const))
5537 return NULL_TREE;
5538
5539 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5540 and GT_EXPR, doing the rest with recursive calls using logical
5541 simplifications. */
5542 switch (code)
5543 {
5544 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5545 {
5546 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5547 type, op0, op1);
5548 if (tem)
5549 return invert_truthvalue (tem);
5550 return NULL_TREE;
5551 }
5552
5553 case GE_EXPR:
5554 return
5555 fold_build2 (TRUTH_ORIF_EXPR, type,
5556 optimize_minmax_comparison
5557 (EQ_EXPR, type, arg0, comp_const),
5558 optimize_minmax_comparison
5559 (GT_EXPR, type, arg0, comp_const));
5560
5561 case EQ_EXPR:
5562 if (op_code == MAX_EXPR && consts_equal)
5563 /* MAX (X, 0) == 0 -> X <= 0 */
5564 return fold_build2 (LE_EXPR, type, inner, comp_const);
5565
5566 else if (op_code == MAX_EXPR && consts_lt)
5567 /* MAX (X, 0) == 5 -> X == 5 */
5568 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5569
5570 else if (op_code == MAX_EXPR)
5571 /* MAX (X, 0) == -1 -> false */
5572 return omit_one_operand (type, integer_zero_node, inner);
5573
5574 else if (consts_equal)
5575 /* MIN (X, 0) == 0 -> X >= 0 */
5576 return fold_build2 (GE_EXPR, type, inner, comp_const);
5577
5578 else if (consts_lt)
5579 /* MIN (X, 0) == 5 -> false */
5580 return omit_one_operand (type, integer_zero_node, inner);
5581
5582 else
5583 /* MIN (X, 0) == -1 -> X == -1 */
5584 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5585
5586 case GT_EXPR:
5587 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5588 /* MAX (X, 0) > 0 -> X > 0
5589 MAX (X, 0) > 5 -> X > 5 */
5590 return fold_build2 (GT_EXPR, type, inner, comp_const);
5591
5592 else if (op_code == MAX_EXPR)
5593 /* MAX (X, 0) > -1 -> true */
5594 return omit_one_operand (type, integer_one_node, inner);
5595
5596 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5597 /* MIN (X, 0) > 0 -> false
5598 MIN (X, 0) > 5 -> false */
5599 return omit_one_operand (type, integer_zero_node, inner);
5600
5601 else
5602 /* MIN (X, 0) > -1 -> X > -1 */
5603 return fold_build2 (GT_EXPR, type, inner, comp_const);
5604
5605 default:
5606 return NULL_TREE;
5607 }
5608 }
5609 \f
5610 /* T is an integer expression that is being multiplied, divided, or taken a
5611 modulus (CODE says which and what kind of divide or modulus) by a
5612 constant C. See if we can eliminate that operation by folding it with
5613 other operations already in T. WIDE_TYPE, if non-null, is a type that
5614 should be used for the computation if wider than our type.
5615
5616 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5617 (X * 2) + (Y * 4). We must, however, be assured that either the original
5618 expression would not overflow or that overflow is undefined for the type
5619 in the language in question.
5620
5621 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5622 the machine has a multiply-accumulate insn or that this is part of an
5623 addressing calculation.
5624
5625 If we return a non-null expression, it is an equivalent form of the
5626 original computation, but need not be in the original type.
5627
5628 We set *STRICT_OVERFLOW_P to true if the return values depends on
5629 signed overflow being undefined. Otherwise we do not change
5630 *STRICT_OVERFLOW_P. */
5631
5632 static tree
5633 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5634 bool *strict_overflow_p)
5635 {
5636 /* To avoid exponential search depth, refuse to allow recursion past
5637 three levels. Beyond that (1) it's highly unlikely that we'll find
5638 something interesting and (2) we've probably processed it before
5639 when we built the inner expression. */
5640
5641 static int depth;
5642 tree ret;
5643
5644 if (depth > 3)
5645 return NULL;
5646
5647 depth++;
5648 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5649 depth--;
5650
5651 return ret;
5652 }
5653
5654 static tree
5655 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5656 bool *strict_overflow_p)
5657 {
5658 tree type = TREE_TYPE (t);
5659 enum tree_code tcode = TREE_CODE (t);
5660 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5661 > GET_MODE_SIZE (TYPE_MODE (type)))
5662 ? wide_type : type);
5663 tree t1, t2;
5664 int same_p = tcode == code;
5665 tree op0 = NULL_TREE, op1 = NULL_TREE;
5666 bool sub_strict_overflow_p;
5667
5668 /* Don't deal with constants of zero here; they confuse the code below. */
5669 if (integer_zerop (c))
5670 return NULL_TREE;
5671
5672 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5673 op0 = TREE_OPERAND (t, 0);
5674
5675 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5676 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5677
5678 /* Note that we need not handle conditional operations here since fold
5679 already handles those cases. So just do arithmetic here. */
5680 switch (tcode)
5681 {
5682 case INTEGER_CST:
5683 /* For a constant, we can always simplify if we are a multiply
5684 or (for divide and modulus) if it is a multiple of our constant. */
5685 if (code == MULT_EXPR
5686 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5687 return const_binop (code, fold_convert (ctype, t),
5688 fold_convert (ctype, c), 0);
5689 break;
5690
5691 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5692 /* If op0 is an expression ... */
5693 if ((COMPARISON_CLASS_P (op0)
5694 || UNARY_CLASS_P (op0)
5695 || BINARY_CLASS_P (op0)
5696 || VL_EXP_CLASS_P (op0)
5697 || EXPRESSION_CLASS_P (op0))
5698 /* ... and is unsigned, and its type is smaller than ctype,
5699 then we cannot pass through as widening. */
5700 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5701 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5702 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5703 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5704 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5705 /* ... or this is a truncation (t is narrower than op0),
5706 then we cannot pass through this narrowing. */
5707 || (GET_MODE_SIZE (TYPE_MODE (type))
5708 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5709 /* ... or signedness changes for division or modulus,
5710 then we cannot pass through this conversion. */
5711 || (code != MULT_EXPR
5712 && (TYPE_UNSIGNED (ctype)
5713 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5714 break;
5715
5716 /* Pass the constant down and see if we can make a simplification. If
5717 we can, replace this expression with the inner simplification for
5718 possible later conversion to our or some other type. */
5719 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5720 && TREE_CODE (t2) == INTEGER_CST
5721 && !TREE_OVERFLOW (t2)
5722 && (0 != (t1 = extract_muldiv (op0, t2, code,
5723 code == MULT_EXPR
5724 ? ctype : NULL_TREE,
5725 strict_overflow_p))))
5726 return t1;
5727 break;
5728
5729 case ABS_EXPR:
5730 /* If widening the type changes it from signed to unsigned, then we
5731 must avoid building ABS_EXPR itself as unsigned. */
5732 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5733 {
5734 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5735 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5736 != 0)
5737 {
5738 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5739 return fold_convert (ctype, t1);
5740 }
5741 break;
5742 }
5743 /* FALLTHROUGH */
5744 case NEGATE_EXPR:
5745 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5746 != 0)
5747 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5748 break;
5749
5750 case MIN_EXPR: case MAX_EXPR:
5751 /* If widening the type changes the signedness, then we can't perform
5752 this optimization as that changes the result. */
5753 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5754 break;
5755
5756 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5757 sub_strict_overflow_p = false;
5758 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5759 &sub_strict_overflow_p)) != 0
5760 && (t2 = extract_muldiv (op1, c, code, wide_type,
5761 &sub_strict_overflow_p)) != 0)
5762 {
5763 if (tree_int_cst_sgn (c) < 0)
5764 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5765 if (sub_strict_overflow_p)
5766 *strict_overflow_p = true;
5767 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5768 fold_convert (ctype, t2));
5769 }
5770 break;
5771
5772 case LSHIFT_EXPR: case RSHIFT_EXPR:
5773 /* If the second operand is constant, this is a multiplication
5774 or floor division, by a power of two, so we can treat it that
5775 way unless the multiplier or divisor overflows. Signed
5776 left-shift overflow is implementation-defined rather than
5777 undefined in C90, so do not convert signed left shift into
5778 multiplication. */
5779 if (TREE_CODE (op1) == INTEGER_CST
5780 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5781 /* const_binop may not detect overflow correctly,
5782 so check for it explicitly here. */
5783 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5784 && TREE_INT_CST_HIGH (op1) == 0
5785 && 0 != (t1 = fold_convert (ctype,
5786 const_binop (LSHIFT_EXPR,
5787 size_one_node,
5788 op1, 0)))
5789 && !TREE_OVERFLOW (t1))
5790 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5791 ? MULT_EXPR : FLOOR_DIV_EXPR,
5792 ctype, fold_convert (ctype, op0), t1),
5793 c, code, wide_type, strict_overflow_p);
5794 break;
5795
5796 case PLUS_EXPR: case MINUS_EXPR:
5797 /* See if we can eliminate the operation on both sides. If we can, we
5798 can return a new PLUS or MINUS. If we can't, the only remaining
5799 cases where we can do anything are if the second operand is a
5800 constant. */
5801 sub_strict_overflow_p = false;
5802 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5803 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5804 if (t1 != 0 && t2 != 0
5805 && (code == MULT_EXPR
5806 /* If not multiplication, we can only do this if both operands
5807 are divisible by c. */
5808 || (multiple_of_p (ctype, op0, c)
5809 && multiple_of_p (ctype, op1, c))))
5810 {
5811 if (sub_strict_overflow_p)
5812 *strict_overflow_p = true;
5813 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5814 fold_convert (ctype, t2));
5815 }
5816
5817 /* If this was a subtraction, negate OP1 and set it to be an addition.
5818 This simplifies the logic below. */
5819 if (tcode == MINUS_EXPR)
5820 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5821
5822 if (TREE_CODE (op1) != INTEGER_CST)
5823 break;
5824
5825 /* If either OP1 or C are negative, this optimization is not safe for
5826 some of the division and remainder types while for others we need
5827 to change the code. */
5828 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5829 {
5830 if (code == CEIL_DIV_EXPR)
5831 code = FLOOR_DIV_EXPR;
5832 else if (code == FLOOR_DIV_EXPR)
5833 code = CEIL_DIV_EXPR;
5834 else if (code != MULT_EXPR
5835 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5836 break;
5837 }
5838
5839 /* If it's a multiply or a division/modulus operation of a multiple
5840 of our constant, do the operation and verify it doesn't overflow. */
5841 if (code == MULT_EXPR
5842 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5843 {
5844 op1 = const_binop (code, fold_convert (ctype, op1),
5845 fold_convert (ctype, c), 0);
5846 /* We allow the constant to overflow with wrapping semantics. */
5847 if (op1 == 0
5848 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5849 break;
5850 }
5851 else
5852 break;
5853
5854 /* If we have an unsigned type is not a sizetype, we cannot widen
5855 the operation since it will change the result if the original
5856 computation overflowed. */
5857 if (TYPE_UNSIGNED (ctype)
5858 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5859 && ctype != type)
5860 break;
5861
5862 /* If we were able to eliminate our operation from the first side,
5863 apply our operation to the second side and reform the PLUS. */
5864 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5866
5867 /* The last case is if we are a multiply. In that case, we can
5868 apply the distributive law to commute the multiply and addition
5869 if the multiplication of the constants doesn't overflow. */
5870 if (code == MULT_EXPR)
5871 return fold_build2 (tcode, ctype,
5872 fold_build2 (code, ctype,
5873 fold_convert (ctype, op0),
5874 fold_convert (ctype, c)),
5875 op1);
5876
5877 break;
5878
5879 case MULT_EXPR:
5880 /* We have a special case here if we are doing something like
5881 (C * 8) % 4 since we know that's zero. */
5882 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5883 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5884 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5885 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5886 return omit_one_operand (type, integer_zero_node, op0);
5887
5888 /* ... fall through ... */
5889
5890 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5891 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5892 /* If we can extract our operation from the LHS, do so and return a
5893 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5894 do something only if the second operand is a constant. */
5895 if (same_p
5896 && (t1 = extract_muldiv (op0, c, code, wide_type,
5897 strict_overflow_p)) != 0)
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5899 fold_convert (ctype, op1));
5900 else if (tcode == MULT_EXPR && code == MULT_EXPR
5901 && (t1 = extract_muldiv (op1, c, code, wide_type,
5902 strict_overflow_p)) != 0)
5903 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5904 fold_convert (ctype, t1));
5905 else if (TREE_CODE (op1) != INTEGER_CST)
5906 return 0;
5907
5908 /* If these are the same operation types, we can associate them
5909 assuming no overflow. */
5910 if (tcode == code
5911 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5912 fold_convert (ctype, c), 0))
5913 && !TREE_OVERFLOW (t1))
5914 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5915
5916 /* If these operations "cancel" each other, we have the main
5917 optimizations of this pass, which occur when either constant is a
5918 multiple of the other, in which case we replace this with either an
5919 operation or CODE or TCODE.
5920
5921 If we have an unsigned type that is not a sizetype, we cannot do
5922 this since it will change the result if the original computation
5923 overflowed. */
5924 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5925 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5926 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5927 || (tcode == MULT_EXPR
5928 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5929 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5930 {
5931 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5932 {
5933 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5934 *strict_overflow_p = true;
5935 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5936 fold_convert (ctype,
5937 const_binop (TRUNC_DIV_EXPR,
5938 op1, c, 0)));
5939 }
5940 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5941 {
5942 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5943 *strict_overflow_p = true;
5944 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5945 fold_convert (ctype,
5946 const_binop (TRUNC_DIV_EXPR,
5947 c, op1, 0)));
5948 }
5949 }
5950 break;
5951
5952 default:
5953 break;
5954 }
5955
5956 return 0;
5957 }
5958 \f
5959 /* Return a node which has the indicated constant VALUE (either 0 or
5960 1), and is of the indicated TYPE. */
5961
5962 tree
5963 constant_boolean_node (int value, tree type)
5964 {
5965 if (type == integer_type_node)
5966 return value ? integer_one_node : integer_zero_node;
5967 else if (type == boolean_type_node)
5968 return value ? boolean_true_node : boolean_false_node;
5969 else
5970 return build_int_cst (type, value);
5971 }
5972
5973
5974 /* Return true if expr looks like an ARRAY_REF and set base and
5975 offset to the appropriate trees. If there is no offset,
5976 offset is set to NULL_TREE. Base will be canonicalized to
5977 something you can get the element type from using
5978 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5979 in bytes to the base. */
5980
5981 static bool
5982 extract_array_ref (tree expr, tree *base, tree *offset)
5983 {
5984 /* One canonical form is a PLUS_EXPR with the first
5985 argument being an ADDR_EXPR with a possible NOP_EXPR
5986 attached. */
5987 if (TREE_CODE (expr) == PLUS_EXPR)
5988 {
5989 tree op0 = TREE_OPERAND (expr, 0);
5990 tree inner_base, dummy1;
5991 /* Strip NOP_EXPRs here because the C frontends and/or
5992 folders present us (int *)&x.a + 4B possibly. */
5993 STRIP_NOPS (op0);
5994 if (extract_array_ref (op0, &inner_base, &dummy1))
5995 {
5996 *base = inner_base;
5997 if (dummy1 == NULL_TREE)
5998 *offset = TREE_OPERAND (expr, 1);
5999 else
6000 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
6001 dummy1, TREE_OPERAND (expr, 1));
6002 return true;
6003 }
6004 }
6005 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6006 which we transform into an ADDR_EXPR with appropriate
6007 offset. For other arguments to the ADDR_EXPR we assume
6008 zero offset and as such do not care about the ADDR_EXPR
6009 type and strip possible nops from it. */
6010 else if (TREE_CODE (expr) == ADDR_EXPR)
6011 {
6012 tree op0 = TREE_OPERAND (expr, 0);
6013 if (TREE_CODE (op0) == ARRAY_REF)
6014 {
6015 tree idx = TREE_OPERAND (op0, 1);
6016 *base = TREE_OPERAND (op0, 0);
6017 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6018 array_ref_element_size (op0));
6019 }
6020 else
6021 {
6022 /* Handle array-to-pointer decay as &a. */
6023 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6024 *base = TREE_OPERAND (expr, 0);
6025 else
6026 *base = expr;
6027 *offset = NULL_TREE;
6028 }
6029 return true;
6030 }
6031 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6032 else if (SSA_VAR_P (expr)
6033 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6034 {
6035 *base = expr;
6036 *offset = NULL_TREE;
6037 return true;
6038 }
6039
6040 return false;
6041 }
6042
6043
6044 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6045 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6046 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6047 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6048 COND is the first argument to CODE; otherwise (as in the example
6049 given here), it is the second argument. TYPE is the type of the
6050 original expression. Return NULL_TREE if no simplification is
6051 possible. */
6052
6053 static tree
6054 fold_binary_op_with_conditional_arg (enum tree_code code,
6055 tree type, tree op0, tree op1,
6056 tree cond, tree arg, int cond_first_p)
6057 {
6058 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6059 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6060 tree test, true_value, false_value;
6061 tree lhs = NULL_TREE;
6062 tree rhs = NULL_TREE;
6063
6064 /* This transformation is only worthwhile if we don't have to wrap
6065 arg in a SAVE_EXPR, and the operation can be simplified on at least
6066 one of the branches once its pushed inside the COND_EXPR. */
6067 if (!TREE_CONSTANT (arg))
6068 return NULL_TREE;
6069
6070 if (TREE_CODE (cond) == COND_EXPR)
6071 {
6072 test = TREE_OPERAND (cond, 0);
6073 true_value = TREE_OPERAND (cond, 1);
6074 false_value = TREE_OPERAND (cond, 2);
6075 /* If this operand throws an expression, then it does not make
6076 sense to try to perform a logical or arithmetic operation
6077 involving it. */
6078 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6079 lhs = true_value;
6080 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6081 rhs = false_value;
6082 }
6083 else
6084 {
6085 tree testtype = TREE_TYPE (cond);
6086 test = cond;
6087 true_value = constant_boolean_node (true, testtype);
6088 false_value = constant_boolean_node (false, testtype);
6089 }
6090
6091 arg = fold_convert (arg_type, arg);
6092 if (lhs == 0)
6093 {
6094 true_value = fold_convert (cond_type, true_value);
6095 if (cond_first_p)
6096 lhs = fold_build2 (code, type, true_value, arg);
6097 else
6098 lhs = fold_build2 (code, type, arg, true_value);
6099 }
6100 if (rhs == 0)
6101 {
6102 false_value = fold_convert (cond_type, false_value);
6103 if (cond_first_p)
6104 rhs = fold_build2 (code, type, false_value, arg);
6105 else
6106 rhs = fold_build2 (code, type, arg, false_value);
6107 }
6108
6109 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6110 return fold_convert (type, test);
6111 }
6112
6113 \f
6114 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6115
6116 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6117 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6118 ADDEND is the same as X.
6119
6120 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6121 and finite. The problematic cases are when X is zero, and its mode
6122 has signed zeros. In the case of rounding towards -infinity,
6123 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6124 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6125
6126 static bool
6127 fold_real_zero_addition_p (tree type, tree addend, int negate)
6128 {
6129 if (!real_zerop (addend))
6130 return false;
6131
6132 /* Don't allow the fold with -fsignaling-nans. */
6133 if (HONOR_SNANS (TYPE_MODE (type)))
6134 return false;
6135
6136 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6137 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6138 return true;
6139
6140 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6141 if (TREE_CODE (addend) == REAL_CST
6142 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6143 negate = !negate;
6144
6145 /* The mode has signed zeros, and we have to honor their sign.
6146 In this situation, there is only one case we can return true for.
6147 X - 0 is the same as X unless rounding towards -infinity is
6148 supported. */
6149 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6150 }
6151
6152 /* Subroutine of fold() that checks comparisons of built-in math
6153 functions against real constants.
6154
6155 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6156 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6157 is the type of the result and ARG0 and ARG1 are the operands of the
6158 comparison. ARG1 must be a TREE_REAL_CST.
6159
6160 The function returns the constant folded tree if a simplification
6161 can be made, and NULL_TREE otherwise. */
6162
6163 static tree
6164 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6165 tree type, tree arg0, tree arg1)
6166 {
6167 REAL_VALUE_TYPE c;
6168
6169 if (BUILTIN_SQRT_P (fcode))
6170 {
6171 tree arg = CALL_EXPR_ARG (arg0, 0);
6172 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6173
6174 c = TREE_REAL_CST (arg1);
6175 if (REAL_VALUE_NEGATIVE (c))
6176 {
6177 /* sqrt(x) < y is always false, if y is negative. */
6178 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6179 return omit_one_operand (type, integer_zero_node, arg);
6180
6181 /* sqrt(x) > y is always true, if y is negative and we
6182 don't care about NaNs, i.e. negative values of x. */
6183 if (code == NE_EXPR || !HONOR_NANS (mode))
6184 return omit_one_operand (type, integer_one_node, arg);
6185
6186 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6187 return fold_build2 (GE_EXPR, type, arg,
6188 build_real (TREE_TYPE (arg), dconst0));
6189 }
6190 else if (code == GT_EXPR || code == GE_EXPR)
6191 {
6192 REAL_VALUE_TYPE c2;
6193
6194 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6195 real_convert (&c2, mode, &c2);
6196
6197 if (REAL_VALUE_ISINF (c2))
6198 {
6199 /* sqrt(x) > y is x == +Inf, when y is very large. */
6200 if (HONOR_INFINITIES (mode))
6201 return fold_build2 (EQ_EXPR, type, arg,
6202 build_real (TREE_TYPE (arg), c2));
6203
6204 /* sqrt(x) > y is always false, when y is very large
6205 and we don't care about infinities. */
6206 return omit_one_operand (type, integer_zero_node, arg);
6207 }
6208
6209 /* sqrt(x) > c is the same as x > c*c. */
6210 return fold_build2 (code, type, arg,
6211 build_real (TREE_TYPE (arg), c2));
6212 }
6213 else if (code == LT_EXPR || code == LE_EXPR)
6214 {
6215 REAL_VALUE_TYPE c2;
6216
6217 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6218 real_convert (&c2, mode, &c2);
6219
6220 if (REAL_VALUE_ISINF (c2))
6221 {
6222 /* sqrt(x) < y is always true, when y is a very large
6223 value and we don't care about NaNs or Infinities. */
6224 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6225 return omit_one_operand (type, integer_one_node, arg);
6226
6227 /* sqrt(x) < y is x != +Inf when y is very large and we
6228 don't care about NaNs. */
6229 if (! HONOR_NANS (mode))
6230 return fold_build2 (NE_EXPR, type, arg,
6231 build_real (TREE_TYPE (arg), c2));
6232
6233 /* sqrt(x) < y is x >= 0 when y is very large and we
6234 don't care about Infinities. */
6235 if (! HONOR_INFINITIES (mode))
6236 return fold_build2 (GE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg), dconst0));
6238
6239 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6240 if (lang_hooks.decls.global_bindings_p () != 0
6241 || CONTAINS_PLACEHOLDER_P (arg))
6242 return NULL_TREE;
6243
6244 arg = save_expr (arg);
6245 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6246 fold_build2 (GE_EXPR, type, arg,
6247 build_real (TREE_TYPE (arg),
6248 dconst0)),
6249 fold_build2 (NE_EXPR, type, arg,
6250 build_real (TREE_TYPE (arg),
6251 c2)));
6252 }
6253
6254 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6255 if (! HONOR_NANS (mode))
6256 return fold_build2 (code, type, arg,
6257 build_real (TREE_TYPE (arg), c2));
6258
6259 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6260 if (lang_hooks.decls.global_bindings_p () == 0
6261 && ! CONTAINS_PLACEHOLDER_P (arg))
6262 {
6263 arg = save_expr (arg);
6264 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6265 fold_build2 (GE_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg),
6267 dconst0)),
6268 fold_build2 (code, type, arg,
6269 build_real (TREE_TYPE (arg),
6270 c2)));
6271 }
6272 }
6273 }
6274
6275 return NULL_TREE;
6276 }
6277
6278 /* Subroutine of fold() that optimizes comparisons against Infinities,
6279 either +Inf or -Inf.
6280
6281 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6283 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284
6285 The function returns the constant folded tree if a simplification
6286 can be made, and NULL_TREE otherwise. */
6287
6288 static tree
6289 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6290 {
6291 enum machine_mode mode;
6292 REAL_VALUE_TYPE max;
6293 tree temp;
6294 bool neg;
6295
6296 mode = TYPE_MODE (TREE_TYPE (arg0));
6297
6298 /* For negative infinity swap the sense of the comparison. */
6299 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6300 if (neg)
6301 code = swap_tree_comparison (code);
6302
6303 switch (code)
6304 {
6305 case GT_EXPR:
6306 /* x > +Inf is always false, if with ignore sNANs. */
6307 if (HONOR_SNANS (mode))
6308 return NULL_TREE;
6309 return omit_one_operand (type, integer_zero_node, arg0);
6310
6311 case LE_EXPR:
6312 /* x <= +Inf is always true, if we don't case about NaNs. */
6313 if (! HONOR_NANS (mode))
6314 return omit_one_operand (type, integer_one_node, arg0);
6315
6316 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6317 if (lang_hooks.decls.global_bindings_p () == 0
6318 && ! CONTAINS_PLACEHOLDER_P (arg0))
6319 {
6320 arg0 = save_expr (arg0);
6321 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6322 }
6323 break;
6324
6325 case EQ_EXPR:
6326 case GE_EXPR:
6327 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6328 real_maxval (&max, neg, mode);
6329 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331
6332 case LT_EXPR:
6333 /* x < +Inf is always equal to x <= DBL_MAX. */
6334 real_maxval (&max, neg, mode);
6335 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6336 arg0, build_real (TREE_TYPE (arg0), max));
6337
6338 case NE_EXPR:
6339 /* x != +Inf is always equal to !(x > DBL_MAX). */
6340 real_maxval (&max, neg, mode);
6341 if (! HONOR_NANS (mode))
6342 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6343 arg0, build_real (TREE_TYPE (arg0), max));
6344
6345 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6346 arg0, build_real (TREE_TYPE (arg0), max));
6347 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6348
6349 default:
6350 break;
6351 }
6352
6353 return NULL_TREE;
6354 }
6355
6356 /* Subroutine of fold() that optimizes comparisons of a division by
6357 a nonzero integer constant against an integer constant, i.e.
6358 X/C1 op C2.
6359
6360 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6361 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6362 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6363
6364 The function returns the constant folded tree if a simplification
6365 can be made, and NULL_TREE otherwise. */
6366
6367 static tree
6368 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6369 {
6370 tree prod, tmp, hi, lo;
6371 tree arg00 = TREE_OPERAND (arg0, 0);
6372 tree arg01 = TREE_OPERAND (arg0, 1);
6373 unsigned HOST_WIDE_INT lpart;
6374 HOST_WIDE_INT hpart;
6375 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6376 bool neg_overflow;
6377 int overflow;
6378
6379 /* We have to do this the hard way to detect unsigned overflow.
6380 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6381 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6382 TREE_INT_CST_HIGH (arg01),
6383 TREE_INT_CST_LOW (arg1),
6384 TREE_INT_CST_HIGH (arg1),
6385 &lpart, &hpart, unsigned_p);
6386 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6387 -1, overflow);
6388 neg_overflow = false;
6389
6390 if (unsigned_p)
6391 {
6392 tmp = int_const_binop (MINUS_EXPR, arg01,
6393 build_int_cst (TREE_TYPE (arg01), 1), 0);
6394 lo = prod;
6395
6396 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6397 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6398 TREE_INT_CST_HIGH (prod),
6399 TREE_INT_CST_LOW (tmp),
6400 TREE_INT_CST_HIGH (tmp),
6401 &lpart, &hpart, unsigned_p);
6402 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6403 -1, overflow | TREE_OVERFLOW (prod));
6404 }
6405 else if (tree_int_cst_sgn (arg01) >= 0)
6406 {
6407 tmp = int_const_binop (MINUS_EXPR, arg01,
6408 build_int_cst (TREE_TYPE (arg01), 1), 0);
6409 switch (tree_int_cst_sgn (arg1))
6410 {
6411 case -1:
6412 neg_overflow = true;
6413 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6414 hi = prod;
6415 break;
6416
6417 case 0:
6418 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6419 hi = tmp;
6420 break;
6421
6422 case 1:
6423 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6424 lo = prod;
6425 break;
6426
6427 default:
6428 gcc_unreachable ();
6429 }
6430 }
6431 else
6432 {
6433 /* A negative divisor reverses the relational operators. */
6434 code = swap_tree_comparison (code);
6435
6436 tmp = int_const_binop (PLUS_EXPR, arg01,
6437 build_int_cst (TREE_TYPE (arg01), 1), 0);
6438 switch (tree_int_cst_sgn (arg1))
6439 {
6440 case -1:
6441 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6442 lo = prod;
6443 break;
6444
6445 case 0:
6446 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6447 lo = tmp;
6448 break;
6449
6450 case 1:
6451 neg_overflow = true;
6452 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6453 hi = prod;
6454 break;
6455
6456 default:
6457 gcc_unreachable ();
6458 }
6459 }
6460
6461 switch (code)
6462 {
6463 case EQ_EXPR:
6464 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6465 return omit_one_operand (type, integer_zero_node, arg00);
6466 if (TREE_OVERFLOW (hi))
6467 return fold_build2 (GE_EXPR, type, arg00, lo);
6468 if (TREE_OVERFLOW (lo))
6469 return fold_build2 (LE_EXPR, type, arg00, hi);
6470 return build_range_check (type, arg00, 1, lo, hi);
6471
6472 case NE_EXPR:
6473 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6474 return omit_one_operand (type, integer_one_node, arg00);
6475 if (TREE_OVERFLOW (hi))
6476 return fold_build2 (LT_EXPR, type, arg00, lo);
6477 if (TREE_OVERFLOW (lo))
6478 return fold_build2 (GT_EXPR, type, arg00, hi);
6479 return build_range_check (type, arg00, 0, lo, hi);
6480
6481 case LT_EXPR:
6482 if (TREE_OVERFLOW (lo))
6483 {
6484 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6485 return omit_one_operand (type, tmp, arg00);
6486 }
6487 return fold_build2 (LT_EXPR, type, arg00, lo);
6488
6489 case LE_EXPR:
6490 if (TREE_OVERFLOW (hi))
6491 {
6492 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6493 return omit_one_operand (type, tmp, arg00);
6494 }
6495 return fold_build2 (LE_EXPR, type, arg00, hi);
6496
6497 case GT_EXPR:
6498 if (TREE_OVERFLOW (hi))
6499 {
6500 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6501 return omit_one_operand (type, tmp, arg00);
6502 }
6503 return fold_build2 (GT_EXPR, type, arg00, hi);
6504
6505 case GE_EXPR:
6506 if (TREE_OVERFLOW (lo))
6507 {
6508 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6509 return omit_one_operand (type, tmp, arg00);
6510 }
6511 return fold_build2 (GE_EXPR, type, arg00, lo);
6512
6513 default:
6514 break;
6515 }
6516
6517 return NULL_TREE;
6518 }
6519
6520
6521 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6522 equality/inequality test, then return a simplified form of the test
6523 using a sign testing. Otherwise return NULL. TYPE is the desired
6524 result type. */
6525
6526 static tree
6527 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6528 tree result_type)
6529 {
6530 /* If this is testing a single bit, we can optimize the test. */
6531 if ((code == NE_EXPR || code == EQ_EXPR)
6532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6534 {
6535 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6536 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6537 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6538
6539 if (arg00 != NULL_TREE
6540 /* This is only a win if casting to a signed type is cheap,
6541 i.e. when arg00's type is not a partial mode. */
6542 && TYPE_PRECISION (TREE_TYPE (arg00))
6543 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6544 {
6545 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6546 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6547 result_type, fold_convert (stype, arg00),
6548 build_int_cst (stype, 0));
6549 }
6550 }
6551
6552 return NULL_TREE;
6553 }
6554
6555 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6556 equality/inequality test, then return a simplified form of
6557 the test using shifts and logical operations. Otherwise return
6558 NULL. TYPE is the desired result type. */
6559
6560 tree
6561 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6562 tree result_type)
6563 {
6564 /* If this is testing a single bit, we can optimize the test. */
6565 if ((code == NE_EXPR || code == EQ_EXPR)
6566 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6567 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6568 {
6569 tree inner = TREE_OPERAND (arg0, 0);
6570 tree type = TREE_TYPE (arg0);
6571 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6572 enum machine_mode operand_mode = TYPE_MODE (type);
6573 int ops_unsigned;
6574 tree signed_type, unsigned_type, intermediate_type;
6575 tree tem, one;
6576
6577 /* First, see if we can fold the single bit test into a sign-bit
6578 test. */
6579 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6580 result_type);
6581 if (tem)
6582 return tem;
6583
6584 /* Otherwise we have (A & C) != 0 where C is a single bit,
6585 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6586 Similarly for (A & C) == 0. */
6587
6588 /* If INNER is a right shift of a constant and it plus BITNUM does
6589 not overflow, adjust BITNUM and INNER. */
6590 if (TREE_CODE (inner) == RSHIFT_EXPR
6591 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6592 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6593 && bitnum < TYPE_PRECISION (type)
6594 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6595 bitnum - TYPE_PRECISION (type)))
6596 {
6597 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6598 inner = TREE_OPERAND (inner, 0);
6599 }
6600
6601 /* If we are going to be able to omit the AND below, we must do our
6602 operations as unsigned. If we must use the AND, we have a choice.
6603 Normally unsigned is faster, but for some machines signed is. */
6604 #ifdef LOAD_EXTEND_OP
6605 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6606 && !flag_syntax_only) ? 0 : 1;
6607 #else
6608 ops_unsigned = 1;
6609 #endif
6610
6611 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6612 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6613 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6614 inner = fold_convert (intermediate_type, inner);
6615
6616 if (bitnum != 0)
6617 inner = build2 (RSHIFT_EXPR, intermediate_type,
6618 inner, size_int (bitnum));
6619
6620 one = build_int_cst (intermediate_type, 1);
6621
6622 if (code == EQ_EXPR)
6623 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6624
6625 /* Put the AND last so it can combine with more things. */
6626 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6627
6628 /* Make sure to return the proper type. */
6629 inner = fold_convert (result_type, inner);
6630
6631 return inner;
6632 }
6633 return NULL_TREE;
6634 }
6635
6636 /* Check whether we are allowed to reorder operands arg0 and arg1,
6637 such that the evaluation of arg1 occurs before arg0. */
6638
6639 static bool
6640 reorder_operands_p (tree arg0, tree arg1)
6641 {
6642 if (! flag_evaluation_order)
6643 return true;
6644 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6645 return true;
6646 return ! TREE_SIDE_EFFECTS (arg0)
6647 && ! TREE_SIDE_EFFECTS (arg1);
6648 }
6649
6650 /* Test whether it is preferable two swap two operands, ARG0 and
6651 ARG1, for example because ARG0 is an integer constant and ARG1
6652 isn't. If REORDER is true, only recommend swapping if we can
6653 evaluate the operands in reverse order. */
6654
6655 bool
6656 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6657 {
6658 STRIP_SIGN_NOPS (arg0);
6659 STRIP_SIGN_NOPS (arg1);
6660
6661 if (TREE_CODE (arg1) == INTEGER_CST)
6662 return 0;
6663 if (TREE_CODE (arg0) == INTEGER_CST)
6664 return 1;
6665
6666 if (TREE_CODE (arg1) == REAL_CST)
6667 return 0;
6668 if (TREE_CODE (arg0) == REAL_CST)
6669 return 1;
6670
6671 if (TREE_CODE (arg1) == COMPLEX_CST)
6672 return 0;
6673 if (TREE_CODE (arg0) == COMPLEX_CST)
6674 return 1;
6675
6676 if (TREE_CONSTANT (arg1))
6677 return 0;
6678 if (TREE_CONSTANT (arg0))
6679 return 1;
6680
6681 if (optimize_size)
6682 return 0;
6683
6684 if (reorder && flag_evaluation_order
6685 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6686 return 0;
6687
6688 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6689 for commutative and comparison operators. Ensuring a canonical
6690 form allows the optimizers to find additional redundancies without
6691 having to explicitly check for both orderings. */
6692 if (TREE_CODE (arg0) == SSA_NAME
6693 && TREE_CODE (arg1) == SSA_NAME
6694 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6695 return 1;
6696
6697 /* Put SSA_NAMEs last. */
6698 if (TREE_CODE (arg1) == SSA_NAME)
6699 return 0;
6700 if (TREE_CODE (arg0) == SSA_NAME)
6701 return 1;
6702
6703 /* Put variables last. */
6704 if (DECL_P (arg1))
6705 return 0;
6706 if (DECL_P (arg0))
6707 return 1;
6708
6709 return 0;
6710 }
6711
6712 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6713 ARG0 is extended to a wider type. */
6714
6715 static tree
6716 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6717 {
6718 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6719 tree arg1_unw;
6720 tree shorter_type, outer_type;
6721 tree min, max;
6722 bool above, below;
6723
6724 if (arg0_unw == arg0)
6725 return NULL_TREE;
6726 shorter_type = TREE_TYPE (arg0_unw);
6727
6728 #ifdef HAVE_canonicalize_funcptr_for_compare
6729 /* Disable this optimization if we're casting a function pointer
6730 type on targets that require function pointer canonicalization. */
6731 if (HAVE_canonicalize_funcptr_for_compare
6732 && TREE_CODE (shorter_type) == POINTER_TYPE
6733 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6734 return NULL_TREE;
6735 #endif
6736
6737 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6738 return NULL_TREE;
6739
6740 arg1_unw = get_unwidened (arg1, shorter_type);
6741
6742 /* If possible, express the comparison in the shorter mode. */
6743 if ((code == EQ_EXPR || code == NE_EXPR
6744 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6745 && (TREE_TYPE (arg1_unw) == shorter_type
6746 || (TREE_CODE (arg1_unw) == INTEGER_CST
6747 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6748 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6749 && int_fits_type_p (arg1_unw, shorter_type))))
6750 return fold_build2 (code, type, arg0_unw,
6751 fold_convert (shorter_type, arg1_unw));
6752
6753 if (TREE_CODE (arg1_unw) != INTEGER_CST
6754 || TREE_CODE (shorter_type) != INTEGER_TYPE
6755 || !int_fits_type_p (arg1_unw, shorter_type))
6756 return NULL_TREE;
6757
6758 /* If we are comparing with the integer that does not fit into the range
6759 of the shorter type, the result is known. */
6760 outer_type = TREE_TYPE (arg1_unw);
6761 min = lower_bound_in_type (outer_type, shorter_type);
6762 max = upper_bound_in_type (outer_type, shorter_type);
6763
6764 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6765 max, arg1_unw));
6766 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6767 arg1_unw, min));
6768
6769 switch (code)
6770 {
6771 case EQ_EXPR:
6772 if (above || below)
6773 return omit_one_operand (type, integer_zero_node, arg0);
6774 break;
6775
6776 case NE_EXPR:
6777 if (above || below)
6778 return omit_one_operand (type, integer_one_node, arg0);
6779 break;
6780
6781 case LT_EXPR:
6782 case LE_EXPR:
6783 if (above)
6784 return omit_one_operand (type, integer_one_node, arg0);
6785 else if (below)
6786 return omit_one_operand (type, integer_zero_node, arg0);
6787
6788 case GT_EXPR:
6789 case GE_EXPR:
6790 if (above)
6791 return omit_one_operand (type, integer_zero_node, arg0);
6792 else if (below)
6793 return omit_one_operand (type, integer_one_node, arg0);
6794
6795 default:
6796 break;
6797 }
6798
6799 return NULL_TREE;
6800 }
6801
6802 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6803 ARG0 just the signedness is changed. */
6804
6805 static tree
6806 fold_sign_changed_comparison (enum tree_code code, tree type,
6807 tree arg0, tree arg1)
6808 {
6809 tree arg0_inner;
6810 tree inner_type, outer_type;
6811
6812 if (TREE_CODE (arg0) != NOP_EXPR
6813 && TREE_CODE (arg0) != CONVERT_EXPR)
6814 return NULL_TREE;
6815
6816 outer_type = TREE_TYPE (arg0);
6817 arg0_inner = TREE_OPERAND (arg0, 0);
6818 inner_type = TREE_TYPE (arg0_inner);
6819
6820 #ifdef HAVE_canonicalize_funcptr_for_compare
6821 /* Disable this optimization if we're casting a function pointer
6822 type on targets that require function pointer canonicalization. */
6823 if (HAVE_canonicalize_funcptr_for_compare
6824 && TREE_CODE (inner_type) == POINTER_TYPE
6825 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6826 return NULL_TREE;
6827 #endif
6828
6829 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6830 return NULL_TREE;
6831
6832 if (TREE_CODE (arg1) != INTEGER_CST
6833 && !((TREE_CODE (arg1) == NOP_EXPR
6834 || TREE_CODE (arg1) == CONVERT_EXPR)
6835 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6836 return NULL_TREE;
6837
6838 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6839 && code != NE_EXPR
6840 && code != EQ_EXPR)
6841 return NULL_TREE;
6842
6843 if (TREE_CODE (arg1) == INTEGER_CST)
6844 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6845 TREE_INT_CST_HIGH (arg1), 0,
6846 TREE_OVERFLOW (arg1));
6847 else
6848 arg1 = fold_convert (inner_type, arg1);
6849
6850 return fold_build2 (code, type, arg0_inner, arg1);
6851 }
6852
6853 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6854 step of the array. Reconstructs s and delta in the case of s * delta
6855 being an integer constant (and thus already folded).
6856 ADDR is the address. MULT is the multiplicative expression.
6857 If the function succeeds, the new address expression is returned. Otherwise
6858 NULL_TREE is returned. */
6859
6860 static tree
6861 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6862 {
6863 tree s, delta, step;
6864 tree ref = TREE_OPERAND (addr, 0), pref;
6865 tree ret, pos;
6866 tree itype;
6867 bool mdim = false;
6868
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1) == MULT_EXPR)
6872 {
6873 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6874
6875 STRIP_NOPS (arg0);
6876 STRIP_NOPS (arg1);
6877
6878 if (TREE_CODE (arg0) == INTEGER_CST)
6879 {
6880 s = arg0;
6881 delta = arg1;
6882 }
6883 else if (TREE_CODE (arg1) == INTEGER_CST)
6884 {
6885 s = arg1;
6886 delta = arg0;
6887 }
6888 else
6889 return NULL_TREE;
6890 }
6891 else if (TREE_CODE (op1) == INTEGER_CST)
6892 {
6893 delta = op1;
6894 s = NULL_TREE;
6895 }
6896 else
6897 {
6898 /* Simulate we are delta * 1. */
6899 delta = op1;
6900 s = integer_one_node;
6901 }
6902
6903 for (;; ref = TREE_OPERAND (ref, 0))
6904 {
6905 if (TREE_CODE (ref) == ARRAY_REF)
6906 {
6907 /* Remember if this was a multi-dimensional array. */
6908 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6909 mdim = true;
6910
6911 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6912 if (! itype)
6913 continue;
6914
6915 step = array_ref_element_size (ref);
6916 if (TREE_CODE (step) != INTEGER_CST)
6917 continue;
6918
6919 if (s)
6920 {
6921 if (! tree_int_cst_equal (step, s))
6922 continue;
6923 }
6924 else
6925 {
6926 /* Try if delta is a multiple of step. */
6927 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6928 if (! tmp)
6929 continue;
6930 delta = tmp;
6931 }
6932
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6935 if (mdim)
6936 {
6937 tree tmp;
6938
6939 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6940 || !INTEGRAL_TYPE_P (itype)
6941 || !TYPE_MAX_VALUE (itype)
6942 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6943 continue;
6944
6945 tmp = fold_binary (code, itype,
6946 fold_convert (itype,
6947 TREE_OPERAND (ref, 1)),
6948 fold_convert (itype, delta));
6949 if (!tmp
6950 || TREE_CODE (tmp) != INTEGER_CST
6951 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6952 continue;
6953 }
6954
6955 break;
6956 }
6957 else
6958 mdim = false;
6959
6960 if (!handled_component_p (ref))
6961 return NULL_TREE;
6962 }
6963
6964 /* We found the suitable array reference. So copy everything up to it,
6965 and replace the index. */
6966
6967 pref = TREE_OPERAND (addr, 0);
6968 ret = copy_node (pref);
6969 pos = ret;
6970
6971 while (pref != ref)
6972 {
6973 pref = TREE_OPERAND (pref, 0);
6974 TREE_OPERAND (pos, 0) = copy_node (pref);
6975 pos = TREE_OPERAND (pos, 0);
6976 }
6977
6978 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6979 fold_convert (itype,
6980 TREE_OPERAND (pos, 1)),
6981 fold_convert (itype, delta));
6982
6983 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6984 }
6985
6986
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6990
6991 static tree
6992 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6993 {
6994 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6995
6996 if (TREE_CODE (bound) == LT_EXPR)
6997 a = TREE_OPERAND (bound, 0);
6998 else if (TREE_CODE (bound) == GT_EXPR)
6999 a = TREE_OPERAND (bound, 1);
7000 else
7001 return NULL_TREE;
7002
7003 typea = TREE_TYPE (a);
7004 if (!INTEGRAL_TYPE_P (typea)
7005 && !POINTER_TYPE_P (typea))
7006 return NULL_TREE;
7007
7008 if (TREE_CODE (ineq) == LT_EXPR)
7009 {
7010 a1 = TREE_OPERAND (ineq, 1);
7011 y = TREE_OPERAND (ineq, 0);
7012 }
7013 else if (TREE_CODE (ineq) == GT_EXPR)
7014 {
7015 a1 = TREE_OPERAND (ineq, 0);
7016 y = TREE_OPERAND (ineq, 1);
7017 }
7018 else
7019 return NULL_TREE;
7020
7021 if (TREE_TYPE (a1) != typea)
7022 return NULL_TREE;
7023
7024 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7025 if (!integer_onep (diff))
7026 return NULL_TREE;
7027
7028 return fold_build2 (GE_EXPR, type, a, y);
7029 }
7030
7031 /* Fold a sum or difference of at least one multiplication.
7032 Returns the folded tree or NULL if no simplification could be made. */
7033
7034 static tree
7035 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7036 {
7037 tree arg00, arg01, arg10, arg11;
7038 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7039
7040 /* (A * C) +- (B * C) -> (A+-B) * C.
7041 (A * C) +- A -> A * (C+-1).
7042 We are most concerned about the case where C is a constant,
7043 but other combinations show up during loop reduction. Since
7044 it is not difficult, try all four possibilities. */
7045
7046 if (TREE_CODE (arg0) == MULT_EXPR)
7047 {
7048 arg00 = TREE_OPERAND (arg0, 0);
7049 arg01 = TREE_OPERAND (arg0, 1);
7050 }
7051 else
7052 {
7053 arg00 = arg0;
7054 arg01 = build_one_cst (type);
7055 }
7056 if (TREE_CODE (arg1) == MULT_EXPR)
7057 {
7058 arg10 = TREE_OPERAND (arg1, 0);
7059 arg11 = TREE_OPERAND (arg1, 1);
7060 }
7061 else
7062 {
7063 arg10 = arg1;
7064 arg11 = build_one_cst (type);
7065 }
7066 same = NULL_TREE;
7067
7068 if (operand_equal_p (arg01, arg11, 0))
7069 same = arg01, alt0 = arg00, alt1 = arg10;
7070 else if (operand_equal_p (arg00, arg10, 0))
7071 same = arg00, alt0 = arg01, alt1 = arg11;
7072 else if (operand_equal_p (arg00, arg11, 0))
7073 same = arg00, alt0 = arg01, alt1 = arg10;
7074 else if (operand_equal_p (arg01, arg10, 0))
7075 same = arg01, alt0 = arg00, alt1 = arg11;
7076
7077 /* No identical multiplicands; see if we can find a common
7078 power-of-two factor in non-power-of-two multiplies. This
7079 can help in multi-dimensional array access. */
7080 else if (host_integerp (arg01, 0)
7081 && host_integerp (arg11, 0))
7082 {
7083 HOST_WIDE_INT int01, int11, tmp;
7084 bool swap = false;
7085 tree maybe_same;
7086 int01 = TREE_INT_CST_LOW (arg01);
7087 int11 = TREE_INT_CST_LOW (arg11);
7088
7089 /* Move min of absolute values to int11. */
7090 if ((int01 >= 0 ? int01 : -int01)
7091 < (int11 >= 0 ? int11 : -int11))
7092 {
7093 tmp = int01, int01 = int11, int11 = tmp;
7094 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7095 maybe_same = arg01;
7096 swap = true;
7097 }
7098 else
7099 maybe_same = arg11;
7100
7101 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7102 {
7103 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7104 build_int_cst (TREE_TYPE (arg00),
7105 int01 / int11));
7106 alt1 = arg10;
7107 same = maybe_same;
7108 if (swap)
7109 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7110 }
7111 }
7112
7113 if (same)
7114 return fold_build2 (MULT_EXPR, type,
7115 fold_build2 (code, type,
7116 fold_convert (type, alt0),
7117 fold_convert (type, alt1)),
7118 fold_convert (type, same));
7119
7120 return NULL_TREE;
7121 }
7122
7123 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7126 upon failure. */
7127
7128 static int
7129 native_encode_int (tree expr, unsigned char *ptr, int len)
7130 {
7131 tree type = TREE_TYPE (expr);
7132 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7133 int byte, offset, word, words;
7134 unsigned char value;
7135
7136 if (total_bytes > len)
7137 return 0;
7138 words = total_bytes / UNITS_PER_WORD;
7139
7140 for (byte = 0; byte < total_bytes; byte++)
7141 {
7142 int bitpos = byte * BITS_PER_UNIT;
7143 if (bitpos < HOST_BITS_PER_WIDE_INT)
7144 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7145 else
7146 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7147 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7148
7149 if (total_bytes > UNITS_PER_WORD)
7150 {
7151 word = byte / UNITS_PER_WORD;
7152 if (WORDS_BIG_ENDIAN)
7153 word = (words - 1) - word;
7154 offset = word * UNITS_PER_WORD;
7155 if (BYTES_BIG_ENDIAN)
7156 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7157 else
7158 offset += byte % UNITS_PER_WORD;
7159 }
7160 else
7161 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7162 ptr[offset] = value;
7163 }
7164 return total_bytes;
7165 }
7166
7167
7168 /* Subroutine of native_encode_expr. Encode the REAL_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7171 upon failure. */
7172
7173 static int
7174 native_encode_real (tree expr, unsigned char *ptr, int len)
7175 {
7176 tree type = TREE_TYPE (expr);
7177 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7178 int byte, offset, word, words, bitpos;
7179 unsigned char value;
7180
7181 /* There are always 32 bits in each long, no matter the size of
7182 the hosts long. We handle floating point representations with
7183 up to 192 bits. */
7184 long tmp[6];
7185
7186 if (total_bytes > len)
7187 return 0;
7188 words = 32 / UNITS_PER_WORD;
7189
7190 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7191
7192 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7193 bitpos += BITS_PER_UNIT)
7194 {
7195 byte = (bitpos / BITS_PER_UNIT) & 3;
7196 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7197
7198 if (UNITS_PER_WORD < 4)
7199 {
7200 word = byte / UNITS_PER_WORD;
7201 if (WORDS_BIG_ENDIAN)
7202 word = (words - 1) - word;
7203 offset = word * UNITS_PER_WORD;
7204 if (BYTES_BIG_ENDIAN)
7205 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7206 else
7207 offset += byte % UNITS_PER_WORD;
7208 }
7209 else
7210 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7211 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7212 }
7213 return total_bytes;
7214 }
7215
7216 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7217 specified by EXPR into the buffer PTR of length LEN bytes.
7218 Return the number of bytes placed in the buffer, or zero
7219 upon failure. */
7220
7221 static int
7222 native_encode_complex (tree expr, unsigned char *ptr, int len)
7223 {
7224 int rsize, isize;
7225 tree part;
7226
7227 part = TREE_REALPART (expr);
7228 rsize = native_encode_expr (part, ptr, len);
7229 if (rsize == 0)
7230 return 0;
7231 part = TREE_IMAGPART (expr);
7232 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7233 if (isize != rsize)
7234 return 0;
7235 return rsize + isize;
7236 }
7237
7238
7239 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7240 specified by EXPR into the buffer PTR of length LEN bytes.
7241 Return the number of bytes placed in the buffer, or zero
7242 upon failure. */
7243
7244 static int
7245 native_encode_vector (tree expr, unsigned char *ptr, int len)
7246 {
7247 int i, size, offset, count;
7248 tree itype, elem, elements;
7249
7250 offset = 0;
7251 elements = TREE_VECTOR_CST_ELTS (expr);
7252 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7253 itype = TREE_TYPE (TREE_TYPE (expr));
7254 size = GET_MODE_SIZE (TYPE_MODE (itype));
7255 for (i = 0; i < count; i++)
7256 {
7257 if (elements)
7258 {
7259 elem = TREE_VALUE (elements);
7260 elements = TREE_CHAIN (elements);
7261 }
7262 else
7263 elem = NULL_TREE;
7264
7265 if (elem)
7266 {
7267 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7268 return 0;
7269 }
7270 else
7271 {
7272 if (offset + size > len)
7273 return 0;
7274 memset (ptr+offset, 0, size);
7275 }
7276 offset += size;
7277 }
7278 return offset;
7279 }
7280
7281
7282 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7283 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7284 buffer PTR of length LEN bytes. Return the number of bytes
7285 placed in the buffer, or zero upon failure. */
7286
7287 int
7288 native_encode_expr (tree expr, unsigned char *ptr, int len)
7289 {
7290 switch (TREE_CODE (expr))
7291 {
7292 case INTEGER_CST:
7293 return native_encode_int (expr, ptr, len);
7294
7295 case REAL_CST:
7296 return native_encode_real (expr, ptr, len);
7297
7298 case COMPLEX_CST:
7299 return native_encode_complex (expr, ptr, len);
7300
7301 case VECTOR_CST:
7302 return native_encode_vector (expr, ptr, len);
7303
7304 default:
7305 return 0;
7306 }
7307 }
7308
7309
7310 /* Subroutine of native_interpret_expr. Interpret the contents of
7311 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7312 If the buffer cannot be interpreted, return NULL_TREE. */
7313
7314 static tree
7315 native_interpret_int (tree type, unsigned char *ptr, int len)
7316 {
7317 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7318 int byte, offset, word, words;
7319 unsigned char value;
7320 unsigned int HOST_WIDE_INT lo = 0;
7321 HOST_WIDE_INT hi = 0;
7322
7323 if (total_bytes > len)
7324 return NULL_TREE;
7325 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7326 return NULL_TREE;
7327 words = total_bytes / UNITS_PER_WORD;
7328
7329 for (byte = 0; byte < total_bytes; byte++)
7330 {
7331 int bitpos = byte * BITS_PER_UNIT;
7332 if (total_bytes > UNITS_PER_WORD)
7333 {
7334 word = byte / UNITS_PER_WORD;
7335 if (WORDS_BIG_ENDIAN)
7336 word = (words - 1) - word;
7337 offset = word * UNITS_PER_WORD;
7338 if (BYTES_BIG_ENDIAN)
7339 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7340 else
7341 offset += byte % UNITS_PER_WORD;
7342 }
7343 else
7344 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7345 value = ptr[offset];
7346
7347 if (bitpos < HOST_BITS_PER_WIDE_INT)
7348 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7349 else
7350 hi |= (unsigned HOST_WIDE_INT) value
7351 << (bitpos - HOST_BITS_PER_WIDE_INT);
7352 }
7353
7354 return build_int_cst_wide_type (type, lo, hi);
7355 }
7356
7357
7358 /* Subroutine of native_interpret_expr. Interpret the contents of
7359 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7360 If the buffer cannot be interpreted, return NULL_TREE. */
7361
7362 static tree
7363 native_interpret_real (tree type, unsigned char *ptr, int len)
7364 {
7365 enum machine_mode mode = TYPE_MODE (type);
7366 int total_bytes = GET_MODE_SIZE (mode);
7367 int byte, offset, word, words, bitpos;
7368 unsigned char value;
7369 /* There are always 32 bits in each long, no matter the size of
7370 the hosts long. We handle floating point representations with
7371 up to 192 bits. */
7372 REAL_VALUE_TYPE r;
7373 long tmp[6];
7374
7375 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7376 if (total_bytes > len || total_bytes > 24)
7377 return NULL_TREE;
7378 words = 32 / UNITS_PER_WORD;
7379
7380 memset (tmp, 0, sizeof (tmp));
7381 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7382 bitpos += BITS_PER_UNIT)
7383 {
7384 byte = (bitpos / BITS_PER_UNIT) & 3;
7385 if (UNITS_PER_WORD < 4)
7386 {
7387 word = byte / UNITS_PER_WORD;
7388 if (WORDS_BIG_ENDIAN)
7389 word = (words - 1) - word;
7390 offset = word * UNITS_PER_WORD;
7391 if (BYTES_BIG_ENDIAN)
7392 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7393 else
7394 offset += byte % UNITS_PER_WORD;
7395 }
7396 else
7397 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7398 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7399
7400 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7401 }
7402
7403 real_from_target (&r, tmp, mode);
7404 return build_real (type, r);
7405 }
7406
7407
7408 /* Subroutine of native_interpret_expr. Interpret the contents of
7409 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7410 If the buffer cannot be interpreted, return NULL_TREE. */
7411
7412 static tree
7413 native_interpret_complex (tree type, unsigned char *ptr, int len)
7414 {
7415 tree etype, rpart, ipart;
7416 int size;
7417
7418 etype = TREE_TYPE (type);
7419 size = GET_MODE_SIZE (TYPE_MODE (etype));
7420 if (size * 2 > len)
7421 return NULL_TREE;
7422 rpart = native_interpret_expr (etype, ptr, size);
7423 if (!rpart)
7424 return NULL_TREE;
7425 ipart = native_interpret_expr (etype, ptr+size, size);
7426 if (!ipart)
7427 return NULL_TREE;
7428 return build_complex (type, rpart, ipart);
7429 }
7430
7431
7432 /* Subroutine of native_interpret_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7434 If the buffer cannot be interpreted, return NULL_TREE. */
7435
7436 static tree
7437 native_interpret_vector (tree type, unsigned char *ptr, int len)
7438 {
7439 tree etype, elem, elements;
7440 int i, size, count;
7441
7442 etype = TREE_TYPE (type);
7443 size = GET_MODE_SIZE (TYPE_MODE (etype));
7444 count = TYPE_VECTOR_SUBPARTS (type);
7445 if (size * count > len)
7446 return NULL_TREE;
7447
7448 elements = NULL_TREE;
7449 for (i = count - 1; i >= 0; i--)
7450 {
7451 elem = native_interpret_expr (etype, ptr+(i*size), size);
7452 if (!elem)
7453 return NULL_TREE;
7454 elements = tree_cons (NULL_TREE, elem, elements);
7455 }
7456 return build_vector (type, elements);
7457 }
7458
7459
7460 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7461 the buffer PTR of length LEN as a constant of type TYPE. For
7462 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7463 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7464 return NULL_TREE. */
7465
7466 tree
7467 native_interpret_expr (tree type, unsigned char *ptr, int len)
7468 {
7469 switch (TREE_CODE (type))
7470 {
7471 case INTEGER_TYPE:
7472 case ENUMERAL_TYPE:
7473 case BOOLEAN_TYPE:
7474 return native_interpret_int (type, ptr, len);
7475
7476 case REAL_TYPE:
7477 return native_interpret_real (type, ptr, len);
7478
7479 case COMPLEX_TYPE:
7480 return native_interpret_complex (type, ptr, len);
7481
7482 case VECTOR_TYPE:
7483 return native_interpret_vector (type, ptr, len);
7484
7485 default:
7486 return NULL_TREE;
7487 }
7488 }
7489
7490
7491 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7492 TYPE at compile-time. If we're unable to perform the conversion
7493 return NULL_TREE. */
7494
7495 static tree
7496 fold_view_convert_expr (tree type, tree expr)
7497 {
7498 /* We support up to 512-bit values (for V8DFmode). */
7499 unsigned char buffer[64];
7500 int len;
7501
7502 /* Check that the host and target are sane. */
7503 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7504 return NULL_TREE;
7505
7506 len = native_encode_expr (expr, buffer, sizeof (buffer));
7507 if (len == 0)
7508 return NULL_TREE;
7509
7510 return native_interpret_expr (type, buffer, len);
7511 }
7512
7513
7514 /* Fold a unary expression of code CODE and type TYPE with operand
7515 OP0. Return the folded expression if folding is successful.
7516 Otherwise, return NULL_TREE. */
7517
7518 tree
7519 fold_unary (enum tree_code code, tree type, tree op0)
7520 {
7521 tree tem;
7522 tree arg0;
7523 enum tree_code_class kind = TREE_CODE_CLASS (code);
7524
7525 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7526 && TREE_CODE_LENGTH (code) == 1);
7527
7528 arg0 = op0;
7529 if (arg0)
7530 {
7531 if (code == NOP_EXPR || code == CONVERT_EXPR
7532 || code == FLOAT_EXPR || code == ABS_EXPR)
7533 {
7534 /* Don't use STRIP_NOPS, because signedness of argument type
7535 matters. */
7536 STRIP_SIGN_NOPS (arg0);
7537 }
7538 else
7539 {
7540 /* Strip any conversions that don't change the mode. This
7541 is safe for every expression, except for a comparison
7542 expression because its signedness is derived from its
7543 operands.
7544
7545 Note that this is done as an internal manipulation within
7546 the constant folder, in order to find the simplest
7547 representation of the arguments so that their form can be
7548 studied. In any cases, the appropriate type conversions
7549 should be put back in the tree that will get out of the
7550 constant folder. */
7551 STRIP_NOPS (arg0);
7552 }
7553 }
7554
7555 if (TREE_CODE_CLASS (code) == tcc_unary)
7556 {
7557 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7558 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7559 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7560 else if (TREE_CODE (arg0) == COND_EXPR)
7561 {
7562 tree arg01 = TREE_OPERAND (arg0, 1);
7563 tree arg02 = TREE_OPERAND (arg0, 2);
7564 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7565 arg01 = fold_build1 (code, type, arg01);
7566 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7567 arg02 = fold_build1 (code, type, arg02);
7568 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7569 arg01, arg02);
7570
7571 /* If this was a conversion, and all we did was to move into
7572 inside the COND_EXPR, bring it back out. But leave it if
7573 it is a conversion from integer to integer and the
7574 result precision is no wider than a word since such a
7575 conversion is cheap and may be optimized away by combine,
7576 while it couldn't if it were outside the COND_EXPR. Then return
7577 so we don't get into an infinite recursion loop taking the
7578 conversion out and then back in. */
7579
7580 if ((code == NOP_EXPR || code == CONVERT_EXPR
7581 || code == NON_LVALUE_EXPR)
7582 && TREE_CODE (tem) == COND_EXPR
7583 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7584 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7585 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7586 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7587 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7588 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7589 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7590 && (INTEGRAL_TYPE_P
7591 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7592 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7593 || flag_syntax_only))
7594 tem = build1 (code, type,
7595 build3 (COND_EXPR,
7596 TREE_TYPE (TREE_OPERAND
7597 (TREE_OPERAND (tem, 1), 0)),
7598 TREE_OPERAND (tem, 0),
7599 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7600 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7601 return tem;
7602 }
7603 else if (COMPARISON_CLASS_P (arg0))
7604 {
7605 if (TREE_CODE (type) == BOOLEAN_TYPE)
7606 {
7607 arg0 = copy_node (arg0);
7608 TREE_TYPE (arg0) = type;
7609 return arg0;
7610 }
7611 else if (TREE_CODE (type) != INTEGER_TYPE)
7612 return fold_build3 (COND_EXPR, type, arg0,
7613 fold_build1 (code, type,
7614 integer_one_node),
7615 fold_build1 (code, type,
7616 integer_zero_node));
7617 }
7618 }
7619
7620 switch (code)
7621 {
7622 case NOP_EXPR:
7623 case FLOAT_EXPR:
7624 case CONVERT_EXPR:
7625 case FIX_TRUNC_EXPR:
7626 if (TREE_TYPE (op0) == type)
7627 return op0;
7628
7629 /* If we have (type) (a CMP b) and type is an integral type, return
7630 new expression involving the new type. */
7631 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7632 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7633 TREE_OPERAND (op0, 1));
7634
7635 /* Handle cases of two conversions in a row. */
7636 if (TREE_CODE (op0) == NOP_EXPR
7637 || TREE_CODE (op0) == CONVERT_EXPR)
7638 {
7639 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7640 tree inter_type = TREE_TYPE (op0);
7641 int inside_int = INTEGRAL_TYPE_P (inside_type);
7642 int inside_ptr = POINTER_TYPE_P (inside_type);
7643 int inside_float = FLOAT_TYPE_P (inside_type);
7644 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7645 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7646 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7647 int inter_int = INTEGRAL_TYPE_P (inter_type);
7648 int inter_ptr = POINTER_TYPE_P (inter_type);
7649 int inter_float = FLOAT_TYPE_P (inter_type);
7650 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7651 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7652 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7653 int final_int = INTEGRAL_TYPE_P (type);
7654 int final_ptr = POINTER_TYPE_P (type);
7655 int final_float = FLOAT_TYPE_P (type);
7656 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7657 unsigned int final_prec = TYPE_PRECISION (type);
7658 int final_unsignedp = TYPE_UNSIGNED (type);
7659
7660 /* In addition to the cases of two conversions in a row
7661 handled below, if we are converting something to its own
7662 type via an object of identical or wider precision, neither
7663 conversion is needed. */
7664 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7665 && (((inter_int || inter_ptr) && final_int)
7666 || (inter_float && final_float))
7667 && inter_prec >= final_prec)
7668 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7669
7670 /* Likewise, if the intermediate and final types are either both
7671 float or both integer, we don't need the middle conversion if
7672 it is wider than the final type and doesn't change the signedness
7673 (for integers). Avoid this if the final type is a pointer
7674 since then we sometimes need the inner conversion. Likewise if
7675 the outer has a precision not equal to the size of its mode. */
7676 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7677 || (inter_float && inside_float)
7678 || (inter_vec && inside_vec))
7679 && inter_prec >= inside_prec
7680 && (inter_float || inter_vec
7681 || inter_unsignedp == inside_unsignedp)
7682 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7683 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7684 && ! final_ptr
7685 && (! final_vec || inter_prec == inside_prec))
7686 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7687
7688 /* If we have a sign-extension of a zero-extended value, we can
7689 replace that by a single zero-extension. */
7690 if (inside_int && inter_int && final_int
7691 && inside_prec < inter_prec && inter_prec < final_prec
7692 && inside_unsignedp && !inter_unsignedp)
7693 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7694
7695 /* Two conversions in a row are not needed unless:
7696 - some conversion is floating-point (overstrict for now), or
7697 - some conversion is a vector (overstrict for now), or
7698 - the intermediate type is narrower than both initial and
7699 final, or
7700 - the intermediate type and innermost type differ in signedness,
7701 and the outermost type is wider than the intermediate, or
7702 - the initial type is a pointer type and the precisions of the
7703 intermediate and final types differ, or
7704 - the final type is a pointer type and the precisions of the
7705 initial and intermediate types differ.
7706 - the final type is a pointer type and the initial type not
7707 - the initial type is a pointer to an array and the final type
7708 not. */
7709 if (! inside_float && ! inter_float && ! final_float
7710 && ! inside_vec && ! inter_vec && ! final_vec
7711 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7712 && ! (inside_int && inter_int
7713 && inter_unsignedp != inside_unsignedp
7714 && inter_prec < final_prec)
7715 && ((inter_unsignedp && inter_prec > inside_prec)
7716 == (final_unsignedp && final_prec > inter_prec))
7717 && ! (inside_ptr && inter_prec != final_prec)
7718 && ! (final_ptr && inside_prec != inter_prec)
7719 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7720 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7721 && final_ptr == inside_ptr
7722 && ! (inside_ptr
7723 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7724 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7725 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7726 }
7727
7728 /* Handle (T *)&A.B.C for A being of type T and B and C
7729 living at offset zero. This occurs frequently in
7730 C++ upcasting and then accessing the base. */
7731 if (TREE_CODE (op0) == ADDR_EXPR
7732 && POINTER_TYPE_P (type)
7733 && handled_component_p (TREE_OPERAND (op0, 0)))
7734 {
7735 HOST_WIDE_INT bitsize, bitpos;
7736 tree offset;
7737 enum machine_mode mode;
7738 int unsignedp, volatilep;
7739 tree base = TREE_OPERAND (op0, 0);
7740 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7741 &mode, &unsignedp, &volatilep, false);
7742 /* If the reference was to a (constant) zero offset, we can use
7743 the address of the base if it has the same base type
7744 as the result type. */
7745 if (! offset && bitpos == 0
7746 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7747 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7748 return fold_convert (type, build_fold_addr_expr (base));
7749 }
7750
7751 if ((TREE_CODE (op0) == MODIFY_EXPR
7752 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7753 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7754 /* Detect assigning a bitfield. */
7755 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7756 && DECL_BIT_FIELD
7757 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7758 {
7759 /* Don't leave an assignment inside a conversion
7760 unless assigning a bitfield. */
7761 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7762 /* First do the assignment, then return converted constant. */
7763 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7764 TREE_NO_WARNING (tem) = 1;
7765 TREE_USED (tem) = 1;
7766 return tem;
7767 }
7768
7769 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7770 constants (if x has signed type, the sign bit cannot be set
7771 in c). This folds extension into the BIT_AND_EXPR. */
7772 if (INTEGRAL_TYPE_P (type)
7773 && TREE_CODE (type) != BOOLEAN_TYPE
7774 && TREE_CODE (op0) == BIT_AND_EXPR
7775 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7776 {
7777 tree and = op0;
7778 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7779 int change = 0;
7780
7781 if (TYPE_UNSIGNED (TREE_TYPE (and))
7782 || (TYPE_PRECISION (type)
7783 <= TYPE_PRECISION (TREE_TYPE (and))))
7784 change = 1;
7785 else if (TYPE_PRECISION (TREE_TYPE (and1))
7786 <= HOST_BITS_PER_WIDE_INT
7787 && host_integerp (and1, 1))
7788 {
7789 unsigned HOST_WIDE_INT cst;
7790
7791 cst = tree_low_cst (and1, 1);
7792 cst &= (HOST_WIDE_INT) -1
7793 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7794 change = (cst == 0);
7795 #ifdef LOAD_EXTEND_OP
7796 if (change
7797 && !flag_syntax_only
7798 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7799 == ZERO_EXTEND))
7800 {
7801 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7802 and0 = fold_convert (uns, and0);
7803 and1 = fold_convert (uns, and1);
7804 }
7805 #endif
7806 }
7807 if (change)
7808 {
7809 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7810 TREE_INT_CST_HIGH (and1), 0,
7811 TREE_OVERFLOW (and1));
7812 return fold_build2 (BIT_AND_EXPR, type,
7813 fold_convert (type, and0), tem);
7814 }
7815 }
7816
7817 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7818 T2 being pointers to types of the same size. */
7819 if (POINTER_TYPE_P (type)
7820 && BINARY_CLASS_P (arg0)
7821 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7822 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7823 {
7824 tree arg00 = TREE_OPERAND (arg0, 0);
7825 tree t0 = type;
7826 tree t1 = TREE_TYPE (arg00);
7827 tree tt0 = TREE_TYPE (t0);
7828 tree tt1 = TREE_TYPE (t1);
7829 tree s0 = TYPE_SIZE (tt0);
7830 tree s1 = TYPE_SIZE (tt1);
7831
7832 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7833 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7834 TREE_OPERAND (arg0, 1));
7835 }
7836
7837 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7838 of the same precision, and X is a integer type not narrower than
7839 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7840 if (INTEGRAL_TYPE_P (type)
7841 && TREE_CODE (op0) == BIT_NOT_EXPR
7842 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7843 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7844 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7845 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7846 {
7847 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7848 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7849 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7850 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7851 }
7852
7853 tem = fold_convert_const (code, type, op0);
7854 return tem ? tem : NULL_TREE;
7855
7856 case VIEW_CONVERT_EXPR:
7857 if (TREE_TYPE (op0) == type)
7858 return op0;
7859 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7860 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7861 return fold_view_convert_expr (type, op0);
7862
7863 case NEGATE_EXPR:
7864 tem = fold_negate_expr (arg0);
7865 if (tem)
7866 return fold_convert (type, tem);
7867 return NULL_TREE;
7868
7869 case ABS_EXPR:
7870 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7871 return fold_abs_const (arg0, type);
7872 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7873 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7874 /* Convert fabs((double)float) into (double)fabsf(float). */
7875 else if (TREE_CODE (arg0) == NOP_EXPR
7876 && TREE_CODE (type) == REAL_TYPE)
7877 {
7878 tree targ0 = strip_float_extensions (arg0);
7879 if (targ0 != arg0)
7880 return fold_convert (type, fold_build1 (ABS_EXPR,
7881 TREE_TYPE (targ0),
7882 targ0));
7883 }
7884 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7885 else if (TREE_CODE (arg0) == ABS_EXPR)
7886 return arg0;
7887 else if (tree_expr_nonnegative_p (arg0))
7888 return arg0;
7889
7890 /* Strip sign ops from argument. */
7891 if (TREE_CODE (type) == REAL_TYPE)
7892 {
7893 tem = fold_strip_sign_ops (arg0);
7894 if (tem)
7895 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7896 }
7897 return NULL_TREE;
7898
7899 case CONJ_EXPR:
7900 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7901 return fold_convert (type, arg0);
7902 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7903 {
7904 tree itype = TREE_TYPE (type);
7905 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7906 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7907 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7908 }
7909 if (TREE_CODE (arg0) == COMPLEX_CST)
7910 {
7911 tree itype = TREE_TYPE (type);
7912 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7913 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7914 return build_complex (type, rpart, negate_expr (ipart));
7915 }
7916 if (TREE_CODE (arg0) == CONJ_EXPR)
7917 return fold_convert (type, TREE_OPERAND (arg0, 0));
7918 return NULL_TREE;
7919
7920 case BIT_NOT_EXPR:
7921 if (TREE_CODE (arg0) == INTEGER_CST)
7922 return fold_not_const (arg0, type);
7923 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7924 return TREE_OPERAND (arg0, 0);
7925 /* Convert ~ (-A) to A - 1. */
7926 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7927 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7928 build_int_cst (type, 1));
7929 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7930 else if (INTEGRAL_TYPE_P (type)
7931 && ((TREE_CODE (arg0) == MINUS_EXPR
7932 && integer_onep (TREE_OPERAND (arg0, 1)))
7933 || (TREE_CODE (arg0) == PLUS_EXPR
7934 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7935 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7936 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7937 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7938 && (tem = fold_unary (BIT_NOT_EXPR, type,
7939 fold_convert (type,
7940 TREE_OPERAND (arg0, 0)))))
7941 return fold_build2 (BIT_XOR_EXPR, type, tem,
7942 fold_convert (type, TREE_OPERAND (arg0, 1)));
7943 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7944 && (tem = fold_unary (BIT_NOT_EXPR, type,
7945 fold_convert (type,
7946 TREE_OPERAND (arg0, 1)))))
7947 return fold_build2 (BIT_XOR_EXPR, type,
7948 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7949
7950 return NULL_TREE;
7951
7952 case TRUTH_NOT_EXPR:
7953 /* The argument to invert_truthvalue must have Boolean type. */
7954 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7955 arg0 = fold_convert (boolean_type_node, arg0);
7956
7957 /* Note that the operand of this must be an int
7958 and its values must be 0 or 1.
7959 ("true" is a fixed value perhaps depending on the language,
7960 but we don't handle values other than 1 correctly yet.) */
7961 tem = fold_truth_not_expr (arg0);
7962 if (!tem)
7963 return NULL_TREE;
7964 return fold_convert (type, tem);
7965
7966 case REALPART_EXPR:
7967 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7968 return fold_convert (type, arg0);
7969 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7970 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7971 TREE_OPERAND (arg0, 1));
7972 if (TREE_CODE (arg0) == COMPLEX_CST)
7973 return fold_convert (type, TREE_REALPART (arg0));
7974 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7975 {
7976 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7977 tem = fold_build2 (TREE_CODE (arg0), itype,
7978 fold_build1 (REALPART_EXPR, itype,
7979 TREE_OPERAND (arg0, 0)),
7980 fold_build1 (REALPART_EXPR, itype,
7981 TREE_OPERAND (arg0, 1)));
7982 return fold_convert (type, tem);
7983 }
7984 if (TREE_CODE (arg0) == CONJ_EXPR)
7985 {
7986 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7987 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7988 return fold_convert (type, tem);
7989 }
7990 if (TREE_CODE (arg0) == CALL_EXPR)
7991 {
7992 tree fn = get_callee_fndecl (arg0);
7993 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7994 switch (DECL_FUNCTION_CODE (fn))
7995 {
7996 CASE_FLT_FN (BUILT_IN_CEXPI):
7997 fn = mathfn_built_in (type, BUILT_IN_COS);
7998 if (fn)
7999 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8000 break;
8001
8002 default:
8003 break;
8004 }
8005 }
8006 return NULL_TREE;
8007
8008 case IMAGPART_EXPR:
8009 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8010 return fold_convert (type, integer_zero_node);
8011 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8012 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8013 TREE_OPERAND (arg0, 0));
8014 if (TREE_CODE (arg0) == COMPLEX_CST)
8015 return fold_convert (type, TREE_IMAGPART (arg0));
8016 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8017 {
8018 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8019 tem = fold_build2 (TREE_CODE (arg0), itype,
8020 fold_build1 (IMAGPART_EXPR, itype,
8021 TREE_OPERAND (arg0, 0)),
8022 fold_build1 (IMAGPART_EXPR, itype,
8023 TREE_OPERAND (arg0, 1)));
8024 return fold_convert (type, tem);
8025 }
8026 if (TREE_CODE (arg0) == CONJ_EXPR)
8027 {
8028 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8029 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8030 return fold_convert (type, negate_expr (tem));
8031 }
8032 if (TREE_CODE (arg0) == CALL_EXPR)
8033 {
8034 tree fn = get_callee_fndecl (arg0);
8035 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8036 switch (DECL_FUNCTION_CODE (fn))
8037 {
8038 CASE_FLT_FN (BUILT_IN_CEXPI):
8039 fn = mathfn_built_in (type, BUILT_IN_SIN);
8040 if (fn)
8041 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8042 break;
8043
8044 default:
8045 break;
8046 }
8047 }
8048 return NULL_TREE;
8049
8050 default:
8051 return NULL_TREE;
8052 } /* switch (code) */
8053 }
8054
8055 /* Fold a binary expression of code CODE and type TYPE with operands
8056 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8057 Return the folded expression if folding is successful. Otherwise,
8058 return NULL_TREE. */
8059
8060 static tree
8061 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8062 {
8063 enum tree_code compl_code;
8064
8065 if (code == MIN_EXPR)
8066 compl_code = MAX_EXPR;
8067 else if (code == MAX_EXPR)
8068 compl_code = MIN_EXPR;
8069 else
8070 gcc_unreachable ();
8071
8072 /* MIN (MAX (a, b), b) == b. */
8073 if (TREE_CODE (op0) == compl_code
8074 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8075 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8076
8077 /* MIN (MAX (b, a), b) == b. */
8078 if (TREE_CODE (op0) == compl_code
8079 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8080 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8081 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8082
8083 /* MIN (a, MAX (a, b)) == a. */
8084 if (TREE_CODE (op1) == compl_code
8085 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8086 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8087 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8088
8089 /* MIN (a, MAX (b, a)) == a. */
8090 if (TREE_CODE (op1) == compl_code
8091 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8092 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8093 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8094
8095 return NULL_TREE;
8096 }
8097
8098 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8099 by changing CODE to reduce the magnitude of constants involved in
8100 ARG0 of the comparison.
8101 Returns a canonicalized comparison tree if a simplification was
8102 possible, otherwise returns NULL_TREE.
8103 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8104 valid if signed overflow is undefined. */
8105
8106 static tree
8107 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8108 tree arg0, tree arg1,
8109 bool *strict_overflow_p)
8110 {
8111 enum tree_code code0 = TREE_CODE (arg0);
8112 tree t, cst0 = NULL_TREE;
8113 int sgn0;
8114 bool swap = false;
8115
8116 /* Match A +- CST code arg1 and CST code arg1. */
8117 if (!(((code0 == MINUS_EXPR
8118 || code0 == PLUS_EXPR)
8119 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8120 || code0 == INTEGER_CST))
8121 return NULL_TREE;
8122
8123 /* Identify the constant in arg0 and its sign. */
8124 if (code0 == INTEGER_CST)
8125 cst0 = arg0;
8126 else
8127 cst0 = TREE_OPERAND (arg0, 1);
8128 sgn0 = tree_int_cst_sgn (cst0);
8129
8130 /* Overflowed constants and zero will cause problems. */
8131 if (integer_zerop (cst0)
8132 || TREE_OVERFLOW (cst0))
8133 return NULL_TREE;
8134
8135 /* See if we can reduce the magnitude of the constant in
8136 arg0 by changing the comparison code. */
8137 if (code0 == INTEGER_CST)
8138 {
8139 /* CST <= arg1 -> CST-1 < arg1. */
8140 if (code == LE_EXPR && sgn0 == 1)
8141 code = LT_EXPR;
8142 /* -CST < arg1 -> -CST-1 <= arg1. */
8143 else if (code == LT_EXPR && sgn0 == -1)
8144 code = LE_EXPR;
8145 /* CST > arg1 -> CST-1 >= arg1. */
8146 else if (code == GT_EXPR && sgn0 == 1)
8147 code = GE_EXPR;
8148 /* -CST >= arg1 -> -CST-1 > arg1. */
8149 else if (code == GE_EXPR && sgn0 == -1)
8150 code = GT_EXPR;
8151 else
8152 return NULL_TREE;
8153 /* arg1 code' CST' might be more canonical. */
8154 swap = true;
8155 }
8156 else
8157 {
8158 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8159 if (code == LT_EXPR
8160 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8161 code = LE_EXPR;
8162 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8163 else if (code == GT_EXPR
8164 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8165 code = GE_EXPR;
8166 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8167 else if (code == LE_EXPR
8168 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8169 code = LT_EXPR;
8170 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8171 else if (code == GE_EXPR
8172 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8173 code = GT_EXPR;
8174 else
8175 return NULL_TREE;
8176 *strict_overflow_p = true;
8177 }
8178
8179 /* Now build the constant reduced in magnitude. */
8180 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8181 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8182 if (code0 != INTEGER_CST)
8183 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8184
8185 /* If swapping might yield to a more canonical form, do so. */
8186 if (swap)
8187 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8188 else
8189 return fold_build2 (code, type, t, arg1);
8190 }
8191
8192 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8193 overflow further. Try to decrease the magnitude of constants involved
8194 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8195 and put sole constants at the second argument position.
8196 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8197
8198 static tree
8199 maybe_canonicalize_comparison (enum tree_code code, tree type,
8200 tree arg0, tree arg1)
8201 {
8202 tree t;
8203 bool strict_overflow_p;
8204 const char * const warnmsg = G_("assuming signed overflow does not occur "
8205 "when reducing constant in comparison");
8206
8207 /* In principle pointers also have undefined overflow behavior,
8208 but that causes problems elsewhere. */
8209 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8210 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8211 return NULL_TREE;
8212
8213 /* Try canonicalization by simplifying arg0. */
8214 strict_overflow_p = false;
8215 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8216 &strict_overflow_p);
8217 if (t)
8218 {
8219 if (strict_overflow_p)
8220 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8221 return t;
8222 }
8223
8224 /* Try canonicalization by simplifying arg1 using the swapped
8225 comparison. */
8226 code = swap_tree_comparison (code);
8227 strict_overflow_p = false;
8228 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8229 &strict_overflow_p);
8230 if (t && strict_overflow_p)
8231 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8232 return t;
8233 }
8234
8235 /* Subroutine of fold_binary. This routine performs all of the
8236 transformations that are common to the equality/inequality
8237 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8238 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8239 fold_binary should call fold_binary. Fold a comparison with
8240 tree code CODE and type TYPE with operands OP0 and OP1. Return
8241 the folded comparison or NULL_TREE. */
8242
8243 static tree
8244 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8245 {
8246 tree arg0, arg1, tem;
8247
8248 arg0 = op0;
8249 arg1 = op1;
8250
8251 STRIP_SIGN_NOPS (arg0);
8252 STRIP_SIGN_NOPS (arg1);
8253
8254 tem = fold_relational_const (code, type, arg0, arg1);
8255 if (tem != NULL_TREE)
8256 return tem;
8257
8258 /* If one arg is a real or integer constant, put it last. */
8259 if (tree_swap_operands_p (arg0, arg1, true))
8260 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8261
8262 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8263 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8264 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8265 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8267 && (TREE_CODE (arg1) == INTEGER_CST
8268 && !TREE_OVERFLOW (arg1)))
8269 {
8270 tree const1 = TREE_OPERAND (arg0, 1);
8271 tree const2 = arg1;
8272 tree variable = TREE_OPERAND (arg0, 0);
8273 tree lhs;
8274 int lhs_add;
8275 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8276
8277 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8278 TREE_TYPE (arg1), const2, const1);
8279
8280 /* If the constant operation overflowed this can be
8281 simplified as a comparison against INT_MAX/INT_MIN. */
8282 if (TREE_CODE (lhs) == INTEGER_CST
8283 && TREE_OVERFLOW (lhs))
8284 {
8285 int const1_sgn = tree_int_cst_sgn (const1);
8286 enum tree_code code2 = code;
8287
8288 /* Get the sign of the constant on the lhs if the
8289 operation were VARIABLE + CONST1. */
8290 if (TREE_CODE (arg0) == MINUS_EXPR)
8291 const1_sgn = -const1_sgn;
8292
8293 /* The sign of the constant determines if we overflowed
8294 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8295 Canonicalize to the INT_MIN overflow by swapping the comparison
8296 if necessary. */
8297 if (const1_sgn == -1)
8298 code2 = swap_tree_comparison (code);
8299
8300 /* We now can look at the canonicalized case
8301 VARIABLE + 1 CODE2 INT_MIN
8302 and decide on the result. */
8303 if (code2 == LT_EXPR
8304 || code2 == LE_EXPR
8305 || code2 == EQ_EXPR)
8306 return omit_one_operand (type, boolean_false_node, variable);
8307 else if (code2 == NE_EXPR
8308 || code2 == GE_EXPR
8309 || code2 == GT_EXPR)
8310 return omit_one_operand (type, boolean_true_node, variable);
8311 }
8312
8313 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8314 && (TREE_CODE (lhs) != INTEGER_CST
8315 || !TREE_OVERFLOW (lhs)))
8316 {
8317 fold_overflow_warning (("assuming signed overflow does not occur "
8318 "when changing X +- C1 cmp C2 to "
8319 "X cmp C1 +- C2"),
8320 WARN_STRICT_OVERFLOW_COMPARISON);
8321 return fold_build2 (code, type, variable, lhs);
8322 }
8323 }
8324
8325 /* For comparisons of pointers we can decompose it to a compile time
8326 comparison of the base objects and the offsets into the object.
8327 This requires at least one operand being an ADDR_EXPR to do more
8328 than the operand_equal_p test below. */
8329 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8330 && (TREE_CODE (arg0) == ADDR_EXPR
8331 || TREE_CODE (arg1) == ADDR_EXPR))
8332 {
8333 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8334 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8335 enum machine_mode mode;
8336 int volatilep, unsignedp;
8337 bool indirect_base0 = false;
8338
8339 /* Get base and offset for the access. Strip ADDR_EXPR for
8340 get_inner_reference, but put it back by stripping INDIRECT_REF
8341 off the base object if possible. */
8342 base0 = arg0;
8343 if (TREE_CODE (arg0) == ADDR_EXPR)
8344 {
8345 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8346 &bitsize, &bitpos0, &offset0, &mode,
8347 &unsignedp, &volatilep, false);
8348 if (TREE_CODE (base0) == INDIRECT_REF)
8349 base0 = TREE_OPERAND (base0, 0);
8350 else
8351 indirect_base0 = true;
8352 }
8353
8354 base1 = arg1;
8355 if (TREE_CODE (arg1) == ADDR_EXPR)
8356 {
8357 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8358 &bitsize, &bitpos1, &offset1, &mode,
8359 &unsignedp, &volatilep, false);
8360 /* We have to make sure to have an indirect/non-indirect base1
8361 just the same as we did for base0. */
8362 if (TREE_CODE (base1) == INDIRECT_REF
8363 && !indirect_base0)
8364 base1 = TREE_OPERAND (base1, 0);
8365 else if (!indirect_base0)
8366 base1 = NULL_TREE;
8367 }
8368 else if (indirect_base0)
8369 base1 = NULL_TREE;
8370
8371 /* If we have equivalent bases we might be able to simplify. */
8372 if (base0 && base1
8373 && operand_equal_p (base0, base1, 0))
8374 {
8375 /* We can fold this expression to a constant if the non-constant
8376 offset parts are equal. */
8377 if (offset0 == offset1
8378 || (offset0 && offset1
8379 && operand_equal_p (offset0, offset1, 0)))
8380 {
8381 switch (code)
8382 {
8383 case EQ_EXPR:
8384 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8385 case NE_EXPR:
8386 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8387 case LT_EXPR:
8388 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8389 case LE_EXPR:
8390 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8391 case GE_EXPR:
8392 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8393 case GT_EXPR:
8394 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8395 default:;
8396 }
8397 }
8398 /* We can simplify the comparison to a comparison of the variable
8399 offset parts if the constant offset parts are equal.
8400 Be careful to use signed size type here because otherwise we
8401 mess with array offsets in the wrong way. This is possible
8402 because pointer arithmetic is restricted to retain within an
8403 object and overflow on pointer differences is undefined as of
8404 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8405 else if (bitpos0 == bitpos1)
8406 {
8407 tree signed_size_type_node;
8408 signed_size_type_node = signed_type_for (size_type_node);
8409
8410 /* By converting to signed size type we cover middle-end pointer
8411 arithmetic which operates on unsigned pointer types of size
8412 type size and ARRAY_REF offsets which are properly sign or
8413 zero extended from their type in case it is narrower than
8414 size type. */
8415 if (offset0 == NULL_TREE)
8416 offset0 = build_int_cst (signed_size_type_node, 0);
8417 else
8418 offset0 = fold_convert (signed_size_type_node, offset0);
8419 if (offset1 == NULL_TREE)
8420 offset1 = build_int_cst (signed_size_type_node, 0);
8421 else
8422 offset1 = fold_convert (signed_size_type_node, offset1);
8423
8424 return fold_build2 (code, type, offset0, offset1);
8425 }
8426 }
8427 }
8428
8429 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8430 same object, then we can fold this to a comparison of the two offsets in
8431 signed size type. This is possible because pointer arithmetic is
8432 restricted to retain within an object and overflow on pointer differences
8433 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8434
8435 We check flag_wrapv directly because pointers types are unsigned,
8436 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8437 normally what we want to avoid certain odd overflow cases, but
8438 not here. */
8439 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8440 && !flag_wrapv
8441 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8442 {
8443 tree base0, offset0, base1, offset1;
8444
8445 if (extract_array_ref (arg0, &base0, &offset0)
8446 && extract_array_ref (arg1, &base1, &offset1)
8447 && operand_equal_p (base0, base1, 0))
8448 {
8449 tree signed_size_type_node;
8450 signed_size_type_node = signed_type_for (size_type_node);
8451
8452 /* By converting to signed size type we cover middle-end pointer
8453 arithmetic which operates on unsigned pointer types of size
8454 type size and ARRAY_REF offsets which are properly sign or
8455 zero extended from their type in case it is narrower than
8456 size type. */
8457 if (offset0 == NULL_TREE)
8458 offset0 = build_int_cst (signed_size_type_node, 0);
8459 else
8460 offset0 = fold_convert (signed_size_type_node, offset0);
8461 if (offset1 == NULL_TREE)
8462 offset1 = build_int_cst (signed_size_type_node, 0);
8463 else
8464 offset1 = fold_convert (signed_size_type_node, offset1);
8465
8466 return fold_build2 (code, type, offset0, offset1);
8467 }
8468 }
8469
8470 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8471 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8472 the resulting offset is smaller in absolute value than the
8473 original one. */
8474 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8475 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8476 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8477 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8478 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8479 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8480 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8481 {
8482 tree const1 = TREE_OPERAND (arg0, 1);
8483 tree const2 = TREE_OPERAND (arg1, 1);
8484 tree variable1 = TREE_OPERAND (arg0, 0);
8485 tree variable2 = TREE_OPERAND (arg1, 0);
8486 tree cst;
8487 const char * const warnmsg = G_("assuming signed overflow does not "
8488 "occur when combining constants around "
8489 "a comparison");
8490
8491 /* Put the constant on the side where it doesn't overflow and is
8492 of lower absolute value than before. */
8493 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8494 ? MINUS_EXPR : PLUS_EXPR,
8495 const2, const1, 0);
8496 if (!TREE_OVERFLOW (cst)
8497 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8498 {
8499 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8500 return fold_build2 (code, type,
8501 variable1,
8502 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8503 variable2, cst));
8504 }
8505
8506 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8507 ? MINUS_EXPR : PLUS_EXPR,
8508 const1, const2, 0);
8509 if (!TREE_OVERFLOW (cst)
8510 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8511 {
8512 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8513 return fold_build2 (code, type,
8514 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8515 variable1, cst),
8516 variable2);
8517 }
8518 }
8519
8520 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8521 signed arithmetic case. That form is created by the compiler
8522 often enough for folding it to be of value. One example is in
8523 computing loop trip counts after Operator Strength Reduction. */
8524 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8525 && TREE_CODE (arg0) == MULT_EXPR
8526 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8527 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8528 && integer_zerop (arg1))
8529 {
8530 tree const1 = TREE_OPERAND (arg0, 1);
8531 tree const2 = arg1; /* zero */
8532 tree variable1 = TREE_OPERAND (arg0, 0);
8533 enum tree_code cmp_code = code;
8534
8535 gcc_assert (!integer_zerop (const1));
8536
8537 fold_overflow_warning (("assuming signed overflow does not occur when "
8538 "eliminating multiplication in comparison "
8539 "with zero"),
8540 WARN_STRICT_OVERFLOW_COMPARISON);
8541
8542 /* If const1 is negative we swap the sense of the comparison. */
8543 if (tree_int_cst_sgn (const1) < 0)
8544 cmp_code = swap_tree_comparison (cmp_code);
8545
8546 return fold_build2 (cmp_code, type, variable1, const2);
8547 }
8548
8549 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8550 if (tem)
8551 return tem;
8552
8553 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8554 {
8555 tree targ0 = strip_float_extensions (arg0);
8556 tree targ1 = strip_float_extensions (arg1);
8557 tree newtype = TREE_TYPE (targ0);
8558
8559 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8560 newtype = TREE_TYPE (targ1);
8561
8562 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8563 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8564 return fold_build2 (code, type, fold_convert (newtype, targ0),
8565 fold_convert (newtype, targ1));
8566
8567 /* (-a) CMP (-b) -> b CMP a */
8568 if (TREE_CODE (arg0) == NEGATE_EXPR
8569 && TREE_CODE (arg1) == NEGATE_EXPR)
8570 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8571 TREE_OPERAND (arg0, 0));
8572
8573 if (TREE_CODE (arg1) == REAL_CST)
8574 {
8575 REAL_VALUE_TYPE cst;
8576 cst = TREE_REAL_CST (arg1);
8577
8578 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8579 if (TREE_CODE (arg0) == NEGATE_EXPR)
8580 return fold_build2 (swap_tree_comparison (code), type,
8581 TREE_OPERAND (arg0, 0),
8582 build_real (TREE_TYPE (arg1),
8583 REAL_VALUE_NEGATE (cst)));
8584
8585 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8586 /* a CMP (-0) -> a CMP 0 */
8587 if (REAL_VALUE_MINUS_ZERO (cst))
8588 return fold_build2 (code, type, arg0,
8589 build_real (TREE_TYPE (arg1), dconst0));
8590
8591 /* x != NaN is always true, other ops are always false. */
8592 if (REAL_VALUE_ISNAN (cst)
8593 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8594 {
8595 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8596 return omit_one_operand (type, tem, arg0);
8597 }
8598
8599 /* Fold comparisons against infinity. */
8600 if (REAL_VALUE_ISINF (cst))
8601 {
8602 tem = fold_inf_compare (code, type, arg0, arg1);
8603 if (tem != NULL_TREE)
8604 return tem;
8605 }
8606 }
8607
8608 /* If this is a comparison of a real constant with a PLUS_EXPR
8609 or a MINUS_EXPR of a real constant, we can convert it into a
8610 comparison with a revised real constant as long as no overflow
8611 occurs when unsafe_math_optimizations are enabled. */
8612 if (flag_unsafe_math_optimizations
8613 && TREE_CODE (arg1) == REAL_CST
8614 && (TREE_CODE (arg0) == PLUS_EXPR
8615 || TREE_CODE (arg0) == MINUS_EXPR)
8616 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8617 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8618 ? MINUS_EXPR : PLUS_EXPR,
8619 arg1, TREE_OPERAND (arg0, 1), 0))
8620 && !TREE_OVERFLOW (tem))
8621 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8622
8623 /* Likewise, we can simplify a comparison of a real constant with
8624 a MINUS_EXPR whose first operand is also a real constant, i.e.
8625 (c1 - x) < c2 becomes x > c1-c2. */
8626 if (flag_unsafe_math_optimizations
8627 && TREE_CODE (arg1) == REAL_CST
8628 && TREE_CODE (arg0) == MINUS_EXPR
8629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8630 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8631 arg1, 0))
8632 && !TREE_OVERFLOW (tem))
8633 return fold_build2 (swap_tree_comparison (code), type,
8634 TREE_OPERAND (arg0, 1), tem);
8635
8636 /* Fold comparisons against built-in math functions. */
8637 if (TREE_CODE (arg1) == REAL_CST
8638 && flag_unsafe_math_optimizations
8639 && ! flag_errno_math)
8640 {
8641 enum built_in_function fcode = builtin_mathfn_code (arg0);
8642
8643 if (fcode != END_BUILTINS)
8644 {
8645 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8646 if (tem != NULL_TREE)
8647 return tem;
8648 }
8649 }
8650 }
8651
8652 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8653 && (TREE_CODE (arg0) == NOP_EXPR
8654 || TREE_CODE (arg0) == CONVERT_EXPR))
8655 {
8656 /* If we are widening one operand of an integer comparison,
8657 see if the other operand is similarly being widened. Perhaps we
8658 can do the comparison in the narrower type. */
8659 tem = fold_widened_comparison (code, type, arg0, arg1);
8660 if (tem)
8661 return tem;
8662
8663 /* Or if we are changing signedness. */
8664 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8665 if (tem)
8666 return tem;
8667 }
8668
8669 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8670 constant, we can simplify it. */
8671 if (TREE_CODE (arg1) == INTEGER_CST
8672 && (TREE_CODE (arg0) == MIN_EXPR
8673 || TREE_CODE (arg0) == MAX_EXPR)
8674 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8675 {
8676 tem = optimize_minmax_comparison (code, type, op0, op1);
8677 if (tem)
8678 return tem;
8679 }
8680
8681 /* Simplify comparison of something with itself. (For IEEE
8682 floating-point, we can only do some of these simplifications.) */
8683 if (operand_equal_p (arg0, arg1, 0))
8684 {
8685 switch (code)
8686 {
8687 case EQ_EXPR:
8688 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8689 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8690 return constant_boolean_node (1, type);
8691 break;
8692
8693 case GE_EXPR:
8694 case LE_EXPR:
8695 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8696 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8697 return constant_boolean_node (1, type);
8698 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8699
8700 case NE_EXPR:
8701 /* For NE, we can only do this simplification if integer
8702 or we don't honor IEEE floating point NaNs. */
8703 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8704 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8705 break;
8706 /* ... fall through ... */
8707 case GT_EXPR:
8708 case LT_EXPR:
8709 return constant_boolean_node (0, type);
8710 default:
8711 gcc_unreachable ();
8712 }
8713 }
8714
8715 /* If we are comparing an expression that just has comparisons
8716 of two integer values, arithmetic expressions of those comparisons,
8717 and constants, we can simplify it. There are only three cases
8718 to check: the two values can either be equal, the first can be
8719 greater, or the second can be greater. Fold the expression for
8720 those three values. Since each value must be 0 or 1, we have
8721 eight possibilities, each of which corresponds to the constant 0
8722 or 1 or one of the six possible comparisons.
8723
8724 This handles common cases like (a > b) == 0 but also handles
8725 expressions like ((x > y) - (y > x)) > 0, which supposedly
8726 occur in macroized code. */
8727
8728 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8729 {
8730 tree cval1 = 0, cval2 = 0;
8731 int save_p = 0;
8732
8733 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8734 /* Don't handle degenerate cases here; they should already
8735 have been handled anyway. */
8736 && cval1 != 0 && cval2 != 0
8737 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8738 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8739 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8740 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8741 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8742 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8743 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8744 {
8745 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8746 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8747
8748 /* We can't just pass T to eval_subst in case cval1 or cval2
8749 was the same as ARG1. */
8750
8751 tree high_result
8752 = fold_build2 (code, type,
8753 eval_subst (arg0, cval1, maxval,
8754 cval2, minval),
8755 arg1);
8756 tree equal_result
8757 = fold_build2 (code, type,
8758 eval_subst (arg0, cval1, maxval,
8759 cval2, maxval),
8760 arg1);
8761 tree low_result
8762 = fold_build2 (code, type,
8763 eval_subst (arg0, cval1, minval,
8764 cval2, maxval),
8765 arg1);
8766
8767 /* All three of these results should be 0 or 1. Confirm they are.
8768 Then use those values to select the proper code to use. */
8769
8770 if (TREE_CODE (high_result) == INTEGER_CST
8771 && TREE_CODE (equal_result) == INTEGER_CST
8772 && TREE_CODE (low_result) == INTEGER_CST)
8773 {
8774 /* Make a 3-bit mask with the high-order bit being the
8775 value for `>', the next for '=', and the low for '<'. */
8776 switch ((integer_onep (high_result) * 4)
8777 + (integer_onep (equal_result) * 2)
8778 + integer_onep (low_result))
8779 {
8780 case 0:
8781 /* Always false. */
8782 return omit_one_operand (type, integer_zero_node, arg0);
8783 case 1:
8784 code = LT_EXPR;
8785 break;
8786 case 2:
8787 code = EQ_EXPR;
8788 break;
8789 case 3:
8790 code = LE_EXPR;
8791 break;
8792 case 4:
8793 code = GT_EXPR;
8794 break;
8795 case 5:
8796 code = NE_EXPR;
8797 break;
8798 case 6:
8799 code = GE_EXPR;
8800 break;
8801 case 7:
8802 /* Always true. */
8803 return omit_one_operand (type, integer_one_node, arg0);
8804 }
8805
8806 if (save_p)
8807 return save_expr (build2 (code, type, cval1, cval2));
8808 return fold_build2 (code, type, cval1, cval2);
8809 }
8810 }
8811 }
8812
8813 /* Fold a comparison of the address of COMPONENT_REFs with the same
8814 type and component to a comparison of the address of the base
8815 object. In short, &x->a OP &y->a to x OP y and
8816 &x->a OP &y.a to x OP &y */
8817 if (TREE_CODE (arg0) == ADDR_EXPR
8818 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8819 && TREE_CODE (arg1) == ADDR_EXPR
8820 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8821 {
8822 tree cref0 = TREE_OPERAND (arg0, 0);
8823 tree cref1 = TREE_OPERAND (arg1, 0);
8824 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8825 {
8826 tree op0 = TREE_OPERAND (cref0, 0);
8827 tree op1 = TREE_OPERAND (cref1, 0);
8828 return fold_build2 (code, type,
8829 build_fold_addr_expr (op0),
8830 build_fold_addr_expr (op1));
8831 }
8832 }
8833
8834 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8835 into a single range test. */
8836 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8837 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8838 && TREE_CODE (arg1) == INTEGER_CST
8839 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8840 && !integer_zerop (TREE_OPERAND (arg0, 1))
8841 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8842 && !TREE_OVERFLOW (arg1))
8843 {
8844 tem = fold_div_compare (code, type, arg0, arg1);
8845 if (tem != NULL_TREE)
8846 return tem;
8847 }
8848
8849 /* Fold ~X op ~Y as Y op X. */
8850 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8851 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8852 {
8853 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8854 return fold_build2 (code, type,
8855 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8856 TREE_OPERAND (arg0, 0));
8857 }
8858
8859 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8860 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8861 && TREE_CODE (arg1) == INTEGER_CST)
8862 {
8863 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8864 return fold_build2 (swap_tree_comparison (code), type,
8865 TREE_OPERAND (arg0, 0),
8866 fold_build1 (BIT_NOT_EXPR, cmp_type,
8867 fold_convert (cmp_type, arg1)));
8868 }
8869
8870 return NULL_TREE;
8871 }
8872
8873
8874 /* Subroutine of fold_binary. Optimize complex multiplications of the
8875 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8876 argument EXPR represents the expression "z" of type TYPE. */
8877
8878 static tree
8879 fold_mult_zconjz (tree type, tree expr)
8880 {
8881 tree itype = TREE_TYPE (type);
8882 tree rpart, ipart, tem;
8883
8884 if (TREE_CODE (expr) == COMPLEX_EXPR)
8885 {
8886 rpart = TREE_OPERAND (expr, 0);
8887 ipart = TREE_OPERAND (expr, 1);
8888 }
8889 else if (TREE_CODE (expr) == COMPLEX_CST)
8890 {
8891 rpart = TREE_REALPART (expr);
8892 ipart = TREE_IMAGPART (expr);
8893 }
8894 else
8895 {
8896 expr = save_expr (expr);
8897 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8898 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8899 }
8900
8901 rpart = save_expr (rpart);
8902 ipart = save_expr (ipart);
8903 tem = fold_build2 (PLUS_EXPR, itype,
8904 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8905 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8906 return fold_build2 (COMPLEX_EXPR, type, tem,
8907 fold_convert (itype, integer_zero_node));
8908 }
8909
8910
8911 /* Fold a binary expression of code CODE and type TYPE with operands
8912 OP0 and OP1. Return the folded expression if folding is
8913 successful. Otherwise, return NULL_TREE. */
8914
8915 tree
8916 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8917 {
8918 enum tree_code_class kind = TREE_CODE_CLASS (code);
8919 tree arg0, arg1, tem;
8920 tree t1 = NULL_TREE;
8921 bool strict_overflow_p;
8922
8923 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8924 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8925 && TREE_CODE_LENGTH (code) == 2
8926 && op0 != NULL_TREE
8927 && op1 != NULL_TREE);
8928
8929 arg0 = op0;
8930 arg1 = op1;
8931
8932 /* Strip any conversions that don't change the mode. This is
8933 safe for every expression, except for a comparison expression
8934 because its signedness is derived from its operands. So, in
8935 the latter case, only strip conversions that don't change the
8936 signedness.
8937
8938 Note that this is done as an internal manipulation within the
8939 constant folder, in order to find the simplest representation
8940 of the arguments so that their form can be studied. In any
8941 cases, the appropriate type conversions should be put back in
8942 the tree that will get out of the constant folder. */
8943
8944 if (kind == tcc_comparison)
8945 {
8946 STRIP_SIGN_NOPS (arg0);
8947 STRIP_SIGN_NOPS (arg1);
8948 }
8949 else
8950 {
8951 STRIP_NOPS (arg0);
8952 STRIP_NOPS (arg1);
8953 }
8954
8955 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8956 constant but we can't do arithmetic on them. */
8957 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8958 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8959 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8960 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8961 {
8962 if (kind == tcc_binary)
8963 tem = const_binop (code, arg0, arg1, 0);
8964 else if (kind == tcc_comparison)
8965 tem = fold_relational_const (code, type, arg0, arg1);
8966 else
8967 tem = NULL_TREE;
8968
8969 if (tem != NULL_TREE)
8970 {
8971 if (TREE_TYPE (tem) != type)
8972 tem = fold_convert (type, tem);
8973 return tem;
8974 }
8975 }
8976
8977 /* If this is a commutative operation, and ARG0 is a constant, move it
8978 to ARG1 to reduce the number of tests below. */
8979 if (commutative_tree_code (code)
8980 && tree_swap_operands_p (arg0, arg1, true))
8981 return fold_build2 (code, type, op1, op0);
8982
8983 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8984
8985 First check for cases where an arithmetic operation is applied to a
8986 compound, conditional, or comparison operation. Push the arithmetic
8987 operation inside the compound or conditional to see if any folding
8988 can then be done. Convert comparison to conditional for this purpose.
8989 The also optimizes non-constant cases that used to be done in
8990 expand_expr.
8991
8992 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8993 one of the operands is a comparison and the other is a comparison, a
8994 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8995 code below would make the expression more complex. Change it to a
8996 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8997 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8998
8999 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9000 || code == EQ_EXPR || code == NE_EXPR)
9001 && ((truth_value_p (TREE_CODE (arg0))
9002 && (truth_value_p (TREE_CODE (arg1))
9003 || (TREE_CODE (arg1) == BIT_AND_EXPR
9004 && integer_onep (TREE_OPERAND (arg1, 1)))))
9005 || (truth_value_p (TREE_CODE (arg1))
9006 && (truth_value_p (TREE_CODE (arg0))
9007 || (TREE_CODE (arg0) == BIT_AND_EXPR
9008 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9009 {
9010 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9011 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9012 : TRUTH_XOR_EXPR,
9013 boolean_type_node,
9014 fold_convert (boolean_type_node, arg0),
9015 fold_convert (boolean_type_node, arg1));
9016
9017 if (code == EQ_EXPR)
9018 tem = invert_truthvalue (tem);
9019
9020 return fold_convert (type, tem);
9021 }
9022
9023 if (TREE_CODE_CLASS (code) == tcc_binary
9024 || TREE_CODE_CLASS (code) == tcc_comparison)
9025 {
9026 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9027 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9028 fold_build2 (code, type,
9029 TREE_OPERAND (arg0, 1), op1));
9030 if (TREE_CODE (arg1) == COMPOUND_EXPR
9031 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9032 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9033 fold_build2 (code, type,
9034 op0, TREE_OPERAND (arg1, 1)));
9035
9036 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9037 {
9038 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9039 arg0, arg1,
9040 /*cond_first_p=*/1);
9041 if (tem != NULL_TREE)
9042 return tem;
9043 }
9044
9045 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9046 {
9047 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9048 arg1, arg0,
9049 /*cond_first_p=*/0);
9050 if (tem != NULL_TREE)
9051 return tem;
9052 }
9053 }
9054
9055 switch (code)
9056 {
9057 case PLUS_EXPR:
9058 /* A + (-B) -> A - B */
9059 if (TREE_CODE (arg1) == NEGATE_EXPR)
9060 return fold_build2 (MINUS_EXPR, type,
9061 fold_convert (type, arg0),
9062 fold_convert (type, TREE_OPERAND (arg1, 0)));
9063 /* (-A) + B -> B - A */
9064 if (TREE_CODE (arg0) == NEGATE_EXPR
9065 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9066 return fold_build2 (MINUS_EXPR, type,
9067 fold_convert (type, arg1),
9068 fold_convert (type, TREE_OPERAND (arg0, 0)));
9069 /* Convert ~A + 1 to -A. */
9070 if (INTEGRAL_TYPE_P (type)
9071 && TREE_CODE (arg0) == BIT_NOT_EXPR
9072 && integer_onep (arg1))
9073 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9074
9075 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9076 same or one. */
9077 if ((TREE_CODE (arg0) == MULT_EXPR
9078 || TREE_CODE (arg1) == MULT_EXPR)
9079 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9080 {
9081 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9082 if (tem)
9083 return tem;
9084 }
9085
9086 if (! FLOAT_TYPE_P (type))
9087 {
9088 if (integer_zerop (arg1))
9089 return non_lvalue (fold_convert (type, arg0));
9090
9091 /* ~X + X is -1. */
9092 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9094 && !TYPE_OVERFLOW_TRAPS (type))
9095 {
9096 t1 = build_int_cst_type (type, -1);
9097 return omit_one_operand (type, t1, arg1);
9098 }
9099
9100 /* X + ~X is -1. */
9101 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9102 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9103 && !TYPE_OVERFLOW_TRAPS (type))
9104 {
9105 t1 = build_int_cst_type (type, -1);
9106 return omit_one_operand (type, t1, arg0);
9107 }
9108
9109 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9110 with a constant, and the two constants have no bits in common,
9111 we should treat this as a BIT_IOR_EXPR since this may produce more
9112 simplifications. */
9113 if (TREE_CODE (arg0) == BIT_AND_EXPR
9114 && TREE_CODE (arg1) == BIT_AND_EXPR
9115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9116 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9117 && integer_zerop (const_binop (BIT_AND_EXPR,
9118 TREE_OPERAND (arg0, 1),
9119 TREE_OPERAND (arg1, 1), 0)))
9120 {
9121 code = BIT_IOR_EXPR;
9122 goto bit_ior;
9123 }
9124
9125 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9126 (plus (plus (mult) (mult)) (foo)) so that we can
9127 take advantage of the factoring cases below. */
9128 if (((TREE_CODE (arg0) == PLUS_EXPR
9129 || TREE_CODE (arg0) == MINUS_EXPR)
9130 && TREE_CODE (arg1) == MULT_EXPR)
9131 || ((TREE_CODE (arg1) == PLUS_EXPR
9132 || TREE_CODE (arg1) == MINUS_EXPR)
9133 && TREE_CODE (arg0) == MULT_EXPR))
9134 {
9135 tree parg0, parg1, parg, marg;
9136 enum tree_code pcode;
9137
9138 if (TREE_CODE (arg1) == MULT_EXPR)
9139 parg = arg0, marg = arg1;
9140 else
9141 parg = arg1, marg = arg0;
9142 pcode = TREE_CODE (parg);
9143 parg0 = TREE_OPERAND (parg, 0);
9144 parg1 = TREE_OPERAND (parg, 1);
9145 STRIP_NOPS (parg0);
9146 STRIP_NOPS (parg1);
9147
9148 if (TREE_CODE (parg0) == MULT_EXPR
9149 && TREE_CODE (parg1) != MULT_EXPR)
9150 return fold_build2 (pcode, type,
9151 fold_build2 (PLUS_EXPR, type,
9152 fold_convert (type, parg0),
9153 fold_convert (type, marg)),
9154 fold_convert (type, parg1));
9155 if (TREE_CODE (parg0) != MULT_EXPR
9156 && TREE_CODE (parg1) == MULT_EXPR)
9157 return fold_build2 (PLUS_EXPR, type,
9158 fold_convert (type, parg0),
9159 fold_build2 (pcode, type,
9160 fold_convert (type, marg),
9161 fold_convert (type,
9162 parg1)));
9163 }
9164
9165 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9166 of the array. Loop optimizer sometimes produce this type of
9167 expressions. */
9168 if (TREE_CODE (arg0) == ADDR_EXPR)
9169 {
9170 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9171 if (tem)
9172 return fold_convert (type, tem);
9173 }
9174 else if (TREE_CODE (arg1) == ADDR_EXPR)
9175 {
9176 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9177 if (tem)
9178 return fold_convert (type, tem);
9179 }
9180 }
9181 else
9182 {
9183 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9184 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9185 return non_lvalue (fold_convert (type, arg0));
9186
9187 /* Likewise if the operands are reversed. */
9188 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9189 return non_lvalue (fold_convert (type, arg1));
9190
9191 /* Convert X + -C into X - C. */
9192 if (TREE_CODE (arg1) == REAL_CST
9193 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9194 {
9195 tem = fold_negate_const (arg1, type);
9196 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9197 return fold_build2 (MINUS_EXPR, type,
9198 fold_convert (type, arg0),
9199 fold_convert (type, tem));
9200 }
9201
9202 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9203 to __complex__ ( x, y ). This is not the same for SNaNs or
9204 if signed zeros are involved. */
9205 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9206 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9207 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9208 {
9209 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9210 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9211 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9212 bool arg0rz = false, arg0iz = false;
9213 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9214 || (arg0i && (arg0iz = real_zerop (arg0i))))
9215 {
9216 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9217 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9218 if (arg0rz && arg1i && real_zerop (arg1i))
9219 {
9220 tree rp = arg1r ? arg1r
9221 : build1 (REALPART_EXPR, rtype, arg1);
9222 tree ip = arg0i ? arg0i
9223 : build1 (IMAGPART_EXPR, rtype, arg0);
9224 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9225 }
9226 else if (arg0iz && arg1r && real_zerop (arg1r))
9227 {
9228 tree rp = arg0r ? arg0r
9229 : build1 (REALPART_EXPR, rtype, arg0);
9230 tree ip = arg1i ? arg1i
9231 : build1 (IMAGPART_EXPR, rtype, arg1);
9232 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9233 }
9234 }
9235 }
9236
9237 if (flag_unsafe_math_optimizations
9238 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9239 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9240 && (tem = distribute_real_division (code, type, arg0, arg1)))
9241 return tem;
9242
9243 /* Convert x+x into x*2.0. */
9244 if (operand_equal_p (arg0, arg1, 0)
9245 && SCALAR_FLOAT_TYPE_P (type))
9246 return fold_build2 (MULT_EXPR, type, arg0,
9247 build_real (type, dconst2));
9248
9249 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9250 if (flag_unsafe_math_optimizations
9251 && TREE_CODE (arg1) == PLUS_EXPR
9252 && TREE_CODE (arg0) != MULT_EXPR)
9253 {
9254 tree tree10 = TREE_OPERAND (arg1, 0);
9255 tree tree11 = TREE_OPERAND (arg1, 1);
9256 if (TREE_CODE (tree11) == MULT_EXPR
9257 && TREE_CODE (tree10) == MULT_EXPR)
9258 {
9259 tree tree0;
9260 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9261 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9262 }
9263 }
9264 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9265 if (flag_unsafe_math_optimizations
9266 && TREE_CODE (arg0) == PLUS_EXPR
9267 && TREE_CODE (arg1) != MULT_EXPR)
9268 {
9269 tree tree00 = TREE_OPERAND (arg0, 0);
9270 tree tree01 = TREE_OPERAND (arg0, 1);
9271 if (TREE_CODE (tree01) == MULT_EXPR
9272 && TREE_CODE (tree00) == MULT_EXPR)
9273 {
9274 tree tree0;
9275 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9276 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9277 }
9278 }
9279 }
9280
9281 bit_rotate:
9282 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9283 is a rotate of A by C1 bits. */
9284 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9285 is a rotate of A by B bits. */
9286 {
9287 enum tree_code code0, code1;
9288 code0 = TREE_CODE (arg0);
9289 code1 = TREE_CODE (arg1);
9290 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9291 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9292 && operand_equal_p (TREE_OPERAND (arg0, 0),
9293 TREE_OPERAND (arg1, 0), 0)
9294 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9295 {
9296 tree tree01, tree11;
9297 enum tree_code code01, code11;
9298
9299 tree01 = TREE_OPERAND (arg0, 1);
9300 tree11 = TREE_OPERAND (arg1, 1);
9301 STRIP_NOPS (tree01);
9302 STRIP_NOPS (tree11);
9303 code01 = TREE_CODE (tree01);
9304 code11 = TREE_CODE (tree11);
9305 if (code01 == INTEGER_CST
9306 && code11 == INTEGER_CST
9307 && TREE_INT_CST_HIGH (tree01) == 0
9308 && TREE_INT_CST_HIGH (tree11) == 0
9309 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9310 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9311 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9312 code0 == LSHIFT_EXPR ? tree01 : tree11);
9313 else if (code11 == MINUS_EXPR)
9314 {
9315 tree tree110, tree111;
9316 tree110 = TREE_OPERAND (tree11, 0);
9317 tree111 = TREE_OPERAND (tree11, 1);
9318 STRIP_NOPS (tree110);
9319 STRIP_NOPS (tree111);
9320 if (TREE_CODE (tree110) == INTEGER_CST
9321 && 0 == compare_tree_int (tree110,
9322 TYPE_PRECISION
9323 (TREE_TYPE (TREE_OPERAND
9324 (arg0, 0))))
9325 && operand_equal_p (tree01, tree111, 0))
9326 return build2 ((code0 == LSHIFT_EXPR
9327 ? LROTATE_EXPR
9328 : RROTATE_EXPR),
9329 type, TREE_OPERAND (arg0, 0), tree01);
9330 }
9331 else if (code01 == MINUS_EXPR)
9332 {
9333 tree tree010, tree011;
9334 tree010 = TREE_OPERAND (tree01, 0);
9335 tree011 = TREE_OPERAND (tree01, 1);
9336 STRIP_NOPS (tree010);
9337 STRIP_NOPS (tree011);
9338 if (TREE_CODE (tree010) == INTEGER_CST
9339 && 0 == compare_tree_int (tree010,
9340 TYPE_PRECISION
9341 (TREE_TYPE (TREE_OPERAND
9342 (arg0, 0))))
9343 && operand_equal_p (tree11, tree011, 0))
9344 return build2 ((code0 != LSHIFT_EXPR
9345 ? LROTATE_EXPR
9346 : RROTATE_EXPR),
9347 type, TREE_OPERAND (arg0, 0), tree11);
9348 }
9349 }
9350 }
9351
9352 associate:
9353 /* In most languages, can't associate operations on floats through
9354 parentheses. Rather than remember where the parentheses were, we
9355 don't associate floats at all, unless the user has specified
9356 -funsafe-math-optimizations. */
9357
9358 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9359 {
9360 tree var0, con0, lit0, minus_lit0;
9361 tree var1, con1, lit1, minus_lit1;
9362 bool ok = true;
9363
9364 /* Split both trees into variables, constants, and literals. Then
9365 associate each group together, the constants with literals,
9366 then the result with variables. This increases the chances of
9367 literals being recombined later and of generating relocatable
9368 expressions for the sum of a constant and literal. */
9369 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9370 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9371 code == MINUS_EXPR);
9372
9373 /* With undefined overflow we can only associate constants
9374 with one variable. */
9375 if ((POINTER_TYPE_P (type)
9376 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9377 && var0 && var1)
9378 {
9379 tree tmp0 = var0;
9380 tree tmp1 = var1;
9381
9382 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9383 tmp0 = TREE_OPERAND (tmp0, 0);
9384 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9385 tmp1 = TREE_OPERAND (tmp1, 0);
9386 /* The only case we can still associate with two variables
9387 is if they are the same, modulo negation. */
9388 if (!operand_equal_p (tmp0, tmp1, 0))
9389 ok = false;
9390 }
9391
9392 /* Only do something if we found more than two objects. Otherwise,
9393 nothing has changed and we risk infinite recursion. */
9394 if (ok
9395 && (2 < ((var0 != 0) + (var1 != 0)
9396 + (con0 != 0) + (con1 != 0)
9397 + (lit0 != 0) + (lit1 != 0)
9398 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9399 {
9400 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9401 if (code == MINUS_EXPR)
9402 code = PLUS_EXPR;
9403
9404 var0 = associate_trees (var0, var1, code, type);
9405 con0 = associate_trees (con0, con1, code, type);
9406 lit0 = associate_trees (lit0, lit1, code, type);
9407 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9408
9409 /* Preserve the MINUS_EXPR if the negative part of the literal is
9410 greater than the positive part. Otherwise, the multiplicative
9411 folding code (i.e extract_muldiv) may be fooled in case
9412 unsigned constants are subtracted, like in the following
9413 example: ((X*2 + 4) - 8U)/2. */
9414 if (minus_lit0 && lit0)
9415 {
9416 if (TREE_CODE (lit0) == INTEGER_CST
9417 && TREE_CODE (minus_lit0) == INTEGER_CST
9418 && tree_int_cst_lt (lit0, minus_lit0))
9419 {
9420 minus_lit0 = associate_trees (minus_lit0, lit0,
9421 MINUS_EXPR, type);
9422 lit0 = 0;
9423 }
9424 else
9425 {
9426 lit0 = associate_trees (lit0, minus_lit0,
9427 MINUS_EXPR, type);
9428 minus_lit0 = 0;
9429 }
9430 }
9431 if (minus_lit0)
9432 {
9433 if (con0 == 0)
9434 return fold_convert (type,
9435 associate_trees (var0, minus_lit0,
9436 MINUS_EXPR, type));
9437 else
9438 {
9439 con0 = associate_trees (con0, minus_lit0,
9440 MINUS_EXPR, type);
9441 return fold_convert (type,
9442 associate_trees (var0, con0,
9443 PLUS_EXPR, type));
9444 }
9445 }
9446
9447 con0 = associate_trees (con0, lit0, code, type);
9448 return fold_convert (type, associate_trees (var0, con0,
9449 code, type));
9450 }
9451 }
9452
9453 return NULL_TREE;
9454
9455 case MINUS_EXPR:
9456 /* A - (-B) -> A + B */
9457 if (TREE_CODE (arg1) == NEGATE_EXPR)
9458 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9459 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9460 if (TREE_CODE (arg0) == NEGATE_EXPR
9461 && (FLOAT_TYPE_P (type)
9462 || INTEGRAL_TYPE_P (type))
9463 && negate_expr_p (arg1)
9464 && reorder_operands_p (arg0, arg1))
9465 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9466 TREE_OPERAND (arg0, 0));
9467 /* Convert -A - 1 to ~A. */
9468 if (INTEGRAL_TYPE_P (type)
9469 && TREE_CODE (arg0) == NEGATE_EXPR
9470 && integer_onep (arg1)
9471 && !TYPE_OVERFLOW_TRAPS (type))
9472 return fold_build1 (BIT_NOT_EXPR, type,
9473 fold_convert (type, TREE_OPERAND (arg0, 0)));
9474
9475 /* Convert -1 - A to ~A. */
9476 if (INTEGRAL_TYPE_P (type)
9477 && integer_all_onesp (arg0))
9478 return fold_build1 (BIT_NOT_EXPR, type, op1);
9479
9480 if (! FLOAT_TYPE_P (type))
9481 {
9482 if (integer_zerop (arg0))
9483 return negate_expr (fold_convert (type, arg1));
9484 if (integer_zerop (arg1))
9485 return non_lvalue (fold_convert (type, arg0));
9486
9487 /* Fold A - (A & B) into ~B & A. */
9488 if (!TREE_SIDE_EFFECTS (arg0)
9489 && TREE_CODE (arg1) == BIT_AND_EXPR)
9490 {
9491 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9492 return fold_build2 (BIT_AND_EXPR, type,
9493 fold_build1 (BIT_NOT_EXPR, type,
9494 TREE_OPERAND (arg1, 0)),
9495 arg0);
9496 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9497 return fold_build2 (BIT_AND_EXPR, type,
9498 fold_build1 (BIT_NOT_EXPR, type,
9499 TREE_OPERAND (arg1, 1)),
9500 arg0);
9501 }
9502
9503 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9504 any power of 2 minus 1. */
9505 if (TREE_CODE (arg0) == BIT_AND_EXPR
9506 && TREE_CODE (arg1) == BIT_AND_EXPR
9507 && operand_equal_p (TREE_OPERAND (arg0, 0),
9508 TREE_OPERAND (arg1, 0), 0))
9509 {
9510 tree mask0 = TREE_OPERAND (arg0, 1);
9511 tree mask1 = TREE_OPERAND (arg1, 1);
9512 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9513
9514 if (operand_equal_p (tem, mask1, 0))
9515 {
9516 tem = fold_build2 (BIT_XOR_EXPR, type,
9517 TREE_OPERAND (arg0, 0), mask1);
9518 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9519 }
9520 }
9521 }
9522
9523 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9524 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9525 return non_lvalue (fold_convert (type, arg0));
9526
9527 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9528 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9529 (-ARG1 + ARG0) reduces to -ARG1. */
9530 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9531 return negate_expr (fold_convert (type, arg1));
9532
9533 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9534 __complex__ ( x, -y ). This is not the same for SNaNs or if
9535 signed zeros are involved. */
9536 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9537 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9538 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9539 {
9540 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9541 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9542 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9543 bool arg0rz = false, arg0iz = false;
9544 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9545 || (arg0i && (arg0iz = real_zerop (arg0i))))
9546 {
9547 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9548 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9549 if (arg0rz && arg1i && real_zerop (arg1i))
9550 {
9551 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9552 arg1r ? arg1r
9553 : build1 (REALPART_EXPR, rtype, arg1));
9554 tree ip = arg0i ? arg0i
9555 : build1 (IMAGPART_EXPR, rtype, arg0);
9556 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9557 }
9558 else if (arg0iz && arg1r && real_zerop (arg1r))
9559 {
9560 tree rp = arg0r ? arg0r
9561 : build1 (REALPART_EXPR, rtype, arg0);
9562 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9563 arg1i ? arg1i
9564 : build1 (IMAGPART_EXPR, rtype, arg1));
9565 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9566 }
9567 }
9568 }
9569
9570 /* Fold &x - &x. This can happen from &x.foo - &x.
9571 This is unsafe for certain floats even in non-IEEE formats.
9572 In IEEE, it is unsafe because it does wrong for NaNs.
9573 Also note that operand_equal_p is always false if an operand
9574 is volatile. */
9575
9576 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9577 && operand_equal_p (arg0, arg1, 0))
9578 return fold_convert (type, integer_zero_node);
9579
9580 /* A - B -> A + (-B) if B is easily negatable. */
9581 if (negate_expr_p (arg1)
9582 && ((FLOAT_TYPE_P (type)
9583 /* Avoid this transformation if B is a positive REAL_CST. */
9584 && (TREE_CODE (arg1) != REAL_CST
9585 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9586 || INTEGRAL_TYPE_P (type)))
9587 return fold_build2 (PLUS_EXPR, type,
9588 fold_convert (type, arg0),
9589 fold_convert (type, negate_expr (arg1)));
9590
9591 /* Try folding difference of addresses. */
9592 {
9593 HOST_WIDE_INT diff;
9594
9595 if ((TREE_CODE (arg0) == ADDR_EXPR
9596 || TREE_CODE (arg1) == ADDR_EXPR)
9597 && ptr_difference_const (arg0, arg1, &diff))
9598 return build_int_cst_type (type, diff);
9599 }
9600
9601 /* Fold &a[i] - &a[j] to i-j. */
9602 if (TREE_CODE (arg0) == ADDR_EXPR
9603 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9604 && TREE_CODE (arg1) == ADDR_EXPR
9605 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9606 {
9607 tree aref0 = TREE_OPERAND (arg0, 0);
9608 tree aref1 = TREE_OPERAND (arg1, 0);
9609 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9610 TREE_OPERAND (aref1, 0), 0))
9611 {
9612 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9613 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9614 tree esz = array_ref_element_size (aref0);
9615 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9616 return fold_build2 (MULT_EXPR, type, diff,
9617 fold_convert (type, esz));
9618
9619 }
9620 }
9621
9622 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9623 of the array. Loop optimizer sometimes produce this type of
9624 expressions. */
9625 if (TREE_CODE (arg0) == ADDR_EXPR)
9626 {
9627 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9628 if (tem)
9629 return fold_convert (type, tem);
9630 }
9631
9632 if (flag_unsafe_math_optimizations
9633 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9634 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9635 && (tem = distribute_real_division (code, type, arg0, arg1)))
9636 return tem;
9637
9638 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9639 same or one. */
9640 if ((TREE_CODE (arg0) == MULT_EXPR
9641 || TREE_CODE (arg1) == MULT_EXPR)
9642 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9643 {
9644 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9645 if (tem)
9646 return tem;
9647 }
9648
9649 goto associate;
9650
9651 case MULT_EXPR:
9652 /* (-A) * (-B) -> A * B */
9653 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9654 return fold_build2 (MULT_EXPR, type,
9655 fold_convert (type, TREE_OPERAND (arg0, 0)),
9656 fold_convert (type, negate_expr (arg1)));
9657 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9658 return fold_build2 (MULT_EXPR, type,
9659 fold_convert (type, negate_expr (arg0)),
9660 fold_convert (type, TREE_OPERAND (arg1, 0)));
9661
9662 if (! FLOAT_TYPE_P (type))
9663 {
9664 if (integer_zerop (arg1))
9665 return omit_one_operand (type, arg1, arg0);
9666 if (integer_onep (arg1))
9667 return non_lvalue (fold_convert (type, arg0));
9668 /* Transform x * -1 into -x. */
9669 if (integer_all_onesp (arg1))
9670 return fold_convert (type, negate_expr (arg0));
9671 /* Transform x * -C into -x * C if x is easily negatable. */
9672 if (TREE_CODE (arg1) == INTEGER_CST
9673 && tree_int_cst_sgn (arg1) == -1
9674 && negate_expr_p (arg0)
9675 && (tem = negate_expr (arg1)) != arg1
9676 && !TREE_OVERFLOW (tem))
9677 return fold_build2 (MULT_EXPR, type,
9678 negate_expr (arg0), tem);
9679
9680 /* (a * (1 << b)) is (a << b) */
9681 if (TREE_CODE (arg1) == LSHIFT_EXPR
9682 && integer_onep (TREE_OPERAND (arg1, 0)))
9683 return fold_build2 (LSHIFT_EXPR, type, arg0,
9684 TREE_OPERAND (arg1, 1));
9685 if (TREE_CODE (arg0) == LSHIFT_EXPR
9686 && integer_onep (TREE_OPERAND (arg0, 0)))
9687 return fold_build2 (LSHIFT_EXPR, type, arg1,
9688 TREE_OPERAND (arg0, 1));
9689
9690 strict_overflow_p = false;
9691 if (TREE_CODE (arg1) == INTEGER_CST
9692 && 0 != (tem = extract_muldiv (op0,
9693 fold_convert (type, arg1),
9694 code, NULL_TREE,
9695 &strict_overflow_p)))
9696 {
9697 if (strict_overflow_p)
9698 fold_overflow_warning (("assuming signed overflow does not "
9699 "occur when simplifying "
9700 "multiplication"),
9701 WARN_STRICT_OVERFLOW_MISC);
9702 return fold_convert (type, tem);
9703 }
9704
9705 /* Optimize z * conj(z) for integer complex numbers. */
9706 if (TREE_CODE (arg0) == CONJ_EXPR
9707 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9708 return fold_mult_zconjz (type, arg1);
9709 if (TREE_CODE (arg1) == CONJ_EXPR
9710 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9711 return fold_mult_zconjz (type, arg0);
9712 }
9713 else
9714 {
9715 /* Maybe fold x * 0 to 0. The expressions aren't the same
9716 when x is NaN, since x * 0 is also NaN. Nor are they the
9717 same in modes with signed zeros, since multiplying a
9718 negative value by 0 gives -0, not +0. */
9719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9720 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9721 && real_zerop (arg1))
9722 return omit_one_operand (type, arg1, arg0);
9723 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9724 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9725 && real_onep (arg1))
9726 return non_lvalue (fold_convert (type, arg0));
9727
9728 /* Transform x * -1.0 into -x. */
9729 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9730 && real_minus_onep (arg1))
9731 return fold_convert (type, negate_expr (arg0));
9732
9733 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9734 if (flag_unsafe_math_optimizations
9735 && TREE_CODE (arg0) == RDIV_EXPR
9736 && TREE_CODE (arg1) == REAL_CST
9737 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9738 {
9739 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9740 arg1, 0);
9741 if (tem)
9742 return fold_build2 (RDIV_EXPR, type, tem,
9743 TREE_OPERAND (arg0, 1));
9744 }
9745
9746 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9747 if (operand_equal_p (arg0, arg1, 0))
9748 {
9749 tree tem = fold_strip_sign_ops (arg0);
9750 if (tem != NULL_TREE)
9751 {
9752 tem = fold_convert (type, tem);
9753 return fold_build2 (MULT_EXPR, type, tem, tem);
9754 }
9755 }
9756
9757 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9758 This is not the same for NaNs or if signed zeros are
9759 involved. */
9760 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9761 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9762 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9763 && TREE_CODE (arg1) == COMPLEX_CST
9764 && real_zerop (TREE_REALPART (arg1)))
9765 {
9766 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9767 if (real_onep (TREE_IMAGPART (arg1)))
9768 return fold_build2 (COMPLEX_EXPR, type,
9769 negate_expr (fold_build1 (IMAGPART_EXPR,
9770 rtype, arg0)),
9771 fold_build1 (REALPART_EXPR, rtype, arg0));
9772 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9773 return fold_build2 (COMPLEX_EXPR, type,
9774 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9775 negate_expr (fold_build1 (REALPART_EXPR,
9776 rtype, arg0)));
9777 }
9778
9779 /* Optimize z * conj(z) for floating point complex numbers.
9780 Guarded by flag_unsafe_math_optimizations as non-finite
9781 imaginary components don't produce scalar results. */
9782 if (flag_unsafe_math_optimizations
9783 && TREE_CODE (arg0) == CONJ_EXPR
9784 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9785 return fold_mult_zconjz (type, arg1);
9786 if (flag_unsafe_math_optimizations
9787 && TREE_CODE (arg1) == CONJ_EXPR
9788 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9789 return fold_mult_zconjz (type, arg0);
9790
9791 if (flag_unsafe_math_optimizations)
9792 {
9793 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9794 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9795
9796 /* Optimizations of root(...)*root(...). */
9797 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9798 {
9799 tree rootfn, arg;
9800 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9801 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9802
9803 /* Optimize sqrt(x)*sqrt(x) as x. */
9804 if (BUILTIN_SQRT_P (fcode0)
9805 && operand_equal_p (arg00, arg10, 0)
9806 && ! HONOR_SNANS (TYPE_MODE (type)))
9807 return arg00;
9808
9809 /* Optimize root(x)*root(y) as root(x*y). */
9810 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9811 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9812 return build_call_expr (rootfn, 1, arg);
9813 }
9814
9815 /* Optimize expN(x)*expN(y) as expN(x+y). */
9816 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9817 {
9818 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9819 tree arg = fold_build2 (PLUS_EXPR, type,
9820 CALL_EXPR_ARG (arg0, 0),
9821 CALL_EXPR_ARG (arg1, 0));
9822 return build_call_expr (expfn, 1, arg);
9823 }
9824
9825 /* Optimizations of pow(...)*pow(...). */
9826 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9827 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9828 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9829 {
9830 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9831 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9832 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9833 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9834
9835 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9836 if (operand_equal_p (arg01, arg11, 0))
9837 {
9838 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9839 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9840 return build_call_expr (powfn, 2, arg, arg01);
9841 }
9842
9843 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9844 if (operand_equal_p (arg00, arg10, 0))
9845 {
9846 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9847 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9848 return build_call_expr (powfn, 2, arg00, arg);
9849 }
9850 }
9851
9852 /* Optimize tan(x)*cos(x) as sin(x). */
9853 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9854 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9855 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9856 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9857 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9858 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9859 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9860 CALL_EXPR_ARG (arg1, 0), 0))
9861 {
9862 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9863
9864 if (sinfn != NULL_TREE)
9865 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9866 }
9867
9868 /* Optimize x*pow(x,c) as pow(x,c+1). */
9869 if (fcode1 == BUILT_IN_POW
9870 || fcode1 == BUILT_IN_POWF
9871 || fcode1 == BUILT_IN_POWL)
9872 {
9873 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9874 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9875 if (TREE_CODE (arg11) == REAL_CST
9876 && !TREE_OVERFLOW (arg11)
9877 && operand_equal_p (arg0, arg10, 0))
9878 {
9879 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9880 REAL_VALUE_TYPE c;
9881 tree arg;
9882
9883 c = TREE_REAL_CST (arg11);
9884 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9885 arg = build_real (type, c);
9886 return build_call_expr (powfn, 2, arg0, arg);
9887 }
9888 }
9889
9890 /* Optimize pow(x,c)*x as pow(x,c+1). */
9891 if (fcode0 == BUILT_IN_POW
9892 || fcode0 == BUILT_IN_POWF
9893 || fcode0 == BUILT_IN_POWL)
9894 {
9895 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9896 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9897 if (TREE_CODE (arg01) == REAL_CST
9898 && !TREE_OVERFLOW (arg01)
9899 && operand_equal_p (arg1, arg00, 0))
9900 {
9901 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9902 REAL_VALUE_TYPE c;
9903 tree arg;
9904
9905 c = TREE_REAL_CST (arg01);
9906 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9907 arg = build_real (type, c);
9908 return build_call_expr (powfn, 2, arg1, arg);
9909 }
9910 }
9911
9912 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9913 if (! optimize_size
9914 && operand_equal_p (arg0, arg1, 0))
9915 {
9916 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9917
9918 if (powfn)
9919 {
9920 tree arg = build_real (type, dconst2);
9921 return build_call_expr (powfn, 2, arg0, arg);
9922 }
9923 }
9924 }
9925 }
9926 goto associate;
9927
9928 case BIT_IOR_EXPR:
9929 bit_ior:
9930 if (integer_all_onesp (arg1))
9931 return omit_one_operand (type, arg1, arg0);
9932 if (integer_zerop (arg1))
9933 return non_lvalue (fold_convert (type, arg0));
9934 if (operand_equal_p (arg0, arg1, 0))
9935 return non_lvalue (fold_convert (type, arg0));
9936
9937 /* ~X | X is -1. */
9938 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9940 {
9941 t1 = build_int_cst_type (type, -1);
9942 return omit_one_operand (type, t1, arg1);
9943 }
9944
9945 /* X | ~X is -1. */
9946 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9947 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9948 {
9949 t1 = build_int_cst_type (type, -1);
9950 return omit_one_operand (type, t1, arg0);
9951 }
9952
9953 /* Canonicalize (X & C1) | C2. */
9954 if (TREE_CODE (arg0) == BIT_AND_EXPR
9955 && TREE_CODE (arg1) == INTEGER_CST
9956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9957 {
9958 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9959 int width = TYPE_PRECISION (type);
9960 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9961 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9962 hi2 = TREE_INT_CST_HIGH (arg1);
9963 lo2 = TREE_INT_CST_LOW (arg1);
9964
9965 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9966 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9967 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9968
9969 if (width > HOST_BITS_PER_WIDE_INT)
9970 {
9971 mhi = (unsigned HOST_WIDE_INT) -1
9972 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9973 mlo = -1;
9974 }
9975 else
9976 {
9977 mhi = 0;
9978 mlo = (unsigned HOST_WIDE_INT) -1
9979 >> (HOST_BITS_PER_WIDE_INT - width);
9980 }
9981
9982 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9983 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9984 return fold_build2 (BIT_IOR_EXPR, type,
9985 TREE_OPERAND (arg0, 0), arg1);
9986
9987 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9988 hi1 &= mhi;
9989 lo1 &= mlo;
9990 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9991 return fold_build2 (BIT_IOR_EXPR, type,
9992 fold_build2 (BIT_AND_EXPR, type,
9993 TREE_OPERAND (arg0, 0),
9994 build_int_cst_wide (type,
9995 lo1 & ~lo2,
9996 hi1 & ~hi2)),
9997 arg1);
9998 }
9999
10000 /* (X & Y) | Y is (X, Y). */
10001 if (TREE_CODE (arg0) == BIT_AND_EXPR
10002 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10003 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10004 /* (X & Y) | X is (Y, X). */
10005 if (TREE_CODE (arg0) == BIT_AND_EXPR
10006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10007 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10008 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10009 /* X | (X & Y) is (Y, X). */
10010 if (TREE_CODE (arg1) == BIT_AND_EXPR
10011 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10012 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10013 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10014 /* X | (Y & X) is (Y, X). */
10015 if (TREE_CODE (arg1) == BIT_AND_EXPR
10016 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10017 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10018 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10019
10020 t1 = distribute_bit_expr (code, type, arg0, arg1);
10021 if (t1 != NULL_TREE)
10022 return t1;
10023
10024 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10025
10026 This results in more efficient code for machines without a NAND
10027 instruction. Combine will canonicalize to the first form
10028 which will allow use of NAND instructions provided by the
10029 backend if they exist. */
10030 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10031 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10032 {
10033 return fold_build1 (BIT_NOT_EXPR, type,
10034 build2 (BIT_AND_EXPR, type,
10035 TREE_OPERAND (arg0, 0),
10036 TREE_OPERAND (arg1, 0)));
10037 }
10038
10039 /* See if this can be simplified into a rotate first. If that
10040 is unsuccessful continue in the association code. */
10041 goto bit_rotate;
10042
10043 case BIT_XOR_EXPR:
10044 if (integer_zerop (arg1))
10045 return non_lvalue (fold_convert (type, arg0));
10046 if (integer_all_onesp (arg1))
10047 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10048 if (operand_equal_p (arg0, arg1, 0))
10049 return omit_one_operand (type, integer_zero_node, arg0);
10050
10051 /* ~X ^ X is -1. */
10052 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10053 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10054 {
10055 t1 = build_int_cst_type (type, -1);
10056 return omit_one_operand (type, t1, arg1);
10057 }
10058
10059 /* X ^ ~X is -1. */
10060 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10062 {
10063 t1 = build_int_cst_type (type, -1);
10064 return omit_one_operand (type, t1, arg0);
10065 }
10066
10067 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10068 with a constant, and the two constants have no bits in common,
10069 we should treat this as a BIT_IOR_EXPR since this may produce more
10070 simplifications. */
10071 if (TREE_CODE (arg0) == BIT_AND_EXPR
10072 && TREE_CODE (arg1) == BIT_AND_EXPR
10073 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10074 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10075 && integer_zerop (const_binop (BIT_AND_EXPR,
10076 TREE_OPERAND (arg0, 1),
10077 TREE_OPERAND (arg1, 1), 0)))
10078 {
10079 code = BIT_IOR_EXPR;
10080 goto bit_ior;
10081 }
10082
10083 /* (X | Y) ^ X -> Y & ~ X*/
10084 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10085 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10086 {
10087 tree t2 = TREE_OPERAND (arg0, 1);
10088 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10089 arg1);
10090 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10091 fold_convert (type, t1));
10092 return t1;
10093 }
10094
10095 /* (Y | X) ^ X -> Y & ~ X*/
10096 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10097 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10098 {
10099 tree t2 = TREE_OPERAND (arg0, 0);
10100 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10101 arg1);
10102 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10103 fold_convert (type, t1));
10104 return t1;
10105 }
10106
10107 /* X ^ (X | Y) -> Y & ~ X*/
10108 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10109 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10110 {
10111 tree t2 = TREE_OPERAND (arg1, 1);
10112 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10113 arg0);
10114 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10115 fold_convert (type, t1));
10116 return t1;
10117 }
10118
10119 /* X ^ (Y | X) -> Y & ~ X*/
10120 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10121 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10122 {
10123 tree t2 = TREE_OPERAND (arg1, 0);
10124 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10125 arg0);
10126 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10127 fold_convert (type, t1));
10128 return t1;
10129 }
10130
10131 /* Convert ~X ^ ~Y to X ^ Y. */
10132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10133 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10134 return fold_build2 (code, type,
10135 fold_convert (type, TREE_OPERAND (arg0, 0)),
10136 fold_convert (type, TREE_OPERAND (arg1, 0)));
10137
10138 /* Convert ~X ^ C to X ^ ~C. */
10139 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10140 && TREE_CODE (arg1) == INTEGER_CST)
10141 return fold_build2 (code, type,
10142 fold_convert (type, TREE_OPERAND (arg0, 0)),
10143 fold_build1 (BIT_NOT_EXPR, type, arg1));
10144
10145 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10146 if (TREE_CODE (arg0) == BIT_AND_EXPR
10147 && integer_onep (TREE_OPERAND (arg0, 1))
10148 && integer_onep (arg1))
10149 return fold_build2 (EQ_EXPR, type, arg0,
10150 build_int_cst (TREE_TYPE (arg0), 0));
10151
10152 /* Fold (X & Y) ^ Y as ~X & Y. */
10153 if (TREE_CODE (arg0) == BIT_AND_EXPR
10154 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10155 {
10156 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10157 return fold_build2 (BIT_AND_EXPR, type,
10158 fold_build1 (BIT_NOT_EXPR, type, tem),
10159 fold_convert (type, arg1));
10160 }
10161 /* Fold (X & Y) ^ X as ~Y & X. */
10162 if (TREE_CODE (arg0) == BIT_AND_EXPR
10163 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10164 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10165 {
10166 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10167 return fold_build2 (BIT_AND_EXPR, type,
10168 fold_build1 (BIT_NOT_EXPR, type, tem),
10169 fold_convert (type, arg1));
10170 }
10171 /* Fold X ^ (X & Y) as X & ~Y. */
10172 if (TREE_CODE (arg1) == BIT_AND_EXPR
10173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10174 {
10175 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10176 return fold_build2 (BIT_AND_EXPR, type,
10177 fold_convert (type, arg0),
10178 fold_build1 (BIT_NOT_EXPR, type, tem));
10179 }
10180 /* Fold X ^ (Y & X) as ~Y & X. */
10181 if (TREE_CODE (arg1) == BIT_AND_EXPR
10182 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10183 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10184 {
10185 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10186 return fold_build2 (BIT_AND_EXPR, type,
10187 fold_build1 (BIT_NOT_EXPR, type, tem),
10188 fold_convert (type, arg0));
10189 }
10190
10191 /* See if this can be simplified into a rotate first. If that
10192 is unsuccessful continue in the association code. */
10193 goto bit_rotate;
10194
10195 case BIT_AND_EXPR:
10196 if (integer_all_onesp (arg1))
10197 return non_lvalue (fold_convert (type, arg0));
10198 if (integer_zerop (arg1))
10199 return omit_one_operand (type, arg1, arg0);
10200 if (operand_equal_p (arg0, arg1, 0))
10201 return non_lvalue (fold_convert (type, arg0));
10202
10203 /* ~X & X is always zero. */
10204 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10205 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10206 return omit_one_operand (type, integer_zero_node, arg1);
10207
10208 /* X & ~X is always zero. */
10209 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10210 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10211 return omit_one_operand (type, integer_zero_node, arg0);
10212
10213 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10214 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10215 && TREE_CODE (arg1) == INTEGER_CST
10216 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10217 return fold_build2 (BIT_IOR_EXPR, type,
10218 fold_build2 (BIT_AND_EXPR, type,
10219 TREE_OPERAND (arg0, 0), arg1),
10220 fold_build2 (BIT_AND_EXPR, type,
10221 TREE_OPERAND (arg0, 1), arg1));
10222
10223 /* (X | Y) & Y is (X, Y). */
10224 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10225 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10226 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10227 /* (X | Y) & X is (Y, X). */
10228 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10229 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10230 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10231 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10232 /* X & (X | Y) is (Y, X). */
10233 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10235 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10236 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10237 /* X & (Y | X) is (Y, X). */
10238 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10239 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10240 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10241 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10242
10243 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10244 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10245 && integer_onep (TREE_OPERAND (arg0, 1))
10246 && integer_onep (arg1))
10247 {
10248 tem = TREE_OPERAND (arg0, 0);
10249 return fold_build2 (EQ_EXPR, type,
10250 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10251 build_int_cst (TREE_TYPE (tem), 1)),
10252 build_int_cst (TREE_TYPE (tem), 0));
10253 }
10254 /* Fold ~X & 1 as (X & 1) == 0. */
10255 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10256 && integer_onep (arg1))
10257 {
10258 tem = TREE_OPERAND (arg0, 0);
10259 return fold_build2 (EQ_EXPR, type,
10260 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10261 build_int_cst (TREE_TYPE (tem), 1)),
10262 build_int_cst (TREE_TYPE (tem), 0));
10263 }
10264
10265 /* Fold (X ^ Y) & Y as ~X & Y. */
10266 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10267 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10268 {
10269 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10270 return fold_build2 (BIT_AND_EXPR, type,
10271 fold_build1 (BIT_NOT_EXPR, type, tem),
10272 fold_convert (type, arg1));
10273 }
10274 /* Fold (X ^ Y) & X as ~Y & X. */
10275 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10277 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10278 {
10279 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10280 return fold_build2 (BIT_AND_EXPR, type,
10281 fold_build1 (BIT_NOT_EXPR, type, tem),
10282 fold_convert (type, arg1));
10283 }
10284 /* Fold X & (X ^ Y) as X & ~Y. */
10285 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10287 {
10288 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10289 return fold_build2 (BIT_AND_EXPR, type,
10290 fold_convert (type, arg0),
10291 fold_build1 (BIT_NOT_EXPR, type, tem));
10292 }
10293 /* Fold X & (Y ^ X) as ~Y & X. */
10294 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10295 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10296 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10297 {
10298 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10299 return fold_build2 (BIT_AND_EXPR, type,
10300 fold_build1 (BIT_NOT_EXPR, type, tem),
10301 fold_convert (type, arg0));
10302 }
10303
10304 t1 = distribute_bit_expr (code, type, arg0, arg1);
10305 if (t1 != NULL_TREE)
10306 return t1;
10307 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10308 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10309 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10310 {
10311 unsigned int prec
10312 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10313
10314 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10315 && (~TREE_INT_CST_LOW (arg1)
10316 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10317 return fold_convert (type, TREE_OPERAND (arg0, 0));
10318 }
10319
10320 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10321
10322 This results in more efficient code for machines without a NOR
10323 instruction. Combine will canonicalize to the first form
10324 which will allow use of NOR instructions provided by the
10325 backend if they exist. */
10326 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10327 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10328 {
10329 return fold_build1 (BIT_NOT_EXPR, type,
10330 build2 (BIT_IOR_EXPR, type,
10331 TREE_OPERAND (arg0, 0),
10332 TREE_OPERAND (arg1, 0)));
10333 }
10334
10335 goto associate;
10336
10337 case RDIV_EXPR:
10338 /* Don't touch a floating-point divide by zero unless the mode
10339 of the constant can represent infinity. */
10340 if (TREE_CODE (arg1) == REAL_CST
10341 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10342 && real_zerop (arg1))
10343 return NULL_TREE;
10344
10345 /* Optimize A / A to 1.0 if we don't care about
10346 NaNs or Infinities. Skip the transformation
10347 for non-real operands. */
10348 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10349 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10350 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10351 && operand_equal_p (arg0, arg1, 0))
10352 {
10353 tree r = build_real (TREE_TYPE (arg0), dconst1);
10354
10355 return omit_two_operands (type, r, arg0, arg1);
10356 }
10357
10358 /* The complex version of the above A / A optimization. */
10359 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10360 && operand_equal_p (arg0, arg1, 0))
10361 {
10362 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10363 if (! HONOR_NANS (TYPE_MODE (elem_type))
10364 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10365 {
10366 tree r = build_real (elem_type, dconst1);
10367 /* omit_two_operands will call fold_convert for us. */
10368 return omit_two_operands (type, r, arg0, arg1);
10369 }
10370 }
10371
10372 /* (-A) / (-B) -> A / B */
10373 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10374 return fold_build2 (RDIV_EXPR, type,
10375 TREE_OPERAND (arg0, 0),
10376 negate_expr (arg1));
10377 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10378 return fold_build2 (RDIV_EXPR, type,
10379 negate_expr (arg0),
10380 TREE_OPERAND (arg1, 0));
10381
10382 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10383 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10384 && real_onep (arg1))
10385 return non_lvalue (fold_convert (type, arg0));
10386
10387 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10388 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10389 && real_minus_onep (arg1))
10390 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10391
10392 /* If ARG1 is a constant, we can convert this to a multiply by the
10393 reciprocal. This does not have the same rounding properties,
10394 so only do this if -funsafe-math-optimizations. We can actually
10395 always safely do it if ARG1 is a power of two, but it's hard to
10396 tell if it is or not in a portable manner. */
10397 if (TREE_CODE (arg1) == REAL_CST)
10398 {
10399 if (flag_unsafe_math_optimizations
10400 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10401 arg1, 0)))
10402 return fold_build2 (MULT_EXPR, type, arg0, tem);
10403 /* Find the reciprocal if optimizing and the result is exact. */
10404 if (optimize)
10405 {
10406 REAL_VALUE_TYPE r;
10407 r = TREE_REAL_CST (arg1);
10408 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10409 {
10410 tem = build_real (type, r);
10411 return fold_build2 (MULT_EXPR, type,
10412 fold_convert (type, arg0), tem);
10413 }
10414 }
10415 }
10416 /* Convert A/B/C to A/(B*C). */
10417 if (flag_unsafe_math_optimizations
10418 && TREE_CODE (arg0) == RDIV_EXPR)
10419 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10420 fold_build2 (MULT_EXPR, type,
10421 TREE_OPERAND (arg0, 1), arg1));
10422
10423 /* Convert A/(B/C) to (A/B)*C. */
10424 if (flag_unsafe_math_optimizations
10425 && TREE_CODE (arg1) == RDIV_EXPR)
10426 return fold_build2 (MULT_EXPR, type,
10427 fold_build2 (RDIV_EXPR, type, arg0,
10428 TREE_OPERAND (arg1, 0)),
10429 TREE_OPERAND (arg1, 1));
10430
10431 /* Convert C1/(X*C2) into (C1/C2)/X. */
10432 if (flag_unsafe_math_optimizations
10433 && TREE_CODE (arg1) == MULT_EXPR
10434 && TREE_CODE (arg0) == REAL_CST
10435 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10436 {
10437 tree tem = const_binop (RDIV_EXPR, arg0,
10438 TREE_OPERAND (arg1, 1), 0);
10439 if (tem)
10440 return fold_build2 (RDIV_EXPR, type, tem,
10441 TREE_OPERAND (arg1, 0));
10442 }
10443
10444 if (flag_unsafe_math_optimizations)
10445 {
10446 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10447 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10448
10449 /* Optimize sin(x)/cos(x) as tan(x). */
10450 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10451 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10452 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10453 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10454 CALL_EXPR_ARG (arg1, 0), 0))
10455 {
10456 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10457
10458 if (tanfn != NULL_TREE)
10459 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10460 }
10461
10462 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10463 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10464 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10465 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10466 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10467 CALL_EXPR_ARG (arg1, 0), 0))
10468 {
10469 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10470
10471 if (tanfn != NULL_TREE)
10472 {
10473 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10474 return fold_build2 (RDIV_EXPR, type,
10475 build_real (type, dconst1), tmp);
10476 }
10477 }
10478
10479 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10480 NaNs or Infinities. */
10481 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10482 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10483 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10484 {
10485 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10486 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10487
10488 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10489 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10490 && operand_equal_p (arg00, arg01, 0))
10491 {
10492 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10493
10494 if (cosfn != NULL_TREE)
10495 return build_call_expr (cosfn, 1, arg00);
10496 }
10497 }
10498
10499 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10500 NaNs or Infinities. */
10501 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10502 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10503 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10504 {
10505 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10506 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10507
10508 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10509 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10510 && operand_equal_p (arg00, arg01, 0))
10511 {
10512 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10513
10514 if (cosfn != NULL_TREE)
10515 {
10516 tree tmp = build_call_expr (cosfn, 1, arg00);
10517 return fold_build2 (RDIV_EXPR, type,
10518 build_real (type, dconst1),
10519 tmp);
10520 }
10521 }
10522 }
10523
10524 /* Optimize pow(x,c)/x as pow(x,c-1). */
10525 if (fcode0 == BUILT_IN_POW
10526 || fcode0 == BUILT_IN_POWF
10527 || fcode0 == BUILT_IN_POWL)
10528 {
10529 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10530 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10531 if (TREE_CODE (arg01) == REAL_CST
10532 && !TREE_OVERFLOW (arg01)
10533 && operand_equal_p (arg1, arg00, 0))
10534 {
10535 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10536 REAL_VALUE_TYPE c;
10537 tree arg;
10538
10539 c = TREE_REAL_CST (arg01);
10540 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10541 arg = build_real (type, c);
10542 return build_call_expr (powfn, 2, arg1, arg);
10543 }
10544 }
10545
10546 /* Optimize x/expN(y) into x*expN(-y). */
10547 if (BUILTIN_EXPONENT_P (fcode1))
10548 {
10549 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10550 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10551 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10552 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10553 }
10554
10555 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10556 if (fcode1 == BUILT_IN_POW
10557 || fcode1 == BUILT_IN_POWF
10558 || fcode1 == BUILT_IN_POWL)
10559 {
10560 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10561 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10562 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10563 tree neg11 = fold_convert (type, negate_expr (arg11));
10564 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10565 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10566 }
10567 }
10568 return NULL_TREE;
10569
10570 case TRUNC_DIV_EXPR:
10571 case FLOOR_DIV_EXPR:
10572 /* Simplify A / (B << N) where A and B are positive and B is
10573 a power of 2, to A >> (N + log2(B)). */
10574 strict_overflow_p = false;
10575 if (TREE_CODE (arg1) == LSHIFT_EXPR
10576 && (TYPE_UNSIGNED (type)
10577 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10578 {
10579 tree sval = TREE_OPERAND (arg1, 0);
10580 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10581 {
10582 tree sh_cnt = TREE_OPERAND (arg1, 1);
10583 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10584
10585 if (strict_overflow_p)
10586 fold_overflow_warning (("assuming signed overflow does not "
10587 "occur when simplifying A / (B << N)"),
10588 WARN_STRICT_OVERFLOW_MISC);
10589
10590 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10591 sh_cnt, build_int_cst (NULL_TREE, pow2));
10592 return fold_build2 (RSHIFT_EXPR, type,
10593 fold_convert (type, arg0), sh_cnt);
10594 }
10595 }
10596 /* Fall thru */
10597
10598 case ROUND_DIV_EXPR:
10599 case CEIL_DIV_EXPR:
10600 case EXACT_DIV_EXPR:
10601 if (integer_onep (arg1))
10602 return non_lvalue (fold_convert (type, arg0));
10603 if (integer_zerop (arg1))
10604 return NULL_TREE;
10605 /* X / -1 is -X. */
10606 if (!TYPE_UNSIGNED (type)
10607 && TREE_CODE (arg1) == INTEGER_CST
10608 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10609 && TREE_INT_CST_HIGH (arg1) == -1)
10610 return fold_convert (type, negate_expr (arg0));
10611
10612 /* Convert -A / -B to A / B when the type is signed and overflow is
10613 undefined. */
10614 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10615 && TREE_CODE (arg0) == NEGATE_EXPR
10616 && negate_expr_p (arg1))
10617 {
10618 if (INTEGRAL_TYPE_P (type))
10619 fold_overflow_warning (("assuming signed overflow does not occur "
10620 "when distributing negation across "
10621 "division"),
10622 WARN_STRICT_OVERFLOW_MISC);
10623 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10624 negate_expr (arg1));
10625 }
10626 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10627 && TREE_CODE (arg1) == NEGATE_EXPR
10628 && negate_expr_p (arg0))
10629 {
10630 if (INTEGRAL_TYPE_P (type))
10631 fold_overflow_warning (("assuming signed overflow does not occur "
10632 "when distributing negation across "
10633 "division"),
10634 WARN_STRICT_OVERFLOW_MISC);
10635 return fold_build2 (code, type, negate_expr (arg0),
10636 TREE_OPERAND (arg1, 0));
10637 }
10638
10639 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10640 operation, EXACT_DIV_EXPR.
10641
10642 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10643 At one time others generated faster code, it's not clear if they do
10644 after the last round to changes to the DIV code in expmed.c. */
10645 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10646 && multiple_of_p (type, arg0, arg1))
10647 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10648
10649 strict_overflow_p = false;
10650 if (TREE_CODE (arg1) == INTEGER_CST
10651 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10652 &strict_overflow_p)))
10653 {
10654 if (strict_overflow_p)
10655 fold_overflow_warning (("assuming signed overflow does not occur "
10656 "when simplifying division"),
10657 WARN_STRICT_OVERFLOW_MISC);
10658 return fold_convert (type, tem);
10659 }
10660
10661 return NULL_TREE;
10662
10663 case CEIL_MOD_EXPR:
10664 case FLOOR_MOD_EXPR:
10665 case ROUND_MOD_EXPR:
10666 case TRUNC_MOD_EXPR:
10667 /* X % 1 is always zero, but be sure to preserve any side
10668 effects in X. */
10669 if (integer_onep (arg1))
10670 return omit_one_operand (type, integer_zero_node, arg0);
10671
10672 /* X % 0, return X % 0 unchanged so that we can get the
10673 proper warnings and errors. */
10674 if (integer_zerop (arg1))
10675 return NULL_TREE;
10676
10677 /* 0 % X is always zero, but be sure to preserve any side
10678 effects in X. Place this after checking for X == 0. */
10679 if (integer_zerop (arg0))
10680 return omit_one_operand (type, integer_zero_node, arg1);
10681
10682 /* X % -1 is zero. */
10683 if (!TYPE_UNSIGNED (type)
10684 && TREE_CODE (arg1) == INTEGER_CST
10685 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10686 && TREE_INT_CST_HIGH (arg1) == -1)
10687 return omit_one_operand (type, integer_zero_node, arg0);
10688
10689 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10690 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10691 strict_overflow_p = false;
10692 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10693 && (TYPE_UNSIGNED (type)
10694 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10695 {
10696 tree c = arg1;
10697 /* Also optimize A % (C << N) where C is a power of 2,
10698 to A & ((C << N) - 1). */
10699 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10700 c = TREE_OPERAND (arg1, 0);
10701
10702 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10703 {
10704 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10705 build_int_cst (TREE_TYPE (arg1), 1));
10706 if (strict_overflow_p)
10707 fold_overflow_warning (("assuming signed overflow does not "
10708 "occur when simplifying "
10709 "X % (power of two)"),
10710 WARN_STRICT_OVERFLOW_MISC);
10711 return fold_build2 (BIT_AND_EXPR, type,
10712 fold_convert (type, arg0),
10713 fold_convert (type, mask));
10714 }
10715 }
10716
10717 /* X % -C is the same as X % C. */
10718 if (code == TRUNC_MOD_EXPR
10719 && !TYPE_UNSIGNED (type)
10720 && TREE_CODE (arg1) == INTEGER_CST
10721 && !TREE_OVERFLOW (arg1)
10722 && TREE_INT_CST_HIGH (arg1) < 0
10723 && !TYPE_OVERFLOW_TRAPS (type)
10724 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10725 && !sign_bit_p (arg1, arg1))
10726 return fold_build2 (code, type, fold_convert (type, arg0),
10727 fold_convert (type, negate_expr (arg1)));
10728
10729 /* X % -Y is the same as X % Y. */
10730 if (code == TRUNC_MOD_EXPR
10731 && !TYPE_UNSIGNED (type)
10732 && TREE_CODE (arg1) == NEGATE_EXPR
10733 && !TYPE_OVERFLOW_TRAPS (type))
10734 return fold_build2 (code, type, fold_convert (type, arg0),
10735 fold_convert (type, TREE_OPERAND (arg1, 0)));
10736
10737 if (TREE_CODE (arg1) == INTEGER_CST
10738 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10739 &strict_overflow_p)))
10740 {
10741 if (strict_overflow_p)
10742 fold_overflow_warning (("assuming signed overflow does not occur "
10743 "when simplifying modulos"),
10744 WARN_STRICT_OVERFLOW_MISC);
10745 return fold_convert (type, tem);
10746 }
10747
10748 return NULL_TREE;
10749
10750 case LROTATE_EXPR:
10751 case RROTATE_EXPR:
10752 if (integer_all_onesp (arg0))
10753 return omit_one_operand (type, arg0, arg1);
10754 goto shift;
10755
10756 case RSHIFT_EXPR:
10757 /* Optimize -1 >> x for arithmetic right shifts. */
10758 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10759 return omit_one_operand (type, arg0, arg1);
10760 /* ... fall through ... */
10761
10762 case LSHIFT_EXPR:
10763 shift:
10764 if (integer_zerop (arg1))
10765 return non_lvalue (fold_convert (type, arg0));
10766 if (integer_zerop (arg0))
10767 return omit_one_operand (type, arg0, arg1);
10768
10769 /* Since negative shift count is not well-defined,
10770 don't try to compute it in the compiler. */
10771 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10772 return NULL_TREE;
10773
10774 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10775 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10776 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10777 && host_integerp (TREE_OPERAND (arg0, 1), false)
10778 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10779 {
10780 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10781 + TREE_INT_CST_LOW (arg1));
10782
10783 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10784 being well defined. */
10785 if (low >= TYPE_PRECISION (type))
10786 {
10787 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10788 low = low % TYPE_PRECISION (type);
10789 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10790 return build_int_cst (type, 0);
10791 else
10792 low = TYPE_PRECISION (type) - 1;
10793 }
10794
10795 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10796 build_int_cst (type, low));
10797 }
10798
10799 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10800 into x & ((unsigned)-1 >> c) for unsigned types. */
10801 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10802 || (TYPE_UNSIGNED (type)
10803 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10804 && host_integerp (arg1, false)
10805 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10806 && host_integerp (TREE_OPERAND (arg0, 1), false)
10807 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10808 {
10809 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10810 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10811 tree lshift;
10812 tree arg00;
10813
10814 if (low0 == low1)
10815 {
10816 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10817
10818 lshift = build_int_cst (type, -1);
10819 lshift = int_const_binop (code, lshift, arg1, 0);
10820
10821 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10822 }
10823 }
10824
10825 /* Rewrite an LROTATE_EXPR by a constant into an
10826 RROTATE_EXPR by a new constant. */
10827 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10828 {
10829 tree tem = build_int_cst (TREE_TYPE (arg1),
10830 GET_MODE_BITSIZE (TYPE_MODE (type)));
10831 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10832 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10833 }
10834
10835 /* If we have a rotate of a bit operation with the rotate count and
10836 the second operand of the bit operation both constant,
10837 permute the two operations. */
10838 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10839 && (TREE_CODE (arg0) == BIT_AND_EXPR
10840 || TREE_CODE (arg0) == BIT_IOR_EXPR
10841 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10842 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10843 return fold_build2 (TREE_CODE (arg0), type,
10844 fold_build2 (code, type,
10845 TREE_OPERAND (arg0, 0), arg1),
10846 fold_build2 (code, type,
10847 TREE_OPERAND (arg0, 1), arg1));
10848
10849 /* Two consecutive rotates adding up to the width of the mode can
10850 be ignored. */
10851 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10852 && TREE_CODE (arg0) == RROTATE_EXPR
10853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10854 && TREE_INT_CST_HIGH (arg1) == 0
10855 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10856 && ((TREE_INT_CST_LOW (arg1)
10857 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10858 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10859 return TREE_OPERAND (arg0, 0);
10860
10861 return NULL_TREE;
10862
10863 case MIN_EXPR:
10864 if (operand_equal_p (arg0, arg1, 0))
10865 return omit_one_operand (type, arg0, arg1);
10866 if (INTEGRAL_TYPE_P (type)
10867 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10868 return omit_one_operand (type, arg1, arg0);
10869 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10870 if (tem)
10871 return tem;
10872 goto associate;
10873
10874 case MAX_EXPR:
10875 if (operand_equal_p (arg0, arg1, 0))
10876 return omit_one_operand (type, arg0, arg1);
10877 if (INTEGRAL_TYPE_P (type)
10878 && TYPE_MAX_VALUE (type)
10879 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10880 return omit_one_operand (type, arg1, arg0);
10881 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10882 if (tem)
10883 return tem;
10884 goto associate;
10885
10886 case TRUTH_ANDIF_EXPR:
10887 /* Note that the operands of this must be ints
10888 and their values must be 0 or 1.
10889 ("true" is a fixed value perhaps depending on the language.) */
10890 /* If first arg is constant zero, return it. */
10891 if (integer_zerop (arg0))
10892 return fold_convert (type, arg0);
10893 case TRUTH_AND_EXPR:
10894 /* If either arg is constant true, drop it. */
10895 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10896 return non_lvalue (fold_convert (type, arg1));
10897 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10898 /* Preserve sequence points. */
10899 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10900 return non_lvalue (fold_convert (type, arg0));
10901 /* If second arg is constant zero, result is zero, but first arg
10902 must be evaluated. */
10903 if (integer_zerop (arg1))
10904 return omit_one_operand (type, arg1, arg0);
10905 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10906 case will be handled here. */
10907 if (integer_zerop (arg0))
10908 return omit_one_operand (type, arg0, arg1);
10909
10910 /* !X && X is always false. */
10911 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10912 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10913 return omit_one_operand (type, integer_zero_node, arg1);
10914 /* X && !X is always false. */
10915 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10916 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10917 return omit_one_operand (type, integer_zero_node, arg0);
10918
10919 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10920 means A >= Y && A != MAX, but in this case we know that
10921 A < X <= MAX. */
10922
10923 if (!TREE_SIDE_EFFECTS (arg0)
10924 && !TREE_SIDE_EFFECTS (arg1))
10925 {
10926 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10927 if (tem && !operand_equal_p (tem, arg0, 0))
10928 return fold_build2 (code, type, tem, arg1);
10929
10930 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10931 if (tem && !operand_equal_p (tem, arg1, 0))
10932 return fold_build2 (code, type, arg0, tem);
10933 }
10934
10935 truth_andor:
10936 /* We only do these simplifications if we are optimizing. */
10937 if (!optimize)
10938 return NULL_TREE;
10939
10940 /* Check for things like (A || B) && (A || C). We can convert this
10941 to A || (B && C). Note that either operator can be any of the four
10942 truth and/or operations and the transformation will still be
10943 valid. Also note that we only care about order for the
10944 ANDIF and ORIF operators. If B contains side effects, this
10945 might change the truth-value of A. */
10946 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10947 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10948 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10949 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10950 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10951 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10952 {
10953 tree a00 = TREE_OPERAND (arg0, 0);
10954 tree a01 = TREE_OPERAND (arg0, 1);
10955 tree a10 = TREE_OPERAND (arg1, 0);
10956 tree a11 = TREE_OPERAND (arg1, 1);
10957 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10958 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10959 && (code == TRUTH_AND_EXPR
10960 || code == TRUTH_OR_EXPR));
10961
10962 if (operand_equal_p (a00, a10, 0))
10963 return fold_build2 (TREE_CODE (arg0), type, a00,
10964 fold_build2 (code, type, a01, a11));
10965 else if (commutative && operand_equal_p (a00, a11, 0))
10966 return fold_build2 (TREE_CODE (arg0), type, a00,
10967 fold_build2 (code, type, a01, a10));
10968 else if (commutative && operand_equal_p (a01, a10, 0))
10969 return fold_build2 (TREE_CODE (arg0), type, a01,
10970 fold_build2 (code, type, a00, a11));
10971
10972 /* This case if tricky because we must either have commutative
10973 operators or else A10 must not have side-effects. */
10974
10975 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10976 && operand_equal_p (a01, a11, 0))
10977 return fold_build2 (TREE_CODE (arg0), type,
10978 fold_build2 (code, type, a00, a10),
10979 a01);
10980 }
10981
10982 /* See if we can build a range comparison. */
10983 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10984 return tem;
10985
10986 /* Check for the possibility of merging component references. If our
10987 lhs is another similar operation, try to merge its rhs with our
10988 rhs. Then try to merge our lhs and rhs. */
10989 if (TREE_CODE (arg0) == code
10990 && 0 != (tem = fold_truthop (code, type,
10991 TREE_OPERAND (arg0, 1), arg1)))
10992 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10993
10994 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10995 return tem;
10996
10997 return NULL_TREE;
10998
10999 case TRUTH_ORIF_EXPR:
11000 /* Note that the operands of this must be ints
11001 and their values must be 0 or true.
11002 ("true" is a fixed value perhaps depending on the language.) */
11003 /* If first arg is constant true, return it. */
11004 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11005 return fold_convert (type, arg0);
11006 case TRUTH_OR_EXPR:
11007 /* If either arg is constant zero, drop it. */
11008 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11009 return non_lvalue (fold_convert (type, arg1));
11010 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11011 /* Preserve sequence points. */
11012 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11013 return non_lvalue (fold_convert (type, arg0));
11014 /* If second arg is constant true, result is true, but we must
11015 evaluate first arg. */
11016 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11017 return omit_one_operand (type, arg1, arg0);
11018 /* Likewise for first arg, but note this only occurs here for
11019 TRUTH_OR_EXPR. */
11020 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11021 return omit_one_operand (type, arg0, arg1);
11022
11023 /* !X || X is always true. */
11024 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11025 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11026 return omit_one_operand (type, integer_one_node, arg1);
11027 /* X || !X is always true. */
11028 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11030 return omit_one_operand (type, integer_one_node, arg0);
11031
11032 goto truth_andor;
11033
11034 case TRUTH_XOR_EXPR:
11035 /* If the second arg is constant zero, drop it. */
11036 if (integer_zerop (arg1))
11037 return non_lvalue (fold_convert (type, arg0));
11038 /* If the second arg is constant true, this is a logical inversion. */
11039 if (integer_onep (arg1))
11040 {
11041 /* Only call invert_truthvalue if operand is a truth value. */
11042 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11043 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11044 else
11045 tem = invert_truthvalue (arg0);
11046 return non_lvalue (fold_convert (type, tem));
11047 }
11048 /* Identical arguments cancel to zero. */
11049 if (operand_equal_p (arg0, arg1, 0))
11050 return omit_one_operand (type, integer_zero_node, arg0);
11051
11052 /* !X ^ X is always true. */
11053 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11054 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11055 return omit_one_operand (type, integer_one_node, arg1);
11056
11057 /* X ^ !X is always true. */
11058 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11060 return omit_one_operand (type, integer_one_node, arg0);
11061
11062 return NULL_TREE;
11063
11064 case EQ_EXPR:
11065 case NE_EXPR:
11066 tem = fold_comparison (code, type, op0, op1);
11067 if (tem != NULL_TREE)
11068 return tem;
11069
11070 /* bool_var != 0 becomes bool_var. */
11071 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11072 && code == NE_EXPR)
11073 return non_lvalue (fold_convert (type, arg0));
11074
11075 /* bool_var == 1 becomes bool_var. */
11076 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11077 && code == EQ_EXPR)
11078 return non_lvalue (fold_convert (type, arg0));
11079
11080 /* bool_var != 1 becomes !bool_var. */
11081 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11082 && code == NE_EXPR)
11083 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11084
11085 /* bool_var == 0 becomes !bool_var. */
11086 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11087 && code == EQ_EXPR)
11088 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11089
11090 /* If this is an equality comparison of the address of two non-weak,
11091 unaliased symbols neither of which are extern (since we do not
11092 have access to attributes for externs), then we know the result. */
11093 if (TREE_CODE (arg0) == ADDR_EXPR
11094 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11095 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11096 && ! lookup_attribute ("alias",
11097 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11098 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11099 && TREE_CODE (arg1) == ADDR_EXPR
11100 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11101 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11102 && ! lookup_attribute ("alias",
11103 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11104 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11105 {
11106 /* We know that we're looking at the address of two
11107 non-weak, unaliased, static _DECL nodes.
11108
11109 It is both wasteful and incorrect to call operand_equal_p
11110 to compare the two ADDR_EXPR nodes. It is wasteful in that
11111 all we need to do is test pointer equality for the arguments
11112 to the two ADDR_EXPR nodes. It is incorrect to use
11113 operand_equal_p as that function is NOT equivalent to a
11114 C equality test. It can in fact return false for two
11115 objects which would test as equal using the C equality
11116 operator. */
11117 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11118 return constant_boolean_node (equal
11119 ? code == EQ_EXPR : code != EQ_EXPR,
11120 type);
11121 }
11122
11123 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11124 a MINUS_EXPR of a constant, we can convert it into a comparison with
11125 a revised constant as long as no overflow occurs. */
11126 if (TREE_CODE (arg1) == INTEGER_CST
11127 && (TREE_CODE (arg0) == PLUS_EXPR
11128 || TREE_CODE (arg0) == MINUS_EXPR)
11129 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11130 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11131 ? MINUS_EXPR : PLUS_EXPR,
11132 fold_convert (TREE_TYPE (arg0), arg1),
11133 TREE_OPERAND (arg0, 1), 0))
11134 && !TREE_OVERFLOW (tem))
11135 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11136
11137 /* Similarly for a NEGATE_EXPR. */
11138 if (TREE_CODE (arg0) == NEGATE_EXPR
11139 && TREE_CODE (arg1) == INTEGER_CST
11140 && 0 != (tem = negate_expr (arg1))
11141 && TREE_CODE (tem) == INTEGER_CST
11142 && !TREE_OVERFLOW (tem))
11143 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11144
11145 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11146 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11147 && TREE_CODE (arg1) == INTEGER_CST
11148 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11149 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11150 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11151 fold_convert (TREE_TYPE (arg0), arg1),
11152 TREE_OPERAND (arg0, 1)));
11153
11154 /* Transform comparisons of the form X +- C CMP X. */
11155 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11157 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11158 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11159 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11160 {
11161 tree cst = TREE_OPERAND (arg0, 1);
11162
11163 if (code == EQ_EXPR
11164 && !integer_zerop (cst))
11165 return omit_two_operands (type, boolean_false_node,
11166 TREE_OPERAND (arg0, 0), arg1);
11167 else
11168 return omit_two_operands (type, boolean_true_node,
11169 TREE_OPERAND (arg0, 0), arg1);
11170 }
11171
11172 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11173 for !=. Don't do this for ordered comparisons due to overflow. */
11174 if (TREE_CODE (arg0) == MINUS_EXPR
11175 && integer_zerop (arg1))
11176 return fold_build2 (code, type,
11177 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11178
11179 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11180 if (TREE_CODE (arg0) == ABS_EXPR
11181 && (integer_zerop (arg1) || real_zerop (arg1)))
11182 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11183
11184 /* If this is an EQ or NE comparison with zero and ARG0 is
11185 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11186 two operations, but the latter can be done in one less insn
11187 on machines that have only two-operand insns or on which a
11188 constant cannot be the first operand. */
11189 if (TREE_CODE (arg0) == BIT_AND_EXPR
11190 && integer_zerop (arg1))
11191 {
11192 tree arg00 = TREE_OPERAND (arg0, 0);
11193 tree arg01 = TREE_OPERAND (arg0, 1);
11194 if (TREE_CODE (arg00) == LSHIFT_EXPR
11195 && integer_onep (TREE_OPERAND (arg00, 0)))
11196 return
11197 fold_build2 (code, type,
11198 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11199 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11200 arg01, TREE_OPERAND (arg00, 1)),
11201 fold_convert (TREE_TYPE (arg0),
11202 integer_one_node)),
11203 arg1);
11204 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11205 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11206 return
11207 fold_build2 (code, type,
11208 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11209 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11210 arg00, TREE_OPERAND (arg01, 1)),
11211 fold_convert (TREE_TYPE (arg0),
11212 integer_one_node)),
11213 arg1);
11214 }
11215
11216 /* If this is an NE or EQ comparison of zero against the result of a
11217 signed MOD operation whose second operand is a power of 2, make
11218 the MOD operation unsigned since it is simpler and equivalent. */
11219 if (integer_zerop (arg1)
11220 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11221 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11222 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11223 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11224 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11225 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11226 {
11227 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11228 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11229 fold_convert (newtype,
11230 TREE_OPERAND (arg0, 0)),
11231 fold_convert (newtype,
11232 TREE_OPERAND (arg0, 1)));
11233
11234 return fold_build2 (code, type, newmod,
11235 fold_convert (newtype, arg1));
11236 }
11237
11238 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11239 C1 is a valid shift constant, and C2 is a power of two, i.e.
11240 a single bit. */
11241 if (TREE_CODE (arg0) == BIT_AND_EXPR
11242 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11244 == INTEGER_CST
11245 && integer_pow2p (TREE_OPERAND (arg0, 1))
11246 && integer_zerop (arg1))
11247 {
11248 tree itype = TREE_TYPE (arg0);
11249 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11250 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11251
11252 /* Check for a valid shift count. */
11253 if (TREE_INT_CST_HIGH (arg001) == 0
11254 && TREE_INT_CST_LOW (arg001) < prec)
11255 {
11256 tree arg01 = TREE_OPERAND (arg0, 1);
11257 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11258 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11259 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11260 can be rewritten as (X & (C2 << C1)) != 0. */
11261 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11262 {
11263 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11264 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11265 return fold_build2 (code, type, tem, arg1);
11266 }
11267 /* Otherwise, for signed (arithmetic) shifts,
11268 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11269 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11270 else if (!TYPE_UNSIGNED (itype))
11271 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11272 arg000, build_int_cst (itype, 0));
11273 /* Otherwise, of unsigned (logical) shifts,
11274 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11275 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11276 else
11277 return omit_one_operand (type,
11278 code == EQ_EXPR ? integer_one_node
11279 : integer_zero_node,
11280 arg000);
11281 }
11282 }
11283
11284 /* If this is an NE comparison of zero with an AND of one, remove the
11285 comparison since the AND will give the correct value. */
11286 if (code == NE_EXPR
11287 && integer_zerop (arg1)
11288 && TREE_CODE (arg0) == BIT_AND_EXPR
11289 && integer_onep (TREE_OPERAND (arg0, 1)))
11290 return fold_convert (type, arg0);
11291
11292 /* If we have (A & C) == C where C is a power of 2, convert this into
11293 (A & C) != 0. Similarly for NE_EXPR. */
11294 if (TREE_CODE (arg0) == BIT_AND_EXPR
11295 && integer_pow2p (TREE_OPERAND (arg0, 1))
11296 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11297 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11298 arg0, fold_convert (TREE_TYPE (arg0),
11299 integer_zero_node));
11300
11301 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11302 bit, then fold the expression into A < 0 or A >= 0. */
11303 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11304 if (tem)
11305 return tem;
11306
11307 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11308 Similarly for NE_EXPR. */
11309 if (TREE_CODE (arg0) == BIT_AND_EXPR
11310 && TREE_CODE (arg1) == INTEGER_CST
11311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11312 {
11313 tree notc = fold_build1 (BIT_NOT_EXPR,
11314 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11315 TREE_OPERAND (arg0, 1));
11316 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11317 arg1, notc);
11318 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11319 if (integer_nonzerop (dandnotc))
11320 return omit_one_operand (type, rslt, arg0);
11321 }
11322
11323 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11324 Similarly for NE_EXPR. */
11325 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11326 && TREE_CODE (arg1) == INTEGER_CST
11327 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11328 {
11329 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11330 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11331 TREE_OPERAND (arg0, 1), notd);
11332 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11333 if (integer_nonzerop (candnotd))
11334 return omit_one_operand (type, rslt, arg0);
11335 }
11336
11337 /* If this is a comparison of a field, we may be able to simplify it. */
11338 if ((TREE_CODE (arg0) == COMPONENT_REF
11339 || TREE_CODE (arg0) == BIT_FIELD_REF)
11340 /* Handle the constant case even without -O
11341 to make sure the warnings are given. */
11342 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11343 {
11344 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11345 if (t1)
11346 return t1;
11347 }
11348
11349 /* Optimize comparisons of strlen vs zero to a compare of the
11350 first character of the string vs zero. To wit,
11351 strlen(ptr) == 0 => *ptr == 0
11352 strlen(ptr) != 0 => *ptr != 0
11353 Other cases should reduce to one of these two (or a constant)
11354 due to the return value of strlen being unsigned. */
11355 if (TREE_CODE (arg0) == CALL_EXPR
11356 && integer_zerop (arg1))
11357 {
11358 tree fndecl = get_callee_fndecl (arg0);
11359
11360 if (fndecl
11361 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11362 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11363 && call_expr_nargs (arg0) == 1
11364 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11365 {
11366 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11367 return fold_build2 (code, type, iref,
11368 build_int_cst (TREE_TYPE (iref), 0));
11369 }
11370 }
11371
11372 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11373 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11374 if (TREE_CODE (arg0) == RSHIFT_EXPR
11375 && integer_zerop (arg1)
11376 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11377 {
11378 tree arg00 = TREE_OPERAND (arg0, 0);
11379 tree arg01 = TREE_OPERAND (arg0, 1);
11380 tree itype = TREE_TYPE (arg00);
11381 if (TREE_INT_CST_HIGH (arg01) == 0
11382 && TREE_INT_CST_LOW (arg01)
11383 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11384 {
11385 if (TYPE_UNSIGNED (itype))
11386 {
11387 itype = lang_hooks.types.signed_type (itype);
11388 arg00 = fold_convert (itype, arg00);
11389 }
11390 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11391 type, arg00, build_int_cst (itype, 0));
11392 }
11393 }
11394
11395 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11396 if (integer_zerop (arg1)
11397 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11398 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11399 TREE_OPERAND (arg0, 1));
11400
11401 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11402 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11403 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11404 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11405 build_int_cst (TREE_TYPE (arg1), 0));
11406 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11407 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11408 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11409 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11410 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11411 build_int_cst (TREE_TYPE (arg1), 0));
11412
11413 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11414 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11415 && TREE_CODE (arg1) == INTEGER_CST
11416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11417 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11418 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11419 TREE_OPERAND (arg0, 1), arg1));
11420
11421 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11422 (X & C) == 0 when C is a single bit. */
11423 if (TREE_CODE (arg0) == BIT_AND_EXPR
11424 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11425 && integer_zerop (arg1)
11426 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11427 {
11428 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11429 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11430 TREE_OPERAND (arg0, 1));
11431 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11432 type, tem, arg1);
11433 }
11434
11435 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11436 constant C is a power of two, i.e. a single bit. */
11437 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11438 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11439 && integer_zerop (arg1)
11440 && integer_pow2p (TREE_OPERAND (arg0, 1))
11441 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11442 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11443 {
11444 tree arg00 = TREE_OPERAND (arg0, 0);
11445 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11446 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11447 }
11448
11449 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11450 when is C is a power of two, i.e. a single bit. */
11451 if (TREE_CODE (arg0) == BIT_AND_EXPR
11452 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11453 && integer_zerop (arg1)
11454 && integer_pow2p (TREE_OPERAND (arg0, 1))
11455 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11456 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11457 {
11458 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11459 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11460 arg000, TREE_OPERAND (arg0, 1));
11461 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11462 tem, build_int_cst (TREE_TYPE (tem), 0));
11463 }
11464
11465 if (integer_zerop (arg1)
11466 && tree_expr_nonzero_p (arg0))
11467 {
11468 tree res = constant_boolean_node (code==NE_EXPR, type);
11469 return omit_one_operand (type, res, arg0);
11470 }
11471
11472 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11473 if (TREE_CODE (arg0) == NEGATE_EXPR
11474 && TREE_CODE (arg1) == NEGATE_EXPR)
11475 return fold_build2 (code, type,
11476 TREE_OPERAND (arg0, 0),
11477 TREE_OPERAND (arg1, 0));
11478
11479 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11480 if (TREE_CODE (arg0) == BIT_AND_EXPR
11481 && TREE_CODE (arg1) == BIT_AND_EXPR)
11482 {
11483 tree arg00 = TREE_OPERAND (arg0, 0);
11484 tree arg01 = TREE_OPERAND (arg0, 1);
11485 tree arg10 = TREE_OPERAND (arg1, 0);
11486 tree arg11 = TREE_OPERAND (arg1, 1);
11487 tree itype = TREE_TYPE (arg0);
11488
11489 if (operand_equal_p (arg01, arg11, 0))
11490 return fold_build2 (code, type,
11491 fold_build2 (BIT_AND_EXPR, itype,
11492 fold_build2 (BIT_XOR_EXPR, itype,
11493 arg00, arg10),
11494 arg01),
11495 build_int_cst (itype, 0));
11496
11497 if (operand_equal_p (arg01, arg10, 0))
11498 return fold_build2 (code, type,
11499 fold_build2 (BIT_AND_EXPR, itype,
11500 fold_build2 (BIT_XOR_EXPR, itype,
11501 arg00, arg11),
11502 arg01),
11503 build_int_cst (itype, 0));
11504
11505 if (operand_equal_p (arg00, arg11, 0))
11506 return fold_build2 (code, type,
11507 fold_build2 (BIT_AND_EXPR, itype,
11508 fold_build2 (BIT_XOR_EXPR, itype,
11509 arg01, arg10),
11510 arg00),
11511 build_int_cst (itype, 0));
11512
11513 if (operand_equal_p (arg00, arg10, 0))
11514 return fold_build2 (code, type,
11515 fold_build2 (BIT_AND_EXPR, itype,
11516 fold_build2 (BIT_XOR_EXPR, itype,
11517 arg01, arg11),
11518 arg00),
11519 build_int_cst (itype, 0));
11520 }
11521
11522 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11523 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11524 {
11525 tree arg00 = TREE_OPERAND (arg0, 0);
11526 tree arg01 = TREE_OPERAND (arg0, 1);
11527 tree arg10 = TREE_OPERAND (arg1, 0);
11528 tree arg11 = TREE_OPERAND (arg1, 1);
11529 tree itype = TREE_TYPE (arg0);
11530
11531 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11532 operand_equal_p guarantees no side-effects so we don't need
11533 to use omit_one_operand on Z. */
11534 if (operand_equal_p (arg01, arg11, 0))
11535 return fold_build2 (code, type, arg00, arg10);
11536 if (operand_equal_p (arg01, arg10, 0))
11537 return fold_build2 (code, type, arg00, arg11);
11538 if (operand_equal_p (arg00, arg11, 0))
11539 return fold_build2 (code, type, arg01, arg10);
11540 if (operand_equal_p (arg00, arg10, 0))
11541 return fold_build2 (code, type, arg01, arg11);
11542
11543 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11544 if (TREE_CODE (arg01) == INTEGER_CST
11545 && TREE_CODE (arg11) == INTEGER_CST)
11546 return fold_build2 (code, type,
11547 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11548 fold_build2 (BIT_XOR_EXPR, itype,
11549 arg01, arg11)),
11550 arg10);
11551 }
11552
11553 /* Attempt to simplify equality/inequality comparisons of complex
11554 values. Only lower the comparison if the result is known or
11555 can be simplified to a single scalar comparison. */
11556 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11557 || TREE_CODE (arg0) == COMPLEX_CST)
11558 && (TREE_CODE (arg1) == COMPLEX_EXPR
11559 || TREE_CODE (arg1) == COMPLEX_CST))
11560 {
11561 tree real0, imag0, real1, imag1;
11562 tree rcond, icond;
11563
11564 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11565 {
11566 real0 = TREE_OPERAND (arg0, 0);
11567 imag0 = TREE_OPERAND (arg0, 1);
11568 }
11569 else
11570 {
11571 real0 = TREE_REALPART (arg0);
11572 imag0 = TREE_IMAGPART (arg0);
11573 }
11574
11575 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11576 {
11577 real1 = TREE_OPERAND (arg1, 0);
11578 imag1 = TREE_OPERAND (arg1, 1);
11579 }
11580 else
11581 {
11582 real1 = TREE_REALPART (arg1);
11583 imag1 = TREE_IMAGPART (arg1);
11584 }
11585
11586 rcond = fold_binary (code, type, real0, real1);
11587 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11588 {
11589 if (integer_zerop (rcond))
11590 {
11591 if (code == EQ_EXPR)
11592 return omit_two_operands (type, boolean_false_node,
11593 imag0, imag1);
11594 return fold_build2 (NE_EXPR, type, imag0, imag1);
11595 }
11596 else
11597 {
11598 if (code == NE_EXPR)
11599 return omit_two_operands (type, boolean_true_node,
11600 imag0, imag1);
11601 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11602 }
11603 }
11604
11605 icond = fold_binary (code, type, imag0, imag1);
11606 if (icond && TREE_CODE (icond) == INTEGER_CST)
11607 {
11608 if (integer_zerop (icond))
11609 {
11610 if (code == EQ_EXPR)
11611 return omit_two_operands (type, boolean_false_node,
11612 real0, real1);
11613 return fold_build2 (NE_EXPR, type, real0, real1);
11614 }
11615 else
11616 {
11617 if (code == NE_EXPR)
11618 return omit_two_operands (type, boolean_true_node,
11619 real0, real1);
11620 return fold_build2 (EQ_EXPR, type, real0, real1);
11621 }
11622 }
11623 }
11624
11625 return NULL_TREE;
11626
11627 case LT_EXPR:
11628 case GT_EXPR:
11629 case LE_EXPR:
11630 case GE_EXPR:
11631 tem = fold_comparison (code, type, op0, op1);
11632 if (tem != NULL_TREE)
11633 return tem;
11634
11635 /* Transform comparisons of the form X +- C CMP X. */
11636 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11638 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11639 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11640 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11641 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11642 {
11643 tree arg01 = TREE_OPERAND (arg0, 1);
11644 enum tree_code code0 = TREE_CODE (arg0);
11645 int is_positive;
11646
11647 if (TREE_CODE (arg01) == REAL_CST)
11648 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11649 else
11650 is_positive = tree_int_cst_sgn (arg01);
11651
11652 /* (X - c) > X becomes false. */
11653 if (code == GT_EXPR
11654 && ((code0 == MINUS_EXPR && is_positive >= 0)
11655 || (code0 == PLUS_EXPR && is_positive <= 0)))
11656 {
11657 if (TREE_CODE (arg01) == INTEGER_CST
11658 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11659 fold_overflow_warning (("assuming signed overflow does not "
11660 "occur when assuming that (X - c) > X "
11661 "is always false"),
11662 WARN_STRICT_OVERFLOW_ALL);
11663 return constant_boolean_node (0, type);
11664 }
11665
11666 /* Likewise (X + c) < X becomes false. */
11667 if (code == LT_EXPR
11668 && ((code0 == PLUS_EXPR && is_positive >= 0)
11669 || (code0 == MINUS_EXPR && is_positive <= 0)))
11670 {
11671 if (TREE_CODE (arg01) == INTEGER_CST
11672 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11673 fold_overflow_warning (("assuming signed overflow does not "
11674 "occur when assuming that "
11675 "(X + c) < X is always false"),
11676 WARN_STRICT_OVERFLOW_ALL);
11677 return constant_boolean_node (0, type);
11678 }
11679
11680 /* Convert (X - c) <= X to true. */
11681 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11682 && code == LE_EXPR
11683 && ((code0 == MINUS_EXPR && is_positive >= 0)
11684 || (code0 == PLUS_EXPR && is_positive <= 0)))
11685 {
11686 if (TREE_CODE (arg01) == INTEGER_CST
11687 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11688 fold_overflow_warning (("assuming signed overflow does not "
11689 "occur when assuming that "
11690 "(X - c) <= X is always true"),
11691 WARN_STRICT_OVERFLOW_ALL);
11692 return constant_boolean_node (1, type);
11693 }
11694
11695 /* Convert (X + c) >= X to true. */
11696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11697 && code == GE_EXPR
11698 && ((code0 == PLUS_EXPR && is_positive >= 0)
11699 || (code0 == MINUS_EXPR && is_positive <= 0)))
11700 {
11701 if (TREE_CODE (arg01) == INTEGER_CST
11702 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11703 fold_overflow_warning (("assuming signed overflow does not "
11704 "occur when assuming that "
11705 "(X + c) >= X is always true"),
11706 WARN_STRICT_OVERFLOW_ALL);
11707 return constant_boolean_node (1, type);
11708 }
11709
11710 if (TREE_CODE (arg01) == INTEGER_CST)
11711 {
11712 /* Convert X + c > X and X - c < X to true for integers. */
11713 if (code == GT_EXPR
11714 && ((code0 == PLUS_EXPR && is_positive > 0)
11715 || (code0 == MINUS_EXPR && is_positive < 0)))
11716 {
11717 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11718 fold_overflow_warning (("assuming signed overflow does "
11719 "not occur when assuming that "
11720 "(X + c) > X is always true"),
11721 WARN_STRICT_OVERFLOW_ALL);
11722 return constant_boolean_node (1, type);
11723 }
11724
11725 if (code == LT_EXPR
11726 && ((code0 == MINUS_EXPR && is_positive > 0)
11727 || (code0 == PLUS_EXPR && is_positive < 0)))
11728 {
11729 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11730 fold_overflow_warning (("assuming signed overflow does "
11731 "not occur when assuming that "
11732 "(X - c) < X is always true"),
11733 WARN_STRICT_OVERFLOW_ALL);
11734 return constant_boolean_node (1, type);
11735 }
11736
11737 /* Convert X + c <= X and X - c >= X to false for integers. */
11738 if (code == LE_EXPR
11739 && ((code0 == PLUS_EXPR && is_positive > 0)
11740 || (code0 == MINUS_EXPR && is_positive < 0)))
11741 {
11742 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11743 fold_overflow_warning (("assuming signed overflow does "
11744 "not occur when assuming that "
11745 "(X + c) <= X is always false"),
11746 WARN_STRICT_OVERFLOW_ALL);
11747 return constant_boolean_node (0, type);
11748 }
11749
11750 if (code == GE_EXPR
11751 && ((code0 == MINUS_EXPR && is_positive > 0)
11752 || (code0 == PLUS_EXPR && is_positive < 0)))
11753 {
11754 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11755 fold_overflow_warning (("assuming signed overflow does "
11756 "not occur when assuming that "
11757 "(X - c) >= X is always false"),
11758 WARN_STRICT_OVERFLOW_ALL);
11759 return constant_boolean_node (0, type);
11760 }
11761 }
11762 }
11763
11764 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11765 This transformation affects the cases which are handled in later
11766 optimizations involving comparisons with non-negative constants. */
11767 if (TREE_CODE (arg1) == INTEGER_CST
11768 && TREE_CODE (arg0) != INTEGER_CST
11769 && tree_int_cst_sgn (arg1) > 0)
11770 {
11771 if (code == GE_EXPR)
11772 {
11773 arg1 = const_binop (MINUS_EXPR, arg1,
11774 build_int_cst (TREE_TYPE (arg1), 1), 0);
11775 return fold_build2 (GT_EXPR, type, arg0,
11776 fold_convert (TREE_TYPE (arg0), arg1));
11777 }
11778 if (code == LT_EXPR)
11779 {
11780 arg1 = const_binop (MINUS_EXPR, arg1,
11781 build_int_cst (TREE_TYPE (arg1), 1), 0);
11782 return fold_build2 (LE_EXPR, type, arg0,
11783 fold_convert (TREE_TYPE (arg0), arg1));
11784 }
11785 }
11786
11787 /* Comparisons with the highest or lowest possible integer of
11788 the specified precision will have known values. */
11789 {
11790 tree arg1_type = TREE_TYPE (arg1);
11791 unsigned int width = TYPE_PRECISION (arg1_type);
11792
11793 if (TREE_CODE (arg1) == INTEGER_CST
11794 && !TREE_OVERFLOW (arg1)
11795 && width <= 2 * HOST_BITS_PER_WIDE_INT
11796 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11797 {
11798 HOST_WIDE_INT signed_max_hi;
11799 unsigned HOST_WIDE_INT signed_max_lo;
11800 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11801
11802 if (width <= HOST_BITS_PER_WIDE_INT)
11803 {
11804 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11805 - 1;
11806 signed_max_hi = 0;
11807 max_hi = 0;
11808
11809 if (TYPE_UNSIGNED (arg1_type))
11810 {
11811 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11812 min_lo = 0;
11813 min_hi = 0;
11814 }
11815 else
11816 {
11817 max_lo = signed_max_lo;
11818 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11819 min_hi = -1;
11820 }
11821 }
11822 else
11823 {
11824 width -= HOST_BITS_PER_WIDE_INT;
11825 signed_max_lo = -1;
11826 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11827 - 1;
11828 max_lo = -1;
11829 min_lo = 0;
11830
11831 if (TYPE_UNSIGNED (arg1_type))
11832 {
11833 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11834 min_hi = 0;
11835 }
11836 else
11837 {
11838 max_hi = signed_max_hi;
11839 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11840 }
11841 }
11842
11843 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11844 && TREE_INT_CST_LOW (arg1) == max_lo)
11845 switch (code)
11846 {
11847 case GT_EXPR:
11848 return omit_one_operand (type, integer_zero_node, arg0);
11849
11850 case GE_EXPR:
11851 return fold_build2 (EQ_EXPR, type, op0, op1);
11852
11853 case LE_EXPR:
11854 return omit_one_operand (type, integer_one_node, arg0);
11855
11856 case LT_EXPR:
11857 return fold_build2 (NE_EXPR, type, op0, op1);
11858
11859 /* The GE_EXPR and LT_EXPR cases above are not normally
11860 reached because of previous transformations. */
11861
11862 default:
11863 break;
11864 }
11865 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11866 == max_hi
11867 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11868 switch (code)
11869 {
11870 case GT_EXPR:
11871 arg1 = const_binop (PLUS_EXPR, arg1,
11872 build_int_cst (TREE_TYPE (arg1), 1), 0);
11873 return fold_build2 (EQ_EXPR, type,
11874 fold_convert (TREE_TYPE (arg1), arg0),
11875 arg1);
11876 case LE_EXPR:
11877 arg1 = const_binop (PLUS_EXPR, arg1,
11878 build_int_cst (TREE_TYPE (arg1), 1), 0);
11879 return fold_build2 (NE_EXPR, type,
11880 fold_convert (TREE_TYPE (arg1), arg0),
11881 arg1);
11882 default:
11883 break;
11884 }
11885 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11886 == min_hi
11887 && TREE_INT_CST_LOW (arg1) == min_lo)
11888 switch (code)
11889 {
11890 case LT_EXPR:
11891 return omit_one_operand (type, integer_zero_node, arg0);
11892
11893 case LE_EXPR:
11894 return fold_build2 (EQ_EXPR, type, op0, op1);
11895
11896 case GE_EXPR:
11897 return omit_one_operand (type, integer_one_node, arg0);
11898
11899 case GT_EXPR:
11900 return fold_build2 (NE_EXPR, type, op0, op1);
11901
11902 default:
11903 break;
11904 }
11905 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11906 == min_hi
11907 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11908 switch (code)
11909 {
11910 case GE_EXPR:
11911 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11912 return fold_build2 (NE_EXPR, type,
11913 fold_convert (TREE_TYPE (arg1), arg0),
11914 arg1);
11915 case LT_EXPR:
11916 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11917 return fold_build2 (EQ_EXPR, type,
11918 fold_convert (TREE_TYPE (arg1), arg0),
11919 arg1);
11920 default:
11921 break;
11922 }
11923
11924 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11925 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11926 && TYPE_UNSIGNED (arg1_type)
11927 /* We will flip the signedness of the comparison operator
11928 associated with the mode of arg1, so the sign bit is
11929 specified by this mode. Check that arg1 is the signed
11930 max associated with this sign bit. */
11931 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11932 /* signed_type does not work on pointer types. */
11933 && INTEGRAL_TYPE_P (arg1_type))
11934 {
11935 /* The following case also applies to X < signed_max+1
11936 and X >= signed_max+1 because previous transformations. */
11937 if (code == LE_EXPR || code == GT_EXPR)
11938 {
11939 tree st;
11940 st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11941 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11942 type, fold_convert (st, arg0),
11943 build_int_cst (st, 0));
11944 }
11945 }
11946 }
11947 }
11948
11949 /* If we are comparing an ABS_EXPR with a constant, we can
11950 convert all the cases into explicit comparisons, but they may
11951 well not be faster than doing the ABS and one comparison.
11952 But ABS (X) <= C is a range comparison, which becomes a subtraction
11953 and a comparison, and is probably faster. */
11954 if (code == LE_EXPR
11955 && TREE_CODE (arg1) == INTEGER_CST
11956 && TREE_CODE (arg0) == ABS_EXPR
11957 && ! TREE_SIDE_EFFECTS (arg0)
11958 && (0 != (tem = negate_expr (arg1)))
11959 && TREE_CODE (tem) == INTEGER_CST
11960 && !TREE_OVERFLOW (tem))
11961 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11962 build2 (GE_EXPR, type,
11963 TREE_OPERAND (arg0, 0), tem),
11964 build2 (LE_EXPR, type,
11965 TREE_OPERAND (arg0, 0), arg1));
11966
11967 /* Convert ABS_EXPR<x> >= 0 to true. */
11968 strict_overflow_p = false;
11969 if (code == GE_EXPR
11970 && (integer_zerop (arg1)
11971 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11972 && real_zerop (arg1)))
11973 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11974 {
11975 if (strict_overflow_p)
11976 fold_overflow_warning (("assuming signed overflow does not occur "
11977 "when simplifying comparison of "
11978 "absolute value and zero"),
11979 WARN_STRICT_OVERFLOW_CONDITIONAL);
11980 return omit_one_operand (type, integer_one_node, arg0);
11981 }
11982
11983 /* Convert ABS_EXPR<x> < 0 to false. */
11984 strict_overflow_p = false;
11985 if (code == LT_EXPR
11986 && (integer_zerop (arg1) || real_zerop (arg1))
11987 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11988 {
11989 if (strict_overflow_p)
11990 fold_overflow_warning (("assuming signed overflow does not occur "
11991 "when simplifying comparison of "
11992 "absolute value and zero"),
11993 WARN_STRICT_OVERFLOW_CONDITIONAL);
11994 return omit_one_operand (type, integer_zero_node, arg0);
11995 }
11996
11997 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11998 and similarly for >= into !=. */
11999 if ((code == LT_EXPR || code == GE_EXPR)
12000 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12001 && TREE_CODE (arg1) == LSHIFT_EXPR
12002 && integer_onep (TREE_OPERAND (arg1, 0)))
12003 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12004 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12005 TREE_OPERAND (arg1, 1)),
12006 build_int_cst (TREE_TYPE (arg0), 0));
12007
12008 if ((code == LT_EXPR || code == GE_EXPR)
12009 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12010 && (TREE_CODE (arg1) == NOP_EXPR
12011 || TREE_CODE (arg1) == CONVERT_EXPR)
12012 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12013 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12014 return
12015 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12016 fold_convert (TREE_TYPE (arg0),
12017 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12018 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12019 1))),
12020 build_int_cst (TREE_TYPE (arg0), 0));
12021
12022 return NULL_TREE;
12023
12024 case UNORDERED_EXPR:
12025 case ORDERED_EXPR:
12026 case UNLT_EXPR:
12027 case UNLE_EXPR:
12028 case UNGT_EXPR:
12029 case UNGE_EXPR:
12030 case UNEQ_EXPR:
12031 case LTGT_EXPR:
12032 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12033 {
12034 t1 = fold_relational_const (code, type, arg0, arg1);
12035 if (t1 != NULL_TREE)
12036 return t1;
12037 }
12038
12039 /* If the first operand is NaN, the result is constant. */
12040 if (TREE_CODE (arg0) == REAL_CST
12041 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12042 && (code != LTGT_EXPR || ! flag_trapping_math))
12043 {
12044 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12045 ? integer_zero_node
12046 : integer_one_node;
12047 return omit_one_operand (type, t1, arg1);
12048 }
12049
12050 /* If the second operand is NaN, the result is constant. */
12051 if (TREE_CODE (arg1) == REAL_CST
12052 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12053 && (code != LTGT_EXPR || ! flag_trapping_math))
12054 {
12055 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12056 ? integer_zero_node
12057 : integer_one_node;
12058 return omit_one_operand (type, t1, arg0);
12059 }
12060
12061 /* Simplify unordered comparison of something with itself. */
12062 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12063 && operand_equal_p (arg0, arg1, 0))
12064 return constant_boolean_node (1, type);
12065
12066 if (code == LTGT_EXPR
12067 && !flag_trapping_math
12068 && operand_equal_p (arg0, arg1, 0))
12069 return constant_boolean_node (0, type);
12070
12071 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12072 {
12073 tree targ0 = strip_float_extensions (arg0);
12074 tree targ1 = strip_float_extensions (arg1);
12075 tree newtype = TREE_TYPE (targ0);
12076
12077 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12078 newtype = TREE_TYPE (targ1);
12079
12080 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12081 return fold_build2 (code, type, fold_convert (newtype, targ0),
12082 fold_convert (newtype, targ1));
12083 }
12084
12085 return NULL_TREE;
12086
12087 case COMPOUND_EXPR:
12088 /* When pedantic, a compound expression can be neither an lvalue
12089 nor an integer constant expression. */
12090 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12091 return NULL_TREE;
12092 /* Don't let (0, 0) be null pointer constant. */
12093 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12094 : fold_convert (type, arg1);
12095 return pedantic_non_lvalue (tem);
12096
12097 case COMPLEX_EXPR:
12098 if ((TREE_CODE (arg0) == REAL_CST
12099 && TREE_CODE (arg1) == REAL_CST)
12100 || (TREE_CODE (arg0) == INTEGER_CST
12101 && TREE_CODE (arg1) == INTEGER_CST))
12102 return build_complex (type, arg0, arg1);
12103 return NULL_TREE;
12104
12105 case ASSERT_EXPR:
12106 /* An ASSERT_EXPR should never be passed to fold_binary. */
12107 gcc_unreachable ();
12108
12109 default:
12110 return NULL_TREE;
12111 } /* switch (code) */
12112 }
12113
12114 /* Callback for walk_tree, looking for LABEL_EXPR.
12115 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12116 Do not check the sub-tree of GOTO_EXPR. */
12117
12118 static tree
12119 contains_label_1 (tree *tp,
12120 int *walk_subtrees,
12121 void *data ATTRIBUTE_UNUSED)
12122 {
12123 switch (TREE_CODE (*tp))
12124 {
12125 case LABEL_EXPR:
12126 return *tp;
12127 case GOTO_EXPR:
12128 *walk_subtrees = 0;
12129 /* no break */
12130 default:
12131 return NULL_TREE;
12132 }
12133 }
12134
12135 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12136 accessible from outside the sub-tree. Returns NULL_TREE if no
12137 addressable label is found. */
12138
12139 static bool
12140 contains_label_p (tree st)
12141 {
12142 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12143 }
12144
12145 /* Fold a ternary expression of code CODE and type TYPE with operands
12146 OP0, OP1, and OP2. Return the folded expression if folding is
12147 successful. Otherwise, return NULL_TREE. */
12148
12149 tree
12150 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12151 {
12152 tree tem;
12153 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12154 enum tree_code_class kind = TREE_CODE_CLASS (code);
12155
12156 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12157 && TREE_CODE_LENGTH (code) == 3);
12158
12159 /* Strip any conversions that don't change the mode. This is safe
12160 for every expression, except for a comparison expression because
12161 its signedness is derived from its operands. So, in the latter
12162 case, only strip conversions that don't change the signedness.
12163
12164 Note that this is done as an internal manipulation within the
12165 constant folder, in order to find the simplest representation of
12166 the arguments so that their form can be studied. In any cases,
12167 the appropriate type conversions should be put back in the tree
12168 that will get out of the constant folder. */
12169 if (op0)
12170 {
12171 arg0 = op0;
12172 STRIP_NOPS (arg0);
12173 }
12174
12175 if (op1)
12176 {
12177 arg1 = op1;
12178 STRIP_NOPS (arg1);
12179 }
12180
12181 switch (code)
12182 {
12183 case COMPONENT_REF:
12184 if (TREE_CODE (arg0) == CONSTRUCTOR
12185 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12186 {
12187 unsigned HOST_WIDE_INT idx;
12188 tree field, value;
12189 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12190 if (field == arg1)
12191 return value;
12192 }
12193 return NULL_TREE;
12194
12195 case COND_EXPR:
12196 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12197 so all simple results must be passed through pedantic_non_lvalue. */
12198 if (TREE_CODE (arg0) == INTEGER_CST)
12199 {
12200 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12201 tem = integer_zerop (arg0) ? op2 : op1;
12202 /* Only optimize constant conditions when the selected branch
12203 has the same type as the COND_EXPR. This avoids optimizing
12204 away "c ? x : throw", where the throw has a void type.
12205 Avoid throwing away that operand which contains label. */
12206 if ((!TREE_SIDE_EFFECTS (unused_op)
12207 || !contains_label_p (unused_op))
12208 && (! VOID_TYPE_P (TREE_TYPE (tem))
12209 || VOID_TYPE_P (type)))
12210 return pedantic_non_lvalue (tem);
12211 return NULL_TREE;
12212 }
12213 if (operand_equal_p (arg1, op2, 0))
12214 return pedantic_omit_one_operand (type, arg1, arg0);
12215
12216 /* If we have A op B ? A : C, we may be able to convert this to a
12217 simpler expression, depending on the operation and the values
12218 of B and C. Signed zeros prevent all of these transformations,
12219 for reasons given above each one.
12220
12221 Also try swapping the arguments and inverting the conditional. */
12222 if (COMPARISON_CLASS_P (arg0)
12223 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12224 arg1, TREE_OPERAND (arg0, 1))
12225 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12226 {
12227 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12228 if (tem)
12229 return tem;
12230 }
12231
12232 if (COMPARISON_CLASS_P (arg0)
12233 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12234 op2,
12235 TREE_OPERAND (arg0, 1))
12236 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12237 {
12238 tem = fold_truth_not_expr (arg0);
12239 if (tem && COMPARISON_CLASS_P (tem))
12240 {
12241 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12242 if (tem)
12243 return tem;
12244 }
12245 }
12246
12247 /* If the second operand is simpler than the third, swap them
12248 since that produces better jump optimization results. */
12249 if (truth_value_p (TREE_CODE (arg0))
12250 && tree_swap_operands_p (op1, op2, false))
12251 {
12252 /* See if this can be inverted. If it can't, possibly because
12253 it was a floating-point inequality comparison, don't do
12254 anything. */
12255 tem = fold_truth_not_expr (arg0);
12256 if (tem)
12257 return fold_build3 (code, type, tem, op2, op1);
12258 }
12259
12260 /* Convert A ? 1 : 0 to simply A. */
12261 if (integer_onep (op1)
12262 && integer_zerop (op2)
12263 /* If we try to convert OP0 to our type, the
12264 call to fold will try to move the conversion inside
12265 a COND, which will recurse. In that case, the COND_EXPR
12266 is probably the best choice, so leave it alone. */
12267 && type == TREE_TYPE (arg0))
12268 return pedantic_non_lvalue (arg0);
12269
12270 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12271 over COND_EXPR in cases such as floating point comparisons. */
12272 if (integer_zerop (op1)
12273 && integer_onep (op2)
12274 && truth_value_p (TREE_CODE (arg0)))
12275 return pedantic_non_lvalue (fold_convert (type,
12276 invert_truthvalue (arg0)));
12277
12278 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12279 if (TREE_CODE (arg0) == LT_EXPR
12280 && integer_zerop (TREE_OPERAND (arg0, 1))
12281 && integer_zerop (op2)
12282 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12283 {
12284 /* sign_bit_p only checks ARG1 bits within A's precision.
12285 If <sign bit of A> has wider type than A, bits outside
12286 of A's precision in <sign bit of A> need to be checked.
12287 If they are all 0, this optimization needs to be done
12288 in unsigned A's type, if they are all 1 in signed A's type,
12289 otherwise this can't be done. */
12290 if (TYPE_PRECISION (TREE_TYPE (tem))
12291 < TYPE_PRECISION (TREE_TYPE (arg1))
12292 && TYPE_PRECISION (TREE_TYPE (tem))
12293 < TYPE_PRECISION (type))
12294 {
12295 unsigned HOST_WIDE_INT mask_lo;
12296 HOST_WIDE_INT mask_hi;
12297 int inner_width, outer_width;
12298 tree tem_type;
12299
12300 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12301 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12302 if (outer_width > TYPE_PRECISION (type))
12303 outer_width = TYPE_PRECISION (type);
12304
12305 if (outer_width > HOST_BITS_PER_WIDE_INT)
12306 {
12307 mask_hi = ((unsigned HOST_WIDE_INT) -1
12308 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12309 mask_lo = -1;
12310 }
12311 else
12312 {
12313 mask_hi = 0;
12314 mask_lo = ((unsigned HOST_WIDE_INT) -1
12315 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12316 }
12317 if (inner_width > HOST_BITS_PER_WIDE_INT)
12318 {
12319 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12320 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12321 mask_lo = 0;
12322 }
12323 else
12324 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12325 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12326
12327 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12328 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12329 {
12330 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12331 tem = fold_convert (tem_type, tem);
12332 }
12333 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12334 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12335 {
12336 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12337 tem = fold_convert (tem_type, tem);
12338 }
12339 else
12340 tem = NULL;
12341 }
12342
12343 if (tem)
12344 return fold_convert (type,
12345 fold_build2 (BIT_AND_EXPR,
12346 TREE_TYPE (tem), tem,
12347 fold_convert (TREE_TYPE (tem),
12348 arg1)));
12349 }
12350
12351 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12352 already handled above. */
12353 if (TREE_CODE (arg0) == BIT_AND_EXPR
12354 && integer_onep (TREE_OPERAND (arg0, 1))
12355 && integer_zerop (op2)
12356 && integer_pow2p (arg1))
12357 {
12358 tree tem = TREE_OPERAND (arg0, 0);
12359 STRIP_NOPS (tem);
12360 if (TREE_CODE (tem) == RSHIFT_EXPR
12361 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12362 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12363 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12364 return fold_build2 (BIT_AND_EXPR, type,
12365 TREE_OPERAND (tem, 0), arg1);
12366 }
12367
12368 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12369 is probably obsolete because the first operand should be a
12370 truth value (that's why we have the two cases above), but let's
12371 leave it in until we can confirm this for all front-ends. */
12372 if (integer_zerop (op2)
12373 && TREE_CODE (arg0) == NE_EXPR
12374 && integer_zerop (TREE_OPERAND (arg0, 1))
12375 && integer_pow2p (arg1)
12376 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12377 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12378 arg1, OEP_ONLY_CONST))
12379 return pedantic_non_lvalue (fold_convert (type,
12380 TREE_OPERAND (arg0, 0)));
12381
12382 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12383 if (integer_zerop (op2)
12384 && truth_value_p (TREE_CODE (arg0))
12385 && truth_value_p (TREE_CODE (arg1)))
12386 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12387 fold_convert (type, arg0),
12388 arg1);
12389
12390 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12391 if (integer_onep (op2)
12392 && truth_value_p (TREE_CODE (arg0))
12393 && truth_value_p (TREE_CODE (arg1)))
12394 {
12395 /* Only perform transformation if ARG0 is easily inverted. */
12396 tem = fold_truth_not_expr (arg0);
12397 if (tem)
12398 return fold_build2 (TRUTH_ORIF_EXPR, type,
12399 fold_convert (type, tem),
12400 arg1);
12401 }
12402
12403 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12404 if (integer_zerop (arg1)
12405 && truth_value_p (TREE_CODE (arg0))
12406 && truth_value_p (TREE_CODE (op2)))
12407 {
12408 /* Only perform transformation if ARG0 is easily inverted. */
12409 tem = fold_truth_not_expr (arg0);
12410 if (tem)
12411 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12412 fold_convert (type, tem),
12413 op2);
12414 }
12415
12416 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12417 if (integer_onep (arg1)
12418 && truth_value_p (TREE_CODE (arg0))
12419 && truth_value_p (TREE_CODE (op2)))
12420 return fold_build2 (TRUTH_ORIF_EXPR, type,
12421 fold_convert (type, arg0),
12422 op2);
12423
12424 return NULL_TREE;
12425
12426 case CALL_EXPR:
12427 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12428 of fold_ternary on them. */
12429 gcc_unreachable ();
12430
12431 case BIT_FIELD_REF:
12432 if ((TREE_CODE (arg0) == VECTOR_CST
12433 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12434 && type == TREE_TYPE (TREE_TYPE (arg0))
12435 && host_integerp (arg1, 1)
12436 && host_integerp (op2, 1))
12437 {
12438 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12439 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12440
12441 if (width != 0
12442 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12443 && (idx % width) == 0
12444 && (idx = idx / width)
12445 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12446 {
12447 tree elements = NULL_TREE;
12448
12449 if (TREE_CODE (arg0) == VECTOR_CST)
12450 elements = TREE_VECTOR_CST_ELTS (arg0);
12451 else
12452 {
12453 unsigned HOST_WIDE_INT idx;
12454 tree value;
12455
12456 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12457 elements = tree_cons (NULL_TREE, value, elements);
12458 }
12459 while (idx-- > 0 && elements)
12460 elements = TREE_CHAIN (elements);
12461 if (elements)
12462 return TREE_VALUE (elements);
12463 else
12464 return fold_convert (type, integer_zero_node);
12465 }
12466 }
12467 return NULL_TREE;
12468
12469 default:
12470 return NULL_TREE;
12471 } /* switch (code) */
12472 }
12473
12474 /* Perform constant folding and related simplification of EXPR.
12475 The related simplifications include x*1 => x, x*0 => 0, etc.,
12476 and application of the associative law.
12477 NOP_EXPR conversions may be removed freely (as long as we
12478 are careful not to change the type of the overall expression).
12479 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12480 but we can constant-fold them if they have constant operands. */
12481
12482 #ifdef ENABLE_FOLD_CHECKING
12483 # define fold(x) fold_1 (x)
12484 static tree fold_1 (tree);
12485 static
12486 #endif
12487 tree
12488 fold (tree expr)
12489 {
12490 const tree t = expr;
12491 enum tree_code code = TREE_CODE (t);
12492 enum tree_code_class kind = TREE_CODE_CLASS (code);
12493 tree tem;
12494
12495 /* Return right away if a constant. */
12496 if (kind == tcc_constant)
12497 return t;
12498
12499 /* CALL_EXPR-like objects with variable numbers of operands are
12500 treated specially. */
12501 if (kind == tcc_vl_exp)
12502 {
12503 if (code == CALL_EXPR)
12504 {
12505 tem = fold_call_expr (expr, false);
12506 return tem ? tem : expr;
12507 }
12508 return expr;
12509 }
12510
12511 if (IS_EXPR_CODE_CLASS (kind)
12512 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12513 {
12514 tree type = TREE_TYPE (t);
12515 tree op0, op1, op2;
12516
12517 switch (TREE_CODE_LENGTH (code))
12518 {
12519 case 1:
12520 op0 = TREE_OPERAND (t, 0);
12521 tem = fold_unary (code, type, op0);
12522 return tem ? tem : expr;
12523 case 2:
12524 op0 = TREE_OPERAND (t, 0);
12525 op1 = TREE_OPERAND (t, 1);
12526 tem = fold_binary (code, type, op0, op1);
12527 return tem ? tem : expr;
12528 case 3:
12529 op0 = TREE_OPERAND (t, 0);
12530 op1 = TREE_OPERAND (t, 1);
12531 op2 = TREE_OPERAND (t, 2);
12532 tem = fold_ternary (code, type, op0, op1, op2);
12533 return tem ? tem : expr;
12534 default:
12535 break;
12536 }
12537 }
12538
12539 switch (code)
12540 {
12541 case CONST_DECL:
12542 return fold (DECL_INITIAL (t));
12543
12544 default:
12545 return t;
12546 } /* switch (code) */
12547 }
12548
12549 #ifdef ENABLE_FOLD_CHECKING
12550 #undef fold
12551
12552 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12553 static void fold_check_failed (tree, tree);
12554 void print_fold_checksum (tree);
12555
12556 /* When --enable-checking=fold, compute a digest of expr before
12557 and after actual fold call to see if fold did not accidentally
12558 change original expr. */
12559
12560 tree
12561 fold (tree expr)
12562 {
12563 tree ret;
12564 struct md5_ctx ctx;
12565 unsigned char checksum_before[16], checksum_after[16];
12566 htab_t ht;
12567
12568 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12569 md5_init_ctx (&ctx);
12570 fold_checksum_tree (expr, &ctx, ht);
12571 md5_finish_ctx (&ctx, checksum_before);
12572 htab_empty (ht);
12573
12574 ret = fold_1 (expr);
12575
12576 md5_init_ctx (&ctx);
12577 fold_checksum_tree (expr, &ctx, ht);
12578 md5_finish_ctx (&ctx, checksum_after);
12579 htab_delete (ht);
12580
12581 if (memcmp (checksum_before, checksum_after, 16))
12582 fold_check_failed (expr, ret);
12583
12584 return ret;
12585 }
12586
12587 void
12588 print_fold_checksum (tree expr)
12589 {
12590 struct md5_ctx ctx;
12591 unsigned char checksum[16], cnt;
12592 htab_t ht;
12593
12594 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12595 md5_init_ctx (&ctx);
12596 fold_checksum_tree (expr, &ctx, ht);
12597 md5_finish_ctx (&ctx, checksum);
12598 htab_delete (ht);
12599 for (cnt = 0; cnt < 16; ++cnt)
12600 fprintf (stderr, "%02x", checksum[cnt]);
12601 putc ('\n', stderr);
12602 }
12603
12604 static void
12605 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12606 {
12607 internal_error ("fold check: original tree changed by fold");
12608 }
12609
12610 static void
12611 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12612 {
12613 void **slot;
12614 enum tree_code code;
12615 struct tree_function_decl buf;
12616 int i, len;
12617
12618 recursive_label:
12619
12620 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12621 <= sizeof (struct tree_function_decl))
12622 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12623 if (expr == NULL)
12624 return;
12625 slot = htab_find_slot (ht, expr, INSERT);
12626 if (*slot != NULL)
12627 return;
12628 *slot = expr;
12629 code = TREE_CODE (expr);
12630 if (TREE_CODE_CLASS (code) == tcc_declaration
12631 && DECL_ASSEMBLER_NAME_SET_P (expr))
12632 {
12633 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12634 memcpy ((char *) &buf, expr, tree_size (expr));
12635 expr = (tree) &buf;
12636 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12637 }
12638 else if (TREE_CODE_CLASS (code) == tcc_type
12639 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12640 || TYPE_CACHED_VALUES_P (expr)
12641 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12642 {
12643 /* Allow these fields to be modified. */
12644 memcpy ((char *) &buf, expr, tree_size (expr));
12645 expr = (tree) &buf;
12646 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12647 TYPE_POINTER_TO (expr) = NULL;
12648 TYPE_REFERENCE_TO (expr) = NULL;
12649 if (TYPE_CACHED_VALUES_P (expr))
12650 {
12651 TYPE_CACHED_VALUES_P (expr) = 0;
12652 TYPE_CACHED_VALUES (expr) = NULL;
12653 }
12654 }
12655 md5_process_bytes (expr, tree_size (expr), ctx);
12656 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12657 if (TREE_CODE_CLASS (code) != tcc_type
12658 && TREE_CODE_CLASS (code) != tcc_declaration
12659 && code != TREE_LIST)
12660 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12661 switch (TREE_CODE_CLASS (code))
12662 {
12663 case tcc_constant:
12664 switch (code)
12665 {
12666 case STRING_CST:
12667 md5_process_bytes (TREE_STRING_POINTER (expr),
12668 TREE_STRING_LENGTH (expr), ctx);
12669 break;
12670 case COMPLEX_CST:
12671 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12672 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12673 break;
12674 case VECTOR_CST:
12675 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12676 break;
12677 default:
12678 break;
12679 }
12680 break;
12681 case tcc_exceptional:
12682 switch (code)
12683 {
12684 case TREE_LIST:
12685 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12686 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12687 expr = TREE_CHAIN (expr);
12688 goto recursive_label;
12689 break;
12690 case TREE_VEC:
12691 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12692 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12693 break;
12694 default:
12695 break;
12696 }
12697 break;
12698 case tcc_expression:
12699 case tcc_reference:
12700 case tcc_comparison:
12701 case tcc_unary:
12702 case tcc_binary:
12703 case tcc_statement:
12704 case tcc_vl_exp:
12705 len = TREE_OPERAND_LENGTH (expr);
12706 for (i = 0; i < len; ++i)
12707 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12708 break;
12709 case tcc_declaration:
12710 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12711 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12712 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12713 {
12714 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12715 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12716 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12717 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12718 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12719 }
12720 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12721 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12722
12723 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12724 {
12725 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12726 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12727 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12728 }
12729 break;
12730 case tcc_type:
12731 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12732 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12733 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12734 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12735 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12736 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12737 if (INTEGRAL_TYPE_P (expr)
12738 || SCALAR_FLOAT_TYPE_P (expr))
12739 {
12740 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12741 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12742 }
12743 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12744 if (TREE_CODE (expr) == RECORD_TYPE
12745 || TREE_CODE (expr) == UNION_TYPE
12746 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12747 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12748 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12749 break;
12750 default:
12751 break;
12752 }
12753 }
12754
12755 #endif
12756
12757 /* Fold a unary tree expression with code CODE of type TYPE with an
12758 operand OP0. Return a folded expression if successful. Otherwise,
12759 return a tree expression with code CODE of type TYPE with an
12760 operand OP0. */
12761
12762 tree
12763 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12764 {
12765 tree tem;
12766 #ifdef ENABLE_FOLD_CHECKING
12767 unsigned char checksum_before[16], checksum_after[16];
12768 struct md5_ctx ctx;
12769 htab_t ht;
12770
12771 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12772 md5_init_ctx (&ctx);
12773 fold_checksum_tree (op0, &ctx, ht);
12774 md5_finish_ctx (&ctx, checksum_before);
12775 htab_empty (ht);
12776 #endif
12777
12778 tem = fold_unary (code, type, op0);
12779 if (!tem)
12780 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12781
12782 #ifdef ENABLE_FOLD_CHECKING
12783 md5_init_ctx (&ctx);
12784 fold_checksum_tree (op0, &ctx, ht);
12785 md5_finish_ctx (&ctx, checksum_after);
12786 htab_delete (ht);
12787
12788 if (memcmp (checksum_before, checksum_after, 16))
12789 fold_check_failed (op0, tem);
12790 #endif
12791 return tem;
12792 }
12793
12794 /* Fold a binary tree expression with code CODE of type TYPE with
12795 operands OP0 and OP1. Return a folded expression if successful.
12796 Otherwise, return a tree expression with code CODE of type TYPE
12797 with operands OP0 and OP1. */
12798
12799 tree
12800 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12801 MEM_STAT_DECL)
12802 {
12803 tree tem;
12804 #ifdef ENABLE_FOLD_CHECKING
12805 unsigned char checksum_before_op0[16],
12806 checksum_before_op1[16],
12807 checksum_after_op0[16],
12808 checksum_after_op1[16];
12809 struct md5_ctx ctx;
12810 htab_t ht;
12811
12812 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12813 md5_init_ctx (&ctx);
12814 fold_checksum_tree (op0, &ctx, ht);
12815 md5_finish_ctx (&ctx, checksum_before_op0);
12816 htab_empty (ht);
12817
12818 md5_init_ctx (&ctx);
12819 fold_checksum_tree (op1, &ctx, ht);
12820 md5_finish_ctx (&ctx, checksum_before_op1);
12821 htab_empty (ht);
12822 #endif
12823
12824 tem = fold_binary (code, type, op0, op1);
12825 if (!tem)
12826 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12827
12828 #ifdef ENABLE_FOLD_CHECKING
12829 md5_init_ctx (&ctx);
12830 fold_checksum_tree (op0, &ctx, ht);
12831 md5_finish_ctx (&ctx, checksum_after_op0);
12832 htab_empty (ht);
12833
12834 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12835 fold_check_failed (op0, tem);
12836
12837 md5_init_ctx (&ctx);
12838 fold_checksum_tree (op1, &ctx, ht);
12839 md5_finish_ctx (&ctx, checksum_after_op1);
12840 htab_delete (ht);
12841
12842 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12843 fold_check_failed (op1, tem);
12844 #endif
12845 return tem;
12846 }
12847
12848 /* Fold a ternary tree expression with code CODE of type TYPE with
12849 operands OP0, OP1, and OP2. Return a folded expression if
12850 successful. Otherwise, return a tree expression with code CODE of
12851 type TYPE with operands OP0, OP1, and OP2. */
12852
12853 tree
12854 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12855 MEM_STAT_DECL)
12856 {
12857 tree tem;
12858 #ifdef ENABLE_FOLD_CHECKING
12859 unsigned char checksum_before_op0[16],
12860 checksum_before_op1[16],
12861 checksum_before_op2[16],
12862 checksum_after_op0[16],
12863 checksum_after_op1[16],
12864 checksum_after_op2[16];
12865 struct md5_ctx ctx;
12866 htab_t ht;
12867
12868 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12869 md5_init_ctx (&ctx);
12870 fold_checksum_tree (op0, &ctx, ht);
12871 md5_finish_ctx (&ctx, checksum_before_op0);
12872 htab_empty (ht);
12873
12874 md5_init_ctx (&ctx);
12875 fold_checksum_tree (op1, &ctx, ht);
12876 md5_finish_ctx (&ctx, checksum_before_op1);
12877 htab_empty (ht);
12878
12879 md5_init_ctx (&ctx);
12880 fold_checksum_tree (op2, &ctx, ht);
12881 md5_finish_ctx (&ctx, checksum_before_op2);
12882 htab_empty (ht);
12883 #endif
12884
12885 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12886 tem = fold_ternary (code, type, op0, op1, op2);
12887 if (!tem)
12888 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12889
12890 #ifdef ENABLE_FOLD_CHECKING
12891 md5_init_ctx (&ctx);
12892 fold_checksum_tree (op0, &ctx, ht);
12893 md5_finish_ctx (&ctx, checksum_after_op0);
12894 htab_empty (ht);
12895
12896 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12897 fold_check_failed (op0, tem);
12898
12899 md5_init_ctx (&ctx);
12900 fold_checksum_tree (op1, &ctx, ht);
12901 md5_finish_ctx (&ctx, checksum_after_op1);
12902 htab_empty (ht);
12903
12904 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12905 fold_check_failed (op1, tem);
12906
12907 md5_init_ctx (&ctx);
12908 fold_checksum_tree (op2, &ctx, ht);
12909 md5_finish_ctx (&ctx, checksum_after_op2);
12910 htab_delete (ht);
12911
12912 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12913 fold_check_failed (op2, tem);
12914 #endif
12915 return tem;
12916 }
12917
12918 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12919 arguments in ARGARRAY, and a null static chain.
12920 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12921 of type TYPE from the given operands as constructed by build_call_array. */
12922
12923 tree
12924 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12925 {
12926 tree tem;
12927 #ifdef ENABLE_FOLD_CHECKING
12928 unsigned char checksum_before_fn[16],
12929 checksum_before_arglist[16],
12930 checksum_after_fn[16],
12931 checksum_after_arglist[16];
12932 struct md5_ctx ctx;
12933 htab_t ht;
12934 int i;
12935
12936 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12937 md5_init_ctx (&ctx);
12938 fold_checksum_tree (fn, &ctx, ht);
12939 md5_finish_ctx (&ctx, checksum_before_fn);
12940 htab_empty (ht);
12941
12942 md5_init_ctx (&ctx);
12943 for (i = 0; i < nargs; i++)
12944 fold_checksum_tree (argarray[i], &ctx, ht);
12945 md5_finish_ctx (&ctx, checksum_before_arglist);
12946 htab_empty (ht);
12947 #endif
12948
12949 tem = fold_builtin_call_array (type, fn, nargs, argarray);
12950
12951 #ifdef ENABLE_FOLD_CHECKING
12952 md5_init_ctx (&ctx);
12953 fold_checksum_tree (fn, &ctx, ht);
12954 md5_finish_ctx (&ctx, checksum_after_fn);
12955 htab_empty (ht);
12956
12957 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12958 fold_check_failed (fn, tem);
12959
12960 md5_init_ctx (&ctx);
12961 for (i = 0; i < nargs; i++)
12962 fold_checksum_tree (argarray[i], &ctx, ht);
12963 md5_finish_ctx (&ctx, checksum_after_arglist);
12964 htab_delete (ht);
12965
12966 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12967 fold_check_failed (NULL_TREE, tem);
12968 #endif
12969 return tem;
12970 }
12971
12972 /* Perform constant folding and related simplification of initializer
12973 expression EXPR. These behave identically to "fold_buildN" but ignore
12974 potential run-time traps and exceptions that fold must preserve. */
12975
12976 #define START_FOLD_INIT \
12977 int saved_signaling_nans = flag_signaling_nans;\
12978 int saved_trapping_math = flag_trapping_math;\
12979 int saved_rounding_math = flag_rounding_math;\
12980 int saved_trapv = flag_trapv;\
12981 int saved_folding_initializer = folding_initializer;\
12982 flag_signaling_nans = 0;\
12983 flag_trapping_math = 0;\
12984 flag_rounding_math = 0;\
12985 flag_trapv = 0;\
12986 folding_initializer = 1;
12987
12988 #define END_FOLD_INIT \
12989 flag_signaling_nans = saved_signaling_nans;\
12990 flag_trapping_math = saved_trapping_math;\
12991 flag_rounding_math = saved_rounding_math;\
12992 flag_trapv = saved_trapv;\
12993 folding_initializer = saved_folding_initializer;
12994
12995 tree
12996 fold_build1_initializer (enum tree_code code, tree type, tree op)
12997 {
12998 tree result;
12999 START_FOLD_INIT;
13000
13001 result = fold_build1 (code, type, op);
13002
13003 END_FOLD_INIT;
13004 return result;
13005 }
13006
13007 tree
13008 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13009 {
13010 tree result;
13011 START_FOLD_INIT;
13012
13013 result = fold_build2 (code, type, op0, op1);
13014
13015 END_FOLD_INIT;
13016 return result;
13017 }
13018
13019 tree
13020 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13021 tree op2)
13022 {
13023 tree result;
13024 START_FOLD_INIT;
13025
13026 result = fold_build3 (code, type, op0, op1, op2);
13027
13028 END_FOLD_INIT;
13029 return result;
13030 }
13031
13032 tree
13033 fold_build_call_array_initializer (tree type, tree fn,
13034 int nargs, tree *argarray)
13035 {
13036 tree result;
13037 START_FOLD_INIT;
13038
13039 result = fold_build_call_array (type, fn, nargs, argarray);
13040
13041 END_FOLD_INIT;
13042 return result;
13043 }
13044
13045 #undef START_FOLD_INIT
13046 #undef END_FOLD_INIT
13047
13048 /* Determine if first argument is a multiple of second argument. Return 0 if
13049 it is not, or we cannot easily determined it to be.
13050
13051 An example of the sort of thing we care about (at this point; this routine
13052 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13053 fold cases do now) is discovering that
13054
13055 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13056
13057 is a multiple of
13058
13059 SAVE_EXPR (J * 8)
13060
13061 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13062
13063 This code also handles discovering that
13064
13065 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13066
13067 is a multiple of 8 so we don't have to worry about dealing with a
13068 possible remainder.
13069
13070 Note that we *look* inside a SAVE_EXPR only to determine how it was
13071 calculated; it is not safe for fold to do much of anything else with the
13072 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13073 at run time. For example, the latter example above *cannot* be implemented
13074 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13075 evaluation time of the original SAVE_EXPR is not necessarily the same at
13076 the time the new expression is evaluated. The only optimization of this
13077 sort that would be valid is changing
13078
13079 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13080
13081 divided by 8 to
13082
13083 SAVE_EXPR (I) * SAVE_EXPR (J)
13084
13085 (where the same SAVE_EXPR (J) is used in the original and the
13086 transformed version). */
13087
13088 int
13089 multiple_of_p (tree type, tree top, tree bottom)
13090 {
13091 if (operand_equal_p (top, bottom, 0))
13092 return 1;
13093
13094 if (TREE_CODE (type) != INTEGER_TYPE)
13095 return 0;
13096
13097 switch (TREE_CODE (top))
13098 {
13099 case BIT_AND_EXPR:
13100 /* Bitwise and provides a power of two multiple. If the mask is
13101 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13102 if (!integer_pow2p (bottom))
13103 return 0;
13104 /* FALLTHRU */
13105
13106 case MULT_EXPR:
13107 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13108 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13109
13110 case PLUS_EXPR:
13111 case MINUS_EXPR:
13112 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13113 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13114
13115 case LSHIFT_EXPR:
13116 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13117 {
13118 tree op1, t1;
13119
13120 op1 = TREE_OPERAND (top, 1);
13121 /* const_binop may not detect overflow correctly,
13122 so check for it explicitly here. */
13123 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13124 > TREE_INT_CST_LOW (op1)
13125 && TREE_INT_CST_HIGH (op1) == 0
13126 && 0 != (t1 = fold_convert (type,
13127 const_binop (LSHIFT_EXPR,
13128 size_one_node,
13129 op1, 0)))
13130 && !TREE_OVERFLOW (t1))
13131 return multiple_of_p (type, t1, bottom);
13132 }
13133 return 0;
13134
13135 case NOP_EXPR:
13136 /* Can't handle conversions from non-integral or wider integral type. */
13137 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13138 || (TYPE_PRECISION (type)
13139 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13140 return 0;
13141
13142 /* .. fall through ... */
13143
13144 case SAVE_EXPR:
13145 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13146
13147 case INTEGER_CST:
13148 if (TREE_CODE (bottom) != INTEGER_CST
13149 || integer_zerop (bottom)
13150 || (TYPE_UNSIGNED (type)
13151 && (tree_int_cst_sgn (top) < 0
13152 || tree_int_cst_sgn (bottom) < 0)))
13153 return 0;
13154 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13155 top, bottom, 0));
13156
13157 default:
13158 return 0;
13159 }
13160 }
13161
13162 /* Return true if `t' is known to be non-negative. If the return
13163 value is based on the assumption that signed overflow is undefined,
13164 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13165 *STRICT_OVERFLOW_P. */
13166
13167 bool
13168 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13169 {
13170 if (t == error_mark_node)
13171 return false;
13172
13173 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13174 return true;
13175
13176 switch (TREE_CODE (t))
13177 {
13178 case SSA_NAME:
13179 /* Query VRP to see if it has recorded any information about
13180 the range of this object. */
13181 return ssa_name_nonnegative_p (t);
13182
13183 case ABS_EXPR:
13184 /* We can't return 1 if flag_wrapv is set because
13185 ABS_EXPR<INT_MIN> = INT_MIN. */
13186 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13187 return true;
13188 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13189 {
13190 *strict_overflow_p = true;
13191 return true;
13192 }
13193 break;
13194
13195 case INTEGER_CST:
13196 return tree_int_cst_sgn (t) >= 0;
13197
13198 case REAL_CST:
13199 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13200
13201 case PLUS_EXPR:
13202 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13203 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13204 strict_overflow_p)
13205 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13206 strict_overflow_p));
13207
13208 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13209 both unsigned and at least 2 bits shorter than the result. */
13210 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13211 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13212 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13213 {
13214 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13215 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13216 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13217 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13218 {
13219 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13220 TYPE_PRECISION (inner2)) + 1;
13221 return prec < TYPE_PRECISION (TREE_TYPE (t));
13222 }
13223 }
13224 break;
13225
13226 case MULT_EXPR:
13227 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13228 {
13229 /* x * x for floating point x is always non-negative. */
13230 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13231 return true;
13232 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13233 strict_overflow_p)
13234 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13235 strict_overflow_p));
13236 }
13237
13238 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13239 both unsigned and their total bits is shorter than the result. */
13240 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13241 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13242 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13243 {
13244 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13245 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13246 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13247 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13248 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13249 < TYPE_PRECISION (TREE_TYPE (t));
13250 }
13251 return false;
13252
13253 case BIT_AND_EXPR:
13254 case MAX_EXPR:
13255 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13256 strict_overflow_p)
13257 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13258 strict_overflow_p));
13259
13260 case BIT_IOR_EXPR:
13261 case BIT_XOR_EXPR:
13262 case MIN_EXPR:
13263 case RDIV_EXPR:
13264 case TRUNC_DIV_EXPR:
13265 case CEIL_DIV_EXPR:
13266 case FLOOR_DIV_EXPR:
13267 case ROUND_DIV_EXPR:
13268 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13269 strict_overflow_p)
13270 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13271 strict_overflow_p));
13272
13273 case TRUNC_MOD_EXPR:
13274 case CEIL_MOD_EXPR:
13275 case FLOOR_MOD_EXPR:
13276 case ROUND_MOD_EXPR:
13277 case SAVE_EXPR:
13278 case NON_LVALUE_EXPR:
13279 case FLOAT_EXPR:
13280 case FIX_TRUNC_EXPR:
13281 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13282 strict_overflow_p);
13283
13284 case COMPOUND_EXPR:
13285 case MODIFY_EXPR:
13286 case GIMPLE_MODIFY_STMT:
13287 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13288 strict_overflow_p);
13289
13290 case BIND_EXPR:
13291 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13292 strict_overflow_p);
13293
13294 case COND_EXPR:
13295 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13296 strict_overflow_p)
13297 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13298 strict_overflow_p));
13299
13300 case NOP_EXPR:
13301 {
13302 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13303 tree outer_type = TREE_TYPE (t);
13304
13305 if (TREE_CODE (outer_type) == REAL_TYPE)
13306 {
13307 if (TREE_CODE (inner_type) == REAL_TYPE)
13308 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13309 strict_overflow_p);
13310 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13311 {
13312 if (TYPE_UNSIGNED (inner_type))
13313 return true;
13314 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13315 strict_overflow_p);
13316 }
13317 }
13318 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13319 {
13320 if (TREE_CODE (inner_type) == REAL_TYPE)
13321 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13322 strict_overflow_p);
13323 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13324 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13325 && TYPE_UNSIGNED (inner_type);
13326 }
13327 }
13328 break;
13329
13330 case TARGET_EXPR:
13331 {
13332 tree temp = TARGET_EXPR_SLOT (t);
13333 t = TARGET_EXPR_INITIAL (t);
13334
13335 /* If the initializer is non-void, then it's a normal expression
13336 that will be assigned to the slot. */
13337 if (!VOID_TYPE_P (t))
13338 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13339
13340 /* Otherwise, the initializer sets the slot in some way. One common
13341 way is an assignment statement at the end of the initializer. */
13342 while (1)
13343 {
13344 if (TREE_CODE (t) == BIND_EXPR)
13345 t = expr_last (BIND_EXPR_BODY (t));
13346 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13347 || TREE_CODE (t) == TRY_CATCH_EXPR)
13348 t = expr_last (TREE_OPERAND (t, 0));
13349 else if (TREE_CODE (t) == STATEMENT_LIST)
13350 t = expr_last (t);
13351 else
13352 break;
13353 }
13354 if ((TREE_CODE (t) == MODIFY_EXPR
13355 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13356 && GENERIC_TREE_OPERAND (t, 0) == temp)
13357 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13358 strict_overflow_p);
13359
13360 return false;
13361 }
13362
13363 case CALL_EXPR:
13364 {
13365 tree fndecl = get_callee_fndecl (t);
13366 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13367 switch (DECL_FUNCTION_CODE (fndecl))
13368 {
13369 CASE_FLT_FN (BUILT_IN_ACOS):
13370 CASE_FLT_FN (BUILT_IN_ACOSH):
13371 CASE_FLT_FN (BUILT_IN_CABS):
13372 CASE_FLT_FN (BUILT_IN_COSH):
13373 CASE_FLT_FN (BUILT_IN_ERFC):
13374 CASE_FLT_FN (BUILT_IN_EXP):
13375 CASE_FLT_FN (BUILT_IN_EXP10):
13376 CASE_FLT_FN (BUILT_IN_EXP2):
13377 CASE_FLT_FN (BUILT_IN_FABS):
13378 CASE_FLT_FN (BUILT_IN_FDIM):
13379 CASE_FLT_FN (BUILT_IN_HYPOT):
13380 CASE_FLT_FN (BUILT_IN_POW10):
13381 CASE_INT_FN (BUILT_IN_FFS):
13382 CASE_INT_FN (BUILT_IN_PARITY):
13383 CASE_INT_FN (BUILT_IN_POPCOUNT):
13384 case BUILT_IN_BSWAP32:
13385 case BUILT_IN_BSWAP64:
13386 /* Always true. */
13387 return true;
13388
13389 CASE_FLT_FN (BUILT_IN_SQRT):
13390 /* sqrt(-0.0) is -0.0. */
13391 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13392 return true;
13393 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13394 strict_overflow_p);
13395
13396 CASE_FLT_FN (BUILT_IN_ASINH):
13397 CASE_FLT_FN (BUILT_IN_ATAN):
13398 CASE_FLT_FN (BUILT_IN_ATANH):
13399 CASE_FLT_FN (BUILT_IN_CBRT):
13400 CASE_FLT_FN (BUILT_IN_CEIL):
13401 CASE_FLT_FN (BUILT_IN_ERF):
13402 CASE_FLT_FN (BUILT_IN_EXPM1):
13403 CASE_FLT_FN (BUILT_IN_FLOOR):
13404 CASE_FLT_FN (BUILT_IN_FMOD):
13405 CASE_FLT_FN (BUILT_IN_FREXP):
13406 CASE_FLT_FN (BUILT_IN_LCEIL):
13407 CASE_FLT_FN (BUILT_IN_LDEXP):
13408 CASE_FLT_FN (BUILT_IN_LFLOOR):
13409 CASE_FLT_FN (BUILT_IN_LLCEIL):
13410 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13411 CASE_FLT_FN (BUILT_IN_LLRINT):
13412 CASE_FLT_FN (BUILT_IN_LLROUND):
13413 CASE_FLT_FN (BUILT_IN_LRINT):
13414 CASE_FLT_FN (BUILT_IN_LROUND):
13415 CASE_FLT_FN (BUILT_IN_MODF):
13416 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13417 CASE_FLT_FN (BUILT_IN_RINT):
13418 CASE_FLT_FN (BUILT_IN_ROUND):
13419 CASE_FLT_FN (BUILT_IN_SCALB):
13420 CASE_FLT_FN (BUILT_IN_SCALBLN):
13421 CASE_FLT_FN (BUILT_IN_SCALBN):
13422 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13423 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13424 CASE_FLT_FN (BUILT_IN_SINH):
13425 CASE_FLT_FN (BUILT_IN_TANH):
13426 CASE_FLT_FN (BUILT_IN_TRUNC):
13427 /* True if the 1st argument is nonnegative. */
13428 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13429 strict_overflow_p);
13430
13431 CASE_FLT_FN (BUILT_IN_FMAX):
13432 /* True if the 1st OR 2nd arguments are nonnegative. */
13433 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13434 strict_overflow_p)
13435 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13436 strict_overflow_p)));
13437
13438 CASE_FLT_FN (BUILT_IN_FMIN):
13439 /* True if the 1st AND 2nd arguments are nonnegative. */
13440 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13441 strict_overflow_p)
13442 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13443 strict_overflow_p)));
13444
13445 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13446 /* True if the 2nd argument is nonnegative. */
13447 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13448 strict_overflow_p);
13449
13450 CASE_FLT_FN (BUILT_IN_POWI):
13451 /* True if the 1st argument is nonnegative or the second
13452 argument is an even integer. */
13453 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13454 {
13455 tree arg1 = CALL_EXPR_ARG (t, 1);
13456 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13457 return true;
13458 }
13459 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13460 strict_overflow_p);
13461
13462 CASE_FLT_FN (BUILT_IN_POW):
13463 /* True if the 1st argument is nonnegative or the second
13464 argument is an even integer valued real. */
13465 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13466 {
13467 REAL_VALUE_TYPE c;
13468 HOST_WIDE_INT n;
13469
13470 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13471 n = real_to_integer (&c);
13472 if ((n & 1) == 0)
13473 {
13474 REAL_VALUE_TYPE cint;
13475 real_from_integer (&cint, VOIDmode, n,
13476 n < 0 ? -1 : 0, 0);
13477 if (real_identical (&c, &cint))
13478 return true;
13479 }
13480 }
13481 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13482 strict_overflow_p);
13483
13484 default:
13485 break;
13486 }
13487 }
13488
13489 /* ... fall through ... */
13490
13491 default:
13492 if (truth_value_p (TREE_CODE (t)))
13493 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13494 return true;
13495 }
13496
13497 /* We don't know sign of `t', so be conservative and return false. */
13498 return false;
13499 }
13500
13501 /* Return true if `t' is known to be non-negative. Handle warnings
13502 about undefined signed overflow. */
13503
13504 bool
13505 tree_expr_nonnegative_p (tree t)
13506 {
13507 bool ret, strict_overflow_p;
13508
13509 strict_overflow_p = false;
13510 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13511 if (strict_overflow_p)
13512 fold_overflow_warning (("assuming signed overflow does not occur when "
13513 "determining that expression is always "
13514 "non-negative"),
13515 WARN_STRICT_OVERFLOW_MISC);
13516 return ret;
13517 }
13518
13519 /* Return true when T is an address and is known to be nonzero.
13520 For floating point we further ensure that T is not denormal.
13521 Similar logic is present in nonzero_address in rtlanal.h.
13522
13523 If the return value is based on the assumption that signed overflow
13524 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13525 change *STRICT_OVERFLOW_P. */
13526
13527 bool
13528 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13529 {
13530 tree type = TREE_TYPE (t);
13531 bool sub_strict_overflow_p;
13532
13533 /* Doing something useful for floating point would need more work. */
13534 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13535 return false;
13536
13537 switch (TREE_CODE (t))
13538 {
13539 case SSA_NAME:
13540 /* Query VRP to see if it has recorded any information about
13541 the range of this object. */
13542 return ssa_name_nonzero_p (t);
13543
13544 case ABS_EXPR:
13545 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13546 strict_overflow_p);
13547
13548 case INTEGER_CST:
13549 return !integer_zerop (t);
13550
13551 case PLUS_EXPR:
13552 if (TYPE_OVERFLOW_UNDEFINED (type))
13553 {
13554 /* With the presence of negative values it is hard
13555 to say something. */
13556 sub_strict_overflow_p = false;
13557 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13558 &sub_strict_overflow_p)
13559 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13560 &sub_strict_overflow_p))
13561 return false;
13562 /* One of operands must be positive and the other non-negative. */
13563 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13564 overflows, on a twos-complement machine the sum of two
13565 nonnegative numbers can never be zero. */
13566 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13567 strict_overflow_p)
13568 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13569 strict_overflow_p));
13570 }
13571 break;
13572
13573 case MULT_EXPR:
13574 if (TYPE_OVERFLOW_UNDEFINED (type))
13575 {
13576 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13577 strict_overflow_p)
13578 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13579 strict_overflow_p))
13580 {
13581 *strict_overflow_p = true;
13582 return true;
13583 }
13584 }
13585 break;
13586
13587 case NOP_EXPR:
13588 {
13589 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13590 tree outer_type = TREE_TYPE (t);
13591
13592 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13593 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13594 strict_overflow_p));
13595 }
13596 break;
13597
13598 case ADDR_EXPR:
13599 {
13600 tree base = get_base_address (TREE_OPERAND (t, 0));
13601
13602 if (!base)
13603 return false;
13604
13605 /* Weak declarations may link to NULL. */
13606 if (VAR_OR_FUNCTION_DECL_P (base))
13607 return !DECL_WEAK (base);
13608
13609 /* Constants are never weak. */
13610 if (CONSTANT_CLASS_P (base))
13611 return true;
13612
13613 return false;
13614 }
13615
13616 case COND_EXPR:
13617 sub_strict_overflow_p = false;
13618 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13619 &sub_strict_overflow_p)
13620 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13621 &sub_strict_overflow_p))
13622 {
13623 if (sub_strict_overflow_p)
13624 *strict_overflow_p = true;
13625 return true;
13626 }
13627 break;
13628
13629 case MIN_EXPR:
13630 sub_strict_overflow_p = false;
13631 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13632 &sub_strict_overflow_p)
13633 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13634 &sub_strict_overflow_p))
13635 {
13636 if (sub_strict_overflow_p)
13637 *strict_overflow_p = true;
13638 }
13639 break;
13640
13641 case MAX_EXPR:
13642 sub_strict_overflow_p = false;
13643 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13644 &sub_strict_overflow_p))
13645 {
13646 if (sub_strict_overflow_p)
13647 *strict_overflow_p = true;
13648
13649 /* When both operands are nonzero, then MAX must be too. */
13650 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13651 strict_overflow_p))
13652 return true;
13653
13654 /* MAX where operand 0 is positive is positive. */
13655 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13656 strict_overflow_p);
13657 }
13658 /* MAX where operand 1 is positive is positive. */
13659 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13660 &sub_strict_overflow_p)
13661 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13662 &sub_strict_overflow_p))
13663 {
13664 if (sub_strict_overflow_p)
13665 *strict_overflow_p = true;
13666 return true;
13667 }
13668 break;
13669
13670 case COMPOUND_EXPR:
13671 case MODIFY_EXPR:
13672 case GIMPLE_MODIFY_STMT:
13673 case BIND_EXPR:
13674 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13675 strict_overflow_p);
13676
13677 case SAVE_EXPR:
13678 case NON_LVALUE_EXPR:
13679 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13680 strict_overflow_p);
13681
13682 case BIT_IOR_EXPR:
13683 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13684 strict_overflow_p)
13685 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13686 strict_overflow_p));
13687
13688 case CALL_EXPR:
13689 return alloca_call_p (t);
13690
13691 default:
13692 break;
13693 }
13694 return false;
13695 }
13696
13697 /* Return true when T is an address and is known to be nonzero.
13698 Handle warnings about undefined signed overflow. */
13699
13700 bool
13701 tree_expr_nonzero_p (tree t)
13702 {
13703 bool ret, strict_overflow_p;
13704
13705 strict_overflow_p = false;
13706 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13707 if (strict_overflow_p)
13708 fold_overflow_warning (("assuming signed overflow does not occur when "
13709 "determining that expression is always "
13710 "non-zero"),
13711 WARN_STRICT_OVERFLOW_MISC);
13712 return ret;
13713 }
13714
13715 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13716 attempt to fold the expression to a constant without modifying TYPE,
13717 OP0 or OP1.
13718
13719 If the expression could be simplified to a constant, then return
13720 the constant. If the expression would not be simplified to a
13721 constant, then return NULL_TREE. */
13722
13723 tree
13724 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13725 {
13726 tree tem = fold_binary (code, type, op0, op1);
13727 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13728 }
13729
13730 /* Given the components of a unary expression CODE, TYPE and OP0,
13731 attempt to fold the expression to a constant without modifying
13732 TYPE or OP0.
13733
13734 If the expression could be simplified to a constant, then return
13735 the constant. If the expression would not be simplified to a
13736 constant, then return NULL_TREE. */
13737
13738 tree
13739 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13740 {
13741 tree tem = fold_unary (code, type, op0);
13742 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13743 }
13744
13745 /* If EXP represents referencing an element in a constant string
13746 (either via pointer arithmetic or array indexing), return the
13747 tree representing the value accessed, otherwise return NULL. */
13748
13749 tree
13750 fold_read_from_constant_string (tree exp)
13751 {
13752 if ((TREE_CODE (exp) == INDIRECT_REF
13753 || TREE_CODE (exp) == ARRAY_REF)
13754 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13755 {
13756 tree exp1 = TREE_OPERAND (exp, 0);
13757 tree index;
13758 tree string;
13759
13760 if (TREE_CODE (exp) == INDIRECT_REF)
13761 string = string_constant (exp1, &index);
13762 else
13763 {
13764 tree low_bound = array_ref_low_bound (exp);
13765 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13766
13767 /* Optimize the special-case of a zero lower bound.
13768
13769 We convert the low_bound to sizetype to avoid some problems
13770 with constant folding. (E.g. suppose the lower bound is 1,
13771 and its mode is QI. Without the conversion,l (ARRAY
13772 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13773 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13774 if (! integer_zerop (low_bound))
13775 index = size_diffop (index, fold_convert (sizetype, low_bound));
13776
13777 string = exp1;
13778 }
13779
13780 if (string
13781 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13782 && TREE_CODE (string) == STRING_CST
13783 && TREE_CODE (index) == INTEGER_CST
13784 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13785 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13786 == MODE_INT)
13787 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13788 return fold_convert (TREE_TYPE (exp),
13789 build_int_cst (NULL_TREE,
13790 (TREE_STRING_POINTER (string)
13791 [TREE_INT_CST_LOW (index)])));
13792 }
13793 return NULL;
13794 }
13795
13796 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13797 an integer constant or real constant.
13798
13799 TYPE is the type of the result. */
13800
13801 static tree
13802 fold_negate_const (tree arg0, tree type)
13803 {
13804 tree t = NULL_TREE;
13805
13806 switch (TREE_CODE (arg0))
13807 {
13808 case INTEGER_CST:
13809 {
13810 unsigned HOST_WIDE_INT low;
13811 HOST_WIDE_INT high;
13812 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13813 TREE_INT_CST_HIGH (arg0),
13814 &low, &high);
13815 t = force_fit_type_double (type, low, high, 1,
13816 (overflow | TREE_OVERFLOW (arg0))
13817 && !TYPE_UNSIGNED (type));
13818 break;
13819 }
13820
13821 case REAL_CST:
13822 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13823 break;
13824
13825 default:
13826 gcc_unreachable ();
13827 }
13828
13829 return t;
13830 }
13831
13832 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13833 an integer constant or real constant.
13834
13835 TYPE is the type of the result. */
13836
13837 tree
13838 fold_abs_const (tree arg0, tree type)
13839 {
13840 tree t = NULL_TREE;
13841
13842 switch (TREE_CODE (arg0))
13843 {
13844 case INTEGER_CST:
13845 /* If the value is unsigned, then the absolute value is
13846 the same as the ordinary value. */
13847 if (TYPE_UNSIGNED (type))
13848 t = arg0;
13849 /* Similarly, if the value is non-negative. */
13850 else if (INT_CST_LT (integer_minus_one_node, arg0))
13851 t = arg0;
13852 /* If the value is negative, then the absolute value is
13853 its negation. */
13854 else
13855 {
13856 unsigned HOST_WIDE_INT low;
13857 HOST_WIDE_INT high;
13858 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13859 TREE_INT_CST_HIGH (arg0),
13860 &low, &high);
13861 t = force_fit_type_double (type, low, high, -1,
13862 overflow | TREE_OVERFLOW (arg0));
13863 }
13864 break;
13865
13866 case REAL_CST:
13867 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13868 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13869 else
13870 t = arg0;
13871 break;
13872
13873 default:
13874 gcc_unreachable ();
13875 }
13876
13877 return t;
13878 }
13879
13880 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13881 constant. TYPE is the type of the result. */
13882
13883 static tree
13884 fold_not_const (tree arg0, tree type)
13885 {
13886 tree t = NULL_TREE;
13887
13888 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13889
13890 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13891 ~TREE_INT_CST_HIGH (arg0), 0,
13892 TREE_OVERFLOW (arg0));
13893
13894 return t;
13895 }
13896
13897 /* Given CODE, a relational operator, the target type, TYPE and two
13898 constant operands OP0 and OP1, return the result of the
13899 relational operation. If the result is not a compile time
13900 constant, then return NULL_TREE. */
13901
13902 static tree
13903 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13904 {
13905 int result, invert;
13906
13907 /* From here on, the only cases we handle are when the result is
13908 known to be a constant. */
13909
13910 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13911 {
13912 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13913 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13914
13915 /* Handle the cases where either operand is a NaN. */
13916 if (real_isnan (c0) || real_isnan (c1))
13917 {
13918 switch (code)
13919 {
13920 case EQ_EXPR:
13921 case ORDERED_EXPR:
13922 result = 0;
13923 break;
13924
13925 case NE_EXPR:
13926 case UNORDERED_EXPR:
13927 case UNLT_EXPR:
13928 case UNLE_EXPR:
13929 case UNGT_EXPR:
13930 case UNGE_EXPR:
13931 case UNEQ_EXPR:
13932 result = 1;
13933 break;
13934
13935 case LT_EXPR:
13936 case LE_EXPR:
13937 case GT_EXPR:
13938 case GE_EXPR:
13939 case LTGT_EXPR:
13940 if (flag_trapping_math)
13941 return NULL_TREE;
13942 result = 0;
13943 break;
13944
13945 default:
13946 gcc_unreachable ();
13947 }
13948
13949 return constant_boolean_node (result, type);
13950 }
13951
13952 return constant_boolean_node (real_compare (code, c0, c1), type);
13953 }
13954
13955 /* Handle equality/inequality of complex constants. */
13956 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13957 {
13958 tree rcond = fold_relational_const (code, type,
13959 TREE_REALPART (op0),
13960 TREE_REALPART (op1));
13961 tree icond = fold_relational_const (code, type,
13962 TREE_IMAGPART (op0),
13963 TREE_IMAGPART (op1));
13964 if (code == EQ_EXPR)
13965 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13966 else if (code == NE_EXPR)
13967 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13968 else
13969 return NULL_TREE;
13970 }
13971
13972 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13973
13974 To compute GT, swap the arguments and do LT.
13975 To compute GE, do LT and invert the result.
13976 To compute LE, swap the arguments, do LT and invert the result.
13977 To compute NE, do EQ and invert the result.
13978
13979 Therefore, the code below must handle only EQ and LT. */
13980
13981 if (code == LE_EXPR || code == GT_EXPR)
13982 {
13983 tree tem = op0;
13984 op0 = op1;
13985 op1 = tem;
13986 code = swap_tree_comparison (code);
13987 }
13988
13989 /* Note that it is safe to invert for real values here because we
13990 have already handled the one case that it matters. */
13991
13992 invert = 0;
13993 if (code == NE_EXPR || code == GE_EXPR)
13994 {
13995 invert = 1;
13996 code = invert_tree_comparison (code, false);
13997 }
13998
13999 /* Compute a result for LT or EQ if args permit;
14000 Otherwise return T. */
14001 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14002 {
14003 if (code == EQ_EXPR)
14004 result = tree_int_cst_equal (op0, op1);
14005 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14006 result = INT_CST_LT_UNSIGNED (op0, op1);
14007 else
14008 result = INT_CST_LT (op0, op1);
14009 }
14010 else
14011 return NULL_TREE;
14012
14013 if (invert)
14014 result ^= 1;
14015 return constant_boolean_node (result, type);
14016 }
14017
14018 /* Build an expression for the a clean point containing EXPR with type TYPE.
14019 Don't build a cleanup point expression for EXPR which don't have side
14020 effects. */
14021
14022 tree
14023 fold_build_cleanup_point_expr (tree type, tree expr)
14024 {
14025 /* If the expression does not have side effects then we don't have to wrap
14026 it with a cleanup point expression. */
14027 if (!TREE_SIDE_EFFECTS (expr))
14028 return expr;
14029
14030 /* If the expression is a return, check to see if the expression inside the
14031 return has no side effects or the right hand side of the modify expression
14032 inside the return. If either don't have side effects set we don't need to
14033 wrap the expression in a cleanup point expression. Note we don't check the
14034 left hand side of the modify because it should always be a return decl. */
14035 if (TREE_CODE (expr) == RETURN_EXPR)
14036 {
14037 tree op = TREE_OPERAND (expr, 0);
14038 if (!op || !TREE_SIDE_EFFECTS (op))
14039 return expr;
14040 op = TREE_OPERAND (op, 1);
14041 if (!TREE_SIDE_EFFECTS (op))
14042 return expr;
14043 }
14044
14045 return build1 (CLEANUP_POINT_EXPR, type, expr);
14046 }
14047
14048 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14049 avoid confusing the gimplify process. */
14050
14051 tree
14052 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14053 {
14054 /* The size of the object is not relevant when talking about its address. */
14055 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14056 t = TREE_OPERAND (t, 0);
14057
14058 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14059 if (TREE_CODE (t) == INDIRECT_REF
14060 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14061 {
14062 t = TREE_OPERAND (t, 0);
14063 if (TREE_TYPE (t) != ptrtype)
14064 t = build1 (NOP_EXPR, ptrtype, t);
14065 }
14066 else
14067 {
14068 tree base = t;
14069
14070 while (handled_component_p (base))
14071 base = TREE_OPERAND (base, 0);
14072 if (DECL_P (base))
14073 TREE_ADDRESSABLE (base) = 1;
14074
14075 t = build1 (ADDR_EXPR, ptrtype, t);
14076 }
14077
14078 return t;
14079 }
14080
14081 tree
14082 build_fold_addr_expr (tree t)
14083 {
14084 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14085 }
14086
14087 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14088 of an indirection through OP0, or NULL_TREE if no simplification is
14089 possible. */
14090
14091 tree
14092 fold_indirect_ref_1 (tree type, tree op0)
14093 {
14094 tree sub = op0;
14095 tree subtype;
14096
14097 STRIP_NOPS (sub);
14098 subtype = TREE_TYPE (sub);
14099 if (!POINTER_TYPE_P (subtype))
14100 return NULL_TREE;
14101
14102 if (TREE_CODE (sub) == ADDR_EXPR)
14103 {
14104 tree op = TREE_OPERAND (sub, 0);
14105 tree optype = TREE_TYPE (op);
14106 /* *&CONST_DECL -> to the value of the const decl. */
14107 if (TREE_CODE (op) == CONST_DECL)
14108 return DECL_INITIAL (op);
14109 /* *&p => p; make sure to handle *&"str"[cst] here. */
14110 if (type == optype)
14111 {
14112 tree fop = fold_read_from_constant_string (op);
14113 if (fop)
14114 return fop;
14115 else
14116 return op;
14117 }
14118 /* *(foo *)&fooarray => fooarray[0] */
14119 else if (TREE_CODE (optype) == ARRAY_TYPE
14120 && type == TREE_TYPE (optype))
14121 {
14122 tree type_domain = TYPE_DOMAIN (optype);
14123 tree min_val = size_zero_node;
14124 if (type_domain && TYPE_MIN_VALUE (type_domain))
14125 min_val = TYPE_MIN_VALUE (type_domain);
14126 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14127 }
14128 /* *(foo *)&complexfoo => __real__ complexfoo */
14129 else if (TREE_CODE (optype) == COMPLEX_TYPE
14130 && type == TREE_TYPE (optype))
14131 return fold_build1 (REALPART_EXPR, type, op);
14132 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14133 else if (TREE_CODE (optype) == VECTOR_TYPE
14134 && type == TREE_TYPE (optype))
14135 {
14136 tree part_width = TYPE_SIZE (type);
14137 tree index = bitsize_int (0);
14138 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14139 }
14140 }
14141
14142 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14143 if (TREE_CODE (sub) == PLUS_EXPR
14144 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14145 {
14146 tree op00 = TREE_OPERAND (sub, 0);
14147 tree op01 = TREE_OPERAND (sub, 1);
14148 tree op00type;
14149
14150 STRIP_NOPS (op00);
14151 op00type = TREE_TYPE (op00);
14152 if (TREE_CODE (op00) == ADDR_EXPR
14153 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14154 && type == TREE_TYPE (TREE_TYPE (op00type)))
14155 {
14156 tree size = TYPE_SIZE_UNIT (type);
14157 if (tree_int_cst_equal (size, op01))
14158 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14159 }
14160 }
14161
14162 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14163 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14164 && type == TREE_TYPE (TREE_TYPE (subtype)))
14165 {
14166 tree type_domain;
14167 tree min_val = size_zero_node;
14168 sub = build_fold_indirect_ref (sub);
14169 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14170 if (type_domain && TYPE_MIN_VALUE (type_domain))
14171 min_val = TYPE_MIN_VALUE (type_domain);
14172 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14173 }
14174
14175 return NULL_TREE;
14176 }
14177
14178 /* Builds an expression for an indirection through T, simplifying some
14179 cases. */
14180
14181 tree
14182 build_fold_indirect_ref (tree t)
14183 {
14184 tree type = TREE_TYPE (TREE_TYPE (t));
14185 tree sub = fold_indirect_ref_1 (type, t);
14186
14187 if (sub)
14188 return sub;
14189 else
14190 return build1 (INDIRECT_REF, type, t);
14191 }
14192
14193 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14194
14195 tree
14196 fold_indirect_ref (tree t)
14197 {
14198 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14199
14200 if (sub)
14201 return sub;
14202 else
14203 return t;
14204 }
14205
14206 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14207 whose result is ignored. The type of the returned tree need not be
14208 the same as the original expression. */
14209
14210 tree
14211 fold_ignored_result (tree t)
14212 {
14213 if (!TREE_SIDE_EFFECTS (t))
14214 return integer_zero_node;
14215
14216 for (;;)
14217 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14218 {
14219 case tcc_unary:
14220 t = TREE_OPERAND (t, 0);
14221 break;
14222
14223 case tcc_binary:
14224 case tcc_comparison:
14225 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14226 t = TREE_OPERAND (t, 0);
14227 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14228 t = TREE_OPERAND (t, 1);
14229 else
14230 return t;
14231 break;
14232
14233 case tcc_expression:
14234 switch (TREE_CODE (t))
14235 {
14236 case COMPOUND_EXPR:
14237 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14238 return t;
14239 t = TREE_OPERAND (t, 0);
14240 break;
14241
14242 case COND_EXPR:
14243 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14244 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14245 return t;
14246 t = TREE_OPERAND (t, 0);
14247 break;
14248
14249 default:
14250 return t;
14251 }
14252 break;
14253
14254 default:
14255 return t;
14256 }
14257 }
14258
14259 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14260 This can only be applied to objects of a sizetype. */
14261
14262 tree
14263 round_up (tree value, int divisor)
14264 {
14265 tree div = NULL_TREE;
14266
14267 gcc_assert (divisor > 0);
14268 if (divisor == 1)
14269 return value;
14270
14271 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14272 have to do anything. Only do this when we are not given a const,
14273 because in that case, this check is more expensive than just
14274 doing it. */
14275 if (TREE_CODE (value) != INTEGER_CST)
14276 {
14277 div = build_int_cst (TREE_TYPE (value), divisor);
14278
14279 if (multiple_of_p (TREE_TYPE (value), value, div))
14280 return value;
14281 }
14282
14283 /* If divisor is a power of two, simplify this to bit manipulation. */
14284 if (divisor == (divisor & -divisor))
14285 {
14286 if (TREE_CODE (value) == INTEGER_CST)
14287 {
14288 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14289 unsigned HOST_WIDE_INT high;
14290 bool overflow_p;
14291
14292 if ((low & (divisor - 1)) == 0)
14293 return value;
14294
14295 overflow_p = TREE_OVERFLOW (value);
14296 high = TREE_INT_CST_HIGH (value);
14297 low &= ~(divisor - 1);
14298 low += divisor;
14299 if (low == 0)
14300 {
14301 high++;
14302 if (high == 0)
14303 overflow_p = true;
14304 }
14305
14306 return force_fit_type_double (TREE_TYPE (value), low, high,
14307 -1, overflow_p);
14308 }
14309 else
14310 {
14311 tree t;
14312
14313 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14314 value = size_binop (PLUS_EXPR, value, t);
14315 t = build_int_cst (TREE_TYPE (value), -divisor);
14316 value = size_binop (BIT_AND_EXPR, value, t);
14317 }
14318 }
14319 else
14320 {
14321 if (!div)
14322 div = build_int_cst (TREE_TYPE (value), divisor);
14323 value = size_binop (CEIL_DIV_EXPR, value, div);
14324 value = size_binop (MULT_EXPR, value, div);
14325 }
14326
14327 return value;
14328 }
14329
14330 /* Likewise, but round down. */
14331
14332 tree
14333 round_down (tree value, int divisor)
14334 {
14335 tree div = NULL_TREE;
14336
14337 gcc_assert (divisor > 0);
14338 if (divisor == 1)
14339 return value;
14340
14341 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14342 have to do anything. Only do this when we are not given a const,
14343 because in that case, this check is more expensive than just
14344 doing it. */
14345 if (TREE_CODE (value) != INTEGER_CST)
14346 {
14347 div = build_int_cst (TREE_TYPE (value), divisor);
14348
14349 if (multiple_of_p (TREE_TYPE (value), value, div))
14350 return value;
14351 }
14352
14353 /* If divisor is a power of two, simplify this to bit manipulation. */
14354 if (divisor == (divisor & -divisor))
14355 {
14356 tree t;
14357
14358 t = build_int_cst (TREE_TYPE (value), -divisor);
14359 value = size_binop (BIT_AND_EXPR, value, t);
14360 }
14361 else
14362 {
14363 if (!div)
14364 div = build_int_cst (TREE_TYPE (value), divisor);
14365 value = size_binop (FLOOR_DIV_EXPR, value, div);
14366 value = size_binop (MULT_EXPR, value, div);
14367 }
14368
14369 return value;
14370 }
14371
14372 /* Returns the pointer to the base of the object addressed by EXP and
14373 extracts the information about the offset of the access, storing it
14374 to PBITPOS and POFFSET. */
14375
14376 static tree
14377 split_address_to_core_and_offset (tree exp,
14378 HOST_WIDE_INT *pbitpos, tree *poffset)
14379 {
14380 tree core;
14381 enum machine_mode mode;
14382 int unsignedp, volatilep;
14383 HOST_WIDE_INT bitsize;
14384
14385 if (TREE_CODE (exp) == ADDR_EXPR)
14386 {
14387 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14388 poffset, &mode, &unsignedp, &volatilep,
14389 false);
14390 core = build_fold_addr_expr (core);
14391 }
14392 else
14393 {
14394 core = exp;
14395 *pbitpos = 0;
14396 *poffset = NULL_TREE;
14397 }
14398
14399 return core;
14400 }
14401
14402 /* Returns true if addresses of E1 and E2 differ by a constant, false
14403 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14404
14405 bool
14406 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14407 {
14408 tree core1, core2;
14409 HOST_WIDE_INT bitpos1, bitpos2;
14410 tree toffset1, toffset2, tdiff, type;
14411
14412 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14413 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14414
14415 if (bitpos1 % BITS_PER_UNIT != 0
14416 || bitpos2 % BITS_PER_UNIT != 0
14417 || !operand_equal_p (core1, core2, 0))
14418 return false;
14419
14420 if (toffset1 && toffset2)
14421 {
14422 type = TREE_TYPE (toffset1);
14423 if (type != TREE_TYPE (toffset2))
14424 toffset2 = fold_convert (type, toffset2);
14425
14426 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14427 if (!cst_and_fits_in_hwi (tdiff))
14428 return false;
14429
14430 *diff = int_cst_value (tdiff);
14431 }
14432 else if (toffset1 || toffset2)
14433 {
14434 /* If only one of the offsets is non-constant, the difference cannot
14435 be a constant. */
14436 return false;
14437 }
14438 else
14439 *diff = 0;
14440
14441 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14442 return true;
14443 }
14444
14445 /* Simplify the floating point expression EXP when the sign of the
14446 result is not significant. Return NULL_TREE if no simplification
14447 is possible. */
14448
14449 tree
14450 fold_strip_sign_ops (tree exp)
14451 {
14452 tree arg0, arg1;
14453
14454 switch (TREE_CODE (exp))
14455 {
14456 case ABS_EXPR:
14457 case NEGATE_EXPR:
14458 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14459 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14460
14461 case MULT_EXPR:
14462 case RDIV_EXPR:
14463 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14464 return NULL_TREE;
14465 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14466 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14467 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14468 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14469 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14470 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14471 break;
14472
14473 case COMPOUND_EXPR:
14474 arg0 = TREE_OPERAND (exp, 0);
14475 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14476 if (arg1)
14477 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14478 break;
14479
14480 case COND_EXPR:
14481 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14482 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14483 if (arg0 || arg1)
14484 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14485 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14486 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14487 break;
14488
14489 case CALL_EXPR:
14490 {
14491 const enum built_in_function fcode = builtin_mathfn_code (exp);
14492 switch (fcode)
14493 {
14494 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14495 /* Strip copysign function call, return the 1st argument. */
14496 arg0 = CALL_EXPR_ARG (exp, 0);
14497 arg1 = CALL_EXPR_ARG (exp, 1);
14498 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14499
14500 default:
14501 /* Strip sign ops from the argument of "odd" math functions. */
14502 if (negate_mathfn_p (fcode))
14503 {
14504 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14505 if (arg0)
14506 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14507 }
14508 break;
14509 }
14510 }
14511 break;
14512
14513 default:
14514 break;
14515 }
14516 return NULL_TREE;
14517 }
This page took 0.707155 seconds and 6 git commands to generate.